Spaces:
Sleeping
Sleeping
Upload 5 files
Browse files- Dockerfile +12 -20
- README.md +13 -16
- app.py +41 -0
- db_connector.py +17 -0
- requirements.txt +10 -2
Dockerfile
CHANGED
@@ -1,21 +1,13 @@
|
|
1 |
-
FROM python:3.
|
2 |
-
|
|
|
3 |
WORKDIR /app
|
4 |
-
|
5 |
-
RUN
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
COPY src/ ./src/
|
14 |
-
|
15 |
-
RUN pip3 install -r requirements.txt
|
16 |
-
|
17 |
-
EXPOSE 8501
|
18 |
-
|
19 |
-
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
|
20 |
-
|
21 |
-
ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
|
|
1 |
+
FROM python:3.10-slim
|
2 |
+
ENV PYTHONDONTWRITEBYTECODE 1
|
3 |
+
ENV PYTHONUNBUFFERED 1
|
4 |
WORKDIR /app
|
5 |
+
COPY requirements.txt /app/
|
6 |
+
RUN pip install --upgrade pip && pip install -r requirements.txt
|
7 |
+
COPY . /app
|
8 |
+
ENV STREAMLIT_PORT 7860
|
9 |
+
ENV STREAMLIT_HEADLESS true
|
10 |
+
ENV STREAMLIT_ENABLECORS false
|
11 |
+
ENV STREAMLIT_SERVER_ENABLE_WEBRTC false
|
12 |
+
EXPOSE 7860
|
13 |
+
CMD ["streamlit", "run", "app.py"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
README.md
CHANGED
@@ -1,19 +1,16 @@
|
|
1 |
-
|
2 |
-
title: BizIntel AI
|
3 |
-
emoji: π
|
4 |
-
colorFrom: red
|
5 |
-
colorTo: red
|
6 |
-
sdk: docker
|
7 |
-
app_port: 8501
|
8 |
-
tags:
|
9 |
-
- streamlit
|
10 |
-
pinned: false
|
11 |
-
short_description: Streamlit template space
|
12 |
-
---
|
13 |
|
14 |
-
|
15 |
|
16 |
-
|
|
|
|
|
|
|
|
|
17 |
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
|
|
|
1 |
+
# π BizIntel AI Ultra
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
+
**Next-gen Business Analytics App powered by Google ADK, Gemini Pro, and Streamlit**
|
4 |
|
5 |
+
## Features
|
6 |
+
- Upload CSV or connect to databases (PostgreSQL, MySQL, MSSQL, Oracle, SQLite)
|
7 |
+
- ADK multi-agent orchestration: trend analysis, forecasting, strategic insights
|
8 |
+
- Interactive Streamlit dashboard with visual charts
|
9 |
+
- Hugging Face Spaces Docker-based deployment
|
10 |
|
11 |
+
## Usage
|
12 |
+
1. Select your data source: file upload or SQL DB connection
|
13 |
+
2. Run agent analysis pipeline
|
14 |
+
3. View KPIs, charts, forecasts, and strategy
|
15 |
+
|
16 |
+
Built with β€οΈ using Streamlit + Gemini + ADK
|
app.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import streamlit as st
|
3 |
+
from agents.analytics_pipeline import analytics_coordinator
|
4 |
+
import os
|
5 |
+
from db_connector import fetch_data_from_db, list_tables, SUPPORTED_ENGINES
|
6 |
+
|
7 |
+
st.set_page_config(page_title="BizIntel AI Ultra", layout="wide")
|
8 |
+
st.title("π BizIntel AI Ultra - Ultimate Business Intelligence")
|
9 |
+
|
10 |
+
input_source = st.radio("Select data source", ["Upload CSV", "Connect to SQL Database"])
|
11 |
+
file_path = None
|
12 |
+
|
13 |
+
if input_source == "Upload CSV":
|
14 |
+
uploaded_file = st.file_uploader("Upload CSV", type="csv")
|
15 |
+
if uploaded_file:
|
16 |
+
file_path = os.path.join("data", uploaded_file.name)
|
17 |
+
with open(file_path, "wb") as f:
|
18 |
+
f.write(uploaded_file.read())
|
19 |
+
st.success("File uploaded.")
|
20 |
+
|
21 |
+
elif input_source == "Connect to SQL Database":
|
22 |
+
engine = st.selectbox("Select database engine", SUPPORTED_ENGINES)
|
23 |
+
conn_str = st.text_input("Connection string (SQLAlchemy format)")
|
24 |
+
if conn_str:
|
25 |
+
tables = list_tables(conn_str)
|
26 |
+
if tables:
|
27 |
+
table_name = st.selectbox("Choose a table", tables)
|
28 |
+
if table_name:
|
29 |
+
file_path = fetch_data_from_db(conn_str, table_name)
|
30 |
+
st.success(f"Fetched table '{table_name}' as CSV.")
|
31 |
+
|
32 |
+
if file_path:
|
33 |
+
st.info("Running analytics pipeline...")
|
34 |
+
result = analytics_coordinator.run(input=file_path)
|
35 |
+
st.subheader("Analysis & Strategy Report")
|
36 |
+
st.text(result)
|
37 |
+
|
38 |
+
if os.path.exists("sales_plot.png"):
|
39 |
+
st.image("sales_plot.png", caption="Sales Trend", use_column_width=True)
|
40 |
+
if os.path.exists("forecast_plot.png"):
|
41 |
+
st.image("forecast_plot.png", caption="Forecast Chart", use_column_width=True)
|
db_connector.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import pandas as pd
|
3 |
+
from sqlalchemy import create_engine, inspect
|
4 |
+
|
5 |
+
SUPPORTED_ENGINES = ["SQLite", "PostgreSQL", "MySQL", "MSSQL", "Oracle"]
|
6 |
+
|
7 |
+
def list_tables(conn_str):
|
8 |
+
engine = create_engine(conn_str)
|
9 |
+
inspector = inspect(engine)
|
10 |
+
return inspector.get_table_names()
|
11 |
+
|
12 |
+
def fetch_data_from_db(conn_str, table):
|
13 |
+
engine = create_engine(conn_str)
|
14 |
+
df = pd.read_sql_table(table, engine)
|
15 |
+
csv_path = f"data/{table}_extracted.csv"
|
16 |
+
df.to_csv(csv_path, index=False)
|
17 |
+
return csv_path
|
requirements.txt
CHANGED
@@ -1,3 +1,11 @@
|
|
1 |
-
|
2 |
pandas
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
google-adk
|
2 |
pandas
|
3 |
+
matplotlib
|
4 |
+
statsmodels
|
5 |
+
streamlit
|
6 |
+
plotly
|
7 |
+
sqlalchemy
|
8 |
+
psycopg2-binary
|
9 |
+
mysql-connector-python
|
10 |
+
pyodbc
|
11 |
+
cx_Oracle
|