Spaces:
Runtime error
Runtime error
ikram
commited on
Commit
·
164a5da
1
Parent(s):
d0d55cc
2nd commit
Browse files- Visualisation/__pycache__/app.cpython-39.pyc +0 -0
- Visualisation/app.py +83 -0
- Visualisation/dockerfile +2 -2
- Visualisation/requirements.txt +0 -8
- main.py +7 -0
- requirements.txt +0 -0
- translation/dockerfile +13 -10
- translation/requirements.txt +0 -0
Visualisation/__pycache__/app.cpython-39.pyc
ADDED
Binary file (2.62 kB). View file
|
|
Visualisation/app.py
CHANGED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, File, UploadFile, Form
|
2 |
+
import pandas as pd
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
import io
|
5 |
+
from fastapi.responses import StreamingResponse
|
6 |
+
from transformers import pipeline, AutoModelForSeq2SeqLM, AutoTokenizer
|
7 |
+
|
8 |
+
app = FastAPI()
|
9 |
+
|
10 |
+
# ✅ Load Hugging Face models dynamically from the internet
|
11 |
+
code_generator = pipeline("text-generation", model="bigcode/starcoder",times_out=1000)
|
12 |
+
user_input_processor = pipeline("text-generation", model="tiiuae/falcon-7b-instruct") # comprend lsngusge naturel
|
13 |
+
table_analyzer = pipeline("table-question-answering", model="google/tapas-large")
|
14 |
+
#image_captioner = pipeline("image-to-text", model="Salesforce/blip2-opt-2.7b")
|
15 |
+
|
16 |
+
# ✅ Load T5 Model (ensure correct architecture)
|
17 |
+
model_name = "google/t5-small" # Change to the correct T5 model if needed
|
18 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
19 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
20 |
+
|
21 |
+
@app.post("/visualize/")
|
22 |
+
async def visualize(
|
23 |
+
file: UploadFile = File(...),
|
24 |
+
description: str = Form(None),
|
25 |
+
chart_type: str = Form(None),
|
26 |
+
x_column: str = Form(None),
|
27 |
+
y_column: str = Form(None)
|
28 |
+
):
|
29 |
+
print("🔵 Début du traitement...")
|
30 |
+
|
31 |
+
contents = await file.read()
|
32 |
+
excel_data = io.BytesIO(contents)
|
33 |
+
print("✅ Fichier reçu et converti en mémoire.")
|
34 |
+
|
35 |
+
try:
|
36 |
+
df = pd.read_excel(excel_data)
|
37 |
+
print("✅ Lecture du fichier Excel réussie.")
|
38 |
+
except Exception as e:
|
39 |
+
print(f"❌ Erreur lors de la lecture du fichier Excel : {e}")
|
40 |
+
return {"error": "Impossible de lire le fichier Excel."}
|
41 |
+
|
42 |
+
df.columns = df.columns.str.strip().str.lower()
|
43 |
+
print("📌 Colonnes après nettoyage :", df.columns.tolist())
|
44 |
+
|
45 |
+
# If no specific chart details are given, infer from description
|
46 |
+
if description:
|
47 |
+
print("📝 Analyse de la description utilisateur...")
|
48 |
+
response = user_input_processor(description, max_length=50)
|
49 |
+
inferred_data = response[0]['generated_text']
|
50 |
+
print("🔍 Inference AI:", inferred_data)
|
51 |
+
# TODO: Extract structured data from response (chart_type, x_column, y_column)
|
52 |
+
|
53 |
+
# Ensure x_column and y_column exist
|
54 |
+
if x_column.lower() not in df.columns or y_column.lower() not in df.columns:
|
55 |
+
print(f"❌ Erreur: '{x_column}' ou '{y_column}' non trouvées.")
|
56 |
+
return {"error": f"Les colonnes '{x_column}' ou '{y_column}' n'existent pas."}
|
57 |
+
|
58 |
+
print("✅ Colonnes valides, préparation du graphique...")
|
59 |
+
|
60 |
+
plt.figure(figsize=(20, 12))
|
61 |
+
if chart_type == "bar":
|
62 |
+
df.plot(kind="bar", x=x_column.lower(), y=y_column.lower())
|
63 |
+
elif chart_type == "line":
|
64 |
+
df.plot(kind="line", x=x_column.lower(), y=y_column.lower())
|
65 |
+
elif chart_type == "scatter":
|
66 |
+
df.plot(kind="scatter", x=x_column.lower(), y=y_column.lower())
|
67 |
+
elif chart_type == "pie":
|
68 |
+
df.set_index(x_column.lower())[y_column.lower()].plot(kind="pie", autopct="%1.1f%%")
|
69 |
+
elif chart_type == "histogram":
|
70 |
+
df[y_column.lower()].plot(kind="hist", bins=10)
|
71 |
+
else:
|
72 |
+
return {"error": "Invalid chart type"}
|
73 |
+
|
74 |
+
img_stream = io.BytesIO()
|
75 |
+
plt.savefig(img_stream, format="png")
|
76 |
+
img_stream.seek(0)
|
77 |
+
plt.close()
|
78 |
+
|
79 |
+
# Generate image caption
|
80 |
+
#image_description = image_captioner(img_stream)
|
81 |
+
#print("🖼️ Description du graphique:", image_description)
|
82 |
+
|
83 |
+
return StreamingResponse(img_stream, media_type="image/png")
|
Visualisation/dockerfile
CHANGED
@@ -8,7 +8,7 @@ WORKDIR /app
|
|
8 |
COPY requirements.txt .
|
9 |
|
10 |
# Install the dependencies
|
11 |
-
RUN pip install --no-cache-dir -r
|
12 |
|
13 |
# Copy the entire project into the container
|
14 |
COPY . .
|
@@ -17,4 +17,4 @@ COPY . .
|
|
17 |
EXPOSE 8000
|
18 |
|
19 |
# Command to start FastAPI with Uvicorn
|
20 |
-
CMD ["uvicorn", "
|
|
|
8 |
COPY requirements.txt .
|
9 |
|
10 |
# Install the dependencies
|
11 |
+
RUN pip install --no-cache-dir -r project/requirements.txt
|
12 |
|
13 |
# Copy the entire project into the container
|
14 |
COPY . .
|
|
|
17 |
EXPOSE 8000
|
18 |
|
19 |
# Command to start FastAPI with Uvicorn
|
20 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
Visualisation/requirements.txt
DELETED
@@ -1,8 +0,0 @@
|
|
1 |
-
fastapi
|
2 |
-
uvicorn
|
3 |
-
pandas
|
4 |
-
openpyxl
|
5 |
-
matplotlib
|
6 |
-
seaborn
|
7 |
-
pydantic
|
8 |
-
python-multipart
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
main.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import subprocess
|
2 |
+
|
3 |
+
# Start Data Visualization Service
|
4 |
+
subprocess.Popen(["uvicorn", "data-visualisation.app:app", "--host", "0.0.0.0", "--port", "8001"])
|
5 |
+
|
6 |
+
# Start Document Translation Service
|
7 |
+
subprocess.Popen(["uvicorn", "document-translation.app:app", "--host", "0.0.0.0", "--port", "8002"])
|
requirements.txt
ADDED
Binary file (350 Bytes). View file
|
|
translation/dockerfile
CHANGED
@@ -1,20 +1,23 @@
|
|
1 |
-
#
|
2 |
FROM python:3.9
|
3 |
|
4 |
-
#
|
5 |
WORKDIR /app
|
6 |
|
7 |
-
#
|
8 |
COPY . .
|
9 |
|
10 |
-
#
|
11 |
-
|
12 |
|
13 |
-
#
|
14 |
-
|
15 |
|
16 |
-
#
|
17 |
-
|
18 |
|
|
|
|
|
19 |
|
20 |
-
|
|
|
|
1 |
+
# Use Python 3.9 as base image
|
2 |
FROM python:3.9
|
3 |
|
4 |
+
# Set working directory inside the container
|
5 |
WORKDIR /app
|
6 |
|
7 |
+
# Copy project files
|
8 |
COPY . .
|
9 |
|
10 |
+
# Copy the torch_wheels directory from location
|
11 |
+
COPY ../torch_wheels /tmp/torch_wheels
|
12 |
|
13 |
+
# Install torch wheels
|
14 |
+
RUN pip install --no-cache-dir /tmp/torch_wheels/*
|
15 |
|
16 |
+
# Install other dependencies
|
17 |
+
RUN pip install --no-cache-dir -r project/requirements.txt
|
18 |
|
19 |
+
# Expose port for FastAPI
|
20 |
+
EXPOSE 7860
|
21 |
|
22 |
+
# Start the application
|
23 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
translation/requirements.txt
DELETED
Binary file (1.31 kB)
|
|