Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,8 @@ import os
|
|
4 |
from PIL import Image
|
5 |
import tempfile
|
6 |
import io
|
|
|
|
|
7 |
|
8 |
app = Flask(__name__)
|
9 |
|
@@ -25,37 +27,60 @@ model = genai.GenerativeModel(
|
|
25 |
safety_settings=safety_settings
|
26 |
)
|
27 |
|
|
|
|
|
|
|
28 |
@app.route('/')
|
29 |
def home():
|
30 |
return render_template('index.html')
|
31 |
|
32 |
-
@app.route('/generate', methods=['POST'
|
33 |
def generate():
|
34 |
if 'image' not in request.files:
|
35 |
return jsonify({'error': 'No image uploaded'}), 400
|
36 |
|
37 |
image_file = request.files['image']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
def generate_stream():
|
41 |
-
|
42 |
-
image_file.save(temp_file.name)
|
43 |
try:
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
img_byte_arr = img_byte_arr.getvalue()
|
49 |
-
|
50 |
-
# Générer le contenu en streaming
|
51 |
-
response = model.generate_content([mm, {"mime_type": "image/png", "data": img_byte_arr}], stream=True)
|
52 |
-
for chunk in response:
|
53 |
-
yield f"data: {chunk.text}\n\n"
|
54 |
except Exception as e:
|
55 |
yield f"data: Error: {str(e)}\n\n"
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
59 |
|
60 |
return Response(generate_stream(), mimetype='text/event-stream')
|
61 |
|
|
|
4 |
from PIL import Image
|
5 |
import tempfile
|
6 |
import io
|
7 |
+
import uuid
|
8 |
+
import time
|
9 |
|
10 |
app = Flask(__name__)
|
11 |
|
|
|
27 |
safety_settings=safety_settings
|
28 |
)
|
29 |
|
30 |
+
# Dictionnaire pour stocker les réponses en cours de génération
|
31 |
+
pending_responses = {}
|
32 |
+
|
33 |
@app.route('/')
|
34 |
def home():
|
35 |
return render_template('index.html')
|
36 |
|
37 |
+
@app.route('/generate', methods=['POST'])
|
38 |
def generate():
|
39 |
if 'image' not in request.files:
|
40 |
return jsonify({'error': 'No image uploaded'}), 400
|
41 |
|
42 |
image_file = request.files['image']
|
43 |
+
request_id = str(uuid.uuid4()) # Générer un identifiant unique
|
44 |
+
|
45 |
+
# Sauvegarder temporairement l'image
|
46 |
+
with tempfile.NamedTemporaryFile(delete=False, suffix='.png') as temp_file:
|
47 |
+
image_file.save(temp_file.name)
|
48 |
+
try:
|
49 |
+
image = Image.open(temp_file.name)
|
50 |
+
# Convertir l'image en bytes pour le streaming
|
51 |
+
img_byte_arr = io.BytesIO()
|
52 |
+
image.save(img_byte_arr, format='PNG')
|
53 |
+
img_byte_arr = img_byte_arr.getvalue()
|
54 |
|
55 |
+
# Stocker la tâche de génération dans le dictionnaire
|
56 |
+
pending_responses[request_id] = {
|
57 |
+
'status': 'processing',
|
58 |
+
'response': model.generate_content([mm, {"mime_type": "image/png", "data": img_byte_arr}], stream=True)
|
59 |
+
}
|
60 |
+
|
61 |
+
return jsonify({'request_id': request_id})
|
62 |
+
except Exception as e:
|
63 |
+
return jsonify({'error': str(e)}), 500
|
64 |
+
finally:
|
65 |
+
# Nettoyer le fichier temporaire
|
66 |
+
os.unlink(temp_file.name)
|
67 |
+
|
68 |
+
@app.route('/stream/<request_id>')
|
69 |
+
def stream(request_id):
|
70 |
def generate_stream():
|
71 |
+
while request_id in pending_responses and pending_responses[request_id]['status'] == 'processing':
|
|
|
72 |
try:
|
73 |
+
chunk = next(pending_responses[request_id]['response'])
|
74 |
+
yield f"data: {chunk.text}\n\n"
|
75 |
+
except StopIteration:
|
76 |
+
pending_responses[request_id]['status'] = 'completed'
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
except Exception as e:
|
78 |
yield f"data: Error: {str(e)}\n\n"
|
79 |
+
pending_responses[request_id]['status'] = 'error'
|
80 |
+
time.sleep(0.1) # Attendre un peu avant de vérifier à nouveau
|
81 |
+
|
82 |
+
if request_id in pending_responses:
|
83 |
+
del pending_responses[request_id]
|
84 |
|
85 |
return Response(generate_stream(), mimetype='text/event-stream')
|
86 |
|