Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -44,12 +44,20 @@ async def generate_command(
|
|
44 |
genai.configure(api_key=GEMINI_API_KEY)
|
45 |
model = genai.GenerativeModel("gemini-2.0-flash-exp")
|
46 |
response = model.generate_content(prompt, stream=True)
|
47 |
-
|
48 |
-
full_text = ""
|
49 |
-
|
50 |
for part in response:
|
51 |
-
|
52 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
except Exception as e:
|
54 |
print(e)
|
55 |
|
|
|
44 |
genai.configure(api_key=GEMINI_API_KEY)
|
45 |
model = genai.GenerativeModel("gemini-2.0-flash-exp")
|
46 |
response = model.generate_content(prompt, stream=True)
|
47 |
+
current_message_content = ""
|
|
|
|
|
48 |
for part in response:
|
49 |
+
text_chunk = part.text
|
50 |
+
|
51 |
+
if len(current_message_content) + len(text_chunk) <= 2000:
|
52 |
+
current_message_content += text_chunk
|
53 |
+
else:
|
54 |
+
# Send the current message
|
55 |
+
await interaction.followup.send(content=current_message_content)
|
56 |
+
current_message_content = text_chunk # Start new message with the current chunk
|
57 |
+
|
58 |
+
# Send any remaining content
|
59 |
+
if current_message_content:
|
60 |
+
await interaction.followup.send(content=current_message_content)
|
61 |
except Exception as e:
|
62 |
print(e)
|
63 |
|