Spaces:
Running
Running
Alexey Mametyev
commited on
Commit
Β·
46eaca5
1
Parent(s):
7dcf767
Remove upload error handler
Browse files
README.md
CHANGED
@@ -19,7 +19,7 @@ Create beautiful explanatory animations from simple text prompts using AI and Ma
|
|
19 |
- π€ **AI-Powered**: Uses Gemini AI to generate Manim code from natural language
|
20 |
- π₯ **Automatic Rendering**: Creates high-quality MP4 videos
|
21 |
- π΅ **Background Music**: Automatically adds background music to all videos
|
22 |
-
- π **Auto-Fix**: AI
|
23 |
- β‘ **Fast**: Medium quality rendering for quick results
|
24 |
|
25 |
## How to Use
|
@@ -29,7 +29,8 @@ Create beautiful explanatory animations from simple text prompts using AI and Ma
|
|
29 |
3. The AI will create a scenario and generate Manim code
|
30 |
4. Type "continue" when prompted to proceed with code generation
|
31 |
5. Wait for the video to render with background music
|
32 |
-
6.
|
|
|
33 |
|
34 |
## Technical Details
|
35 |
|
|
|
19 |
- π€ **AI-Powered**: Uses Gemini AI to generate Manim code from natural language
|
20 |
- π₯ **Automatic Rendering**: Creates high-quality MP4 videos
|
21 |
- π΅ **Background Music**: Automatically adds background music to all videos
|
22 |
+
- π **Auto-Fix**: AI automatically fixes rendering errors and lets you iterate on the result
|
23 |
- β‘ **Fast**: Medium quality rendering for quick results
|
24 |
|
25 |
## How to Use
|
|
|
29 |
3. The AI will create a scenario and generate Manim code
|
30 |
4. Type "continue" when prompted to proceed with code generation
|
31 |
5. Wait for the video to render with background music
|
32 |
+
6. If you want changes, type a new message and the video will be regenerated
|
33 |
+
7. Download your animated explanation!
|
34 |
|
35 |
## Technical Details
|
36 |
|
demo.py
CHANGED
@@ -9,7 +9,9 @@
|
|
9 |
ΠΎΡΠΏΡΠ°Π²Π»ΡΡΡΡΡ Π² Gemini; ΠΌΠΎΠ΄Π΅Π»Ρ ΠΎΡΠ²Π΅ΡΠ°Π΅Ρ, ΠΌΡ ΡΠ½ΠΎΠ²Π° ΠΏΡΡΠ°Π΅ΠΌΡΡ ΡΠ³Π΅Π½Π΅ΡΠΈΡΠΎΠ²Π°ΡΡ ΠΊΠΎΠ΄ β
|
10 |
ΠΏΠΎΠ»Π½ΠΎΡΡΡΡ Π°Π²ΡΠΎΠΌΠ°ΡΠΈΡΠ΅ΡΠΊΠΈΠΉ ΡΠΈΠΊΠ», ΠΊΠ°ΠΊ Π² Π²Π°ΡΠ΅ΠΌ CLIβΡΠΊΡΠΈΠΏΡΠ΅.
|
11 |
β’ Π£ΠΏΡΠ°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΡΡΠΎΡΠ½ΠΈΠ΅ΠΌ ΡΠ²Π΅Π΄Π΅Π½ΠΎ ΠΊ ΡΡΡΠΊΠΈΠΌ ΡΡΠ°ΠΏΠ°ΠΌ: `await_task`, `coding_loop`,
|
12 |
-
`
|
|
|
|
|
13 |
|
14 |
ΠΠ°ΠΏΡΡΠΊ:
|
15 |
```bash
|
@@ -33,7 +35,7 @@ from google.genai.chats import Chat, AsyncChat
|
|
33 |
from google.genai.types import GenerateContentConfig, ThinkingConfig, UploadFileConfig
|
34 |
|
35 |
from manim_video_generator.video_executor import VideoExecutor # type: ignore
|
36 |
-
from prompts import SYSTEM_PROMPT_SCENARIO_GENERATOR, SYSTEM_PROMPT_CODEGEN
|
37 |
|
38 |
# ββββββββββββββββββββββββββββββββ Config βββββββββββββββββββββββββββββββββββββ
|
39 |
|
@@ -86,10 +88,47 @@ def extract_python(md: str) -> str:
|
|
86 |
raise ValueError("No ```python``` block found in model output.")
|
87 |
return m.group(1).strip()
|
88 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
# ββββββββββββββββββββββββββ Session state ββββββββββββββββββββββββββββββββββββ
|
90 |
|
91 |
class Session(dict):
|
92 |
-
phase: str # await_task | coding_loop |
|
93 |
chat: AsyncChat | None
|
94 |
last_video: Path | None
|
95 |
|
@@ -143,74 +182,28 @@ async def chat_handler(user_msg: str, history: List[Tuple[str, str]], state: Ses
|
|
143 |
# ββ Coding loop βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
144 |
if state.phase == "coding_loop":
|
145 |
prompt = "Thanks. It is good scenario. Now generate code for it.\n\n" + SYSTEM_PROMPT_CODEGEN
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
full_answer = history[-1][1]
|
155 |
-
try:
|
156 |
-
py_code = extract_python(full_answer)
|
157 |
-
except ValueError as e:
|
158 |
-
# send formatting error to model, loop again
|
159 |
-
err_msg = f"Error: {e}. Please wrap the code in ```python``` fence."
|
160 |
-
prompt = err_msg
|
161 |
-
add_user_msg(history, err_msg)
|
162 |
-
yield history, state, state.last_video
|
163 |
-
continue # restart loop
|
164 |
-
|
165 |
-
# 2. Render
|
166 |
-
try:
|
167 |
-
video_path = video_executor.execute_manim_code(py_code)
|
168 |
-
state.last_video = video_path
|
169 |
-
except Exception as e:
|
170 |
-
tb = traceback.format_exc(limit=10)
|
171 |
-
err_msg = f"Error, your code is not valid: {e}. Traceback: {tb}. Please fix this error and regenerate the code again."
|
172 |
-
prompt = err_msg
|
173 |
-
add_user_msg(history, err_msg) # error == user message
|
174 |
-
yield history, state, state.last_video
|
175 |
-
continue # Gemini will answer with a fix
|
176 |
-
|
177 |
-
append_bot_chunk(history, "\nποΈ Rendering done, uploading for reviewβ¦")
|
178 |
yield history, state, state.last_video
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
)
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
err_msg = f"Upload error: {up_err}"
|
193 |
-
add_user_msg(history, err_msg)
|
194 |
-
yield history, state, state.last_video
|
195 |
-
continue # ask Gemini to fix
|
196 |
-
|
197 |
-
# 4. Review
|
198 |
-
review_prompt = [file_ref, REVIEW_PROMPT]
|
199 |
-
add_user_msg(history, "# system β review video")
|
200 |
-
async for chunk in stream_parts(state.chat, review_prompt):
|
201 |
-
append_bot_chunk(history, chunk.text)
|
202 |
-
yield history, state, state.last_video
|
203 |
-
await asyncio.sleep(0)
|
204 |
-
|
205 |
-
if "no issues found" in history[-1][1].lower():
|
206 |
-
append_bot_chunk(history, "\nβ
Video accepted! π")
|
207 |
-
state.phase = "finished"
|
208 |
-
yield history, state, state.last_video
|
209 |
-
return
|
210 |
-
else:
|
211 |
-
append_bot_chunk(history, "\nπ Issues found. Trying againβ¦")
|
212 |
-
# let the loop run again (Gemini will generate corrected code)
|
213 |
-
continue
|
214 |
|
215 |
# ββ Finished phase ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
216 |
if state.phase == "finished":
|
|
|
9 |
ΠΎΡΠΏΡΠ°Π²Π»ΡΡΡΡΡ Π² Gemini; ΠΌΠΎΠ΄Π΅Π»Ρ ΠΎΡΠ²Π΅ΡΠ°Π΅Ρ, ΠΌΡ ΡΠ½ΠΎΠ²Π° ΠΏΡΡΠ°Π΅ΠΌΡΡ ΡΠ³Π΅Π½Π΅ΡΠΈΡΠΎΠ²Π°ΡΡ ΠΊΠΎΠ΄ β
|
10 |
ΠΏΠΎΠ»Π½ΠΎΡΡΡΡ Π°Π²ΡΠΎΠΌΠ°ΡΠΈΡΠ΅ΡΠΊΠΈΠΉ ΡΠΈΠΊΠ», ΠΊΠ°ΠΊ Π² Π²Π°ΡΠ΅ΠΌ CLIβΡΠΊΡΠΈΠΏΡΠ΅.
|
11 |
β’ Π£ΠΏΡΠ°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΎΡΡΠΎΡΠ½ΠΈΠ΅ΠΌ ΡΠ²Π΅Π΄Π΅Π½ΠΎ ΠΊ ΡΡΡΠΊΠΈΠΌ ΡΡΠ°ΠΏΠ°ΠΌ: `await_task`, `coding_loop`,
|
12 |
+
`await_feedback`, `finished`.
|
13 |
+
β’ ΠΠΎΡΠ»Π΅ ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ ΡΠ΅Π½Π΄Π΅ΡΠ° ΠΏΠΎΠ»ΡΠ·ΠΎΠ²Π°ΡΠ΅Π»Ρ ΠΌΠΎΠΆΠ΅Ρ Π΄Π°ΡΡ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡΠ΅Π»ΡΠ½ΡΠ΅ ΡΠΊΠ°Π·Π°Π½ΠΈΡ β
|
14 |
+
Π²ΠΈΠ΄Π΅ΠΎ ΠΎΡΠΏΡΠ°Π²Π»ΡΠ΅ΡΡΡ Π² Gemini ΠΈ ΠΊΠΎΠ΄ Π³Π΅Π½Π΅ΡΠΈΡΡΠ΅ΡΡΡ Π·Π°Π½ΠΎΠ²ΠΎ Ρ ΡΡΡΡΠΎΠΌ Π·Π°ΠΌΠ΅ΡΠ°Π½ΠΈΠΉ.
|
15 |
|
16 |
ΠΠ°ΠΏΡΡΠΊ:
|
17 |
```bash
|
|
|
35 |
from google.genai.types import GenerateContentConfig, ThinkingConfig, UploadFileConfig
|
36 |
|
37 |
from manim_video_generator.video_executor import VideoExecutor # type: ignore
|
38 |
+
from prompts import SYSTEM_PROMPT_SCENARIO_GENERATOR, SYSTEM_PROMPT_CODEGEN
|
39 |
|
40 |
# ββββββββββββββββββββββββββββββββ Config βββββββββββββββββββββββββββββββββββββ
|
41 |
|
|
|
88 |
raise ValueError("No ```python``` block found in model output.")
|
89 |
return m.group(1).strip()
|
90 |
|
91 |
+
|
92 |
+
async def coding_cycle(state: "Session", history: List[Tuple[str, str]], prompt):
|
93 |
+
"""Generate code, render video and return once rendering succeeds."""
|
94 |
+
while True:
|
95 |
+
async for chunk in stream_parts(state.chat, prompt):
|
96 |
+
append_bot_chunk(history, chunk.text)
|
97 |
+
yield history, state, state.last_video
|
98 |
+
await asyncio.sleep(0)
|
99 |
+
|
100 |
+
full_answer = history[-1][1]
|
101 |
+
try:
|
102 |
+
py_code = extract_python(full_answer)
|
103 |
+
except ValueError as e:
|
104 |
+
err_msg = f"Error: {e}. Please wrap the code in ```python``` fence."
|
105 |
+
prompt = err_msg
|
106 |
+
add_user_msg(history, err_msg)
|
107 |
+
yield history, state, state.last_video
|
108 |
+
continue
|
109 |
+
|
110 |
+
try:
|
111 |
+
video_path = video_executor.execute_manim_code(py_code)
|
112 |
+
state.last_video = video_path
|
113 |
+
except Exception as e:
|
114 |
+
tb = traceback.format_exc(limit=10)
|
115 |
+
err_msg = (
|
116 |
+
f"Error, your code is not valid: {e}. Traceback: {tb}. Please fix this error and regenerate the code again."
|
117 |
+
)
|
118 |
+
prompt = err_msg
|
119 |
+
add_user_msg(history, err_msg)
|
120 |
+
yield history, state, state.last_video
|
121 |
+
continue
|
122 |
+
|
123 |
+
append_bot_chunk(history, "\nποΈ Rendering done! Feel free to request changes or type **finish** to end.")
|
124 |
+
state.phase = "await_feedback"
|
125 |
+
yield history, state, state.last_video
|
126 |
+
return
|
127 |
+
|
128 |
# ββββββββββββββββββββββββββ Session state ββββββββββββββββββββββββββββββββββββ
|
129 |
|
130 |
class Session(dict):
|
131 |
+
phase: str # await_task | coding_loop | await_feedback | finished
|
132 |
chat: AsyncChat | None
|
133 |
last_video: Path | None
|
134 |
|
|
|
182 |
# ββ Coding loop βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
183 |
if state.phase == "coding_loop":
|
184 |
prompt = "Thanks. It is good scenario. Now generate code for it.\n\n" + SYSTEM_PROMPT_CODEGEN
|
185 |
+
async for out in coding_cycle(state, history, prompt):
|
186 |
+
yield out
|
187 |
+
return
|
188 |
+
# ββ Awaiting user feedback after rendering ββββββββββββββββββββββββββββββββ
|
189 |
+
if state.phase == "await_feedback":
|
190 |
+
if user_msg.strip().lower() in {"finish", "done", "f"}:
|
191 |
+
state.phase = "finished"
|
192 |
+
append_bot_chunk(history, "Session complete. Refresh page to start over.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
193 |
yield history, state, state.last_video
|
194 |
+
return
|
195 |
+
file_ref = client.files.upload(file=state.last_video, config=UploadFileConfig(display_name=state.last_video.name))
|
196 |
+
while file_ref.state and file_ref.state.name == "PROCESSING":
|
197 |
+
await asyncio.sleep(3)
|
198 |
+
if file_ref.name:
|
199 |
+
file_ref = client.files.get(name=file_ref.name)
|
200 |
+
if file_ref.state and file_ref.state.name == "FAILED":
|
201 |
+
raise RuntimeError("Gemini failed to process upload")
|
202 |
+
prompt = [file_ref, f"{user_msg}\n\n{SYSTEM_PROMPT_CODEGEN}"]
|
203 |
+
state.phase = "coding_loop"
|
204 |
+
async for out in coding_cycle(state, history, prompt):
|
205 |
+
yield out
|
206 |
+
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
207 |
|
208 |
# ββ Finished phase ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
209 |
if state.phase == "finished":
|