Spaces:
Running
Running
Commit
Β·
9b25d3a
1
Parent(s):
481f639
update scenario edits
Browse files
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: π¬
|
|
4 |
colorFrom: blue
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
app_file: demo.py
|
9 |
pinned: false
|
10 |
license: mit
|
@@ -43,4 +43,4 @@ Create beautiful explanatory animations from simple text prompts using AI and Ma
|
|
43 |
Set your Gemini API key as an environment variable:
|
44 |
```bash
|
45 |
export GEMINI_API_KEY="your_api_key_here"
|
46 |
-
```
|
|
|
4 |
colorFrom: blue
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
+
sdk_version: "4.0.0"
|
8 |
app_file: demo.py
|
9 |
pinned: false
|
10 |
license: mit
|
|
|
43 |
Set your Gemini API key as an environment variable:
|
44 |
```bash
|
45 |
export GEMINI_API_KEY="your_api_key_here"
|
46 |
+
```
|
demo.py
CHANGED
@@ -126,8 +126,6 @@ async def chat_handler(user_msg: str, history: List[Tuple[str, str]], state: Ses
|
|
126 |
if user_msg.strip().lower() in {"c", "continue", "Ρ"}:
|
127 |
# User is ready to proceed to code generation
|
128 |
state.phase = "coding_loop"
|
129 |
-
prompt = "Thanks. It is good scenario. Now generate code for it.\n\n" + SYSTEM_PROMPT_CODEGEN
|
130 |
-
# Continue to coding_loop logic below
|
131 |
else:
|
132 |
# User wants to discuss/modify scenario
|
133 |
for chunk in stream_parts(state.chat, user_msg):
|
@@ -140,16 +138,11 @@ async def chat_handler(user_msg: str, history: List[Tuple[str, str]], state: Ses
|
|
140 |
|
141 |
# later phases require chat obj
|
142 |
if not state.chat:
|
143 |
-
|
144 |
-
yield history, state, state.last_video
|
145 |
-
return
|
146 |
|
147 |
# ββ Coding loop βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
148 |
if state.phase == "coding_loop":
|
149 |
if not user_msg.strip().lower() in {"c", "continue", "Ρ"}:
|
150 |
-
# This should not happen anymore since we handle it in await_task
|
151 |
-
prompt = "Thanks. It is good scenario. Now generate code for it.\n\n" + SYSTEM_PROMPT_CODEGEN
|
152 |
-
else:
|
153 |
prompt = "Thanks. It is good scenario. Now generate code for it.\n\n" + SYSTEM_PROMPT_CODEGEN
|
154 |
|
155 |
while True: # keep cycling until render succeeds
|
|
|
126 |
if user_msg.strip().lower() in {"c", "continue", "Ρ"}:
|
127 |
# User is ready to proceed to code generation
|
128 |
state.phase = "coding_loop"
|
|
|
|
|
129 |
else:
|
130 |
# User wants to discuss/modify scenario
|
131 |
for chunk in stream_parts(state.chat, user_msg):
|
|
|
138 |
|
139 |
# later phases require chat obj
|
140 |
if not state.chat:
|
141 |
+
raise ValueError("Chat not found")
|
|
|
|
|
142 |
|
143 |
# ββ Coding loop βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
144 |
if state.phase == "coding_loop":
|
145 |
if not user_msg.strip().lower() in {"c", "continue", "Ρ"}:
|
|
|
|
|
|
|
146 |
prompt = "Thanks. It is good scenario. Now generate code for it.\n\n" + SYSTEM_PROMPT_CODEGEN
|
147 |
|
148 |
while True: # keep cycling until render succeeds
|