Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,31 +1,44 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
import re
|
3 |
import json
|
4 |
-
import
|
|
|
5 |
import os
|
6 |
import tempfile
|
7 |
|
8 |
# --- build_logic.py is now a hard requirement ---
|
9 |
from build_logic import (
|
10 |
create_space as build_logic_create_space,
|
11 |
-
_get_api_token as build_logic_get_api_token,
|
12 |
-
whoami as build_logic_whoami,
|
13 |
list_space_files_for_browsing,
|
14 |
get_space_repository_info,
|
15 |
get_space_file_content,
|
16 |
update_space_file,
|
17 |
parse_markdown as build_logic_parse_markdown,
|
18 |
delete_space_file as build_logic_delete_space_file,
|
19 |
-
get_space_runtime_status
|
20 |
)
|
21 |
print("build_logic.py loaded successfully.")
|
22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
|
25 |
bbb = chr(96) * 3
|
26 |
parsed_code_blocks_state_cache = []
|
27 |
BOT_ROLE_NAME = "assistant"
|
28 |
-
GROQ_API_ENDPOINT
|
|
|
29 |
|
30 |
DEFAULT_SYSTEM_PROMPT = f"""You are an expert AI programmer. Your role is to generate code and file structures based on user requests, or to modify existing code provided by the user.
|
31 |
When you provide NEW code for a file, or MODIFIED code for an existing file, use the following format exactly:
|
@@ -55,7 +68,7 @@ If the user asks to delete a file, simply omit it from your next full ### File:
|
|
55 |
If no code is provided, assist the user with their tasks.
|
56 |
"""
|
57 |
|
58 |
-
# --- Core Utility, Parsing, Export
|
59 |
def escape_html_for_markdown(text):
|
60 |
if not isinstance(text, str): return ""
|
61 |
# Minimal escaping, expand if needed
|
@@ -115,90 +128,79 @@ def _clean_filename(filename_line_content):
|
|
115 |
return filename_candidate if filename_candidate else text.strip()
|
116 |
|
117 |
|
118 |
-
def _parse_chat_stream_logic(
|
119 |
-
|
|
|
|
|
|
|
120 |
latest_blocks_dict = {}
|
121 |
if existing_files_state:
|
122 |
-
for
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
if not isinstance(ai_chat_history, list): raise ValueError("JSON input must be a list of chat messages.")
|
128 |
-
except json.JSONDecodeError as e: results["error_message"] = f"JSON Parsing Error: {e}."; return results
|
129 |
-
except ValueError as e: results["error_message"] = str(e); return results
|
130 |
-
|
131 |
-
# Ensure the bot's message is the last one and is the only one being parsed for new/updated files
|
132 |
-
# This prevents reprocessing the entire history on every new AI turn
|
133 |
-
message_obj = None
|
134 |
-
if ai_chat_history and isinstance(ai_chat_history[-1], dict) and ai_chat_history[-1].get("role", "").lower() == BOT_ROLE_NAME:
|
135 |
-
message_obj = ai_chat_history[-1]
|
136 |
|
137 |
-
if not message_obj:
|
138 |
-
# If the last message isn't the bot's or is malformed, don't update state based on it
|
139 |
-
results["parsed_code_blocks"] = list(latest_blocks_dict.values()) # Return existing state
|
140 |
-
results["default_selected_filenames"] = [b["filename"] for b in results["parsed_code_blocks"] if not b.get("is_structure_block")]
|
141 |
-
# results["error_message"] = "No bot message found in the last entry for parsing." # Optional: add a debug message
|
142 |
-
return results
|
143 |
|
144 |
-
|
145 |
-
|
146 |
|
147 |
file_pattern = re.compile(r"### File:\s*(?P<filename_line>[^\n]+)\n(?:```(?P<lang>[\w\.\-\+]*)\n(?P<code>[\s\S]*?)\n```|(?P<binary_msg>\[Binary file(?: - [^\]]+)?\]))")
|
148 |
structure_pattern = re.compile(r"## File Structure\n```(?:(?P<struct_lang>[\w.-]*)\n)?(?P<structure_code>[\s\S]*?)\n```")
|
149 |
|
150 |
-
# Process
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
#
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
current_parsed_blocks = list(latest_blocks_dict.values())
|
|
|
184 |
current_parsed_blocks.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
|
185 |
-
|
|
|
|
|
186 |
results["parsed_code_blocks"] = current_parsed_blocks
|
187 |
results["default_selected_filenames"] = [b["filename"] for b in current_parsed_blocks if not b.get("is_structure_block")]
|
188 |
return results
|
189 |
|
190 |
def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_blocks_for_export):
|
|
|
191 |
results = {"output_str": "", "error_message": None, "download_filepath": None}
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
parsed_blocks_for_export = parsed_code_blocks_state_cache
|
196 |
|
197 |
-
#
|
198 |
-
|
199 |
-
binary_blocks = [b for b in parsed_blocks_for_export if b.get("is_binary") or b.get("code", "").startswith("[Binary or Skipped file]")]
|
200 |
|
201 |
-
if not
|
202 |
results["output_str"] = f"# Space: {space_line_name_for_md}\n## File Structure\n{bbb}\nπ Root\n{bbb}\n\n*No files to list in structure or export.*"
|
203 |
try:
|
204 |
with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
|
@@ -215,34 +217,46 @@ def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_bl
|
|
215 |
else:
|
216 |
# If no structure block from AI, generate a simple one from detected files
|
217 |
output_lines.extend(["## File Structure", bbb, "π Root"])
|
218 |
-
|
219 |
-
|
220 |
-
for fname in filenames_for_structure_list: output_lines.append(f" π {fname}")
|
221 |
output_lines.extend([bbb, ""])
|
222 |
|
223 |
output_lines.append("Below are the contents of all files in the space:\n")
|
224 |
exported_content = False
|
225 |
|
226 |
# Determine which files to export content for based on selection or default
|
|
|
227 |
files_to_export_content = []
|
228 |
if selected_filenames:
|
229 |
-
files_to_export_content = [b for b in
|
230 |
else:
|
231 |
-
files_to_export_content =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
232 |
|
233 |
-
# Sort blocks for consistent output order
|
234 |
-
files_to_export_content.sort(key=lambda b: (0, b["filename"]) if b.get("is_binary") else (1, b["filename"]))
|
235 |
|
236 |
-
for block in
|
237 |
output_lines.append(f"### File: {block['filename']}")
|
238 |
-
if block.get('is_binary') or block.get("code", "").startswith("[")
|
239 |
-
|
|
|
240 |
else:
|
|
|
241 |
output_lines.extend([f"{bbb}{block.get('language', 'plaintext') or 'plaintext'}", block.get('code',''), bbb])
|
242 |
output_lines.append(""); exported_content = True
|
243 |
|
244 |
-
if not exported_content and not
|
245 |
-
elif not exported_content
|
|
|
246 |
final_output_str = "\n".join(output_lines)
|
247 |
results["output_str"] = final_output_str
|
248 |
try:
|
@@ -251,7 +265,9 @@ def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_bl
|
|
251 |
except Exception as e: print(f"Error creating temp file: {e}"); results["error_message"] = "Could not prepare file for download."
|
252 |
return results
|
253 |
|
|
|
254 |
def _convert_gr_history_to_api_messages(system_prompt, gr_history, current_user_message=None):
|
|
|
255 |
messages = [{"role": "system", "content": system_prompt}] if system_prompt else []
|
256 |
for user_msg, bot_msg in gr_history:
|
257 |
if user_msg: messages.append({"role": "user", "content": user_msg})
|
@@ -260,22 +276,13 @@ def _convert_gr_history_to_api_messages(system_prompt, gr_history, current_user_
|
|
260 |
if current_user_message: messages.append({"role": "user", "content": current_user_message})
|
261 |
return messages
|
262 |
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
for user_msg, bot_msg in reversed(gr_history):
|
267 |
-
if bot_msg and isinstance(bot_msg, str):
|
268 |
-
latest_bot_msg_content = bot_msg
|
269 |
-
break # Found the latest, stop searching
|
270 |
-
|
271 |
-
if latest_bot_msg_content is None:
|
272 |
-
return json.dumps([]) # Return empty list if no bot message found
|
273 |
-
|
274 |
-
# Return JSON containing only the latest bot message for parsing
|
275 |
-
return json.dumps([{"role": BOT_ROLE_NAME, "content": latest_bot_msg_content}], indent=2)
|
276 |
|
277 |
|
278 |
def _generate_ui_outputs_from_cache(owner, space_name):
|
|
|
279 |
global parsed_code_blocks_state_cache
|
280 |
preview_md_val = "*No files in cache to display.*"
|
281 |
formatted_md_val = f"# Space: {owner}/{space_name}\n## File Structure\n{bbb}\nπ Root\n{bbb}\n\n*No files in cache.*" if owner or space_name else "*Load or define a Space to see its Markdown structure.*"
|
@@ -292,7 +299,7 @@ def _generate_ui_outputs_from_cache(owner, space_name):
|
|
292 |
|
293 |
# Handle content display
|
294 |
content = block.get('code', '')
|
295 |
-
if block.get('is_binary') or content.startswith("[")
|
296 |
preview_md_lines.append(f"\n`{escape_html_for_markdown(content)}`\n")
|
297 |
elif block.get('is_structure_block'):
|
298 |
preview_md_lines.append(f"\n{bbb}{block.get('language', 'plaintext') or 'plaintext'}\n{content}\n{bbb}\n")
|
@@ -304,15 +311,20 @@ def _generate_ui_outputs_from_cache(owner, space_name):
|
|
304 |
space_line_name = f"{owner}/{space_name}" if owner and space_name else (owner or space_name or "your-space")
|
305 |
|
306 |
# _export_selected_logic handles selecting which files to include in the export MD
|
307 |
-
|
|
|
308 |
formatted_md_val = export_result["output_str"]
|
309 |
download_file = export_result["download_filepath"]
|
310 |
|
311 |
return formatted_md_val, preview_md_val, gr.update(value=download_file, interactive=download_file is not None)
|
312 |
|
313 |
-
|
|
|
|
|
314 |
global parsed_code_blocks_state_cache
|
315 |
-
_chat_msg_in
|
|
|
|
|
316 |
_detected_files_update, _formatted_output_update, _download_btn_update = gr.update(), gr.update(), gr.update(interactive=False, value=None)
|
317 |
|
318 |
# --- Before sending to AI: Parse existing files from the current formatted markdown ---
|
@@ -322,27 +334,47 @@ def handle_groq_chat_submit(user_message, chat_history, api_key_input, model_sel
|
|
322 |
if user_message and _current_formatted_markdown:
|
323 |
try:
|
324 |
parsed_from_md = build_logic_parse_markdown(_current_formatted_markdown)
|
325 |
-
# Update cache
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
|
|
|
|
|
|
|
|
330 |
|
331 |
for f_info in parsed_from_md.get("files", []):
|
332 |
-
# Only add if it has
|
333 |
-
if f_info.get("path") and f_info
|
334 |
# Check if it's a binary representation string
|
335 |
is_binary_repr = isinstance(f_info.get("content"), str) and (f_info["content"].startswith("[Binary file") or f_info["content"].startswith("[Error loading content:") or f_info["content"].startswith("[Binary or Skipped file]"))
|
336 |
-
|
337 |
-
|
338 |
-
|
339 |
-
"
|
340 |
-
|
341 |
-
|
342 |
-
|
343 |
-
|
344 |
-
|
345 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
346 |
|
347 |
except Exception as e:
|
348 |
# Log error but don't block chat submission
|
@@ -354,18 +386,19 @@ def handle_groq_chat_submit(user_message, chat_history, api_key_input, model_sel
|
|
354 |
|
355 |
if not user_message.strip():
|
356 |
_status = "Cannot send an empty message."
|
357 |
-
yield (
|
358 |
-
_chat_hist.append((user_message, None)); _status = "Sending to
|
359 |
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
360 |
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
-
|
365 |
|
366 |
current_sys_prompt = system_prompt.strip() or DEFAULT_SYSTEM_PROMPT
|
367 |
|
368 |
-
# Include current file contents in the prompt as context
|
|
|
369 |
current_files_context = ""
|
370 |
if parsed_code_blocks_state_cache:
|
371 |
current_files_context = "\n\n## Current Files in Space\n"
|
@@ -374,90 +407,114 @@ def handle_groq_chat_submit(user_message, chat_history, api_key_input, model_sel
|
|
374 |
current_files_context += f"### File: {block['filename']}\n{bbb}\n{block['code']}\n{bbb}\n"
|
375 |
else:
|
376 |
current_files_context += f"### File: {block['filename']}\n"
|
377 |
-
if block.get("is_binary"):
|
378 |
current_files_context += f"{block['code']}\n" # e.g. [Binary file...]
|
379 |
else:
|
380 |
current_files_context += f"{bbb}{block.get('language', 'plaintext') or 'plaintext'}\n{block.get('code','')}\n{bbb}\n"
|
381 |
current_files_context += "\n"
|
382 |
|
383 |
# Append current file context to the user message
|
|
|
384 |
user_message_with_context = user_message.strip()
|
385 |
if current_files_context.strip():
|
386 |
-
user_message_with_context = user_message_with_context + current_files_context + "
|
387 |
|
388 |
|
|
|
389 |
api_msgs = _convert_gr_history_to_api_messages(current_sys_prompt, _chat_hist[:-1], user_message_with_context)
|
390 |
|
391 |
-
|
392 |
-
payload = {"model": model_select, "messages": api_msgs}
|
393 |
-
|
394 |
try:
|
395 |
-
_status = f"Waiting for {model_select}..."; yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
396 |
-
|
397 |
-
response
|
398 |
-
|
399 |
-
|
400 |
-
|
401 |
-
#
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
-
|
409 |
-
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
|
414 |
-
|
415 |
-
|
416 |
-
|
417 |
-
|
418 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
419 |
|
420 |
if parsing_res["error_message"]:
|
421 |
_status = f"Parsing Error: {parsing_res['error_message']}"
|
422 |
-
# Append parsing error to the bot's response in chat? Or
|
423 |
-
# For now, update status and detected files area with error
|
424 |
_detected_files_update = gr.Markdown(f"## Parsing Error\n`{escape_html_for_markdown(parsing_res['error_message'])}`")
|
425 |
else:
|
426 |
-
#
|
|
|
|
|
|
|
|
|
427 |
_formatted_output_update, _detected_files_update, _download_btn_update = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
|
428 |
_status = "Processing complete. Previews updated."
|
429 |
-
|
430 |
-
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update); return
|
431 |
else:
|
432 |
-
# Handle cases where
|
433 |
-
|
434 |
-
#
|
435 |
-
|
436 |
-
except
|
437 |
-
|
438 |
-
|
439 |
-
|
440 |
-
|
441 |
-
|
442 |
-
|
443 |
-
|
444 |
-
|
445 |
-
|
446 |
-
|
447 |
-
_chat_hist.append((user_message, error_msg)) # Append as a new user/bot turn if structure unexpected
|
448 |
|
449 |
-
|
450 |
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
451 |
|
452 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
453 |
def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
|
|
|
454 |
global parsed_code_blocks_state_cache
|
455 |
_formatted_md_val, _detected_preview_val, _status_val = "*Loading files...*", "*Loading files...*", f"Loading Space: {ui_owner_name}/{ui_space_name}..."
|
456 |
_file_browser_update, _iframe_html_update, _download_btn_update = gr.update(visible=False, choices=[], value=None), gr.update(value=None, visible=False), gr.update(interactive=False, value=None)
|
457 |
_build_status_clear, _edit_status_clear, _runtime_status_clear = "*Build status will appear here.*", "*Select a file to load or delete.*", "*Space runtime status will appear here after refresh.*"
|
|
|
458 |
|
459 |
# Yield initial state to update UI
|
460 |
-
yield (_formatted_md_val, _detected_preview_val, _status_val, _file_browser_update, gr.update(value=ui_owner_name), gr.update(value=ui_space_name), _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear)
|
461 |
|
462 |
owner_to_use, updated_owner_name_val = ui_owner_name, ui_owner_name
|
463 |
error_occurred = False
|
@@ -481,7 +538,8 @@ def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
|
|
481 |
|
482 |
if error_occurred:
|
483 |
# Yield error state
|
484 |
-
yield (f"*Error: {_status_val}*", f"*Error: {_status_val}*", _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear)
|
|
|
485 |
return # Stop execution
|
486 |
|
487 |
sdk_for_iframe, file_list, err_list_files = get_space_repository_info(hf_api_key_ui, ui_space_name, owner_to_use)
|
@@ -498,7 +556,7 @@ def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
|
|
498 |
parsed_code_blocks_state_cache = [] # Clear cache on error
|
499 |
_formatted_md_val, _detected_preview_val, _download_btn_update = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
|
500 |
_file_browser_update = gr.update(visible=True, choices=[], value="Error loading files") # Update file browser with error state
|
501 |
-
yield (f"*Error: {err_list_files}*", "*Error loading files*", _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear)
|
502 |
return # Stop execution
|
503 |
|
504 |
if not file_list:
|
@@ -506,14 +564,14 @@ def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
|
|
506 |
parsed_code_blocks_state_cache = []
|
507 |
_formatted_md_val, _detected_preview_val, _download_btn_update = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
|
508 |
_file_browser_update = gr.update(visible=True, choices=[], value="No files found")
|
509 |
-
yield (_formatted_md_val, _detected_preview_val, _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear)
|
510 |
return # Stop execution
|
511 |
|
512 |
|
513 |
-
|
514 |
_status_val = f"Loading {len(file_list)} files from {owner_to_use}/{ui_space_name} (SDK: {sdk_for_iframe or 'unknown'})...";
|
515 |
# Yield intermediate status while loading files
|
516 |
-
yield (_formatted_md_val, _detected_preview_val, _status_val, gr.update(visible=True, choices=sorted(file_list or []), value=None), updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear)
|
517 |
|
518 |
|
519 |
for file_path in file_list:
|
@@ -527,35 +585,43 @@ def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
|
|
527 |
file_path.startswith("__pycache__/") or "/__pycache__/" in file_path or \
|
528 |
file_path.startswith("node_modules/") or "/node_modules/" in file_path or \
|
529 |
file_path.startswith("venv/") or "/venv/" in file_path or \
|
530 |
-
file_path.startswith(".venv/") or "/.venv/" in file_path
|
531 |
-
|
|
|
532 |
|
533 |
# Handle potential issues with reading large files or non-utf8 files
|
534 |
try:
|
535 |
content, err_get = get_space_file_content(hf_api_key_ui, ui_space_name, owner_to_use, file_path)
|
536 |
if err_get:
|
537 |
# If there's an error getting content, record it but don't fail the whole load
|
538 |
-
|
539 |
print(f"Error loading {file_path}: {err_get}");
|
540 |
continue
|
541 |
# If content is successfully loaded
|
542 |
-
|
543 |
except Exception as content_ex:
|
544 |
# Catch any other unexpected exceptions during file content fetching
|
545 |
-
|
546 |
print(f"Unexpected error loading {file_path}: {content_ex}")
|
547 |
continue
|
548 |
|
|
|
|
|
|
|
|
|
|
|
549 |
|
550 |
-
parsed_code_blocks_state_cache =
|
551 |
_formatted_md_val, _detected_preview_val, _download_btn_update = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
|
552 |
-
_status_val = f"Successfully loaded Space: {owner_to_use}/{ui_space_name}. Markdown ready."
|
553 |
_file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value=None) # Use the full file list for the dropdown
|
554 |
|
555 |
-
|
|
|
556 |
|
557 |
|
558 |
def handle_build_space_button(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, space_sdk_ui, formatted_markdown_content):
|
|
|
559 |
_build_status, _iframe_html, _file_browser_update = "Starting space build process...", gr.update(value=None, visible=False), gr.update(visible=False, choices=[], value=None)
|
560 |
yield _build_status, _iframe_html, _file_browser_update # Yield initial status
|
561 |
if not ui_space_name_part or "/" in ui_space_name_part: _build_status = f"Build Error: HF Space Name '{ui_space_name_part}' must be repo name only (no '/')."; yield _build_status, _iframe_html, _file_browser_update; return
|
@@ -573,25 +639,73 @@ def handle_build_space_button(hf_api_key_ui, ui_space_name_part, ui_owner_name_p
|
|
573 |
|
574 |
if not final_owner_for_build: _build_status = "Build Error: HF Owner Name could not be determined. Please specify it."; yield _build_status, _iframe_html, _file_browser_update; return
|
575 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
576 |
result_message = build_logic_create_space(hf_api_key_ui, ui_space_name_part, final_owner_for_build, space_sdk_ui, formatted_markdown_content)
|
577 |
_build_status = f"Build Process: {result_message}"
|
578 |
|
579 |
if "Successfully" in result_message:
|
580 |
-
|
|
|
581 |
sub_repo = re.sub(r'[^a-z0-9\-]+', '-', ui_space_name_part.lower()).strip('-') or 'space'
|
582 |
iframe_url = f"https://{sub_owner}-{sub_repo}{'.static.hf.space' if space_sdk_ui == 'static' else '.hf.space'}"
|
583 |
_iframe_html = gr.update(value=f'<iframe src="{iframe_url}?__theme=light&embed=true" width="100%" height="700px" style="border:1px solid #eee; border-radius:8px;"></iframe>', visible=True)
|
584 |
-
_build_status += f"\nSpace live at: [Link]({iframe_url}) (Repo: https://huggingface.co/spaces/{
|
585 |
|
586 |
# Refresh file list after successful build
|
587 |
-
file_list, err_list = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part,
|
588 |
-
if err_list: _build_status += f"\nFile list refresh error after build: {err_list}"; _file_browser_update = gr.update(visible=True, choices=[], value="Error refreshing files")
|
589 |
else: _file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value=None if file_list else "No files found")
|
590 |
|
591 |
-
|
|
|
|
|
|
|
592 |
|
|
|
593 |
|
|
|
|
|
594 |
def handle_load_file_for_editing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, selected_file_path):
|
|
|
595 |
_file_content_val, _edit_status_val, _commit_msg_val, _lang_update = "", "Error: No file selected.", gr.update(value=""), gr.update(language="python") # Reset values
|
596 |
if not selected_file_path or selected_file_path in ["No files found", "Error loading files", "Error refreshing files"]:
|
597 |
yield _file_content_val, "Select a file from the dropdown.", _commit_msg_val, _lang_update # Clear editor and status
|
@@ -619,7 +733,7 @@ def handle_load_file_for_editing(hf_api_key_ui, ui_space_name_part, ui_owner_nam
|
|
619 |
_edit_status_val = f"Error loading '{selected_file_path}': {err}"
|
620 |
_commit_msg_val = f"Error loading {selected_file_path}"
|
621 |
_file_content_val = f"Error loading {selected_file_path}:\n{err}"
|
622 |
-
_lang_update = gr.update(language="python")
|
623 |
yield _file_content_val, _edit_status_val, _commit_msg_val, _lang_update
|
624 |
return
|
625 |
|
@@ -633,11 +747,15 @@ def handle_load_file_for_editing(hf_api_key_ui, ui_space_name_part, ui_owner_nam
|
|
633 |
def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_edit_path, edited_content, commit_message):
|
634 |
global parsed_code_blocks_state_cache
|
635 |
_edit_status_val = "Processing commit..."
|
636 |
-
|
637 |
-
|
|
|
|
|
|
|
638 |
|
639 |
yield _edit_status_val, _file_browser_update_val, _formatted_md_out, _detected_preview_out, _download_btn_out # Yield initial status
|
640 |
|
|
|
641 |
if not file_to_edit_path or file_to_edit_path in ["No files found", "Error loading files", "Error refreshing files"]:
|
642 |
_edit_status_val = "Error: No valid file selected for commit.";
|
643 |
yield _edit_status_val, gr.update(), gr.update(), gr.update(), gr.update(); return
|
@@ -670,7 +788,10 @@ def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_
|
|
670 |
found_in_cache = True
|
671 |
break
|
672 |
if not found_in_cache:
|
673 |
-
# If file was added via editor
|
|
|
|
|
|
|
674 |
parsed_code_blocks_state_cache.append({
|
675 |
"filename": file_to_edit_path,
|
676 |
"code": edited_content,
|
@@ -698,12 +819,16 @@ def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_
|
|
698 |
def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_delete_path):
|
699 |
global parsed_code_blocks_state_cache
|
700 |
_edit_status_val = "Processing deletion..."
|
701 |
-
|
702 |
-
|
|
|
703 |
_file_content_editor_update = gr.update(value="") # Clear editor content
|
704 |
_commit_msg_update = gr.update(value="") # Clear commit message
|
705 |
_lang_update = gr.update(language="plaintext") # Reset editor language
|
706 |
-
_formatted_md_out
|
|
|
|
|
|
|
707 |
|
708 |
yield (_edit_status_val, _file_browser_choices_update, _file_browser_value_update, _file_content_editor_update, _commit_msg_update, _lang_update, _formatted_md_out, _detected_preview_out, _download_btn_out) # Yield initial status
|
709 |
|
@@ -729,6 +854,9 @@ def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, fi
|
|
729 |
deletion_status_msg = build_logic_delete_space_file(hf_api_key_ui, ui_space_name_part, owner_to_use, file_to_delete_path)
|
730 |
_edit_status_val = deletion_status_msg
|
731 |
|
|
|
|
|
|
|
732 |
if "Successfully deleted" in deletion_status_msg:
|
733 |
# Remove the file from the cache
|
734 |
parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != file_to_delete_path]
|
@@ -736,26 +864,35 @@ def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, fi
|
|
736 |
# Regenerate markdown and preview from the updated cache
|
737 |
_formatted_md_out, _detected_preview_out, _download_btn_out = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name_part)
|
738 |
|
739 |
-
|
740 |
-
new_file_list, err_list = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, owner_to_use)
|
741 |
if err_list:
|
742 |
_edit_status_val += f"\nFile list refresh error: {err_list}"
|
743 |
-
_file_browser_choices_update = gr.update(choices=sorted(new_file_list or []), value="Error refreshing files")
|
744 |
else:
|
745 |
-
_file_browser_choices_update = gr.update(choices=sorted(new_file_list or []), value=None) # Clear selection
|
746 |
|
747 |
_file_browser_value_update = None # Explicitly set value to None to clear selection visual
|
748 |
|
749 |
-
|
750 |
-
|
751 |
-
|
752 |
-
|
753 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
754 |
|
755 |
yield (_edit_status_val, _file_browser_choices_update, _file_browser_value_update, _file_content_editor_update, _commit_msg_update, _lang_update, _formatted_md_out, _detected_preview_out, _download_btn_out)
|
756 |
|
757 |
|
|
|
758 |
def handle_refresh_space_status(hf_api_key_ui, ui_owner_name, ui_space_name):
|
|
|
759 |
yield "*Fetching space status...*" # Initial feedback
|
760 |
owner_to_use = ui_owner_name
|
761 |
if not owner_to_use:
|
@@ -832,7 +969,7 @@ body {
|
|
832 |
.gr-markdown {
|
833 |
background-color: rgba(44, 62, 80, 0.7) !important; /* Transparent dark background */
|
834 |
padding: 10px; /* Add some padding */
|
835 |
-
border-radius:
|
836 |
}
|
837 |
/* Style markdown headers for better contrast */
|
838 |
.gr-markdown h1, .gr-markdown h2, .gr-markdown h3, .gr-markdown h4, .gr-markdown h5, .gr-markdown h6 {
|
@@ -844,7 +981,6 @@ body {
|
|
844 |
background-color: rgba(52, 73, 94, 0.95) !important; /* Darker code background */
|
845 |
border-color: rgba(189, 195, 199, 0.3) !important;
|
846 |
}
|
847 |
-
|
848 |
/* Chatbot specific styling */
|
849 |
.gr-chatbot {
|
850 |
background-color: rgba(44, 62, 80, 0.7) !important;
|
@@ -859,9 +995,17 @@ body {
|
|
859 |
background-color: rgba(46, 204, 113, 0.9) !important; /* Greenish background for user messages */
|
860 |
color: black !important; /* Dark text for green background */
|
861 |
}
|
|
|
862 |
|
863 |
|
864 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
865 |
|
866 |
|
867 |
with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
|
@@ -870,58 +1014,88 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
|
|
870 |
with gr.Row():
|
871 |
with gr.Sidebar():
|
872 |
gr.Markdown("## βοΈ Configuration")
|
873 |
-
with gr.Group(): gr.Markdown("### API Keys & Tokens");
|
|
|
|
|
|
|
874 |
with gr.Group(): gr.Markdown("### Hugging Face Space"); owner_name_input = gr.Textbox(label="HF Owner Name", placeholder="e.g., your-username"); space_name_input = gr.Textbox(label="HF Space Name", value="my-ai-space", placeholder="e.g., my-cool-app"); space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio", info="Used for new/build."); load_space_button = gr.Button("π Load Existing Space", variant="secondary", size="sm")
|
875 |
-
with gr.Group(): gr.Markdown("### AI Model Settings");
|
|
|
|
|
|
|
876 |
with gr.Column(scale=3):
|
877 |
gr.Markdown("## π¬ AI Chat & Code Generation")
|
878 |
-
|
879 |
-
|
880 |
-
|
|
|
881 |
gr.Markdown("---")
|
882 |
with gr.Tabs():
|
883 |
with gr.TabItem("π Formatted Space Markdown"): gr.Markdown("Complete Markdown definition for your Space."); formatted_space_output_display = gr.Textbox(label="Current Space Definition", lines=15, interactive=True, show_copy_button=True, value="*Space definition...*"); download_button = gr.DownloadButton(label="Download .md", interactive=False, size="sm")
|
884 |
with gr.TabItem("π Detected Files Preview"):
|
885 |
-
# --- CORRECTED LINE ---
|
886 |
detected_files_preview = gr.Markdown(value="*Files preview...*")
|
887 |
-
|
888 |
gr.Markdown("---")
|
889 |
with gr.Tabs():
|
890 |
with gr.TabItem("π Build & Preview Space"):
|
891 |
with gr.Row(): build_space_button = gr.Button("Build / Update Space on HF", variant="primary", scale=2); refresh_status_button = gr.Button("π Refresh Space Status", scale=1)
|
|
|
892 |
build_status_display = gr.Textbox(label="Build Operation Status", interactive=False, lines=2, value="*Build status will appear here.*"); gr.Markdown("---"); space_runtime_status_display = gr.Markdown("*Space runtime status will appear here after refresh.*"); gr.Markdown("---"); space_iframe_display = gr.HTML(value="<!-- Space Iframe -->", visible=False)
|
893 |
with gr.TabItem("βοΈ Edit Space Files"):
|
894 |
gr.Markdown("Select a file to view, edit, or delete. Changes are committed to HF Hub.")
|
895 |
file_browser_dropdown = gr.Dropdown(label="Select File in Space", choices=[], interactive=True, visible=False, info="Load/build Space first.")
|
896 |
-
file_content_editor = gr.Code(label="File Content Editor", language="python", lines=15, interactive=True)
|
897 |
commit_message_input = gr.Textbox(label="Commit Message", placeholder="e.g., Updated app.py", value="Update via AI Space Editor")
|
898 |
with gr.Row(): update_file_button = gr.Button("Commit Changes", variant="primary", scale=2); delete_file_button = gr.Button("ποΈ Delete Selected File", variant="stop", scale=1)
|
899 |
edit_status_display = gr.Textbox(label="File Edit/Delete Status", interactive=False, lines=2, value="*Select file...*")
|
900 |
|
901 |
-
|
902 |
-
|
903 |
-
|
904 |
-
|
905 |
-
|
906 |
-
|
907 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
908 |
load_space_button.click(fn=handle_load_existing_space, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=load_space_outputs)
|
909 |
|
910 |
-
|
|
|
911 |
build_space_button.click(fn=handle_build_space_button, inputs=[hf_api_key_input, space_name_input, owner_name_input, space_sdk_select, formatted_space_output_display], outputs=build_outputs)
|
912 |
|
913 |
-
#
|
914 |
file_edit_load_outputs = [file_content_editor, edit_status_display, commit_message_input, file_content_editor]
|
915 |
file_browser_dropdown.change(fn=handle_load_file_for_editing, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=file_edit_load_outputs)
|
916 |
|
917 |
-
#
|
918 |
commit_file_outputs = [edit_status_display, file_browser_dropdown, formatted_space_output_display, detected_files_preview, download_button]
|
919 |
update_file_button.click(fn=handle_commit_file_changes, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown, file_content_editor, commit_message_input], outputs=commit_file_outputs)
|
920 |
|
921 |
-
#
|
922 |
-
delete_file_outputs = [edit_status_display, file_browser_dropdown, file_browser_dropdown, file_content_editor, commit_message_input, file_content_editor, formatted_space_output_display, detected_files_preview, download_button]
|
923 |
delete_file_button.click(fn=handle_delete_file, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=delete_file_outputs)
|
924 |
|
|
|
925 |
refresh_status_button.click(fn=handle_refresh_space_status, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=[space_runtime_status_display])
|
926 |
|
927 |
if __name__ == "__main__":
|
|
|
1 |
+
# app.py
|
2 |
import gradio as gr
|
3 |
import re
|
4 |
import json
|
5 |
+
# Remove direct requests import, will use model_logic
|
6 |
+
# import requests
|
7 |
import os
|
8 |
import tempfile
|
9 |
|
10 |
# --- build_logic.py is now a hard requirement ---
|
11 |
from build_logic import (
|
12 |
create_space as build_logic_create_space,
|
13 |
+
_get_api_token as build_logic_get_api_token, # Keep this for HF Hub token logic
|
14 |
+
whoami as build_logic_whoami, # Keep this for HF user info
|
15 |
list_space_files_for_browsing,
|
16 |
get_space_repository_info,
|
17 |
get_space_file_content,
|
18 |
update_space_file,
|
19 |
parse_markdown as build_logic_parse_markdown,
|
20 |
delete_space_file as build_logic_delete_space_file,
|
21 |
+
get_space_runtime_status
|
22 |
)
|
23 |
print("build_logic.py loaded successfully.")
|
24 |
+
|
25 |
+
# --- model_logic.py is now a hard requirement ---
|
26 |
+
from model_logic import (
|
27 |
+
get_available_providers,
|
28 |
+
get_models_for_provider,
|
29 |
+
get_default_model_for_provider,
|
30 |
+
get_model_id_from_display_name, # Might not be strictly needed in app.py, but good practice
|
31 |
+
generate_stream # This is the core function we'll use
|
32 |
+
)
|
33 |
+
print("model_logic.py loaded successfully.")
|
34 |
+
# --- End imports ---
|
35 |
|
36 |
|
37 |
bbb = chr(96) * 3
|
38 |
parsed_code_blocks_state_cache = []
|
39 |
BOT_ROLE_NAME = "assistant"
|
40 |
+
# Removed GROQ_API_ENDPOINT as it's now in model_logic
|
41 |
+
|
42 |
|
43 |
DEFAULT_SYSTEM_PROMPT = f"""You are an expert AI programmer. Your role is to generate code and file structures based on user requests, or to modify existing code provided by the user.
|
44 |
When you provide NEW code for a file, or MODIFIED code for an existing file, use the following format exactly:
|
|
|
68 |
If no code is provided, assist the user with their tasks.
|
69 |
"""
|
70 |
|
71 |
+
# --- Core Utility, Parsing, Export functions (mostly unchanged) ---
|
72 |
def escape_html_for_markdown(text):
|
73 |
if not isinstance(text, str): return ""
|
74 |
# Minimal escaping, expand if needed
|
|
|
128 |
return filename_candidate if filename_candidate else text.strip()
|
129 |
|
130 |
|
131 |
+
def _parse_chat_stream_logic(latest_bot_message_content, existing_files_state=None):
|
132 |
+
"""
|
133 |
+
Parses a single bot message content string to find file blocks and updates the state.
|
134 |
+
Assumes existing_files_state is the current state *before* this message.
|
135 |
+
"""
|
136 |
latest_blocks_dict = {}
|
137 |
if existing_files_state:
|
138 |
+
# Copy existing blocks, except for potential structure blocks that might be overwritten
|
139 |
+
for block in existing_files_state:
|
140 |
+
if not block.get("is_structure_block"):
|
141 |
+
latest_blocks_dict[block["filename"]] = block.copy()
|
142 |
+
# Keep existing structure block for now, it might be replaced below
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
|
145 |
+
results = {"parsed_code_blocks": [], "preview_md": "", "default_selected_filenames": [], "error_message": None}
|
146 |
+
content = latest_bot_message_content or ""
|
147 |
|
148 |
file_pattern = re.compile(r"### File:\s*(?P<filename_line>[^\n]+)\n(?:```(?P<lang>[\w\.\-\+]*)\n(?P<code>[\s\S]*?)\n```|(?P<binary_msg>\[Binary file(?: - [^\]]+)?\]))")
|
149 |
structure_pattern = re.compile(r"## File Structure\n```(?:(?P<struct_lang>[\w.-]*)\n)?(?P<structure_code>[\s\S]*?)\n```")
|
150 |
|
151 |
+
# Process the latest bot message for updates to file blocks
|
152 |
+
structure_match = structure_pattern.search(content)
|
153 |
+
if structure_match:
|
154 |
+
# Add/Overwrite the structure block from the latest response
|
155 |
+
latest_blocks_dict["File Structure (original)"] = {"filename": "File Structure (original)", "language": structure_match.group("struct_lang") or "plaintext", "code": structure_match.group("structure_code").strip(), "is_binary": False, "is_structure_block": True}
|
156 |
+
else:
|
157 |
+
# If the latest message *doesn't* have a structure block, keep the previous one if it existed
|
158 |
+
existing_structure_block = next((b for b in (existing_files_state or []) if b.get("is_structure_block")), None)
|
159 |
+
if existing_structure_block:
|
160 |
+
latest_blocks_dict["File Structure (original)"] = existing_structure_block.copy()
|
161 |
+
|
162 |
+
|
163 |
+
# Find all file blocks in the latest message
|
164 |
+
current_message_file_blocks = {}
|
165 |
+
for match in file_pattern.finditer(content):
|
166 |
+
filename = _clean_filename(match.group("filename_line"))
|
167 |
+
if not filename: continue
|
168 |
+
lang, code_block, binary_msg = match.group("lang"), match.group("code"), match.group("binary_msg")
|
169 |
+
item_data = {"filename": filename, "is_binary": False, "is_structure_block": False}
|
170 |
+
if code_block is not None:
|
171 |
+
item_data["code"], item_data["language"] = code_block.strip(), (lang.strip().lower() if lang else _infer_lang_from_filename(filename))
|
172 |
+
elif binary_msg is not None:
|
173 |
+
item_data["code"], item_data["language"], item_data["is_binary"] = binary_msg.strip(), "binary", True
|
174 |
+
else: continue # Should not happen with the regex
|
175 |
+
current_message_file_blocks[filename] = item_data
|
176 |
+
|
177 |
+
# Update latest_blocks_dict with blocks from the current message
|
178 |
+
# Any file mentioned in the latest message replaces its old version
|
179 |
+
latest_blocks_dict.update(current_message_file_blocks)
|
180 |
+
|
181 |
+
|
182 |
+
# Convert dictionary values back to a list
|
|
|
183 |
current_parsed_blocks = list(latest_blocks_dict.values())
|
184 |
+
# Sort: structure block first, then files alphabetically
|
185 |
current_parsed_blocks.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
|
186 |
+
|
187 |
+
# Update the global cache outside this function if needed, or pass it back
|
188 |
+
# For now, let's return the new state and let the caller update the cache.
|
189 |
results["parsed_code_blocks"] = current_parsed_blocks
|
190 |
results["default_selected_filenames"] = [b["filename"] for b in current_parsed_blocks if not b.get("is_structure_block")]
|
191 |
return results
|
192 |
|
193 |
def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_blocks_for_export):
|
194 |
+
# This function remains largely the same, using the provided parsed_blocks_for_export
|
195 |
results = {"output_str": "", "error_message": None, "download_filepath": None}
|
196 |
+
# Filter out structure blocks for file listing/export content
|
197 |
+
exportable_blocks_content = [b for b in parsed_blocks_for_export if not b.get("is_structure_block") and not b.get("is_binary") and not (b.get("code", "").startswith("[Error loading content:") or b.get("code", "").startswith("[Binary or Skipped file]"))]
|
198 |
+
binary_blocks_content = [b for b in parsed_blocks_for_export if b.get("is_binary") or b.get("code", "").startswith("[Binary or Skipped file]")]
|
|
|
199 |
|
200 |
+
# Collect all filenames (including binary/error ones) for the structure list
|
201 |
+
all_filenames_in_state = sorted(list(set(b["filename"] for b in parsed_blocks_for_export if not b.get("is_structure_block"))))
|
|
|
202 |
|
203 |
+
if not all_filenames_in_state and not any(b.get("is_structure_block") for b in parsed_blocks_for_export):
|
204 |
results["output_str"] = f"# Space: {space_line_name_for_md}\n## File Structure\n{bbb}\nπ Root\n{bbb}\n\n*No files to list in structure or export.*"
|
205 |
try:
|
206 |
with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
|
|
|
217 |
else:
|
218 |
# If no structure block from AI, generate a simple one from detected files
|
219 |
output_lines.extend(["## File Structure", bbb, "π Root"])
|
220 |
+
if all_filenames_in_state:
|
221 |
+
for fname in all_filenames_in_state: output_lines.append(f" π {fname}")
|
|
|
222 |
output_lines.extend([bbb, ""])
|
223 |
|
224 |
output_lines.append("Below are the contents of all files in the space:\n")
|
225 |
exported_content = False
|
226 |
|
227 |
# Determine which files to export content for based on selection or default
|
228 |
+
# Exportable content blocks only
|
229 |
files_to_export_content = []
|
230 |
if selected_filenames:
|
231 |
+
files_to_export_content = [b for b in exportable_blocks_content if b["filename"] in selected_filenames]
|
232 |
else:
|
233 |
+
files_to_export_content = exportable_blocks_content # Export all content blocks by default
|
234 |
+
|
235 |
+
# Add binary/error blocks if they were selected or if exporting all (and they exist)
|
236 |
+
# Binary/error blocks are listed in the structure, but their *content* is just the marker string
|
237 |
+
binary_error_blocks_to_export = []
|
238 |
+
if selected_filenames:
|
239 |
+
binary_error_blocks_to_export = [b for b in binary_blocks_content if b["filename"] in selected_filenames]
|
240 |
+
elif binary_blocks_content:
|
241 |
+
binary_error_blocks_to_export = binary_blocks_content # Include all binary/error if exporting all
|
242 |
+
|
243 |
+
# Combine and sort all blocks whose content/marker should be included
|
244 |
+
all_blocks_to_export_content = sorted(files_to_export_content + binary_error_blocks_to_export, key=lambda b: b["filename"])
|
245 |
|
|
|
|
|
246 |
|
247 |
+
for block in all_blocks_to_export_content:
|
248 |
output_lines.append(f"### File: {block['filename']}")
|
249 |
+
if block.get('is_binary') or block.get("code", "").startswith("[Binary file") or block.get("code", "").startswith("[Error loading content:") or block.get("code", "").startswith("[Binary or Skipped file]"):
|
250 |
+
# For binary/error placeholders, just add the marker line
|
251 |
+
output_lines.append(block.get('code','[Binary or Skipped file]'))
|
252 |
else:
|
253 |
+
# For actual code/text content
|
254 |
output_lines.extend([f"{bbb}{block.get('language', 'plaintext') or 'plaintext'}", block.get('code',''), bbb])
|
255 |
output_lines.append(""); exported_content = True
|
256 |
|
257 |
+
if not exported_content and not all_filenames_in_state: output_lines.append("*No files in state.*")
|
258 |
+
elif not exported_content: output_lines.append("*No files with editable content are in the state or selected.*") # Message updated
|
259 |
+
|
260 |
final_output_str = "\n".join(output_lines)
|
261 |
results["output_str"] = final_output_str
|
262 |
try:
|
|
|
265 |
except Exception as e: print(f"Error creating temp file: {e}"); results["error_message"] = "Could not prepare file for download."
|
266 |
return results
|
267 |
|
268 |
+
|
269 |
def _convert_gr_history_to_api_messages(system_prompt, gr_history, current_user_message=None):
|
270 |
+
# This function is fine as is, it produces standard OpenAI format
|
271 |
messages = [{"role": "system", "content": system_prompt}] if system_prompt else []
|
272 |
for user_msg, bot_msg in gr_history:
|
273 |
if user_msg: messages.append({"role": "user", "content": user_msg})
|
|
|
276 |
if current_user_message: messages.append({"role": "user", "content": current_user_message})
|
277 |
return messages
|
278 |
|
279 |
+
# This function is no longer needed as we process the *latest* message content directly from the stream handler
|
280 |
+
# def get_latest_bot_message_as_json(gr_history):
|
281 |
+
# ...
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
282 |
|
283 |
|
284 |
def _generate_ui_outputs_from_cache(owner, space_name):
|
285 |
+
# This function remains largely the same, generating UI previews and the export MD
|
286 |
global parsed_code_blocks_state_cache
|
287 |
preview_md_val = "*No files in cache to display.*"
|
288 |
formatted_md_val = f"# Space: {owner}/{space_name}\n## File Structure\n{bbb}\nπ Root\n{bbb}\n\n*No files in cache.*" if owner or space_name else "*Load or define a Space to see its Markdown structure.*"
|
|
|
299 |
|
300 |
# Handle content display
|
301 |
content = block.get('code', '')
|
302 |
+
if block.get('is_binary') or content.startswith("["): # Treat errors/skipped as binary for preview display
|
303 |
preview_md_lines.append(f"\n`{escape_html_for_markdown(content)}`\n")
|
304 |
elif block.get('is_structure_block'):
|
305 |
preview_md_lines.append(f"\n{bbb}{block.get('language', 'plaintext') or 'plaintext'}\n{content}\n{bbb}\n")
|
|
|
311 |
space_line_name = f"{owner}/{space_name}" if owner and space_name else (owner or space_name or "your-space")
|
312 |
|
313 |
# _export_selected_logic handles selecting which files to include in the export MD
|
314 |
+
# Passing None means export all non-structure/non-binary/non-error content + list all files in structure
|
315 |
+
export_result = _export_selected_logic(None, space_line_name, parsed_code_blocks_state_cache)
|
316 |
formatted_md_val = export_result["output_str"]
|
317 |
download_file = export_result["download_filepath"]
|
318 |
|
319 |
return formatted_md_val, preview_md_val, gr.update(value=download_file, interactive=download_file is not None)
|
320 |
|
321 |
+
|
322 |
+
# --- Refactored Chat Submit Handler ---
|
323 |
+
def handle_chat_submit(user_message, chat_history, api_key_input, provider_select, model_select, system_prompt, hf_owner_name, hf_repo_name, _current_formatted_markdown):
|
324 |
global parsed_code_blocks_state_cache
|
325 |
+
_chat_msg_in = ""
|
326 |
+
_chat_hist = list(chat_history)
|
327 |
+
_status = "Initializing..."
|
328 |
_detected_files_update, _formatted_output_update, _download_btn_update = gr.update(), gr.update(), gr.update(interactive=False, value=None)
|
329 |
|
330 |
# --- Before sending to AI: Parse existing files from the current formatted markdown ---
|
|
|
334 |
if user_message and _current_formatted_markdown:
|
335 |
try:
|
336 |
parsed_from_md = build_logic_parse_markdown(_current_formatted_markdown)
|
337 |
+
# Update cache with files parsed from the markdown.
|
338 |
+
# Structure block from AI is volatile, always prefer structure from latest AI message.
|
339 |
+
# Files from markdown overwrite any previous file blocks.
|
340 |
+
new_cache_state = []
|
341 |
+
# Add structure block from *current cache* if it exists, it will be replaced if the AI provides a new one
|
342 |
+
existing_structure_block = next((b for b in parsed_code_blocks_state_cache if b.get("is_structure_block")), None)
|
343 |
+
if existing_structure_block:
|
344 |
+
new_cache_state.append(existing_structure_block.copy()) # Add copy
|
345 |
+
|
346 |
|
347 |
for f_info in parsed_from_md.get("files", []):
|
348 |
+
# Only add if it has a path and isn't the structure block representation placeholder
|
349 |
+
if f_info.get("path") and f_info["path"] != "File Structure (original)":
|
350 |
# Check if it's a binary representation string
|
351 |
is_binary_repr = isinstance(f_info.get("content"), str) and (f_info["content"].startswith("[Binary file") or f_info["content"].startswith("[Error loading content:") or f_info["content"].startswith("[Binary or Skipped file]"))
|
352 |
+
# Check if a block with this filename already exists in new_cache_state and replace it
|
353 |
+
found_existing = False
|
354 |
+
for i, block in enumerate(new_cache_state):
|
355 |
+
if block["filename"] == f_info["path"] and not block.get("is_structure_block"): # Only replace non-structure blocks
|
356 |
+
new_cache_state[i] = {
|
357 |
+
"filename": f_info["path"],
|
358 |
+
"code": f_info.get("content", ""),
|
359 |
+
"language": "binary" if is_binary_repr else _infer_lang_from_filename(f_info["path"]),
|
360 |
+
"is_binary": is_binary_repr,
|
361 |
+
"is_structure_block": False
|
362 |
+
}
|
363 |
+
found_existing = True
|
364 |
+
break
|
365 |
+
if not found_existing:
|
366 |
+
new_cache_state.append({
|
367 |
+
"filename": f_info["path"],
|
368 |
+
"code": f_info.get("content", ""),
|
369 |
+
"language": "binary" if is_binary_repr else _infer_lang_from_filename(f_info["path"]),
|
370 |
+
"is_binary": is_binary_repr,
|
371 |
+
"is_structure_block": False
|
372 |
+
})
|
373 |
+
|
374 |
+
# Sort the updated cache state
|
375 |
+
new_cache_state.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
|
376 |
+
parsed_code_blocks_state_cache = new_cache_state # Update global cache
|
377 |
+
|
378 |
|
379 |
except Exception as e:
|
380 |
# Log error but don't block chat submission
|
|
|
386 |
|
387 |
if not user_message.strip():
|
388 |
_status = "Cannot send an empty message."
|
389 |
+
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update); return
|
390 |
+
_chat_hist.append((user_message, None)); _status = f"Sending to {model_select} via {provider_select}..."
|
391 |
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
392 |
|
393 |
+
# Pass the API key from the UI directly to model_logic
|
394 |
+
api_key_override = api_key_input
|
395 |
+
# model_id = get_model_id_from_display_name(provider_select, model_select) # model_logic handles display name to ID
|
396 |
+
|
397 |
|
398 |
current_sys_prompt = system_prompt.strip() or DEFAULT_SYSTEM_PROMPT
|
399 |
|
400 |
+
# Include current file contents in the prompt as context for the AI
|
401 |
+
# This context is built from the *current cache state* (which was just updated from the formatted markdown)
|
402 |
current_files_context = ""
|
403 |
if parsed_code_blocks_state_cache:
|
404 |
current_files_context = "\n\n## Current Files in Space\n"
|
|
|
407 |
current_files_context += f"### File: {block['filename']}\n{bbb}\n{block['code']}\n{bbb}\n"
|
408 |
else:
|
409 |
current_files_context += f"### File: {block['filename']}\n"
|
410 |
+
if block.get("is_binary") or block.get("code", "").startswith("["): # Include binary/error markers
|
411 |
current_files_context += f"{block['code']}\n" # e.g. [Binary file...]
|
412 |
else:
|
413 |
current_files_context += f"{bbb}{block.get('language', 'plaintext') or 'plaintext'}\n{block.get('code','')}\n{bbb}\n"
|
414 |
current_files_context += "\n"
|
415 |
|
416 |
# Append current file context to the user message
|
417 |
+
# This combined message structure helps the model understand the context and the expected output format
|
418 |
user_message_with_context = user_message.strip()
|
419 |
if current_files_context.strip():
|
420 |
+
user_message_with_context = user_message_with_context + current_files_context + "\nBased on the current files above and our chat history, please provide updated file contents using the `### File: ...\n```...\n```\n` format for any files you are creating, modifying, or want to include in the final output. If you are providing a file structure, use the `## File Structure\n```\n...\n```\n` format. Omit files you want to delete from your response."
|
421 |
|
422 |
|
423 |
+
# Convert history to API messages, including the user message with context
|
424 |
api_msgs = _convert_gr_history_to_api_messages(current_sys_prompt, _chat_hist[:-1], user_message_with_context)
|
425 |
|
426 |
+
# --- Call the new model_logic streaming function ---
|
|
|
|
|
427 |
try:
|
428 |
+
_status = f"Waiting for {model_select} via {provider_select}..."; yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
429 |
+
|
430 |
+
# Accumulate the full response content for parsing *after* streaming
|
431 |
+
full_bot_response_content = ""
|
432 |
+
error_during_stream = None
|
433 |
+
|
434 |
+
# Generate stream from model_logic
|
435 |
+
for chunk in generate_stream(provider_select, model_select, api_key_override, api_msgs):
|
436 |
+
if chunk is None: continue # Skip None chunks if any
|
437 |
+
if chunk.startswith("Error: ") or chunk.startswith("API HTTP Error"):
|
438 |
+
# If an error chunk is received, treat it as the final output and stop
|
439 |
+
full_bot_response_content = chunk
|
440 |
+
error_during_stream = chunk
|
441 |
+
break # Stop processing stream
|
442 |
+
else:
|
443 |
+
# Accumulate response and update the last message in chat_hist
|
444 |
+
full_bot_response_content += chunk
|
445 |
+
_chat_hist[-1] = (user_message, full_bot_response_content)
|
446 |
+
_status = f"Streaming from {model_select}..."
|
447 |
+
# Yield update immediately after receiving chunk
|
448 |
+
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
449 |
+
|
450 |
+
# After the stream finishes or breaks
|
451 |
+
if error_during_stream:
|
452 |
+
_status = error_during_stream # Set status to the error message
|
453 |
+
elif full_bot_response_content:
|
454 |
+
_status = f"Streaming complete. Processing files from {model_select} response..."
|
455 |
+
|
456 |
+
# Pass the *current state* (updated from markdown at the start)
|
457 |
+
# and the *latest bot message content* to the parsing logic.
|
458 |
+
# _parse_chat_stream_logic will merge and update based on the bot's response.
|
459 |
+
parsing_res = _parse_chat_stream_logic(full_bot_response_content, existing_files_state=parsed_code_blocks_state_cache)
|
460 |
|
461 |
if parsing_res["error_message"]:
|
462 |
_status = f"Parsing Error: {parsing_res['error_message']}"
|
463 |
+
# Append parsing error to the bot's response in chat for visibility? Or just status?
|
464 |
+
# For now, update status and detected files area with error message
|
465 |
_detected_files_update = gr.Markdown(f"## Parsing Error\n`{escape_html_for_markdown(parsing_res['error_message'])}`")
|
466 |
else:
|
467 |
+
# Update the global cache with the new state returned by the parser
|
468 |
+
global parsed_code_blocks_state_cache
|
469 |
+
parsed_code_blocks_state_cache = parsing_res["parsed_code_blocks"]
|
470 |
+
|
471 |
+
# Regenerate UI outputs from the *updated* cache
|
472 |
_formatted_output_update, _detected_files_update, _download_btn_update = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
|
473 |
_status = "Processing complete. Previews updated."
|
|
|
|
|
474 |
else:
|
475 |
+
# Handle cases where the stream finished but yielded no content (e.g., filter)
|
476 |
+
_status = "AI response complete, but returned no content."
|
477 |
+
# Keep existing previews/markdown if no content was generated to parse
|
478 |
+
|
479 |
+
except Exception as e:
|
480 |
+
# Catch any errors that occurred *before* or *during* the stream setup/iteration
|
481 |
+
error_msg = f"An unexpected error occurred during AI generation: {e}"
|
482 |
+
print(f"Unexpected error in chat submit stream: {e}")
|
483 |
+
# Update the last chat message with the error
|
484 |
+
if _chat_hist and len(_chat_hist) > 0 and _chat_hist[-1][1] is None:
|
485 |
+
_chat_hist[-1] = (_chat_hist[-1][0], error_msg) # Keep user message, add error as bot message
|
486 |
+
else:
|
487 |
+
_chat_hist.append((user_message, error_msg)) # Append as a new user/bot turn if structure unexpected
|
488 |
+
_status = error_msg
|
489 |
+
# Previews and markdown might not be affected by a generation error, keep existing state
|
|
|
490 |
|
491 |
+
# Final yield to update UI after all processing
|
492 |
yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
|
493 |
|
494 |
|
495 |
+
# --- Handler to update model dropdown based on provider selection ---
|
496 |
+
def update_models_dropdown(provider_select):
|
497 |
+
"""Updates the model dropdown choices and selects the default model."""
|
498 |
+
if not provider_select:
|
499 |
+
return gr.update(choices=[], value=None)
|
500 |
+
models = get_models_for_provider(provider_select)
|
501 |
+
default_model = get_default_model_for_provider(provider_select)
|
502 |
+
return gr.update(choices=models, value=default_model if default_model in models else (models[0] if models else None))
|
503 |
+
|
504 |
+
|
505 |
+
# --- Existing handlers for Load, Build, Edit, Delete, Status (Mostly unchanged) ---
|
506 |
+
# Ensure they correctly use hf_api_key_ui and potentially owner_name_input/space_name_input
|
507 |
+
|
508 |
def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
|
509 |
+
# ... (rest of this function is largely the same, but needs to update global cache)
|
510 |
global parsed_code_blocks_state_cache
|
511 |
_formatted_md_val, _detected_preview_val, _status_val = "*Loading files...*", "*Loading files...*", f"Loading Space: {ui_owner_name}/{ui_space_name}..."
|
512 |
_file_browser_update, _iframe_html_update, _download_btn_update = gr.update(visible=False, choices=[], value=None), gr.update(value=None, visible=False), gr.update(interactive=False, value=None)
|
513 |
_build_status_clear, _edit_status_clear, _runtime_status_clear = "*Build status will appear here.*", "*Select a file to load or delete.*", "*Space runtime status will appear here after refresh.*"
|
514 |
+
_chat_history_clear = [] # Clear chat history on loading a new space
|
515 |
|
516 |
# Yield initial state to update UI
|
517 |
+
yield (_formatted_md_val, _detected_preview_val, _status_val, _file_browser_update, gr.update(value=ui_owner_name), gr.update(value=ui_space_name), _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear, _chat_history_clear)
|
518 |
|
519 |
owner_to_use, updated_owner_name_val = ui_owner_name, ui_owner_name
|
520 |
error_occurred = False
|
|
|
538 |
|
539 |
if error_occurred:
|
540 |
# Yield error state
|
541 |
+
yield (f"*Error: {_status_val}*", f"*Error: {_status_val}*", _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear, _chat_history_clear)
|
542 |
+
parsed_code_blocks_state_cache = [] # Clear cache on error
|
543 |
return # Stop execution
|
544 |
|
545 |
sdk_for_iframe, file_list, err_list_files = get_space_repository_info(hf_api_key_ui, ui_space_name, owner_to_use)
|
|
|
556 |
parsed_code_blocks_state_cache = [] # Clear cache on error
|
557 |
_formatted_md_val, _detected_preview_val, _download_btn_update = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
|
558 |
_file_browser_update = gr.update(visible=True, choices=[], value="Error loading files") # Update file browser with error state
|
559 |
+
yield (f"*Error: {err_list_files}*", "*Error loading files*", _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear, _chat_history_clear)
|
560 |
return # Stop execution
|
561 |
|
562 |
if not file_list:
|
|
|
564 |
parsed_code_blocks_state_cache = []
|
565 |
_formatted_md_val, _detected_preview_val, _download_btn_update = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
|
566 |
_file_browser_update = gr.update(visible=True, choices=[], value="No files found")
|
567 |
+
yield (_formatted_md_val, _detected_preview_val, _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear, _chat_history_clear)
|
568 |
return # Stop execution
|
569 |
|
570 |
|
571 |
+
loaded_files_for_cache = [] # Build a list to become the new cache state
|
572 |
_status_val = f"Loading {len(file_list)} files from {owner_to_use}/{ui_space_name} (SDK: {sdk_for_iframe or 'unknown'})...";
|
573 |
# Yield intermediate status while loading files
|
574 |
+
yield (_formatted_md_val, _detected_preview_val, _status_val, gr.update(visible=True, choices=sorted(file_list or []), value=None), updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear, _chat_history_clear)
|
575 |
|
576 |
|
577 |
for file_path in file_list:
|
|
|
585 |
file_path.startswith("__pycache__/") or "/__pycache__/" in file_path or \
|
586 |
file_path.startswith("node_modules/") or "/node_modules/" in file_path or \
|
587 |
file_path.startswith("venv/") or "/venv/" in file_path or \
|
588 |
+
file_path.startswith(".venv/") or "/.venv/" in file_path or \
|
589 |
+
file_path == "README.md" or file_path == "LICENSE": # Optionally skip common non-code files like README/LICENSE
|
590 |
+
loaded_files_for_cache.append({"filename": file_path, "code": "[Binary or Skipped file]", "language": "binary", "is_binary": True, "is_structure_block": False}); continue
|
591 |
|
592 |
# Handle potential issues with reading large files or non-utf8 files
|
593 |
try:
|
594 |
content, err_get = get_space_file_content(hf_api_key_ui, ui_space_name, owner_to_use, file_path)
|
595 |
if err_get:
|
596 |
# If there's an error getting content, record it but don't fail the whole load
|
597 |
+
loaded_files_for_cache.append({"filename": file_path, "code": f"[Error loading content: {err_get}]", "language": _infer_lang_from_filename(file_path), "is_binary": False, "is_structure_block": False})
|
598 |
print(f"Error loading {file_path}: {err_get}");
|
599 |
continue
|
600 |
# If content is successfully loaded
|
601 |
+
loaded_files_for_cache.append({"filename": file_path, "code": content, "language": _infer_lang_from_filename(file_path), "is_binary": False, "is_structure_block": False})
|
602 |
except Exception as content_ex:
|
603 |
# Catch any other unexpected exceptions during file content fetching
|
604 |
+
loaded_files_for_cache.append({"filename": file_path, "code": f"[Unexpected error loading content: {content_ex}]", "language": _infer_lang_from_filename(file_path), "is_binary": False, "is_structure_block": False})
|
605 |
print(f"Unexpected error loading {file_path}: {content_ex}")
|
606 |
continue
|
607 |
|
608 |
+
# Add a placeholder structure block if none was loaded (AI will generate one later if needed)
|
609 |
+
# This ensures the cache isn't empty except for files
|
610 |
+
# structure_block = next((b for b in loaded_files_for_cache if b.get("is_structure_block")), None)
|
611 |
+
# if not structure_block:
|
612 |
+
# loaded_files_for_cache.insert(0, {"filename": "File Structure (original)", "code": "π Root\n ...\n", "language": "plaintext", "is_binary": False, "is_structure_block": True})
|
613 |
|
614 |
+
parsed_code_blocks_state_cache = loaded_files_for_cache
|
615 |
_formatted_md_val, _detected_preview_val, _download_btn_update = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
|
616 |
+
_status_val = f"Successfully loaded Space: {owner_to_use}/{ui_space_name}. Markdown ready. {len(file_list)} files listed."
|
617 |
_file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value=None) # Use the full file list for the dropdown
|
618 |
|
619 |
+
# Final yield with updated state
|
620 |
+
yield (_formatted_md_val, _detected_preview_val, _status_val, _file_browser_update, updated_owner_name_val, ui_space_name, _iframe_html_update, _download_btn_update, _build_status_clear, _edit_status_clear, _runtime_status_clear, _chat_history_clear)
|
621 |
|
622 |
|
623 |
def handle_build_space_button(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, space_sdk_ui, formatted_markdown_content):
|
624 |
+
# ... (this function calls build_logic_create_space and refreshes file list)
|
625 |
_build_status, _iframe_html, _file_browser_update = "Starting space build process...", gr.update(value=None, visible=False), gr.update(visible=False, choices=[], value=None)
|
626 |
yield _build_status, _iframe_html, _file_browser_update # Yield initial status
|
627 |
if not ui_space_name_part or "/" in ui_space_name_part: _build_status = f"Build Error: HF Space Name '{ui_space_name_part}' must be repo name only (no '/')."; yield _build_status, _iframe_html, _file_browser_update; return
|
|
|
639 |
|
640 |
if not final_owner_for_build: _build_status = "Build Error: HF Owner Name could not be determined. Please specify it."; yield _build_status, _iframe_html, _file_browser_update; return
|
641 |
|
642 |
+
# Before building, parse the markdown to ensure the cache reflects exactly what's being built
|
643 |
+
# This prevents inconsistencies if the user manually edited the markdown output
|
644 |
+
try:
|
645 |
+
parsed_from_md_for_build = build_logic_parse_markdown(formatted_markdown_content)
|
646 |
+
# Replace the global cache state with the state derived from the markdown being built
|
647 |
+
global parsed_code_blocks_state_cache
|
648 |
+
parsed_code_blocks_state_cache = []
|
649 |
+
if parsed_from_md_for_build.get("owner_md"): # Update UI owner/space name if present in MD
|
650 |
+
ui_owner_name_part = parsed_from_md_for_build["owner_md"]
|
651 |
+
if parsed_from_md_for_build.get("repo_name_md"):
|
652 |
+
ui_space_name_part = parsed_from_md_for_build["repo_name_md"]
|
653 |
+
|
654 |
+
# Rebuild cache from parsed markdown files + structure block
|
655 |
+
structure_block_md = next((f for f in parsed_from_md_for_build.get("files", []) if f.get("path") == "File Structure (original)"), None)
|
656 |
+
if structure_block_md:
|
657 |
+
parsed_code_blocks_state_cache.append({
|
658 |
+
"filename": structure_block_md["path"],
|
659 |
+
"code": structure_block_md["content"],
|
660 |
+
"language": "plaintext", # Markdown parser doesn't detect lang for structure block ```
|
661 |
+
"is_binary": False,
|
662 |
+
"is_structure_block": True
|
663 |
+
})
|
664 |
+
|
665 |
+
for f_info in parsed_from_md_for_build.get("files", []):
|
666 |
+
if f_info.get("path") and f_info["path"] != "File Structure (original)":
|
667 |
+
is_binary_repr = isinstance(f_info.get("content"), str) and (f_info["content"].startswith("[Binary file") or f_info["content"].startswith("[Error loading content:") or f_info["content"].startswith("[Binary or Skipped file]"))
|
668 |
+
parsed_code_blocks_state_cache.append({
|
669 |
+
"filename": f_info["path"],
|
670 |
+
"code": f_info.get("content", ""),
|
671 |
+
"language": "binary" if is_binary_repr else _infer_lang_from_filename(f_info["path"]),
|
672 |
+
"is_binary": is_binary_repr,
|
673 |
+
"is_structure_block": False
|
674 |
+
})
|
675 |
+
parsed_code_blocks_state_cache.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
|
676 |
+
|
677 |
+
except Exception as e:
|
678 |
+
_build_status = f"Build Error: Failed to parse Markdown structure before building: {e}";
|
679 |
+
yield _build_status, _iframe_html, _file_browser_update; return # Stop build on parse error
|
680 |
+
|
681 |
+
|
682 |
result_message = build_logic_create_space(hf_api_key_ui, ui_space_name_part, final_owner_for_build, space_sdk_ui, formatted_markdown_content)
|
683 |
_build_status = f"Build Process: {result_message}"
|
684 |
|
685 |
if "Successfully" in result_message:
|
686 |
+
# Use potentially updated owner/space name from markdown parsing
|
687 |
+
sub_owner = re.sub(r'[^a-z0-9\-]+', '-', ui_owner_name_part.lower()).strip('-') or 'owner'
|
688 |
sub_repo = re.sub(r'[^a-z0-9\-]+', '-', ui_space_name_part.lower()).strip('-') or 'space'
|
689 |
iframe_url = f"https://{sub_owner}-{sub_repo}{'.static.hf.space' if space_sdk_ui == 'static' else '.hf.space'}"
|
690 |
_iframe_html = gr.update(value=f'<iframe src="{iframe_url}?__theme=light&embed=true" width="100%" height="700px" style="border:1px solid #eee; border-radius:8px;"></iframe>', visible=True)
|
691 |
+
_build_status += f"\nSpace live at: [Link]({iframe_url}) (Repo: https://huggingface.co/spaces/{ui_owner_name_part}/{ui_space_name_part})"
|
692 |
|
693 |
# Refresh file list after successful build
|
694 |
+
file_list, err_list = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part)
|
695 |
+
if err_list: _build_status += f"\nFile list refresh error after build: {err_list}"; _file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value="Error refreshing files")
|
696 |
else: _file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value=None if file_list else "No files found")
|
697 |
|
698 |
+
# Update UI with owner/space names extracted from markdown if present
|
699 |
+
owner_name_output = gr.update(value=ui_owner_name_part)
|
700 |
+
space_name_output = gr.update(value=ui_space_name_part)
|
701 |
+
|
702 |
|
703 |
+
yield _build_status, _iframe_html, _file_browser_update, owner_name_output, space_name_output
|
704 |
|
705 |
+
|
706 |
+
# File editing handlers are okay, just need to ensure they update the cache properly after commit/delete
|
707 |
def handle_load_file_for_editing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, selected_file_path):
|
708 |
+
# ... (rest of this function is the same)
|
709 |
_file_content_val, _edit_status_val, _commit_msg_val, _lang_update = "", "Error: No file selected.", gr.update(value=""), gr.update(language="python") # Reset values
|
710 |
if not selected_file_path or selected_file_path in ["No files found", "Error loading files", "Error refreshing files"]:
|
711 |
yield _file_content_val, "Select a file from the dropdown.", _commit_msg_val, _lang_update # Clear editor and status
|
|
|
733 |
_edit_status_val = f"Error loading '{selected_file_path}': {err}"
|
734 |
_commit_msg_val = f"Error loading {selected_file_path}"
|
735 |
_file_content_val = f"Error loading {selected_file_path}:\n{err}"
|
736 |
+
_lang_update = gr.update(language="python") # Default language for error display
|
737 |
yield _file_content_val, _edit_status_val, _commit_msg_val, _lang_update
|
738 |
return
|
739 |
|
|
|
747 |
def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_edit_path, edited_content, commit_message):
|
748 |
global parsed_code_blocks_state_cache
|
749 |
_edit_status_val = "Processing commit..."
|
750 |
+
# Initialize updates for components that might change
|
751 |
+
_file_browser_update_val = gr.update() # Will update choices or value
|
752 |
+
_formatted_md_out = gr.update() # Will update markdown
|
753 |
+
_detected_preview_out = gr.update() # Will update markdown preview
|
754 |
+
_download_btn_out = gr.update() # Will update download button
|
755 |
|
756 |
yield _edit_status_val, _file_browser_update_val, _formatted_md_out, _detected_preview_out, _download_btn_out # Yield initial status
|
757 |
|
758 |
+
|
759 |
if not file_to_edit_path or file_to_edit_path in ["No files found", "Error loading files", "Error refreshing files"]:
|
760 |
_edit_status_val = "Error: No valid file selected for commit.";
|
761 |
yield _edit_status_val, gr.update(), gr.update(), gr.update(), gr.update(); return
|
|
|
788 |
found_in_cache = True
|
789 |
break
|
790 |
if not found_in_cache:
|
791 |
+
# If file was added/edited via editor and wasn't in initial load cache (e.g. binary/error placeholder), add/replace it
|
792 |
+
# First remove any existing placeholder for this file
|
793 |
+
parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != file_to_edit_path]
|
794 |
+
# Then add the new text content block
|
795 |
parsed_code_blocks_state_cache.append({
|
796 |
"filename": file_to_edit_path,
|
797 |
"code": edited_content,
|
|
|
819 |
def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_delete_path):
|
820 |
global parsed_code_blocks_state_cache
|
821 |
_edit_status_val = "Processing deletion..."
|
822 |
+
# Initialize updates for components that might change/clear
|
823 |
+
_file_browser_choices_update = gr.update() # Update choices
|
824 |
+
_file_browser_value_update = None # Clear selected file value
|
825 |
_file_content_editor_update = gr.update(value="") # Clear editor content
|
826 |
_commit_msg_update = gr.update(value="") # Clear commit message
|
827 |
_lang_update = gr.update(language="plaintext") # Reset editor language
|
828 |
+
_formatted_md_out = gr.update() # Update markdown
|
829 |
+
_detected_preview_out = gr.update() # Update markdown preview
|
830 |
+
_download_btn_out = gr.update() # Update download button
|
831 |
+
|
832 |
|
833 |
yield (_edit_status_val, _file_browser_choices_update, _file_browser_value_update, _file_content_editor_update, _commit_msg_update, _lang_update, _formatted_md_out, _detected_preview_out, _download_btn_out) # Yield initial status
|
834 |
|
|
|
854 |
deletion_status_msg = build_logic_delete_space_file(hf_api_key_ui, ui_space_name_part, owner_to_use, file_to_delete_path)
|
855 |
_edit_status_val = deletion_status_msg
|
856 |
|
857 |
+
# Always refresh the file list dropdown choices after a delete attempt, successful or not
|
858 |
+
new_file_list, err_list = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, owner_to_use)
|
859 |
+
|
860 |
if "Successfully deleted" in deletion_status_msg:
|
861 |
# Remove the file from the cache
|
862 |
parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != file_to_delete_path]
|
|
|
864 |
# Regenerate markdown and preview from the updated cache
|
865 |
_formatted_md_out, _detected_preview_out, _download_btn_out = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name_part)
|
866 |
|
867 |
+
|
|
|
868 |
if err_list:
|
869 |
_edit_status_val += f"\nFile list refresh error: {err_list}"
|
870 |
+
_file_browser_choices_update = gr.update(choices=sorted(new_file_list or []), value="Error refreshing files") # Set value to error state
|
871 |
else:
|
872 |
+
_file_browser_choices_update = gr.update(choices=sorted(new_file_list or []), value=None) # Clear selection visually and internally
|
873 |
|
874 |
_file_browser_value_update = None # Explicitly set value to None to clear selection visual
|
875 |
|
876 |
+
|
877 |
+
else: # If deletion failed
|
878 |
+
if err_list:
|
879 |
+
_edit_status_val += f"\nFile list refresh error: {err_list}"
|
880 |
+
_file_browser_choices_update = gr.update(choices=sorted(new_file_list or []), value="Error refreshing files")
|
881 |
+
_file_browser_value_update = "Error refreshing files" # Keep error state in value if list failed
|
882 |
+
else:
|
883 |
+
# If list refresh succeeded but delete failed, refresh choices and keep the *failed-to-delete* file selected
|
884 |
+
_file_browser_choices_update = gr.update(choices=sorted(new_file_list or []), value=file_to_delete_path)
|
885 |
+
_file_browser_value_update = file_to_delete_path # Keep the file selected visually
|
886 |
+
|
887 |
+
# Markdown and preview are not changed if deletion failed, keep current updates as gr.update()
|
888 |
+
|
889 |
|
890 |
yield (_edit_status_val, _file_browser_choices_update, _file_browser_value_update, _file_content_editor_update, _commit_msg_update, _lang_update, _formatted_md_out, _detected_preview_out, _download_btn_out)
|
891 |
|
892 |
|
893 |
+
# Space status handler is okay
|
894 |
def handle_refresh_space_status(hf_api_key_ui, ui_owner_name, ui_space_name):
|
895 |
+
# ... (rest of this function is the same)
|
896 |
yield "*Fetching space status...*" # Initial feedback
|
897 |
owner_to_use = ui_owner_name
|
898 |
if not owner_to_use:
|
|
|
969 |
.gr-markdown {
|
970 |
background-color: rgba(44, 62, 80, 0.7) !important; /* Transparent dark background */
|
971 |
padding: 10px; /* Add some padding */
|
972 |
+
border-radius: 5 al;
|
973 |
}
|
974 |
/* Style markdown headers for better contrast */
|
975 |
.gr-markdown h1, .gr-markdown h2, .gr-markdown h3, .gr-markdown h4, .gr-markdown h5, .gr-markdown h6 {
|
|
|
981 |
background-color: rgba(52, 73, 94, 0.95) !important; /* Darker code background */
|
982 |
border-color: rgba(189, 195, 199, 0.3) !important;
|
983 |
}
|
|
|
984 |
/* Chatbot specific styling */
|
985 |
.gr-chatbot {
|
986 |
background-color: rgba(44, 62, 80, 0.7) !important;
|
|
|
995 |
background-color: rgba(46, 204, 113, 0.9) !important; /* Greenish background for user messages */
|
996 |
color: black !important; /* Dark text for green background */
|
997 |
}
|
998 |
+
"""
|
999 |
|
1000 |
|
1001 |
+
# Get initial providers and models for UI setup
|
1002 |
+
available_providers = get_available_providers()
|
1003 |
+
default_provider = available_providers[0] if available_providers else None
|
1004 |
+
initial_models = get_models_for_provider(default_provider) if default_provider else []
|
1005 |
+
initial_default_model = get_default_model_for_provider(default_provider) if default_provider else None
|
1006 |
+
# Ensure initial_default_model is in the initial_models list, fallback if not
|
1007 |
+
if initial_default_model not in initial_models and initial_models:
|
1008 |
+
initial_default_model = initial_models[0]
|
1009 |
|
1010 |
|
1011 |
with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
|
|
|
1014 |
with gr.Row():
|
1015 |
with gr.Sidebar():
|
1016 |
gr.Markdown("## βοΈ Configuration")
|
1017 |
+
with gr.Group(): gr.Markdown("### API Keys & Tokens");
|
1018 |
+
# Single API key input, model_logic decides which env var to check or uses this override
|
1019 |
+
api_key_input = gr.Textbox(label="AI Provider API Key (Optional Override)", type="password", placeholder="Paste key here or set env var (e.g., GROQ_API_KEY)");
|
1020 |
+
hf_api_key_input = gr.Textbox(label="Hugging Face Token (for building/loading)", type="password", placeholder="hf_...")
|
1021 |
with gr.Group(): gr.Markdown("### Hugging Face Space"); owner_name_input = gr.Textbox(label="HF Owner Name", placeholder="e.g., your-username"); space_name_input = gr.Textbox(label="HF Space Name", value="my-ai-space", placeholder="e.g., my-cool-app"); space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio", info="Used for new/build."); load_space_button = gr.Button("π Load Existing Space", variant="secondary", size="sm")
|
1022 |
+
with gr.Group(): gr.Markdown("### AI Model Settings");
|
1023 |
+
provider_select = gr.Dropdown(label="AI Provider", choices=available_providers, value=default_provider, info="Select an AI model provider.");
|
1024 |
+
model_select = gr.Dropdown(label="AI Model", choices=initial_models, value=initial_default_model, info="Select a model.");
|
1025 |
+
system_prompt_input = gr.Textbox(label="System Prompt", lines=8, value=DEFAULT_SYSTEM_PROMPT, interactive=True)
|
1026 |
with gr.Column(scale=3):
|
1027 |
gr.Markdown("## π¬ AI Chat & Code Generation")
|
1028 |
+
# Updated chatbot avatar
|
1029 |
+
chatbot_display = gr.Chatbot(label="AI Chat", height=400, bubble_full_width=False, avatar_images=(None, "https://huggingface.co/datasets/huggingface/badges/resolve/main/huggingface-bot-avatar.svg"))
|
1030 |
+
with gr.Row(): chat_message_input = gr.Textbox(show_label=False, placeholder="Your Message...", scale=7); send_chat_button = gr.Button("Send", variant="primary", scale=1, size="lg")
|
1031 |
+
status_output = gr.Textbox(label="Chat/Process Status", interactive=False, lines=1, value="Ready.")
|
1032 |
gr.Markdown("---")
|
1033 |
with gr.Tabs():
|
1034 |
with gr.TabItem("π Formatted Space Markdown"): gr.Markdown("Complete Markdown definition for your Space."); formatted_space_output_display = gr.Textbox(label="Current Space Definition", lines=15, interactive=True, show_copy_button=True, value="*Space definition...*"); download_button = gr.DownloadButton(label="Download .md", interactive=False, size="sm")
|
1035 |
with gr.TabItem("π Detected Files Preview"):
|
|
|
1036 |
detected_files_preview = gr.Markdown(value="*Files preview...*")
|
1037 |
+
|
1038 |
gr.Markdown("---")
|
1039 |
with gr.Tabs():
|
1040 |
with gr.TabItem("π Build & Preview Space"):
|
1041 |
with gr.Row(): build_space_button = gr.Button("Build / Update Space on HF", variant="primary", scale=2); refresh_status_button = gr.Button("π Refresh Space Status", scale=1)
|
1042 |
+
# Build status outputs also include updating owner/space names in the textboxes
|
1043 |
build_status_display = gr.Textbox(label="Build Operation Status", interactive=False, lines=2, value="*Build status will appear here.*"); gr.Markdown("---"); space_runtime_status_display = gr.Markdown("*Space runtime status will appear here after refresh.*"); gr.Markdown("---"); space_iframe_display = gr.HTML(value="<!-- Space Iframe -->", visible=False)
|
1044 |
with gr.TabItem("βοΈ Edit Space Files"):
|
1045 |
gr.Markdown("Select a file to view, edit, or delete. Changes are committed to HF Hub.")
|
1046 |
file_browser_dropdown = gr.Dropdown(label="Select File in Space", choices=[], interactive=True, visible=False, info="Load/build Space first.")
|
1047 |
+
file_content_editor = gr.Code(label="File Content Editor", language="python", lines=15, interactive=True)
|
1048 |
commit_message_input = gr.Textbox(label="Commit Message", placeholder="e.g., Updated app.py", value="Update via AI Space Editor")
|
1049 |
with gr.Row(): update_file_button = gr.Button("Commit Changes", variant="primary", scale=2); delete_file_button = gr.Button("ποΈ Delete Selected File", variant="stop", scale=1)
|
1050 |
edit_status_display = gr.Textbox(label="File Edit/Delete Status", interactive=False, lines=2, value="*Select file...*")
|
1051 |
|
1052 |
+
# --- Event Handlers ---
|
1053 |
+
|
1054 |
+
# Provider dropdown change event to update model dropdown
|
1055 |
+
provider_select.change(
|
1056 |
+
fn=update_models_dropdown,
|
1057 |
+
inputs=provider_select,
|
1058 |
+
outputs=model_select
|
1059 |
+
)
|
1060 |
+
|
1061 |
+
# Chat submit handler outputs
|
1062 |
+
chat_outputs = [chat_message_input, chatbot_display, status_output, detected_files_preview, formatted_space_output_display, download_button]
|
1063 |
+
# Chat submit handler inputs
|
1064 |
+
chat_inputs = [chat_message_input, chatbot_display, api_key_input, provider_select, model_select, system_prompt_input, owner_name_input, space_name_input, formatted_space_output_display] # Pass current formatted markdown as context
|
1065 |
+
|
1066 |
+
# Wire chat buttons
|
1067 |
+
send_chat_button.click(
|
1068 |
+
fn=handle_chat_submit,
|
1069 |
+
inputs=chat_inputs,
|
1070 |
+
outputs=chat_outputs
|
1071 |
+
)
|
1072 |
+
chat_message_input.submit( # Allow submitting with Enter key
|
1073 |
+
fn=handle_chat_submit,
|
1074 |
+
inputs=chat_inputs,
|
1075 |
+
outputs=chat_outputs
|
1076 |
+
)
|
1077 |
+
|
1078 |
+
# Load space outputs include clearing chat history
|
1079 |
+
load_space_outputs = [formatted_space_output_display, detected_files_preview, status_output, file_browser_dropdown, owner_name_input, space_name_input, space_iframe_display, download_button, build_status_display, edit_status_display, space_runtime_status_display, chatbot_display] # Added chatbot_display
|
1080 |
load_space_button.click(fn=handle_load_existing_space, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=load_space_outputs)
|
1081 |
|
1082 |
+
# Build outputs now include updating owner/space name textboxes
|
1083 |
+
build_outputs = [build_status_display, space_iframe_display, file_browser_dropdown, owner_name_input, space_name_input] # Added owner_name_input, space_name_input
|
1084 |
build_space_button.click(fn=handle_build_space_button, inputs=[hf_api_key_input, space_name_input, owner_name_input, space_sdk_select, formatted_space_output_display], outputs=build_outputs)
|
1085 |
|
1086 |
+
# File edit load outputs include clearing/setting commit message and language
|
1087 |
file_edit_load_outputs = [file_content_editor, edit_status_display, commit_message_input, file_content_editor]
|
1088 |
file_browser_dropdown.change(fn=handle_load_file_for_editing, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=file_edit_load_outputs)
|
1089 |
|
1090 |
+
# Commit file outputs include refreshing previews and file browser state
|
1091 |
commit_file_outputs = [edit_status_display, file_browser_dropdown, formatted_space_output_display, detected_files_preview, download_button]
|
1092 |
update_file_button.click(fn=handle_commit_file_changes, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown, file_content_editor, commit_message_input], outputs=commit_file_outputs)
|
1093 |
|
1094 |
+
# Delete file outputs include refreshing previews, file browser state, and clearing editor
|
1095 |
+
delete_file_outputs = [edit_status_display, file_browser_dropdown, file_browser_dropdown, file_content_editor, commit_message_input, file_content_editor, formatted_space_output_display, detected_files_preview, download_button] # Two file_browser_dropdown outputs: choices and value
|
1096 |
delete_file_button.click(fn=handle_delete_file, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=delete_file_outputs)
|
1097 |
|
1098 |
+
# Refresh status handler is okay
|
1099 |
refresh_status_button.click(fn=handle_refresh_space_status, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=[space_runtime_status_display])
|
1100 |
|
1101 |
if __name__ == "__main__":
|