broadfield-dev commited on
Commit
6948cd0
Β·
verified Β·
1 Parent(s): 08948ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -132,7 +132,7 @@ def _parse_chat_stream_logic(chat_json_string, existing_files_state=None):
132
  message_obj = None
133
  if ai_chat_history and isinstance(ai_chat_history[-1], dict) and ai_chat_history[-1].get("role", "").lower() == BOT_ROLE_NAME:
134
  message_obj = ai_chat_history[-1]
135
-
136
  if not message_obj:
137
  # If the last message isn't the bot's or is malformed, don't update state based on it
138
  results["parsed_code_blocks"] = list(latest_blocks_dict.values()) # Return existing state
@@ -152,7 +152,7 @@ def _parse_chat_stream_logic(chat_json_string, existing_files_state=None):
152
  if structure_match:
153
  # Overwrite or add the structure block from the latest response
154
  latest_blocks_dict["File Structure (original)"] = {"filename": "File Structure (original)", "language": structure_match.group("struct_lang") or "plaintext", "code": structure_match.group("structure_code").strip(), "is_binary": False, "is_structure_block": True}
155
-
156
  # Find all file blocks in the latest message
157
  current_message_file_blocks = {}
158
  for match in file_pattern.finditer(content):
@@ -678,7 +678,7 @@ def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_
678
  "is_structure_block": False
679
  })
680
  # Re-sort the cache to maintain consistent order
681
- parsed_code_blocks_state_cache.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
682
 
683
  # Regenerate markdown and preview from the updated cache
684
  _formatted_md_out, _detected_preview_out, _download_btn_out = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name_part)
@@ -786,7 +786,7 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="orange"))
786
  with gr.Row():
787
  with gr.Sidebar():
788
  gr.Markdown("## βš™οΈ Configuration")
789
- with gr.Group(): gr.Markdown("### API Keys & Tokens"); groq_api_key_input = gr.Textbox(label="Groq API Key", type="password", placeholder="gsk_..."); hf_api_key_input = gr.Textbox(label="Hugging Face Token", type="password", placeholder="hf_...")
790
  with gr.Group(): gr.Markdown("### Hugging Face Space"); owner_name_input = gr.Textbox(label="HF Owner Name", placeholder="e.g., your-username"); space_name_input = gr.Textbox(label="HF Space Name", value="my-ai-space", placeholder="e.g., my-cool-app"); space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio", info="Used for new/build."); load_space_button = gr.Button("πŸ”„ Load Existing Space", variant="secondary", size="sm")
791
  with gr.Group(): gr.Markdown("### AI Model Settings"); groq_model_select = gr.Dropdown(label="Groq Model", choices=["mixtral-8x7b-32768", "llama3-8b-8192", "llama3-70b-8192", "gemma-7b-it"], value="llama3-8b-8192"); groq_system_prompt_input = gr.Textbox(label="System Prompt", lines=8, value=DEFAULT_SYSTEM_PROMPT, interactive=True)
792
  with gr.Column(scale=3):
@@ -797,7 +797,10 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="orange"))
797
  gr.Markdown("---")
798
  with gr.Tabs():
799
  with gr.TabItem("πŸ“ Formatted Space Markdown"): gr.Markdown("Complete Markdown definition for your Space."); formatted_space_output_display = gr.Textbox(label="Current Space Definition", lines=15, interactive=True, show_copy_button=True, value="*Space definition...*"); download_button = gr.DownloadButton(label="Download .md", interactive=False, size="sm")
800
- with gr.TabItem("πŸ” Detected Files Preview"): gr.Markdown(value="*Files preview...*")
 
 
 
801
  gr.Markdown("---")
802
  with gr.Tabs():
803
  with gr.TabItem("πŸš€ Build & Preview Space"):
 
132
  message_obj = None
133
  if ai_chat_history and isinstance(ai_chat_history[-1], dict) and ai_chat_history[-1].get("role", "").lower() == BOT_ROLE_NAME:
134
  message_obj = ai_chat_history[-1]
135
+
136
  if not message_obj:
137
  # If the last message isn't the bot's or is malformed, don't update state based on it
138
  results["parsed_code_blocks"] = list(latest_blocks_dict.values()) # Return existing state
 
152
  if structure_match:
153
  # Overwrite or add the structure block from the latest response
154
  latest_blocks_dict["File Structure (original)"] = {"filename": "File Structure (original)", "language": structure_match.group("struct_lang") or "plaintext", "code": structure_match.group("structure_code").strip(), "is_binary": False, "is_structure_block": True}
155
+
156
  # Find all file blocks in the latest message
157
  current_message_file_blocks = {}
158
  for match in file_pattern.finditer(content):
 
678
  "is_structure_block": False
679
  })
680
  # Re-sort the cache to maintain consistent order
681
+ parsed_code_blocks_state_cache.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename")))
682
 
683
  # Regenerate markdown and preview from the updated cache
684
  _formatted_md_out, _detected_preview_out, _download_btn_out = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name_part)
 
786
  with gr.Row():
787
  with gr.Sidebar():
788
  gr.Markdown("## βš™οΈ Configuration")
789
+ with gr.Group(): gr.Markdown("### API Keys & Tokens"); groq_api_key_input = gr.Textbox(label="Groq API Key", type="password", placeholder="gsk_...", allow_paste=True); hf_api_key_input = gr.Textbox(label="Hugging Face Token", type="password", placeholder="hf_...", allow_paste=True)
790
  with gr.Group(): gr.Markdown("### Hugging Face Space"); owner_name_input = gr.Textbox(label="HF Owner Name", placeholder="e.g., your-username"); space_name_input = gr.Textbox(label="HF Space Name", value="my-ai-space", placeholder="e.g., my-cool-app"); space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio", info="Used for new/build."); load_space_button = gr.Button("πŸ”„ Load Existing Space", variant="secondary", size="sm")
791
  with gr.Group(): gr.Markdown("### AI Model Settings"); groq_model_select = gr.Dropdown(label="Groq Model", choices=["mixtral-8x7b-32768", "llama3-8b-8192", "llama3-70b-8192", "gemma-7b-it"], value="llama3-8b-8192"); groq_system_prompt_input = gr.Textbox(label="System Prompt", lines=8, value=DEFAULT_SYSTEM_PROMPT, interactive=True)
792
  with gr.Column(scale=3):
 
797
  gr.Markdown("---")
798
  with gr.Tabs():
799
  with gr.TabItem("πŸ“ Formatted Space Markdown"): gr.Markdown("Complete Markdown definition for your Space."); formatted_space_output_display = gr.Textbox(label="Current Space Definition", lines=15, interactive=True, show_copy_button=True, value="*Space definition...*"); download_button = gr.DownloadButton(label="Download .md", interactive=False, size="sm")
800
+ with gr.TabItem("πŸ” Detected Files Preview"):
801
+ # --- CORRECTED LINE ---
802
+ detected_files_preview = gr.Markdown(value="*Files preview...*")
803
+ # --- END CORRECTED LINE ---
804
  gr.Markdown("---")
805
  with gr.Tabs():
806
  with gr.TabItem("πŸš€ Build & Preview Space"):