mgbam commited on
Commit
51d8a3f
·
verified ·
1 Parent(s): f22daae

Upload 6 files

Browse files
Files changed (4) hide show
  1. api_clients.py +4 -2
  2. app.py +3 -1
  3. chat_processing.py +20 -0
  4. file_processing.py +21 -1
api_clients.py CHANGED
@@ -10,11 +10,13 @@ from config import (
10
  GENERIC_SYSTEM_PROMPT_WITH_SEARCH, FollowUpSystemPrompt
11
  )
12
  from chat_processing import (
13
- history_to_messages, messages_to_history, create_multimodal_message,
14
  remove_code_block, apply_search_replace_changes, send_to_sandbox,
15
  history_to_chatbot_messages, get_gradio_language
16
  )
17
- from file_processing import extract_text_from_file
 
 
18
  from web_extraction import extract_website_content
19
 
20
  # HF Inference Client
 
10
  GENERIC_SYSTEM_PROMPT_WITH_SEARCH, FollowUpSystemPrompt
11
  )
12
  from chat_processing import (
13
+ history_to_messages, messages_to_history,
14
  remove_code_block, apply_search_replace_changes, send_to_sandbox,
15
  history_to_chatbot_messages, get_gradio_language
16
  )
17
+ from file_processing import (
18
+ extract_text_from_file, create_multimodal_message
19
+ )
20
  from web_extraction import extract_website_content
21
 
22
  # HF Inference Client
app.py CHANGED
@@ -9,8 +9,10 @@ from config import (
9
  from api_clients import generation_code, tavily_client
10
  from chat_processing import (
11
  clear_history, history_to_chatbot_messages, update_image_input_visibility,
12
- get_gradio_language, send_to_sandbox
13
  )
 
 
14
 
15
 
16
  def demo_card_click(e: gr.EventData):
 
9
  from api_clients import generation_code, tavily_client
10
  from chat_processing import (
11
  clear_history, history_to_chatbot_messages, update_image_input_visibility,
12
+ get_gradio_language, send_to_sandbox,
13
  )
14
+ from file_processing import create_multimodal_message
15
+ from web_extraction import enhance_query_with_search
16
 
17
 
18
  def demo_card_click(e: gr.EventData):
chat_processing.py CHANGED
@@ -88,6 +88,26 @@ def update_image_input_visibility(model):
88
  is_glm_vl = model.get("id") == "THUDM/GLM-4.1V-9B-Thinking"
89
  return gr.update(visible=is_ernie_vl or is_glm_vl)
90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  def apply_search_replace_changes(original_html: str, changes_text: str) -> str:
92
  """Apply search/replace changes to HTML content"""
93
  if not changes_text.strip():
 
88
  is_glm_vl = model.get("id") == "THUDM/GLM-4.1V-9B-Thinking"
89
  return gr.update(visible=is_ernie_vl or is_glm_vl)
90
 
91
+ def create_multimodal_message(text, image=None):
92
+ """Create a multimodal message with text and optional image"""
93
+ if image is None:
94
+ return {"role": "user", "content": text}
95
+
96
+ from file_processing import process_image_for_model
97
+ content = [
98
+ {
99
+ "type": "text",
100
+ "text": text
101
+ },
102
+ {
103
+ "type": "image_url",
104
+ "image_url": {
105
+ "url": process_image_for_model(image)
106
+ }
107
+ }
108
+ ]
109
+
110
+ return {"role": "user", "content": content}
111
  def apply_search_replace_changes(original_html: str, changes_text: str) -> str:
112
  """Apply search/replace changes to HTML content"""
113
  if not changes_text.strip():
file_processing.py CHANGED
@@ -67,4 +67,24 @@ def extract_text_from_file(file_path):
67
  else:
68
  return ""
69
  except Exception as e:
70
- return f"Error extracting text: {e}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  else:
68
  return ""
69
  except Exception as e:
70
+ return f"Error extracting text: {e}"
71
+
72
+ def create_multimodal_message(text, image=None):
73
+ """Create a multimodal message with text and optional image"""
74
+ if image is None:
75
+ return {"role": "user", "content": text}
76
+
77
+ content = [
78
+ {
79
+ "type": "text",
80
+ "text": text
81
+ },
82
+ {
83
+ "type": "image_url",
84
+ "image_url": {
85
+ "url": process_image_for_model(image)
86
+ }
87
+ }
88
+ ]
89
+
90
+ return {"role": "user", "content": content}