Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -72,31 +72,26 @@ def query_model(model_name: str, messages: List[Dict[str, str]]) -> str:
|
|
72 |
return f"{model_name} error: {str(e)}"
|
73 |
|
74 |
def respond(message: str, history: List[List[str]]) -> str:
|
75 |
-
"""Handle sequential model responses with
|
76 |
messages = [{"role": "user", "content": message}]
|
77 |
responses = []
|
78 |
|
79 |
-
#
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
"Llama3.3-70B-Instruct"
|
84 |
-
]
|
85 |
|
86 |
-
#
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
responses.append(f"**{model_name}**:\n{response}")
|
91 |
-
|
92 |
-
# Add model's response to message history for next model
|
93 |
-
messages.append({
|
94 |
-
"role": "assistant",
|
95 |
-
"content": f"{model_name} response: {response}"
|
96 |
-
})
|
97 |
|
98 |
-
#
|
99 |
-
|
|
|
|
|
|
|
|
|
100 |
|
101 |
# Create the Gradio interface
|
102 |
chat_interface = gr.ChatInterface(
|
|
|
72 |
return f"{model_name} error: {str(e)}"
|
73 |
|
74 |
def respond(message: str, history: List[List[str]]) -> str:
|
75 |
+
"""Handle sequential model responses with progressive context"""
|
76 |
messages = [{"role": "user", "content": message}]
|
77 |
responses = []
|
78 |
|
79 |
+
# First model: Initial response
|
80 |
+
reply1 = query_model("Qwen2.5-Coder-32B-Instruct", messages)
|
81 |
+
responses.append(f"**Qwen2.5-Coder-32B-Instruct**:\n{reply1}")
|
82 |
+
messages.append({"role": "assistant", "content": reply1})
|
|
|
|
|
83 |
|
84 |
+
# Second model: Contextualize with first response
|
85 |
+
reply2 = query_model("Qwen2.5-72B-Instruct", messages)
|
86 |
+
responses.append(f"\n\n**Qwen2.5-72B-Instruct**:\n{reply2}")
|
87 |
+
messages.append({"role": "assistant", "content": reply2})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
|
89 |
+
# Third model: Final synthesis
|
90 |
+
reply3 = query_model("Llama3.3-70B-Instruct", messages)
|
91 |
+
responses.append(f"\n\n**Llama3.3-70B-Instruct**:\n{reply3}")
|
92 |
+
|
93 |
+
# Return progressive responses with clear separation
|
94 |
+
return "\n\n".join(responses)
|
95 |
|
96 |
# Create the Gradio interface
|
97 |
chat_interface = gr.ChatInterface(
|