Spaces:
Building
Building
Update app.py
Browse files
app.py
CHANGED
@@ -55,11 +55,11 @@ def search(query):
|
|
55 |
client_gemma = InferenceClient("google/gemma-1.1-7b-it")
|
56 |
client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
|
57 |
client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
|
|
58 |
|
59 |
# Define the main chat function
|
60 |
def respond(message, history):
|
61 |
global messages # Make messages global for persistent storage
|
62 |
-
messages = [] # Initialize messages list (this gets overwritten each turn)
|
63 |
vqa = ""
|
64 |
|
65 |
# Handle image processing
|
@@ -98,6 +98,8 @@ def respond(message, history):
|
|
98 |
response = response.replace('\\"', '"')
|
99 |
print(f"\n{response}")
|
100 |
|
|
|
|
|
101 |
# Process and return the response based on the function call
|
102 |
try:
|
103 |
json_data = json.loads(str(response))
|
|
|
55 |
client_gemma = InferenceClient("google/gemma-1.1-7b-it")
|
56 |
client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
|
57 |
client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
58 |
+
messages = []
|
59 |
|
60 |
# Define the main chat function
|
61 |
def respond(message, history):
|
62 |
global messages # Make messages global for persistent storage
|
|
|
63 |
vqa = ""
|
64 |
|
65 |
# Handle image processing
|
|
|
98 |
response = response.replace('\\"', '"')
|
99 |
print(f"\n{response}")
|
100 |
|
101 |
+
messages.append({"role": "assistant", "content": f"<functioncall>{str(response)}</functioncall>"})
|
102 |
+
|
103 |
# Process and return the response based on the function call
|
104 |
try:
|
105 |
json_data = json.loads(str(response))
|