Spaces:
Sleeping
Sleeping
fix error content
Browse files
app.py
CHANGED
@@ -31,7 +31,7 @@ hf_client = InferenceClient(api_key=HF_TOKEN)
|
|
31 |
# Function to process message and get response
|
32 |
async def get_ai_response(message_content):
|
33 |
try:
|
34 |
-
messages = [{
|
35 |
response = ""
|
36 |
stream = hf_client.chat.completions.create(
|
37 |
model="Qwen/Qwen2.5-72B-Instruct",
|
@@ -42,11 +42,17 @@ async def get_ai_response(message_content):
|
|
42 |
stream=True
|
43 |
)
|
44 |
for chunk in stream:
|
45 |
-
|
46 |
-
|
47 |
-
|
|
|
|
|
|
|
|
|
|
|
48 |
return response if response else "I couldn't generate a response."
|
49 |
except Exception as e:
|
|
|
50 |
return f"An error occurred: {str(e)}"
|
51 |
|
52 |
@client.event
|
|
|
31 |
# Function to process message and get response
|
32 |
async def get_ai_response(message_content):
|
33 |
try:
|
34 |
+
messages = [{"role": "user", "content": message_content}]
|
35 |
response = ""
|
36 |
stream = hf_client.chat.completions.create(
|
37 |
model="Qwen/Qwen2.5-72B-Instruct",
|
|
|
42 |
stream=True
|
43 |
)
|
44 |
for chunk in stream:
|
45 |
+
# Safely handle the chunk content
|
46 |
+
try:
|
47 |
+
delta_content = chunk.choices[0].delta.content
|
48 |
+
if delta_content is not None: # Only append if content exists
|
49 |
+
response += delta_content
|
50 |
+
except (AttributeError, IndexError) as e:
|
51 |
+
logging.warning(f"Skipping invalid chunk: {e}")
|
52 |
+
continue
|
53 |
return response if response else "I couldn't generate a response."
|
54 |
except Exception as e:
|
55 |
+
logging.error(f"Error in get_ai_response: {e}")
|
56 |
return f"An error occurred: {str(e)}"
|
57 |
|
58 |
@client.event
|