leilaaaaa commited on
Commit
d605348
·
verified ·
1 Parent(s): 4770ccc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -27
app.py CHANGED
@@ -26,39 +26,40 @@ def respond(
26
  top_p,
27
  image=None
28
  ):
29
- try:
30
- messages = [{"role": "system", "content": system_message}]
31
 
32
- for val in history:
33
- if val[0]:
34
- messages.append({"role": "user", "content": val[0]})
35
- if val[1]:
36
- messages.append({"role": "assistant", "content": val[1]})
37
 
38
- messages.append({"role": "user", "content": message})
39
 
40
- if image:
41
- # Handle single image input
42
- image_b64 = image_to_base64(image) # Convert image to base64
 
43
  messages.append({"role": "user", "content": "Image uploaded", "image": image_b64})
 
 
 
 
 
44
 
45
- # Call Hugging Face model for response
46
- response = ""
47
- for message in client.chat_completion(
48
- messages,
49
- max_tokens=max_tokens,
50
- stream=True,
51
- temperature=temperature,
52
- top_p=top_p,
53
- ):
54
- token = message.choices[0].delta.content
55
 
56
- response += token
57
- yield response
58
-
59
- except Exception as e:
60
- print(f"Error in respond function: {str(e)}")
61
- yield f"Error occurred: {str(e)}"
62
 
63
 
64
  # Debugging print statements
 
26
  top_p,
27
  image=None
28
  ):
29
+ messages = [{"role": "system", "content": system_message}]
 
30
 
31
+ for val in history:
32
+ if val[0]:
33
+ messages.append({"role": "user", "content": val[0]})
34
+ if val[1]:
35
+ messages.append({"role": "assistant", "content": val[1]})
36
 
37
+ messages.append({"role": "user", "content": message})
38
 
39
+ if image:
40
+ # Convert image to base64
41
+ if isinstance(image, Image.Image):
42
+ image_b64 = image_to_base64(image)
43
  messages.append({"role": "user", "content": "Image uploaded", "image": image_b64})
44
+ else:
45
+ # Handle multiple images if necessary
46
+ for img in image:
47
+ image_b64 = image_to_base64(img)
48
+ messages.append({"role": "user", "content": "Image uploaded", "image": image_b64})
49
 
50
+ # Call Hugging Face model for response
51
+ response = ""
52
+ for message in client.chat_completion(
53
+ messages,
54
+ max_tokens=max_tokens,
55
+ stream=True,
56
+ temperature=temperature,
57
+ top_p=top_p,
58
+ ):
59
+ token = message.choices[0].delta.content
60
 
61
+ response += token
62
+ yield response
 
 
 
 
63
 
64
 
65
  # Debugging print statements