leilaaaaa commited on
Commit
40b3e0b
·
verified ·
1 Parent(s): dbcfad5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -43
app.py CHANGED
@@ -14,6 +14,7 @@ def image_to_base64(image):
14
  img_str = base64.b64encode(buffered.getvalue()).decode('utf-8')
15
  return img_str
16
 
 
17
  # Function to interact with LLAVA model
18
  def respond(
19
  message,
@@ -24,53 +25,65 @@ def respond(
24
  top_p,
25
  image=None
26
  ):
27
- messages = [{"role": "system", "content": system_message}]
 
 
 
 
 
 
 
 
 
28
 
29
- for val in history:
30
- if val[0]:
31
- messages.append({"role": "user", "content": val[0]})
32
- if val[1]:
33
- messages.append({"role": "assistant", "content": val[1]})
34
 
35
- messages.append({"role": "user", "content": message})
 
 
 
 
 
 
 
 
 
36
 
37
- if image:
38
- # Convert image to base64
39
- image_b64 = image_to_base64(image)
40
- messages.append({"role": "user", "content": "Image uploaded", "image": image_b64})
41
 
42
- # Call Hugging Face model for response
43
- response = ""
44
- for message in client.chat_completion(
45
- messages,
46
- max_tokens=max_tokens,
47
- stream=True,
48
- temperature=temperature,
49
- top_p=top_p,
50
- ):
51
- token = message.choices[0].delta.content
52
 
53
- response += token
54
- yield response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
 
56
- # Create a Gradio interface
57
- demo = gr.Interface(
58
- fn=respond,
59
- inputs=[
60
- gr.Textbox(label="Message"),
61
- gr.Image(label="Upload Medical Image", type="pil")
62
- ],
63
- outputs=gr.Textbox(label="Response", placeholder="Model response will appear here..."),
64
- title="LLAVA Model - Medical Image and Question",
65
- description="Upload a medical image and ask a specific question about the image for a medical description.",
66
- additional_inputs=[
67
- gr.Textbox(label="System message", value="You are a friendly Chatbot."),
68
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
69
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
70
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
71
- ]
72
- )
73
 
74
- # Launch the Gradio interface
75
- if __name__ == "__main__":
76
- demo.launch()
 
14
  img_str = base64.b64encode(buffered.getvalue()).decode('utf-8')
15
  return img_str
16
 
17
+ # Function to interact with LLAVA model
18
  # Function to interact with LLAVA model
19
  def respond(
20
  message,
 
25
  top_p,
26
  image=None
27
  ):
28
+ try:
29
+ messages = [{"role": "system", "content": system_message}]
30
+
31
+ for val in history:
32
+ if val[0]:
33
+ messages.append({"role": "user", "content": val[0]})
34
+ if val[1]:
35
+ messages.append({"role": "assistant", "content": val[1]})
36
+
37
+ messages.append({"role": "user", "content": message})
38
 
39
+ if image:
40
+ # Convert image to base64
41
+ image_b64 = image_to_base64(image)
42
+ messages.append({"role": "user", "content": "Image uploaded", "image": image_b64})
 
43
 
44
+ # Call Hugging Face model for response
45
+ response = ""
46
+ for message in client.chat_completion(
47
+ messages,
48
+ max_tokens=max_tokens,
49
+ stream=True,
50
+ temperature=temperature,
51
+ top_p=top_p,
52
+ ):
53
+ token = message.choices[0].delta.content
54
 
55
+ response += token
56
+ yield response
 
 
57
 
58
+ except Exception as e:
59
+ print(f"Error in respond function: {str(e)}")
60
+ yield f"Error occurred: {str(e)}"
 
 
 
 
 
 
 
61
 
62
+ # Debugging print statements
63
+ print("Starting Gradio interface setup...")
64
+ try:
65
+ # Create a Gradio interface
66
+ demo = gr.Interface(
67
+ fn=respond,
68
+ inputs=[
69
+ gr.inputs.Textbox(label="Message"),
70
+ gr.inputs.Image(label="Upload Medical Image", type="pil")
71
+ ],
72
+ outputs=gr.outputs.Textbox(label="Response", placeholder="Model response will appear here..."),
73
+ title="LLAVA Model - Medical Image and Question",
74
+ description="Upload a medical image and ask a specific question about the image for a medical description.",
75
+ additional_inputs=[
76
+ gr.Textbox(label="System message", value="You are a friendly Chatbot."),
77
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
78
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
79
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
80
+ ]
81
+ )
82
 
83
+ # Launch the Gradio interface
84
+ if __name__ == "__main__":
85
+ print("Launching Gradio interface...")
86
+ demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
+ except Exception as e:
89
+ print(f"Error during Gradio setup: {str(e)}")