Update app.py
Browse files
app.py
CHANGED
@@ -51,15 +51,9 @@ def chat_with_text(user_message, max_new_tokens=250):
|
|
51 |
return assistant_reply
|
52 |
|
53 |
# Resim ve/veya metin tabanlı sohbet fonksiyonu
|
54 |
-
def
|
55 |
global history
|
56 |
|
57 |
-
if history is None: # Eğer `history` verilmemişse global `history`yi kullan
|
58 |
-
history = []
|
59 |
-
|
60 |
-
user_message = message.get("text", "")
|
61 |
-
image = message.get("image", None)
|
62 |
-
|
63 |
if image: # Resim varsa
|
64 |
response = describe_image(image, user_message)
|
65 |
else: # Sadece metin mesajı varsa
|
@@ -68,27 +62,33 @@ def bot_streaming(message, history=None, max_new_tokens=250):
|
|
68 |
return response
|
69 |
|
70 |
# Gradio arayüzü
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
label="
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
if __name__ == "__main__":
|
94 |
demo.launch(debug=True)
|
|
|
51 |
return assistant_reply
|
52 |
|
53 |
# Resim ve/veya metin tabanlı sohbet fonksiyonu
|
54 |
+
def process_input(user_message="", image=None, max_new_tokens=250):
|
55 |
global history
|
56 |
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
if image: # Resim varsa
|
58 |
response = describe_image(image, user_message)
|
59 |
else: # Sadece metin mesajı varsa
|
|
|
62 |
return response
|
63 |
|
64 |
# Gradio arayüzü
|
65 |
+
with gr.Blocks() as demo:
|
66 |
+
gr.Markdown("# Multimodal Chat Assistant")
|
67 |
+
gr.Markdown(
|
68 |
+
"This demo combines text and image understanding using Moondream2 for visual tasks and LLaMA for conversational AI."
|
69 |
+
)
|
70 |
+
|
71 |
+
with gr.Row():
|
72 |
+
with gr.Column():
|
73 |
+
chat_input = gr.Textbox(label="Enter your message")
|
74 |
+
image_input = gr.Image(label="Upload an image (optional)", type="pil")
|
75 |
+
token_slider = gr.Slider(
|
76 |
+
minimum=10,
|
77 |
+
maximum=500,
|
78 |
+
value=250,
|
79 |
+
step=10,
|
80 |
+
label="Maximum number of new tokens to generate",
|
81 |
+
)
|
82 |
+
submit_btn = gr.Button("Submit")
|
83 |
+
|
84 |
+
with gr.Column():
|
85 |
+
chat_output = gr.Textbox(label="Assistant Reply", lines=10)
|
86 |
+
|
87 |
+
submit_btn.click(
|
88 |
+
fn=process_input,
|
89 |
+
inputs=[chat_input, image_input, token_slider],
|
90 |
+
outputs=chat_output,
|
91 |
+
)
|
92 |
|
93 |
if __name__ == "__main__":
|
94 |
demo.launch(debug=True)
|