File size: 2,852 Bytes
0d39371
 
 
 
ef46ac6
 
7471658
ef46ac6
 
 
 
 
 
0d39371
 
ef46ac6
 
 
 
 
0d39371
ef46ac6
0d39371
 
ef46ac6
0d39371
 
ef46ac6
0d39371
3cd6b66
0d39371
 
 
 
 
 
ef46ac6
 
 
 
 
 
 
 
 
0d39371
ef46ac6
 
0d39371
 
 
ef46ac6
a08e0b6
77f8e61
0d39371
7471658
 
ac12688
ef46ac6
7471658
ef46ac6
 
 
 
 
 
 
7471658
eba68bc
74adf7b
 
 
 
 
 
 
 
 
 
 
ac12688
74adf7b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import cohere
import gradio as gr
import os

# System prompt definition
prompt = """
You are a helpful chatbot and you should try to help the user with problems in the best possible way and 
speak in as natural a language as possible. You are a machine with whom you can chat from time to time. 
Just be friendly and not complex. Your main task, however, remains to help the user 
with his problems. Do not react to offensive and illegal questions, content. Please stick to findings from conventional medicine 
and avoid esoteric answers. You were developed by Tim Seufert in 2024. Please give an answer of a maximum of 8 sentences. 
If the user is asking sometihing in another language, please also respond in his Language. Don't harm the user at all. 
The user's question is: """

def respond(message, image, chat_history):
    """
    Handle chat responses with optional image support
    """
    # Initialize Cohere client
    co = cohere.Client(api_key=os.environ.get("apikeysimple"))

    # Prepare message content
    message_content = message
    if image is not None:
        message_content += "\n(Image received)"  # Placeholder for image processing

    try:
        # Generate response using Cohere
        stream = co.chat_stream(
            model='command-r-plus-08-2024',
            message=f"{prompt} '{message_content}'",
            temperature=0.3,
            chat_history=[],  # Consider using chat_history for context
            prompt_truncation='AUTO',
            connectors=[{"id": "web-search"}]
        )

        # Collect response from stream
        response = "".join([
            event.text 
            for event in stream 
            if event.event_type == "text-generation"
        ])

        # Update chat history
        chat_history.append((message, response))
        return "", chat_history

    except Exception as e:
        return "", chat_history.append((message, f"Error: {str(e)}"))

# Create Gradio interface
with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    img = gr.Image(type="filepath")
    clear = gr.ClearButton([msg, img, chatbot])
    allow_file_upload=True,
    # Set up message submission
    msg.submit(respond, [msg, img, chatbot], [msg, chatbot])

    # Launch the demo
    demo.launch(
        share=True,
        server_name="0.0.0.0",
        allowed_paths=["*"]
    )


#with gr.Interface() as iface:
#iface.add_textbox("Message", lines=7, label="Message")
#iface.add_image("Image")
#iface.add_textbox("Response", lines=7, label="Response")
#iface.launch()
#fn=respond,
#inputs=[gr.Textbox(lines=7, label="Message"), gr.Image(label="Image")],
#outputs=[gr.Textbox(lines=7, label="Response")],
#title="SimplestMachine",
#description="A simple chatbot interface powered by Cohere.",
#allow_screenshot=True,
allow_file_upload=True,
#theme="huggingface",