File size: 2,344 Bytes
0d39371
 
 
 
ef46ac6
 
 
 
 
 
 
 
 
0d39371
 
ef46ac6
 
 
 
 
0d39371
ef46ac6
0d39371
 
ef46ac6
0d39371
 
ef46ac6
0d39371
 
 
 
 
 
 
 
ef46ac6
 
 
 
 
 
 
 
 
0d39371
ef46ac6
 
0d39371
 
 
ef46ac6
0d39371
 
 
 
 
 
ef46ac6
0d39371
ef46ac6
 
 
 
 
 
 
0d39371
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import cohere
import gradio as gr
import os

# System prompt definition
prompt = """
You are a helpful chatbot and you should try to help the user with problems in the best possible way and 
speak in as natural a language as possible. You are a machine with whom you can chat from time to time. 
Just be friendly and not complex. Your main task, however, remains to help the user 
with his problems. Do not react to offensive and illegal questions, content. Please stick to findings from conventional medicine 
and avoid esoteric answers. You were developed by Tim Seufert in 2024. Please give an answer of a maximum of 8 sentences. 
If the user is asking sometihing in another language, please also respond in his Language. Don't harm the user at all. 
The user's question is: """

def respond(message, image, chat_history):
    """
    Handle chat responses with optional image support
    """
    # Initialize Cohere client
    co = cohere.Client(api_key=os.environ.get("apikeysimple"))

    # Prepare message content
    message_content = message
    if image is not None:
        message_content += "\n(Image received)"  # Placeholder for image processing

    try:
        # Generate response using Cohere
        stream = co.chat_stream(
            model='command-r-plus-08-2024',
            message=f"{prompt} '{message_content}'",
            temperature=0.3,
            chat_history=[],  # Consider using chat_history for context
            prompt_truncation='AUTO',
            connectors=[{"id": "web-search"}]
        )

        # Collect response from stream
        response = "".join([
            event.text 
            for event in stream 
            if event.event_type == "text-generation"
        ])

        # Update chat history
        chat_history.append((message, response))
        return "", chat_history

    except Exception as e:
        return "", chat_history.append((message, f"Error: {str(e)}"))

# Create Gradio interface
with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    img = gr.Image(type="filepath")
    clear = gr.ClearButton([msg, img, chatbot])

    # Set up message submission
    msg.submit(respond, [msg, img, chatbot], [msg, chatbot])

    # Launch the demo
    demo.launch(
        share=True,
        server_name="0.0.0.0",
        allowed_paths=["*"]
    )