File size: 1,331 Bytes
46bef1e
e648b17
46bef1e
8274426
e648b17
6baf09a
b40d855
 
e648b17
 
 
 
 
b40d855
e648b17
 
 
b40d855
e648b17
 
 
 
b40d855
e648b17
ab20db0
 
1b555fc
71c95a6
4ac79c2
709456d
0c6b528
 
4ac79c2
080128f
e80e7b5
37b2fab
6e0fd70
294398a
 
 
926a0ae
294398a
080128f
 
 
 
 
 
 
 
 
4ac79c2
080128f
0c6b528
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import gradio as gr
import os
import time
import google.generativeai as palm

palm.configure(api_key=os.environ.get("palm_key"))

defaults = {
    'model': 'models/chat-bison-001',
    'temperature': 0.25,
    'candidate_count': 1,
    'top_k': 40,
    'top_p': 0.95,
}

context = "You're a computer failure assistant"

examples = [
    [
        "Hey my computer is broken",
        "Hey, what is the issue with your computer?"
    ]
]

# user_message = ['']
# history = ['']


with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    clear = gr.ClearButton([msg, chatbot])

    def user(user_message, history):
        return gr.update(value="", interactive=False), history + [[user_message,None]]
    def bot(history):
        # chat_history.append(message)  # Initialize chat history
        bot_message = palm.chat(
            context=context,
            examples=examples,
            messages=history
        )
        history[-1][1] = ""
        for character in bot_message:
            history[-1][1] += character
            time.sleep(0.05)
            yield history
    response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot, chatbot, chatbot
    )
    response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)

demo.queue()
demo.launch()