File size: 3,496 Bytes
e96ba45
 
 
 
 
 
 
 
13d3b4a
e96ba45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72859da
 
 
 
 
e96ba45
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import os
import google.generativeai as genai
import gradio as gr
from dotenv import load_dotenv

load_dotenv()

GEMINI_API_KEY_PREVIEW = os.getenv("GEMINI_API_KEY_PREVIEW")
model_list = ["gemini-1.5-pro", "gemini-1.5-pro-002", "gemini-1.5-pro-exp-0827", "gemini-1.5-flash", "gemini-1.5-flash-002", "gemini-1.5-flash-8b-exp-0924"]

genai.configure(api_key=GEMINI_API_KEY_PREVIEW)

def clear_chat_history_preview():
    chat.history = []
    
def undo_chat_preview():
    last_send, last_received = chat.rewind()

def transform_history(history):
    new_history = []
    for user_msg, model_msg in history:
        new_history.append({"role": "user", "parts": [user_msg]})
        new_history.append({"role": "model", "parts": [model_msg]})
    return new_history

def chatbot_preview(message, history, model_id, system_message, max_tokens, temperature, top_p,):
    global model, chat
    model = genai.GenerativeModel(
        model_name=model_id,
        system_instruction=system_message,
        safety_settings=[
            {
                "category": "HARM_CATEGORY_HARASSMENT",
                "threshold": "BLOCK_NONE"
            },
            {
                "category": "HARM_CATEGORY_HATE_SPEECH",
                "threshold": "BLOCK_NONE"
            },
            {
                "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
                "threshold": "BLOCK_NONE"
            },
            {
                "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
                "threshold": "BLOCK_NONE"
            }
        ],
        generation_config={
            "temperature": temperature,
            "top_p": top_p,
            "top_k": 64,
            "max_output_tokens": max_tokens,
            "response_mime_type": "text/plain",
        }
    )
    chat = model.start_chat(history=[])
    message_text = message["text"]
    message_files = message["files"]
    if message_files:
        image_uris = [genai.upload_file(path=file_path["path"]) for file_path in message_files]
        message_content = [message_text] + image_uris
    else:
        message_content = [message_text]

    response = chat.send_message(message_content, stream=True)
    response.resolve()

    return response.text

gemini_chatbot_interface_preview = gr.Chatbot(
    height=500,
    likeable=True,
    avatar_images=(
        None,
        "https://media.roboflow.com/spaces/gemini-icon.png"
    ),
    show_copy_button=True,
    show_share_button=True,
    render_markdown=True
)

clear_chat_button_preview = gr.ClearButton(
    components=[gemini_chatbot_interface_preview],
    value="🗑️ Clear"
)

undo_chat_button_preview = gr.Button(
    value="↩️ Undo"
)

gemini_chatbot_preview = gr.ChatInterface(
    fn=chatbot_preview,
    chatbot=gemini_chatbot_interface_preview,
    multimodal=True,
    clear_btn=clear_chat_button_preview,
    undo_btn=undo_chat_button_preview,
    additional_inputs=[
        gr.Dropdown(
            choices=model_list,
            value="gemini-1.5-flash-exp-0827",
            label="Models"
        ),
        gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
        gr.Slider(minimum=1, maximum=8192, value=4096, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.1, maximum=1.0, value=1, step=0.1, label="Temperature"),
        gr.Slider(
            minimum=0.1,
            maximum=1.0,
            value=0.95,
            step=0.05,
            label="Top-p (nucleus sampling)",
        ),
    ],
)