File size: 3,097 Bytes
2cc7027
2d59812
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2cc7027
 
 
 
 
 
 
 
 
 
 
 
 
2d59812
2cc7027
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12f10e6
148a716
 
 
 
 
 
2cc7027
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import gradio as gr
from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage


def get_stream_chat_completion(
    message, chat_history, model, api_key, system=None, **kwargs
):
    messages = []
    if system is not None:
        messages.append(ChatMessage(role="system", content=system))
    for chat in chat_history:
        human_message, bot_message = chat
        messages.extend(
            (
                ChatMessage(role="user", content=human_message),
                ChatMessage(role="assistant", content=bot_message),
            )
        )
    messages.append(ChatMessage(role="user", content=message))
    client = MistralClient(api_key=api_key)
    for chunk in client.chat_stream(
        model=model,
        messages=messages,
        **kwargs,
    ):
        if chunk.choices[0].delta.content is not None:
            yield chunk.choices[0].delta.content

def respond_stream(
    message,
    chat_history,
    api_key,
    model,
    temperature,
    top_p,
    max_tokens,
    system,
):
    response = ""
    received_anything = False
    for chunk in get_stream_chat_completion(
        message=message,
        chat_history=chat_history,
        model=model,
        api_key=api_key,
        temperature=temperature,
        top_p=top_p,
        max_tokens=int(max_tokens),
        system=system if system else None,
    ):
        response += chunk
        yield response
        received_anything = True
    if not received_anything:
        gr.Warning("Error: Invalid API Key")
        yield ""

css = """
.header-text p {line-height: 80px !important; text-align: left; font-size: 26px;}
.header-logo {text-align: left}
"""

with gr.Blocks(title="Mistral Playground", css=css) as mistral_playground:
    with gr.Row():
        with gr.Column(scale=1, min_width=80):
            gr.Image("tt-logo.jpg", width=80, height=80, show_download_button=False, show_share_button=False, interactive=False, show_label=False, elem_id="thinktecture-logo", elem_classes="header-logo", container=False)
        with gr.Column(scale=11):
            gr.Markdown("Thinktecture Mistral AI Playground", elem_classes="header-text")
            
    with gr.Row():
        api_key = gr.Textbox(lines=1, label="Mistral API Key")
        model = gr.Radio(
            choices=["mistral-tiny", "mistral-small", "mistral-medium"],
            value="mistral-tiny",
        )
    with gr.Row():
        temperature = gr.Slider(minimum=0.01, maximum=1.0, value=0.2, step=0.1, label="Temperature")
        top_p = gr.Slider(minimum=0.01, maximum=1.0, step=0.01, label="Top P")
        max_tokens = gr.Slider(minimum=1, maximum=16000, step=500, label="Max Tokens", value=4000)

    with gr.Row():
        system = gr.Textbox(lines=10, label="System Message")
        gr.ChatInterface(
            respond_stream,
            additional_inputs=[
                api_key,
                model,
                temperature,
                top_p,
                max_tokens,
                system,
            ],
        )

mistral_playground.launch()