File size: 2,170 Bytes
01b6230
6e26571
01b6230
6e26571
 
 
63dc1cb
01b6230
83bb155
6e26571
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
941f5ca
6e26571
 
 
013bfec
937c8cb
80974fc
11d07e4
e7c3890
97b7017
11d07e4
 
 
bb3ad1f
80974fc
 
 
 
 
 
 
 
 
 
f06d789
937c8cb
a21e898
937c8cb
9332fb5
a21e898
 
59de313
a21e898
 
80974fc
 
00a9a1b
80974fc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import gradio as gr
from huggingface_hub import InferenceClient

"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("T3lli/test")

name = "Elli"

def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    messages = [{"role": "system", "content": system_message}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": message})

    response = ""

    for message in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content

        response += token
        yield response


"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""

"""demo = gr.ChatInterface(
    respond,
        
    title="**Your word** \ Score \ Prompts left ",
    description="This is a friendly chatbot.",
    
    additional_inputs=[
        gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
        
        gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.1, maximum=4.0, value=0.75, step=0.1, label="Temperature"),
        gr.Slider(
            minimum=0.1,
            maximum=1.0,
            value=0.95,
            step=0.05,
            label="Top-p (nucleus sampling)",
        ),
    ],
    theme = "soft",
)"""

with gr.Blocks() as demo:
    title_input = gr.Textbox(label="Enter Title", value="Initial Title")
    chat_interface = gr.ChatInterface(
        respond,
        title="title_input",  # Dynamically set the title
        additional_inputs=[title_input],  # Pass the title input to the `respond` function
    )

if __name__ == "__main__":
    
    demo.launch()