File size: 3,346 Bytes
0841a1b
 
9a3f681
0841a1b
6f28eab
9a3f681
 
6f28eab
 
 
 
9a3f681
 
 
0841a1b
9a3f681
 
6f28eab
354eea2
9a3f681
 
 
 
6f28eab
9a3f681
354eea2
6f28eab
 
354eea2
6f28eab
 
 
 
 
 
 
 
 
 
 
 
0841a1b
6f28eab
0841a1b
 
 
 
 
 
 
 
9a3f681
 
 
0841a1b
 
6f28eab
0841a1b
 
 
 
 
 
6f28eab
0841a1b
 
9a3f681
6f28eab
9a3f681
 
 
 
 
 
6f28eab
 
9a3f681
 
6f28eab
0841a1b
6f28eab
9a3f681
6f28eab
0841a1b
6f28eab
 
9a3f681
6f28eab
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9a3f681
 
6f28eab
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import gradio as gr
from huggingface_hub import InferenceClient
from datasets import load_dataset

# Safely load the PleIAs/common_corpus dataset
def load_common_corpus():
    try:
        print("Loading dataset...")
        dataset = load_dataset("PleIAs/common_corpus")
        print("Dataset loaded successfully!")
        return dataset
    except Exception as e:
        print(f"Error loading dataset: {e}")
        return None

common_corpus = load_common_corpus()

# Retrieve an example safely
def get_example_from_corpus(dataset, index):
    if dataset and "train" in dataset:
        try:
            return dataset["train"][index]
        except IndexError:
            print("Index out of range for dataset.")
            return {"text": "No example available"}
    else:
        print("Dataset not loaded correctly.")
        return {"text": "Dataset not available."}

# Safely initialize the inference client
def initialize_client():
    try:
        print("Initializing inference client...")
        client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
        print("Inference client initialized successfully!")
        return client
    except Exception as e:
        print(f"Error initializing inference client: {e}")
        return None

client = initialize_client()

# Chatbot response logic
def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    if not client:
        return "Error: Inference client not initialized."

    messages = [{"role": "system", "content": system_message}]

    # Add historical interactions
    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    # Add user message
    messages.append({"role": "user", "content": message})

    try:
        print("Sending request to model...")
        response = client.chat_completion(
            messages,
            max_tokens=max_tokens,
            temperature=temperature,
            top_p=top_p,
        ).choices[0].message.content
        print("Response received successfully!")
        return response
    except Exception as e:
        print(f"Error during inference: {e}")
        return "An error occurred while generating a response."

# Example: Retrieve an entry from the dataset
example_data = get_example_from_corpus(common_corpus, 0)
print("Example from dataset:", example_data)

# Gradio interface
def launch_demo():
    try:
        demo = gr.ChatInterface(
            respond,
            additional_inputs=[
                gr.Textbox(value="You are a friendly Chatbot. Your name is Juninho.", label="System message"),
                gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
                gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
                gr.Slider(
                    minimum=0.1,
                    maximum=1.0,
                    value=0.95,
                    step=0.05,
                    label="Top-p (nucleus sampling)",
                ),
            ],
        )
        demo.launch()
    except Exception as e:
        print(f"Error launching Gradio app: {e}")

if __name__ == "__main__":
    launch_demo()