Thziin commited on
Commit
354eea2
·
verified ·
1 Parent(s): 47544b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -18
app.py CHANGED
@@ -1,14 +1,20 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- #client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
- #client = InferenceClient("meta-llama/Llama-3.2-1B-Instruct")
9
- #client = InferenceClient("microsoft/Phi-3.5-mini-instruct")
10
- client = InferenceClient("PleIAs/common_corpus")
11
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  def respond(
14
  message,
@@ -20,36 +26,36 @@ def respond(
20
  ):
21
  messages = [{"role": "system", "content": system_message}]
22
 
 
23
  for val in history:
24
  if val[0]:
25
  messages.append({"role": "user", "content": val[0]})
26
  if val[1]:
27
  messages.append({"role": "assistant", "content": val[1]})
28
 
 
29
  messages.append({"role": "user", "content": message})
30
 
31
- response = ""
32
-
33
-
34
- mensagens = client.chat_completion(
35
  messages,
36
  max_tokens=max_tokens,
37
  temperature=temperature,
38
  top_p=top_p,
39
- )
40
- response = mensagens.choices[0].message.content
41
 
42
  return response
43
 
 
 
 
44
 
45
- """
46
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
47
- """
48
  demo = gr.ChatInterface(
49
  respond,
50
  additional_inputs=[
51
  gr.Textbox(value="You are a friendly Chatbot. Your name is Juninho.", label="System message"),
52
- gr.Slider(minimum=1, maximum=5096, value=1024, step=1, label="Max new tokens"),
53
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
54
  gr.Slider(
55
  minimum=0.1,
@@ -61,6 +67,5 @@ demo = gr.ChatInterface(
61
  ],
62
  )
63
 
64
-
65
  if __name__ == "__main__":
66
  demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ from datasets import load_dataset # Import datasets library
4
 
5
+ # Load the PleIAs/common_corpus dataset
6
+ common_corpus = load_dataset("PleIAs/common_corpus")
 
 
 
 
 
7
 
8
+ # Function to retrieve an example from the dataset
9
+ def get_example_from_corpus(dataset, index):
10
+ if "train" in dataset:
11
+ example = dataset["train"][index]
12
+ return example
13
+ else:
14
+ raise ValueError("Dataset does not have a 'train' split.")
15
+
16
+ # Initialize inference client
17
+ client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
18
 
19
  def respond(
20
  message,
 
26
  ):
27
  messages = [{"role": "system", "content": system_message}]
28
 
29
+ # Add historical interactions
30
  for val in history:
31
  if val[0]:
32
  messages.append({"role": "user", "content": val[0]})
33
  if val[1]:
34
  messages.append({"role": "assistant", "content": val[1]})
35
 
36
+ # Add user's message
37
  messages.append({"role": "user", "content": message})
38
 
39
+ # Get response from model
40
+ response = client.chat_completion(
 
 
41
  messages,
42
  max_tokens=max_tokens,
43
  temperature=temperature,
44
  top_p=top_p,
45
+ ).choices[0].message.content
 
46
 
47
  return response
48
 
49
+ # Example usage of the dataset
50
+ example_data = get_example_from_corpus(common_corpus, index=0)
51
+ print("Example from PleIAs/common_corpus:", example_data)
52
 
53
+ # Gradio ChatInterface
 
 
54
  demo = gr.ChatInterface(
55
  respond,
56
  additional_inputs=[
57
  gr.Textbox(value="You are a friendly Chatbot. Your name is Juninho.", label="System message"),
58
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
59
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
60
  gr.Slider(
61
  minimum=0.1,
 
67
  ],
68
  )
69
 
 
70
  if __name__ == "__main__":
71
  demo.launch()