Thziin commited on
Commit
5e292fc
·
verified ·
1 Parent(s): 5ace836

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -34
app.py CHANGED
@@ -2,45 +2,45 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from datasets import load_dataset
4
 
5
- # Safely load the PleIAs/common_corpus dataset
6
- def load_common_corpus():
7
  try:
8
- print("Loading dataset...")
9
- dataset = load_dataset("PleIAs/common_corpus")
10
- print("Dataset loaded successfully!")
11
  return dataset
12
  except Exception as e:
13
- print(f"Error loading dataset: {e}")
14
  return None
15
 
16
- common_corpus = load_common_corpus()
17
 
18
- # Retrieve an example safely
19
- def get_example_from_corpus(dataset, index):
20
  if dataset and "train" in dataset:
21
  try:
22
  return dataset["train"][index]
23
  except IndexError:
24
- print("Index out of range for dataset.")
25
- return {"text": "No example available"}
26
  else:
27
- print("Dataset not loaded correctly.")
28
- return {"text": "Dataset not available."}
29
 
30
- # Safely initialize the inference client
31
  def initialize_client():
32
  try:
33
- print("Initializing inference client...")
34
  client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
35
- print("Inference client initialized successfully!")
36
  return client
37
  except Exception as e:
38
- print(f"Error initializing inference client: {e}")
39
  return None
40
 
41
  client = initialize_client()
42
 
43
- # Chatbot response logic
44
  def respond(
45
  message,
46
  history: list[tuple[str, str]],
@@ -50,59 +50,59 @@ def respond(
50
  top_p,
51
  ):
52
  if not client:
53
- return "Error: Inference client not initialized."
54
 
55
  messages = [{"role": "system", "content": system_message}]
56
 
57
- # Add historical interactions
58
  for val in history:
59
  if val[0]:
60
  messages.append({"role": "user", "content": val[0]})
61
  if val[1]:
62
  messages.append({"role": "assistant", "content": val[1]})
63
 
64
- # Add user message
65
  messages.append({"role": "user", "content": message})
66
 
67
  try:
68
- print("Sending request to model...")
69
  response = client.chat_completion(
70
  messages,
71
  max_tokens=max_tokens,
72
  temperature=temperature,
73
  top_p=top_p,
74
  ).choices[0].message.content
75
- print("Response received successfully!")
76
  return response
77
  except Exception as e:
78
- print(f"Error during inference: {e}")
79
- return "An error occurred while generating a response."
80
 
81
- # Example: Retrieve an entry from the dataset
82
- example_data = get_example_from_corpus(common_corpus, 0)
83
- print("Example from dataset:", example_data)
84
 
85
- # Gradio interface
86
  def launch_demo():
87
  try:
88
  demo = gr.ChatInterface(
89
  respond,
90
  additional_inputs=[
91
- gr.Textbox(value="You are a friendly Chatbot. Your name is Juninho.", label="System message"),
92
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
93
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
94
  gr.Slider(
95
  minimum=0.1,
96
  maximum=1.0,
97
  value=0.95,
98
  step=0.05,
99
- label="Top-p (nucleus sampling)",
100
  ),
101
  ],
102
  )
103
  demo.launch()
104
  except Exception as e:
105
- print(f"Error launching Gradio app: {e}")
106
 
107
  if __name__ == "__main__":
108
  launch_demo()
 
2
  from huggingface_hub import InferenceClient
3
  from datasets import load_dataset
4
 
5
+ # Função para carregar o dataset `aicoder69/aicoder69` com tratamento de erros
6
+ def load_aicoder_dataset():
7
  try:
8
+ print("Carregando o dataset...")
9
+ dataset = load_dataset("aicoder69/aicoder69")
10
+ print("Dataset carregado com sucesso!")
11
  return dataset
12
  except Exception as e:
13
+ print(f"Erro ao carregar o dataset: {e}")
14
  return None
15
 
16
+ aicoder_dataset = load_aicoder_dataset()
17
 
18
+ # Função para recuperar um exemplo do dataset com segurança
19
+ def get_example_from_aicoder(dataset, index):
20
  if dataset and "train" in dataset:
21
  try:
22
  return dataset["train"][index]
23
  except IndexError:
24
+ print("Índice fora do intervalo no dataset.")
25
+ return {"text": "Nenhum exemplo disponível."}
26
  else:
27
+ print("O dataset não foi carregado corretamente.")
28
+ return {"text": "Dataset não disponível."}
29
 
30
+ # Inicializar o cliente de inferência com tratamento de erros
31
  def initialize_client():
32
  try:
33
+ print("Inicializando o cliente de inferência...")
34
  client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
35
+ print("Cliente de inferência inicializado com sucesso!")
36
  return client
37
  except Exception as e:
38
+ print(f"Erro ao inicializar o cliente de inferência: {e}")
39
  return None
40
 
41
  client = initialize_client()
42
 
43
+ # Função de resposta do chatbot
44
  def respond(
45
  message,
46
  history: list[tuple[str, str]],
 
50
  top_p,
51
  ):
52
  if not client:
53
+ return "Erro: O cliente de inferência não foi inicializado."
54
 
55
  messages = [{"role": "system", "content": system_message}]
56
 
57
+ # Adicionar interações históricas
58
  for val in history:
59
  if val[0]:
60
  messages.append({"role": "user", "content": val[0]})
61
  if val[1]:
62
  messages.append({"role": "assistant", "content": val[1]})
63
 
64
+ # Adicionar mensagem do usuário
65
  messages.append({"role": "user", "content": message})
66
 
67
  try:
68
+ print("Enviando solicitação ao modelo...")
69
  response = client.chat_completion(
70
  messages,
71
  max_tokens=max_tokens,
72
  temperature=temperature,
73
  top_p=top_p,
74
  ).choices[0].message.content
75
+ print("Resposta recebida com sucesso!")
76
  return response
77
  except Exception as e:
78
+ print(f"Erro durante a inferência: {e}")
79
+ return "Ocorreu um erro ao gerar uma resposta."
80
 
81
+ # Exemplo: Recuperar uma entrada do dataset
82
+ example_data = get_example_from_aicoder(aicoder_dataset, 0)
83
+ print("Exemplo do dataset:", example_data)
84
 
85
+ # Interface Gradio
86
  def launch_demo():
87
  try:
88
  demo = gr.ChatInterface(
89
  respond,
90
  additional_inputs=[
91
+ gr.Textbox(value="Você é um chatbot amigável. Seu nome é Juninho.", label="Mensagem do sistema"),
92
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Máximo de novos tokens"),
93
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperatura"),
94
  gr.Slider(
95
  minimum=0.1,
96
  maximum=1.0,
97
  value=0.95,
98
  step=0.05,
99
+ label="Top-p (amostragem núcleo)",
100
  ),
101
  ],
102
  )
103
  demo.launch()
104
  except Exception as e:
105
+ print(f"Erro ao iniciar o aplicativo Gradio: {e}")
106
 
107
  if __name__ == "__main__":
108
  launch_demo()