ZeusCabanas commited on
Commit
8ba4838
·
1 Parent(s): 64b5a1f

arreglos 4

Browse files
Files changed (1) hide show
  1. app.py +31 -16
app.py CHANGED
@@ -7,33 +7,48 @@ client = InferenceClient("AuriLab/gpt-bi-instruct-cesar")
7
  def format_messages(history: List[Tuple[str, str]], system_message: str, user_message: str) -> List[Dict[str, str]]:
8
  messages = [{"role": "system", "content": system_message}]
9
  messages.extend([
10
- {"role": "user" if i % 2 == 0 else "assistant", "content": msg}
11
  for turn in history
12
  for i, msg in enumerate(turn)
13
- if msg
14
  ])
15
- messages.append({"role": "user", "content": user_message})
16
  return messages
17
 
18
- def respond(message: str, history: List[Tuple[str, str]], system_message: str, max_tokens: int, temperature: float, top_p: float) -> str:
 
 
 
 
 
 
19
  messages = format_messages(history, system_message, message)
20
  response = ""
21
 
22
- for msg in client.chat_completion(
23
- messages,
24
- max_tokens=max_tokens,
25
- stream=True,
26
- temperature=0.7, # Aumentado para más variedad
27
- top_p=0.85, # Ajustado para mejor balance
28
- ):
29
- token = msg.choices[0].delta.content
30
- response += token
31
- yield response
 
 
 
 
 
32
 
33
  demo = gr.ChatInterface(
34
- respond,
35
  title="Demo GPT-BI instruct",
 
 
 
 
36
  )
37
 
38
  if __name__ == "__main__":
39
- demo.launch()
 
7
  def format_messages(history: List[Tuple[str, str]], system_message: str, user_message: str) -> List[Dict[str, str]]:
8
  messages = [{"role": "system", "content": system_message}]
9
  messages.extend([
10
+ {"role": "user" if i % 2 == 0 else "assistant", "content": str(msg)} # Convert msg to string
11
  for turn in history
12
  for i, msg in enumerate(turn)
13
+ if msg is not None
14
  ])
15
+ messages.append({"role": "user", "content": str(user_message)}) # Convert user_message to string
16
  return messages
17
 
18
+ def respond(message: str, history: List[Tuple[str, str]]) -> str:
19
+ # Default values for parameters
20
+ system_message = "You are a helpful AI assistant."
21
+ max_tokens = 1000
22
+ temperature = 0.7
23
+ top_p = 0.85
24
+
25
  messages = format_messages(history, system_message, message)
26
  response = ""
27
 
28
+ try:
29
+ for msg in client.chat_completion(
30
+ messages,
31
+ max_tokens=max_tokens,
32
+ stream=True,
33
+ temperature=temperature,
34
+ top_p=top_p,
35
+ ):
36
+ if hasattr(msg.choices[0].delta, 'content'):
37
+ token = msg.choices[0].delta.content
38
+ if token is not None:
39
+ response += token
40
+ yield response
41
+ except Exception as e:
42
+ return f"Error: {str(e)}"
43
 
44
  demo = gr.ChatInterface(
45
+ fn=respond,
46
  title="Demo GPT-BI instruct",
47
+ examples=["nola duzu izena?", "Nola egiten duzu?"],
48
+ retry_btn="Retry",
49
+ undo_btn="Undo",
50
+ clear_btn="Clear",
51
  )
52
 
53
  if __name__ == "__main__":
54
+ demo.launch(share=False)