ZoniaChatbot commited on
Commit
47f9fc4
verified
1 Parent(s): c7c4cf1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -24
app.py CHANGED
@@ -1,8 +1,10 @@
1
  import argparse
2
  import os
 
3
  import gradio as gr
4
  from loguru import logger
5
  from similarities import BertSimilarity
 
6
  from chatpdf import ChatPDF
7
 
8
  pwd_path = os.path.abspath(os.path.dirname(__file__))
@@ -23,9 +25,9 @@ if __name__ == '__main__':
23
  parser.add_argument("--server_port", type=int, default=8082)
24
  parser.add_argument("--share", action='store_true', help="share model")
25
  args = parser.parse_args()
 
26
  logger.info(args)
27
-
28
- # Inicializar el modelo
29
  sim_model = BertSimilarity(model_name_or_path=args.sim_model_name, device=args.device)
30
  model = ChatPDF(
31
  similarity_model=sim_model,
@@ -40,7 +42,7 @@ if __name__ == '__main__':
40
  rerank_model_name_or_path=args.rerank_model_name,
41
  )
42
  logger.info(f"chatpdf model: {model}")
43
-
44
  def predict_stream(message, history):
45
  history_format = []
46
  for human, assistant in history:
@@ -48,18 +50,14 @@ if __name__ == '__main__':
48
  model.history = history_format
49
  for chunk in model.predict_stream(message):
50
  yield chunk
51
-
52
- # Funci贸n para generar respuesta (sin yield)
53
  def predict(message, history):
54
- history_format = []
55
- for human, assistant in history:
56
- history_format.append([human, assistant])
57
- model.history = history_format
58
  response, reference_results = model.predict(message)
59
  r = response + "\n\n" + '\n'.join(reference_results)
60
- return r, history_format
 
61
 
62
- # Crear el chatbot
63
  chatbot_stream = gr.Chatbot(
64
  height=600,
65
  avatar_images=(
@@ -68,27 +66,24 @@ if __name__ == '__main__':
68
  ),
69
  bubble_full_width=False
70
  )
71
-
72
- title = " 馃ChatPDF Zonia馃 "
73
  css = """.toast-wrap { display: none !important } """
74
  examples = ['Puede hablarme del PNL?', 'Introducci贸n a la PNL']
75
 
76
- # Crear la interfaz sin utilizar la funci贸n que usa yield
77
- chat_interface = gr.Interface(
78
- predict_stream,
79
- textbox=gr.Textbox(lines=4, placeholder="Ask me question", scale=7), # A帽adir submit=True
80
  title=title,
81
- # description=description,
82
  chatbot=chatbot_stream,
83
  css=css,
84
  examples=examples,
85
  theme='soft',
86
- ).queue() # Aseg煤rate de habilitar la cola aqu铆
87
 
88
  with gr.Blocks() as demo:
89
- chat_interface.render()
90
 
91
- # Lanzar la aplicaci贸n con `.queue()`
92
- demo.queue().launch(
93
- server_name=args.server_name, server_port=args.server_port, share=args.share
94
- )
 
1
  import argparse
2
  import os
3
+
4
  import gradio as gr
5
  from loguru import logger
6
  from similarities import BertSimilarity
7
+
8
  from chatpdf import ChatPDF
9
 
10
  pwd_path = os.path.abspath(os.path.dirname(__file__))
 
25
  parser.add_argument("--server_port", type=int, default=8082)
26
  parser.add_argument("--share", action='store_true', help="share model")
27
  args = parser.parse_args()
28
+
29
  logger.info(args)
30
+
 
31
  sim_model = BertSimilarity(model_name_or_path=args.sim_model_name, device=args.device)
32
  model = ChatPDF(
33
  similarity_model=sim_model,
 
42
  rerank_model_name_or_path=args.rerank_model_name,
43
  )
44
  logger.info(f"chatpdf model: {model}")
45
+
46
  def predict_stream(message, history):
47
  history_format = []
48
  for human, assistant in history:
 
50
  model.history = history_format
51
  for chunk in model.predict_stream(message):
52
  yield chunk
53
+
 
54
  def predict(message, history):
55
+ logger.debug(message)
 
 
 
56
  response, reference_results = model.predict(message)
57
  r = response + "\n\n" + '\n'.join(reference_results)
58
+ logger.debug(r)
59
+ return r
60
 
 
61
  chatbot_stream = gr.Chatbot(
62
  height=600,
63
  avatar_images=(
 
66
  ),
67
  bubble_full_width=False
68
  )
69
+
70
+ title = "馃ChatPDF Zonia馃"
71
  css = """.toast-wrap { display: none !important } """
72
  examples = ['Puede hablarme del PNL?', 'Introducci贸n a la PNL']
73
 
74
+ # Configura la interfaz de chat
75
+ chat_interface_stream = gr.ChatInterface(
76
+ predict_stream,
77
+ textbox=gr.Textbox(lines=4, placeholder="Hazme una pregunta", scale=7),
78
  title=title,
 
79
  chatbot=chatbot_stream,
80
  css=css,
81
  examples=examples,
82
  theme='soft',
83
+ )
84
 
85
  with gr.Blocks() as demo:
86
+ chat_interface_stream.render()
87
 
88
+ # Lanza la aplicaci贸n
89
+ demo.queue().launch(server_name=args.server_name, server_port=args.server_port, share=args.share)