hiwei commited on
Commit
1fd4334
·
verified ·
1 Parent(s): 129b752

extract result from llm response

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -76,7 +76,8 @@ class RAGDemo(object):
76
  chain_type_kwargs={"prompt": QA_CHAIN_PROMPT},
77
  verbose=True,
78
  )
79
- return basic_qa.invoke(input_text)
 
80
 
81
  def __call__(self):
82
  with gr.Blocks() as demo:
@@ -87,7 +88,7 @@ class RAGDemo(object):
87
  with gr.Column():
88
  model_name = gr.Dropdown(
89
  choices=['gemini-1.0-pro'],
90
- value='glm-3-turbo',
91
  label="model"
92
  )
93
  api_key = gr.Textbox(placeholder="your api key for LLM", label="api key")
 
76
  chain_type_kwargs={"prompt": QA_CHAIN_PROMPT},
77
  verbose=True,
78
  )
79
+ resp = basic_qa.invoke(input_text)
80
+ return resp['result']
81
 
82
  def __call__(self):
83
  with gr.Blocks() as demo:
 
88
  with gr.Column():
89
  model_name = gr.Dropdown(
90
  choices=['gemini-1.0-pro'],
91
+ value='gemini-1.0-pro',
92
  label="model"
93
  )
94
  api_key = gr.Textbox(placeholder="your api key for LLM", label="api key")