Spaces:
Runtime error
Runtime error
Commit
·
04cd75a
1
Parent(s):
10351a2
adding open llama
Browse files
app.py
CHANGED
@@ -31,6 +31,9 @@ def create_logger():
|
|
31 |
return logger
|
32 |
|
33 |
|
|
|
|
|
|
|
34 |
def create_prompt():
|
35 |
prompt_template = """Asnwer the questions regarding the content in the Audio .
|
36 |
Use the following context to answer.
|
@@ -181,6 +184,9 @@ with gr.Blocks(css=css) as demo:
|
|
181 |
if audio_file:
|
182 |
load_audio.click(loading_file, None, langchain_status, queue=False)
|
183 |
load_audio.click(audio_processor, inputs=[audio_file,API_key,wav_model,LLM_option,temperature,max_new_tokens], outputs=[langchain_status], queue=False)
|
184 |
-
|
|
|
|
|
|
|
185 |
|
186 |
demo.launch()
|
|
|
31 |
return logger
|
32 |
|
33 |
|
34 |
+
def clear_chat():
|
35 |
+
return []
|
36 |
+
|
37 |
def create_prompt():
|
38 |
prompt_template = """Asnwer the questions regarding the content in the Audio .
|
39 |
Use the following context to answer.
|
|
|
184 |
if audio_file:
|
185 |
load_audio.click(loading_file, None, langchain_status, queue=False)
|
186 |
load_audio.click(audio_processor, inputs=[audio_file,API_key,wav_model,LLM_option,temperature,max_new_tokens], outputs=[langchain_status], queue=False)
|
187 |
+
|
188 |
+
clean_chat_btn.click(clear_chat, [], chatbot)
|
189 |
+
question.submit(add_text, inputs=[chatbot, question], outputs=[chatbot, question]).then(bot, chatbot, chatbot)
|
190 |
+
submit_btn.click(add_text, inputs=[chatbot, question], outputs=[chatbot, question]).then(bot, chatbot, chatbot)
|
191 |
|
192 |
demo.launch()
|