Amirizaniani commited on
Commit
a3e70ba
·
verified ·
1 Parent(s): fba902b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -6
app.py CHANGED
@@ -277,21 +277,26 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
277
  gr.Markdown("To tailor the generation of these five prompts from your original question, you can adjust the relevance and diversity scores. The relevance score determines how closely the generated prompts should align with the original question, while the diversity score dictates the variance among the prompts themselves.")
278
  gr.Markdown("Upon completion, please provide your email address. We will compile and send the answers to you promptly.")
279
 
280
- llm_dropdown = gr.Dropdown([("Llama", "TheBloke/Llama-2-7B-Chat-GGML"), ("Falcon", "TheBloke/Falcon-180B-Chat-GGUF"), ("Zephyr", "TheBloke/zephyr-quiklang-3b-4K-GGUF"), ("Vicuna", "TheBloke/vicuna-33B-GGUF"), ("Claude", "TheBloke/claude2-alpaca-13B-GGUF"), ("Alpaca", "TheBloke/LeoScorpius-GreenNode-Alpaca-7B-v1-GGUF")], label="Large Language Model")
281
- file_upload = gr.File(label="Upload a File with Questions", file_types=["xlsx"])
 
 
 
 
 
 
 
282
  relevance_slider = gr.Slider(0, 100, value=70, step=1, label="Relevance")
283
  diversity_slider = gr.Slider(0, 100, value=25, step=1, label="Diversity")
284
 
285
  submit_button = gr.Button("Submit")
286
- #output_textbox = gr.Textbox(label="Output")
287
  download_button = gr.File(label="Download Processed File")
288
 
289
  def on_submit(llm, file, relevance, diversity):
290
  result, output_file = process_inputs(llm, file, relevance, diversity)
291
- return result, output_file
292
-
293
- submit_button.click(fn=on_submit, inputs=[llm_dropdown, file_upload, relevance_slider, diversity_slider], outputs=[download_button])
294
 
 
295
 
296
  # Launch the Gradio app
297
  demo.launch()
 
277
  gr.Markdown("To tailor the generation of these five prompts from your original question, you can adjust the relevance and diversity scores. The relevance score determines how closely the generated prompts should align with the original question, while the diversity score dictates the variance among the prompts themselves.")
278
  gr.Markdown("Upon completion, please provide your email address. We will compile and send the answers to you promptly.")
279
 
280
+ llm_dropdown = gr.Dropdown(choices=[
281
+ ("Llama", "TheBloke/Llama-2-7B-Chat-GGML"),
282
+ ("Falcon", "TheBloke/Falcon-180B-Chat-GGUF"),
283
+ ("Zephyr", "TheBloke/zephyr-quiklang-3b-4K-GGUF"),
284
+ ("Vicuna", "TheBloke/vicuna-33B-GGUF"),
285
+ ("Claude", "TheBloke/claude2-alpaca-13B-GGUF"),
286
+ ("Alpaca", "TheBloke/LeoScorpius-GreenNode-Alpaca-7B-v1-GGUF")],
287
+ label="Large Language Model")
288
+ file_upload = gr.File(label="Upload a File with Questions", file_types=[".xlsx"])
289
  relevance_slider = gr.Slider(0, 100, value=70, step=1, label="Relevance")
290
  diversity_slider = gr.Slider(0, 100, value=25, step=1, label="Diversity")
291
 
292
  submit_button = gr.Button("Submit")
 
293
  download_button = gr.File(label="Download Processed File")
294
 
295
  def on_submit(llm, file, relevance, diversity):
296
  result, output_file = process_inputs(llm, file, relevance, diversity)
297
+ return output_file
 
 
298
 
299
+ submit_button.click(fn=on_submit, inputs=[llm_dropdown, file_upload, relevance_slider, diversity_slider], outputs=download_button)
300
 
301
  # Launch the Gradio app
302
  demo.launch()