SrijitMukherjee commited on
Commit
a0d8a54
·
verified ·
1 Parent(s): 3d8291f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -20
app.py CHANGED
@@ -1,29 +1,19 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- # Initialize the InferenceClient
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
- # Define the function for generating output based on a prompt
8
- def generate_output():
9
- # Initialize the prompt
10
- prompt = "Initialize your prompt here."
11
- # Generate output based on the prompt
12
  response = client.text_generation(prompt, max_new_tokens=512, temperature=0.7, top_p=0.95)
13
  return response[0]['generated_text']
14
 
15
- # Define the action to be taken when the button is clicked
16
- def button_click():
17
- # Generate output based on the prompt when the button is clicked
18
- output_textbox.update(generate_output())
 
 
 
 
19
 
20
- # Create a Gradio interface with a button and a text output
21
- button = gr.Button("Generate Output", onclick=button_click)
22
- output_textbox = gr.outputs.Textbox(label="Output")
23
-
24
- # Combine the button and the text output in a Gradio interface
25
- interface = gr.Interface(button, output_textbox)
26
-
27
- # Launch the app
28
- if __name__ == "__main__":
29
- interface.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
 
4
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
5
 
6
+ def get_output(prompt):
 
 
 
 
7
  response = client.text_generation(prompt, max_new_tokens=512, temperature=0.7, top_p=0.95)
8
  return response[0]['generated_text']
9
 
10
+ demo = gr.Interface(
11
+ fn=get_output,
12
+ inputs="text",
13
+ outputs="text",
14
+ title="LLaMA Gradio App",
15
+ description="Click the button to get the output",
16
+ button="Get Output"
17
+ )
18
 
19
+ demo.launch()