eagleswim commited on
Commit
d9b37a9
·
verified ·
1 Parent(s): ddd1d6b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -8
app.py CHANGED
@@ -12,12 +12,10 @@ vllm_model = LLM(model=model_name, tensor_parallel_size=1, device="cpu")
12
 
13
 
14
 
15
- def generate_response(prompt, max_tokens, temperature, top_p):
16
  # Define sampling parameters
17
  sampling_params = SamplingParams(
18
- max_tokens=max_tokens,
19
- temperature=temperature,
20
- top_p=top_p,
21
  )
22
 
23
  # Generate text using vLLM (input is the raw string `prompt`)
@@ -31,10 +29,8 @@ def generate_response(prompt, max_tokens, temperature, top_p):
31
 
32
 
33
  prompt =gr.Textbox()
34
- max_tokens = gr.Textbox()
35
- temperature = gr.Textbox()
36
- top_p = gr.Textbox()
37
- demo=gr.Interface(generate_response, inputs=[prompt, max_tokens,temperature, top_p], outputs="text")
38
 
39
  # Launch the app
40
  demo.launch()
 
12
 
13
 
14
 
15
+ def generate_response(prompt ):
16
  # Define sampling parameters
17
  sampling_params = SamplingParams(
18
+
 
 
19
  )
20
 
21
  # Generate text using vLLM (input is the raw string `prompt`)
 
29
 
30
 
31
  prompt =gr.Textbox()
32
+
33
+ demo=gr.Interface(generate_response, inputs=[prompt ], outputs="text")
 
 
34
 
35
  # Launch the app
36
  demo.launch()