MaxLSB commited on
Commit
2918965
·
verified ·
1 Parent(s): cee13f4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -9
app.py CHANGED
@@ -6,22 +6,20 @@ client = InferenceClient("MaxLSB/LeCarnet-8M")
6
 
7
 
8
  def respond(
9
- message,
10
  max_tokens,
11
  temperature,
12
  top_p,
13
  ):
14
  response = ""
15
 
16
- for message in client.chat_completion(
17
- messages,
18
- max_tokens=max_tokens,
19
- stream=True,
20
  temperature=temperature,
21
  top_p=top_p,
 
22
  ):
23
- token = message.choices[0].delta.content
24
-
25
  response += token
26
  yield response
27
 
@@ -29,8 +27,8 @@ def respond(
29
  demo = gr.ChatInterface(
30
  respond,
31
  additional_inputs=[
32
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
33
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
34
  gr.Slider(
35
  minimum=0.1,
36
  maximum=1.0,
 
6
 
7
 
8
  def respond(
9
+ prompt,
10
  max_tokens,
11
  temperature,
12
  top_p,
13
  ):
14
  response = ""
15
 
16
+ for token in client.text_generation(
17
+ prompt=prompt,
18
+ max_new_tokens=max_tokens,
 
19
  temperature=temperature,
20
  top_p=top_p,
21
+ stream=True,
22
  ):
 
 
23
  response += token
24
  yield response
25
 
 
27
  demo = gr.ChatInterface(
28
  respond,
29
  additional_inputs=[
30
+ gr.Slider(minimum=1, maximum=512, value=512, step=1, label="Max new tokens"),
31
+ gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature"),
32
  gr.Slider(
33
  minimum=0.1,
34
  maximum=1.0,