Spaces:
Raven7
/
Runtime error

Raven7 commited on
Commit
9fd2710
Β·
verified Β·
1 Parent(s): eb04e6a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -16
app.py CHANGED
@@ -1,21 +1,21 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
  import os
4
  import pandas as pd
5
 
6
- client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("HF_TOKEN"))
7
 
8
- def load_prompts():
9
- prompts = pd.read_csv("prompts.csv")
10
  return prompts
11
 
12
  def respond(
 
13
  message,
14
  history,
15
- systemmessage,
16
  maxtokens,
17
  temperature,
18
- top_p,
19
  prompts,
20
  ):
21
  messages = [{"role": "system", "content": systemmessage}]
@@ -30,36 +30,38 @@ def respond(
30
 
31
  response = ""
32
 
33
- for message in client.chat_completion(
34
  messages,
35
- max_tokens=maxtokens,
36
- stream=True,
37
  temperature=temperature,
38
- top_p=top_p,
39
  ):
40
  token = message.choices[0].delta.content
41
 
42
  response += token
43
  yield response
44
 
45
- prompts = load_prompts()
46
 
47
  demo = gr.ChatInterface(
48
  respond,
49
  inputs=[
50
- gr.Textbox(value="λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ λ‹΅λ³€ν•˜λΌ. λ„ˆμ˜ 이름은 'ν•œκΈ€λ‘œ'μž…λ‹ˆλ‹€. 좜λ ₯μ‹œ markdown ν˜•μ‹μœΌλ‘œ 좜λ ₯ν•˜λ©° ν•œκΈ€(ν•œκ΅­μ–΄)둜 좜λ ₯되게 ν•˜κ³  ν•„μš”ν•˜λ©΄ 좜λ ₯문을 ν•œκΈ€λ‘œ λ²ˆμ—­ν•˜μ—¬ 좜λ ₯ν•˜λΌ. λ„ˆλŠ” 항상 μΉœμ ˆν•˜κ³  μžμ„Έν•˜κ²Œ 닡변을 ν•˜λΌ. λ„ˆλŠ” λŒ€ν™” μ‹œμž‘μ‹œ μƒλŒ€λ°©μ˜ 이름을 물어보고 ν˜ΈμΉ­μ€ '친ꡬ'을 μ‚¬μš©ν• κ²ƒ. λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ 된 '반말'둜 닡변할것. λ„ˆλŠ” Assistant 역할에 μΆ©μ‹€ν•˜μ—¬μ•Ό ν•œλ‹€. λ„ˆ", label="System message"),
51
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
52
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
 
 
53
  gr.Slider(
54
  minimum=0.1,
55
  maximum=1.0,
56
  value=0.95,
57
  step=0.05,
58
- label="Top-p (nucleus sampling)",
59
  ),
60
  ],
61
  outputs="text",
62
  )
63
 
64
- if __name__ == "__main__":
65
  demo.launch()
 
1
  import gradio as gr
2
+ from huggingfacehub import InferenceClient
3
  import os
4
  import pandas as pd
5
 
6
+ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("H"))
7
 
8
+ def loadprompts():
9
+ prompts = pd.readcsv("prompts.csv")
10
  return prompts
11
 
12
  def respond(
13
+ systemmessage,
14
  message,
15
  history,
 
16
  maxtokens,
17
  temperature,
18
+ topp,
19
  prompts,
20
  ):
21
  messages = [{"role": "system", "content": systemmessage}]
 
30
 
31
  response = ""
32
 
33
+ for message in client.chatcompletion(
34
  messages,
35
+ maxtokens=maxtokens,
36
+ stream=rue,
37
  temperature=temperature,
38
+ topp=topp,
39
  ):
40
  token = message.choices[0].delta.content
41
 
42
  response += token
43
  yield response
44
 
45
+ prompts = loadprompts()
46
 
47
  demo = gr.ChatInterface(
48
  respond,
49
  inputs=[
50
+ gr.extbox(label="μ‹œμŠ€ν…œ λ©”μ‹œμ§€", value="친ꡬ, λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ λ‹΅λ³€ν•˜λΌ. λ„ˆμ˜ 이름은 'ν•œκΈ€λ‘œ'μž…λ‹ˆλ‹€. 좜λ ₯μ‹œ markdown ν˜•μ‹μœΌλ‘œ 좜λ ₯ν•˜λ©° ν•œκΈ€(ν•œκ΅­μ–΄)둜 좜λ ₯되게 ν•˜κ³  ν•„μš”ν•˜λ©΄ 좜λ ₯문을 ν•œκΈ€λ‘œ λ²ˆμ—­ν•˜μ—¬ 좜λ ₯ν•˜λΌ. λ„ˆλŠ” 항상 μΉœμ ˆν•˜κ³  μžμ„Έν•˜κ²Œ 닡변을 ν•˜λΌ. λ„ˆλŠ” λŒ€ν™” μ‹œμž‘μ‹œ μƒλŒ€λ°©μ˜ 이름을 물어보고 ν˜ΈμΉ­μ€ '친ꡬ'을 μ‚¬μš©ν• κ²ƒ. λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ 된 '반말'둜 닡변할것. λ„ˆλŠ” Assistant 역할에 μΆ©μ‹€ν•˜μ—¬μ•Ό ν•œλ‹€. λ„ˆ"),
51
+ gr.extbox(label="μ‚¬μš©μž μž…λ ₯"),
52
+ gr.State(default=[]),
53
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="μ΅œλŒ€ μƒˆλ‘œμš΄ 토큰"),
54
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="μ˜¨λ„"),
55
  gr.Slider(
56
  minimum=0.1,
57
  maximum=1.0,
58
  value=0.95,
59
  step=0.05,
60
+ label="op-P (핡심 μƒ˜ν”Œλ§)",
61
  ),
62
  ],
63
  outputs="text",
64
  )
65
 
66
+ if name == "main":
67
  demo.launch()