Sunghokim commited on
Commit
5ae642c
Β·
verified Β·
1 Parent(s): 094a65d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -7,20 +7,20 @@ import json
7
  import pyarrow.parquet as pq
8
 
9
  # Hugging Face 토큰 확인
10
- hf_token = "μƒˆλ‘œμš΄ 토큰"
11
 
12
- if not hf_token:
13
- raise ValueError("HF_TOKEN ν™˜κ²½ λ³€μˆ˜κ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.")
14
 
15
  # λͺ¨λΈ 정보 확인
16
- api = HfApi(token=hf_token)
17
 
18
  try:
19
- client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=hf_token)
20
  except Exception as e:
21
  print(f"rror initializing InferenceClient: {e}")
22
  # λŒ€μ²΄ λͺ¨λΈμ„ μ‚¬μš©ν•˜κ±°λ‚˜ 였λ₯˜ 처리λ₯Ό μˆ˜ν–‰ν•˜μ„Έμš”.
23
- # 예: client = InferenceClient("gpt2", token=hf_token)
24
 
25
  # ν˜„μž¬ 슀크립트의 디렉토리λ₯Ό κΈ°μ€€μœΌλ‘œ μƒλŒ€ 경둜 μ„€μ •
26
  currentdir = os.path.dirname(os.path.abspath(file))
@@ -34,7 +34,7 @@ try:
34
  print(f"컬럼: {df.columns}")
35
  except Exception as e:
36
  print(f"Parquet 파일 λ‘œλ“œ 쀑 였λ₯˜ λ°œμƒ: {e}")
37
- df = pd.atarame(columns=['instruction', 'responsea']) # 빈 Datarame 생성
38
 
39
  def getanswer(question):
40
  matchinganswer = df[df['instruction'] == question]['responsea'].values
@@ -69,7 +69,7 @@ def respond(
69
  fullprompt += f"Human: {message}\nAI:"
70
 
71
  APIL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
72
- headers = {"Authorization": f"Bearer {hf_token}"}
73
 
74
  def query(payload):
75
  response = requests.post(APIL, headers=headers, json=payload)
@@ -114,13 +114,13 @@ demo = gr.ChatInterface(
114
  Parquet νŒŒμΌμ— μ—†λŠ” λ‚΄μš©μ— λŒ€ν•΄μ„œλŠ” μ μ ˆν•œ λŒ€λ‹΅μ„ 생성해 μ£Όμ„Έμš”.
115
  """, label="μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ"),
116
  gr.Slider(minimum=1, maximum=4000, value=1000, step=1, label="Max new tokens"),
117
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="emperature"),
118
  gr.Slider(
119
  minimum=0.1,
120
  maximum=1.0,
121
  value=0.95,
122
  step=0.05,
123
- label="op-p (nucleus sampling)",
124
  ),
125
  ],
126
  examples=[
 
7
  import pyarrow.parquet as pq
8
 
9
  # Hugging Face 토큰 확인
10
+ hftoken = "μƒˆλ‘œμš΄ 토큰"
11
 
12
+ if not hftoken:
13
+ raise ValueError("H ν™˜κ²½ λ³€μˆ˜κ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.")
14
 
15
  # λͺ¨λΈ 정보 확인
16
+ api = HfApi(token=hftoken)
17
 
18
  try:
19
+ client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=hftoken)
20
  except Exception as e:
21
  print(f"rror initializing InferenceClient: {e}")
22
  # λŒ€μ²΄ λͺ¨λΈμ„ μ‚¬μš©ν•˜κ±°λ‚˜ 였λ₯˜ 처리λ₯Ό μˆ˜ν–‰ν•˜μ„Έμš”.
23
+ # 예: client = InferenceClient("gpt2", token=hftoken)
24
 
25
  # ν˜„μž¬ 슀크립트의 디렉토리λ₯Ό κΈ°μ€€μœΌλ‘œ μƒλŒ€ 경둜 μ„€μ •
26
  currentdir = os.path.dirname(os.path.abspath(file))
 
34
  print(f"컬럼: {df.columns}")
35
  except Exception as e:
36
  print(f"Parquet 파일 λ‘œλ“œ 쀑 였λ₯˜ λ°œμƒ: {e}")
37
+ df = pd.atarame(columns=['instruction', 'responsea']) # 빈 atarame 생성
38
 
39
  def getanswer(question):
40
  matchinganswer = df[df['instruction'] == question]['responsea'].values
 
69
  fullprompt += f"Human: {message}\nAI:"
70
 
71
  APIL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
72
+ headers = {"Authorization": f"Bearer {hftoken}"}
73
 
74
  def query(payload):
75
  response = requests.post(APIL, headers=headers, json=payload)
 
114
  Parquet νŒŒμΌμ— μ—†λŠ” λ‚΄μš©μ— λŒ€ν•΄μ„œλŠ” μ μ ˆν•œ λŒ€λ‹΅μ„ 생성해 μ£Όμ„Έμš”.
115
  """, label="μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ"),
116
  gr.Slider(minimum=1, maximum=4000, value=1000, step=1, label="Max new tokens"),
117
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="temperature"),
118
  gr.Slider(
119
  minimum=0.1,
120
  maximum=1.0,
121
  value=0.95,
122
  step=0.05,
123
+ label="top-p (nucleus sampling)",
124
  ),
125
  ],
126
  examples=[