seawolf2357 commited on
Commit
0cc97e8
ยท
verified ยท
1 Parent(s): f8f486f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -15
app.py CHANGED
@@ -52,23 +52,20 @@ def format_prompt(message, history):
52
 
53
  def generate(prompt, history=[], temperature=0.1, max_new_tokens=10000, top_p=0.95, repetition_penalty=1.0):
54
  global total_tokens_used
55
- input_tokens = len(tokenizer.encode(prompt))
56
- total_tokens_used += input_tokens
57
- available_tokens = 32768 - total_tokens_used
58
- if available_tokens <= 0:
59
- return f"Error: ์ž…๋ ฅ์ด ์ตœ๋Œ€ ํ—ˆ์šฉ ํ† ํฐ ์ˆ˜๋ฅผ ์ดˆ๊ณผํ•ฉ๋‹ˆ๋‹ค. Total tokens used: {total_tokens_used}"
60
-
61
- formatted_prompt = format_prompt(prompt, history)
62
- output_accumulated = ""
63
  try:
64
- stream = client.text_generation(formatted_prompt, temperature=temperature, max_new_tokens=min(max_new_tokens, available_tokens),
65
- top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42, stream=True)
66
- for response in stream:
67
- output_part = response['generated_text'] if 'generated_text' in response else str(response)
68
- output_accumulated += output_part
69
- return output_accumulated + f"\n\n---\nTotal tokens used: {total_tokens_used}"
70
  except Exception as e:
71
- return f"Error: {str(e)}\nTotal tokens used: {total_tokens_used}"
 
72
 
73
  def postprocess(history):
74
  user_prompt = history[-1][0]
 
52
 
53
  def generate(prompt, history=[], temperature=0.1, max_new_tokens=10000, top_p=0.95, repetition_penalty=1.0):
54
  global total_tokens_used
55
+ input_tokens = tokenizer.encode(prompt)
56
+ total_tokens_used += len(input_tokens)
57
+ if total_tokens_used >= 32768:
58
+ return "Error: ์ž…๋ ฅ์ด ์ตœ๋Œ€ ํ—ˆ์šฉ ํ† ํฐ ์ˆ˜๋ฅผ ์ดˆ๊ณผํ•˜์˜€์Šต๋‹ˆ๋‹ค."
 
 
 
 
59
  try:
60
+ response = client(text=prompt, temperature=temperature, max_tokens=max_new_tokens)
61
+ response_text = response.get('generated_text', '')
62
+ if "ํ‹ฐ์ปค" in prompt:
63
+ ticker = prompt.split()[-1]
64
+ response_text += "\n" + fetch_ticker_info(ticker)
65
+ return response_text
66
  except Exception as e:
67
+ return f"์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
68
+
69
 
70
  def postprocess(history):
71
  user_prompt = history[-1][0]