Update app.py
Browse files
app.py
CHANGED
@@ -75,6 +75,7 @@ const feedbackLink = `https://promptspellsmith.com/feedback`
|
|
75 |
// Think logically and methodically, asking probing questions to encourage the user to articulate their thought process and reasoning. This approach not only helps
|
76 |
"""
|
77 |
|
|
|
78 |
def format_prompt(message, history):
|
79 |
prompt = "<s>[SYSTEM] {} [/SYSTEM]".format(system_instruction)
|
80 |
for user_prompt, bot_response in history:
|
@@ -83,11 +84,14 @@ def format_prompt(message, history):
|
|
83 |
return prompt
|
84 |
|
85 |
def generate(prompt, history, temperature=0.1, max_new_tokens=25000, top_p=0.95, repetition_penalty=1.0):
|
86 |
-
# ํ ํฐ ์ ๊ณ์ฐ
|
87 |
input_tokens = len(tokenizer.encode(prompt))
|
88 |
-
available_tokens = 32768 - input_tokens
|
89 |
-
max_new_tokens = min(max_new_tokens, available_tokens)
|
90 |
|
|
|
|
|
|
|
|
|
91 |
formatted_prompt = format_prompt(prompt, history)
|
92 |
|
93 |
try:
|
@@ -102,7 +106,7 @@ def generate(prompt, history, temperature=0.1, max_new_tokens=25000, top_p=0.95,
|
|
102 |
|
103 |
mychatbot = gr.Chatbot(
|
104 |
avatar_images=["./user.png", "./botm.png"],
|
105 |
-
bubble_full_width=
|
106 |
show_label=False,
|
107 |
show_copy_button=True,
|
108 |
likeable=True,
|
@@ -118,10 +122,11 @@ examples = [
|
|
118 |
|
119 |
demo = gr.Interface(
|
120 |
fn=generate,
|
121 |
-
inputs=gr.Textbox(placeholder="์ฌ๊ธฐ์ ์ง๋ฌธ์ ์
๋ ฅํ์ธ์...", lines=2),
|
122 |
outputs=gr.Markdown(),
|
123 |
examples=examples,
|
124 |
-
title="AIQ ์ฝ๋ํ์ผ๋ฟ: OpenLLM v1.12"
|
|
|
125 |
)
|
126 |
|
127 |
demo.launch(show_api=False)
|
|
|
75 |
// Think logically and methodically, asking probing questions to encourage the user to articulate their thought process and reasoning. This approach not only helps
|
76 |
"""
|
77 |
|
78 |
+
|
79 |
def format_prompt(message, history):
|
80 |
prompt = "<s>[SYSTEM] {} [/SYSTEM]".format(system_instruction)
|
81 |
for user_prompt, bot_response in history:
|
|
|
84 |
return prompt
|
85 |
|
86 |
def generate(prompt, history, temperature=0.1, max_new_tokens=25000, top_p=0.95, repetition_penalty=1.0):
|
|
|
87 |
input_tokens = len(tokenizer.encode(prompt))
|
88 |
+
available_tokens = 32768 - input_tokens
|
89 |
+
max_new_tokens = min(max_new_tokens, available_tokens)
|
90 |
|
91 |
+
if available_tokens <= 0:
|
92 |
+
yield "Error: ์
๋ ฅ์ด ์ต๋ ํ์ฉ ํ ํฐ ์๋ฅผ ์ด๊ณผํฉ๋๋ค."
|
93 |
+
return
|
94 |
+
|
95 |
formatted_prompt = format_prompt(prompt, history)
|
96 |
|
97 |
try:
|
|
|
106 |
|
107 |
mychatbot = gr.Chatbot(
|
108 |
avatar_images=["./user.png", "./botm.png"],
|
109 |
+
bubble_full_width=True,
|
110 |
show_label=False,
|
111 |
show_copy_button=True,
|
112 |
likeable=True,
|
|
|
122 |
|
123 |
demo = gr.Interface(
|
124 |
fn=generate,
|
125 |
+
inputs=[gr.Textbox(label="์ง๋ฌธ์ ์
๋ ฅํ์ธ์", placeholder="์ฌ๊ธฐ์ ์ง๋ฌธ์ ์
๋ ฅํ์ธ์...", lines=2), gr.JSON(label="History", value=[])],
|
126 |
outputs=gr.Markdown(),
|
127 |
examples=examples,
|
128 |
+
title="AIQ ์ฝ๋ํ์ผ๋ฟ: OpenLLM v1.12",
|
129 |
+
description="AIQ Codepilot๊ณผ ์ํธ์์ฉํด ๋ณด์ธ์."
|
130 |
)
|
131 |
|
132 |
demo.launch(show_api=False)
|