Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -39,7 +39,7 @@ llm = Llama(model_path = 'Llama-2-ko-7B-chat-gguf-q4_0.bin',
|
|
39 |
n_ctx=2048,
|
40 |
)
|
41 |
# App code
|
42 |
-
def
|
43 |
output = llm(f"๋ค์์ A์ B์ ์ญํ ๊ทน์ด์ผ. ๋๋ B์ผ. A์ ๋ํํ๊ณ ์์ด. ์น๊ตฌ์๊ฒ ์น๊ทผํ๊ณ ๊ฐ๊ฒฐํ๊ฒ ์ ๋๋ตํด์ค.\n\n### A:\n{x}\n\n### B:\n", max_tokens=80, stop=["###"], echo=True)
|
44 |
return output['choices'][0]['text']
|
45 |
|
|
|
39 |
n_ctx=2048,
|
40 |
)
|
41 |
# App code
|
42 |
+
def chat(x):
|
43 |
output = llm(f"๋ค์์ A์ B์ ์ญํ ๊ทน์ด์ผ. ๋๋ B์ผ. A์ ๋ํํ๊ณ ์์ด. ์น๊ตฌ์๊ฒ ์น๊ทผํ๊ณ ๊ฐ๊ฒฐํ๊ฒ ์ ๋๋ตํด์ค.\n\n### A:\n{x}\n\n### B:\n", max_tokens=80, stop=["###"], echo=True)
|
44 |
return output['choices'][0]['text']
|
45 |
|