Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -40,10 +40,10 @@ llm = Llama(model_path = 'Llama-2-ko-7B-chat-gguf-q4_0.bin',
|
|
40 |
)
|
41 |
# App code
|
42 |
def chat(x):
|
43 |
-
prom = f"λ€μμ Aμ Bμ μν κ·Ήμ΄μΌ. λλ BμΌ. Aμ λννκ³ μμ΄. μΉκ΅¬μκ² μΉκ·Όνκ³ κ°κ²°νκ² μ λλ΅ν΄μ€.\n\n### A:\n{x}\n\n### B:\n"
|
44 |
-
output = llm(prom, max_tokens=20, stop=["###"], echo=True)
|
45 |
-
return output['choices'][0]['text'][len(prom):-1]
|
46 |
-
|
47 |
|
48 |
with gr.Blocks() as demo:
|
49 |
count = 0
|
|
|
40 |
)
|
41 |
# App code
|
42 |
def chat(x):
|
43 |
+
#prom = f"λ€μμ Aμ Bμ μν κ·Ήμ΄μΌ. λλ BμΌ. Aμ λννκ³ μμ΄. μΉκ΅¬μκ² μΉκ·Όνκ³ κ°κ²°νκ² μ λλ΅ν΄μ€.\n\n### A:\n{x}\n\n### B:\n"
|
44 |
+
#output = llm(prom, max_tokens=20, stop=["###"], echo=True)
|
45 |
+
#return output['choices'][0]['text'][len(prom):-1]
|
46 |
+
return "AI μλ΅μ
λλ€."
|
47 |
|
48 |
with gr.Blocks() as demo:
|
49 |
count = 0
|