Spaces:
Sleeping
Sleeping
Samuel L Meyers
commited on
Commit
·
2e33da4
1
Parent(s):
275b73b
Actual test 7
Browse files- code/app.py +2 -2
code/app.py
CHANGED
@@ -16,7 +16,7 @@ model_path = "./starling-lm-7b-alpha.Q6_K.gguf"
|
|
16 |
|
17 |
mdlpath = hf_hub_download(repo_id="TheBloke/Starling-LM-7B-alpha-GGUF", filename=model_path, local_dir="./")
|
18 |
|
19 |
-
lcpp_model = Llama(model_path=model_path)
|
20 |
global otxt, txtinput, txtoutput, running, result
|
21 |
otxt = ""
|
22 |
running = False
|
@@ -61,7 +61,7 @@ def talk(txt, jsn):
|
|
61 |
if not jsn:
|
62 |
jsn = txt
|
63 |
if not running:
|
64 |
-
result = lcpp_model.create_chat_completion(messages=txt,stream=True
|
65 |
running = True
|
66 |
for r in result:
|
67 |
txt2 = None
|
|
|
16 |
|
17 |
mdlpath = hf_hub_download(repo_id="TheBloke/Starling-LM-7B-alpha-GGUF", filename=model_path, local_dir="./")
|
18 |
|
19 |
+
lcpp_model = Llama(model_path=model_path, n_ctx=16768)
|
20 |
global otxt, txtinput, txtoutput, running, result
|
21 |
otxt = ""
|
22 |
running = False
|
|
|
61 |
if not jsn:
|
62 |
jsn = txt
|
63 |
if not running:
|
64 |
+
result = lcpp_model.create_chat_completion(messages=txt,stream=True)
|
65 |
running = True
|
66 |
for r in result:
|
67 |
txt2 = None
|