Update llm.py
Browse files
llm.py
CHANGED
@@ -165,7 +165,8 @@ elif thinker in "70b|405b":
|
|
165 |
LLM_HOST += f"__{round(CTXLEN/1024)}k_ctxlen"
|
166 |
who_are_you()
|
167 |
|
168 |
-
|
|
|
169 |
|
170 |
from prompts import summary_template, docchat_template
|
171 |
from prompts import contextual_template, clean_view_template
|
|
|
165 |
LLM_HOST += f"__{round(CTXLEN/1024)}k_ctxlen"
|
166 |
who_are_you()
|
167 |
|
168 |
+
flash_chat = thinker_chat
|
169 |
+
chat = thinker_chat
|
170 |
|
171 |
from prompts import summary_template, docchat_template
|
172 |
from prompts import contextual_template, clean_view_template
|