Fix Bug
Browse files
app.py
CHANGED
@@ -112,7 +112,7 @@ def get_completion(
|
|
112 |
model=model,
|
113 |
messages=messages,
|
114 |
temperature=temperature, # this is the degree of randomness of the model's output
|
115 |
-
|
116 |
top_p=top_p,
|
117 |
top_k=top_k,
|
118 |
stream=stream,
|
@@ -256,7 +256,7 @@ def generate(
|
|
256 |
qa_prompt,
|
257 |
system_prompt=system_prompt,
|
258 |
#stream=True,
|
259 |
-
|
260 |
temperature=temperature,
|
261 |
top_k=top_k,
|
262 |
top_p=top_p,
|
|
|
112 |
model=model,
|
113 |
messages=messages,
|
114 |
temperature=temperature, # this is the degree of randomness of the model's output
|
115 |
+
max_tokens=250, # this is the number of new tokens being generated
|
116 |
top_p=top_p,
|
117 |
top_k=top_k,
|
118 |
stream=stream,
|
|
|
256 |
qa_prompt,
|
257 |
system_prompt=system_prompt,
|
258 |
#stream=True,
|
259 |
+
max_new_tokens=max_new_tokens,
|
260 |
temperature=temperature,
|
261 |
top_k=top_k,
|
262 |
top_p=top_p,
|