Warlord-K commited on
Commit
8846d1e
·
1 Parent(s): 6f56f7d
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -112,7 +112,7 @@ def get_completion(
112
  model=model,
113
  messages=messages,
114
  temperature=temperature, # this is the degree of randomness of the model's output
115
- max_new_tokens=250, # this is the number of new tokens being generated
116
  top_p=top_p,
117
  top_k=top_k,
118
  stream=stream,
@@ -256,7 +256,7 @@ def generate(
256
  qa_prompt,
257
  system_prompt=system_prompt,
258
  #stream=True,
259
- max_tokens=max_new_tokens,
260
  temperature=temperature,
261
  top_k=top_k,
262
  top_p=top_p,
 
112
  model=model,
113
  messages=messages,
114
  temperature=temperature, # this is the degree of randomness of the model's output
115
+ max_tokens=250, # this is the number of new tokens being generated
116
  top_p=top_p,
117
  top_k=top_k,
118
  stream=stream,
 
256
  qa_prompt,
257
  system_prompt=system_prompt,
258
  #stream=True,
259
+ max_new_tokens=max_new_tokens,
260
  temperature=temperature,
261
  top_k=top_k,
262
  top_p=top_p,