Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -223,7 +223,7 @@ def llm_call(question_prompt, model_name,
|
|
223 |
top_p=1, n_samples=64, stop=None):
|
224 |
if HUGGINGFACE:
|
225 |
model_inputs = hug_tokenizer([question_prompt], return_tensors="pt").to('cuda')
|
226 |
-
generated_ids = hug_model.generate(**model_inputs, max_length=1400, temperature=1, num_return_sequences=
|
227 |
responses = hug_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
228 |
codes = []
|
229 |
for response in responses:
|
|
|
223 |
top_p=1, n_samples=64, stop=None):
|
224 |
if HUGGINGFACE:
|
225 |
model_inputs = hug_tokenizer([question_prompt], return_tensors="pt").to('cuda')
|
226 |
+
generated_ids = hug_model.generate(**model_inputs, max_length=1400, temperature=1, num_return_sequences=8, do_sample=True)
|
227 |
responses = hug_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
228 |
codes = []
|
229 |
for response in responses:
|