Spaces:
Runtime error
Runtime error
Commit
·
273a659
1
Parent(s):
e94c1c4
Update main.py
Browse files
main.py
CHANGED
@@ -34,15 +34,8 @@ def generate(prompt, history, temperature=0.2, max_new_tokens=30000, top_p=0.95,
|
|
34 |
)
|
35 |
formatted_prompt = format_prompt(prompt, history)
|
36 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
37 |
-
output = ""
|
38 |
|
39 |
-
#
|
40 |
-
|
41 |
-
for response in stream:
|
42 |
-
output_list.append(response.token.text)
|
43 |
|
44 |
-
#
|
45 |
-
generated_text = "".join(output_list)
|
46 |
-
return generated_text # Restituisci l'intera sequenza generata
|
47 |
-
|
48 |
-
|
|
|
34 |
)
|
35 |
formatted_prompt = format_prompt(prompt, history)
|
36 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
|
|
37 |
|
38 |
+
# Ottieni l'output completo usando next() sull'iteratore
|
39 |
+
generated_output = next(stream).token.text
|
|
|
|
|
40 |
|
41 |
+
return generated_output # Restituisci l'intera sequenza generata
|
|
|
|
|
|
|
|