Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -76,37 +76,6 @@ def format_prompt(message, history):
|
|
76 |
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
77 |
prompt += f"[{now}] [INST] {message} [/INST]"
|
78 |
return prompt
|
79 |
-
|
80 |
-
#--------------------------------------------------- Generazione TESTO Asincrono ------------------------------------------------------
|
81 |
-
@app.post("/GeneraAsincrono")
|
82 |
-
async def read_root_async(request: Request, input_data: InputData):
|
83 |
-
input_text = input_data.input
|
84 |
-
temperature = input_data.temperature
|
85 |
-
max_new_tokens = input_data.max_new_tokens
|
86 |
-
top_p = input_data.top_p
|
87 |
-
repetition_penalty = input_data.repetition_penalty
|
88 |
-
history = []
|
89 |
-
async with aiohttp.ClientSession() as session:
|
90 |
-
tasks = [generate_async(input_data.input, history, input_data.temperature, input_data.max_new_tokens, input_data.top_p, input_data.repetition_penalty) for _ in range(10)]
|
91 |
-
responses = await asyncio.gather(*tasks)
|
92 |
-
return {"responses": responses}
|
93 |
-
|
94 |
-
async def generate_async(prompt, history, temperature=0.2, max_new_tokens=30000, top_p=0.95, repetition_penalty=1.0):
|
95 |
-
temperature = float(temperature)
|
96 |
-
if temperature < 1e-2:
|
97 |
-
temperature = 1e-2
|
98 |
-
top_p = float(top_p)
|
99 |
-
generate_kwargs = dict(
|
100 |
-
temperature=temperature,
|
101 |
-
max_new_tokens=max_new_tokens,
|
102 |
-
top_p=top_p,
|
103 |
-
repetition_penalty=repetition_penalty,
|
104 |
-
do_sample=True,
|
105 |
-
seed=42,
|
106 |
-
)
|
107 |
-
formatted_prompt = format_prompt(prompt, history)
|
108 |
-
output = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=False)
|
109 |
-
return output
|
110 |
|
111 |
#--------------------------------------------------- Generazione IMMAGINE ------------------------------------------------------
|
112 |
style_image = {
|
|
|
76 |
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
77 |
prompt += f"[{now}] [INST] {message} [/INST]"
|
78 |
return prompt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
|
80 |
#--------------------------------------------------- Generazione IMMAGINE ------------------------------------------------------
|
81 |
style_image = {
|