Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -292,7 +292,8 @@ def generate(text, history, rag_option, model_option, temperature=0.5, max_new_
|
|
292 |
#payload = tokenizer.apply_chat_template(prompt,tokenize=False)
|
293 |
#result = client.text_generation(payload, do_sample=True,return_full_text=False, max_new_tokens=2048,top_p=0.9,temperature=0.6,)
|
294 |
#inference allg:
|
295 |
-
|
|
|
296 |
print("prompt:..................")
|
297 |
print(prompt)
|
298 |
print("result:..................")
|
|
|
292 |
#payload = tokenizer.apply_chat_template(prompt,tokenize=False)
|
293 |
#result = client.text_generation(payload, do_sample=True,return_full_text=False, max_new_tokens=2048,top_p=0.9,temperature=0.6,)
|
294 |
#inference allg:
|
295 |
+
response= requests.post(API_URL, headers=HEADERS, json={"inputs": prompt})
|
296 |
+
result = response.json()
|
297 |
print("prompt:..................")
|
298 |
print(prompt)
|
299 |
print("result:..................")
|