Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -294,14 +294,16 @@ def generate(text, history, rag_option, model_option, temperature=0.5, max_new_
|
|
294 |
#inference allg:
|
295 |
response= requests.post(API_URL, headers=HEADERS, json={"inputs": prompt})
|
296 |
result = response.json()
|
297 |
-
|
|
|
|
|
298 |
print(prompt)
|
299 |
-
print("result:..................")
|
300 |
-
print(
|
301 |
except Exception as e:
|
302 |
raise gr.Error(e)
|
303 |
|
304 |
-
return result.json()
|
305 |
|
306 |
#Antwort als Stream ausgeben...
|
307 |
#for i in range(len(result)):
|
|
|
294 |
#inference allg:
|
295 |
response= requests.post(API_URL, headers=HEADERS, json={"inputs": prompt})
|
296 |
result = response.json()
|
297 |
+
# Konvertiere String in ein Python-Dictionary
|
298 |
+
parsed_data = json.loads(json.dumps(result))
|
299 |
+
print("prompt generate:..................")
|
300 |
print(prompt)
|
301 |
+
print("result generate:..................")
|
302 |
+
print(parsed_data['generated_text'])
|
303 |
except Exception as e:
|
304 |
raise gr.Error(e)
|
305 |
|
306 |
+
return parsed_data['generated_text'] #result.json()
|
307 |
|
308 |
#Antwort als Stream ausgeben...
|
309 |
#for i in range(len(result)):
|