Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -58,7 +58,7 @@ async def PromptLLM(file: UploadFile = File(...)):
|
|
58 |
result = chain({"question": "Summarise this report", "chat_history": ''})
|
59 |
summary = result['answer']
|
60 |
|
61 |
-
response = model.generate_content(summary + "\nBased on the information provided, what are the key medical insights and considerations for this patient?")
|
62 |
|
63 |
ans = {"summary": summary, "insights": response.text}
|
64 |
|
|
|
58 |
result = chain({"question": "Summarise this report", "chat_history": ''})
|
59 |
summary = result['answer']
|
60 |
|
61 |
+
response = model.generate_content(summary + "\nBased on the information provided, what are the key medical insights and considerations for this patient?(100 words)")
|
62 |
|
63 |
ans = {"summary": summary, "insights": response.text}
|
64 |
|