hypeconqueror1 commited on
Commit
6a4b65e
·
verified ·
1 Parent(s): 6b7464b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +3 -3
main.py CHANGED
@@ -5,7 +5,7 @@
5
 
6
 
7
 
8
- from fastapi import FastAPI, File, UploadFile, Form
9
  import os
10
  import shutil
11
  import tempfile
@@ -24,7 +24,7 @@ async def home():
24
  return "API Server Running"
25
 
26
  @app.post('/PromptBuddy')
27
- async def PromptLLM(file: UploadFile = File(...), query: str = Form(...)):
28
 
29
  with tempfile.NamedTemporaryFile(delete=False) as temp_file: # Create temporary file
30
  temp_file_path = temp_file.name
@@ -48,7 +48,7 @@ async def PromptLLM(file: UploadFile = File(...), query: str = Form(...)):
48
  # Create a conversational chain
49
  chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
50
 
51
- result = chain({"question": query, "chat_history": ''})
52
  return result['answer']
53
 
54
 
 
5
 
6
 
7
 
8
+ from fastapi import FastAPI, File, UploadFile
9
  import os
10
  import shutil
11
  import tempfile
 
24
  return "API Server Running"
25
 
26
  @app.post('/PromptBuddy')
27
+ async def PromptLLM(file: UploadFile = File(...)):
28
 
29
  with tempfile.NamedTemporaryFile(delete=False) as temp_file: # Create temporary file
30
  temp_file_path = temp_file.name
 
48
  # Create a conversational chain
49
  chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
50
 
51
+ result = chain({"question": "Summarise the report", "chat_history": ''})
52
  return result['answer']
53
 
54