hypeconqueror1 commited on
Commit
d631e70
·
verified ·
1 Parent(s): eb2c6e0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -19
app.py CHANGED
@@ -1,6 +1,4 @@
1
- from ctransformers import AutoModelForCausalLM
2
- from flask import Flask, request, jsonify
3
- import os
4
  from langchain_community.document_loaders import PyMuPDFLoader
5
  from LoadLLM import Loadllm
6
  from langchain_community.embeddings import HuggingFaceEmbeddings
@@ -9,25 +7,21 @@ from langchain.chains import ConversationalRetrievalChain
9
 
10
  DB_FAISS_PATH = 'vectorstore/db_faiss'
11
 
12
- app = Flask(__name__)
13
 
14
- @app.route('/')
15
- def home():
16
  return "API Server Running"
17
 
18
-
19
- @app.route('/PromptBuddy', methods=['GET', 'POST'])
20
- def PromptLLM():
21
- pdf_file = request.files['file']
22
- pdf_name = pdf_file.filename
23
- user_prompt = request.form.get('query')
24
- pdf_file.save(pdf_name)
25
-
26
 
27
  loader = PyMuPDFLoader(file_path=pdf_name)
28
  data = loader.load()
29
 
30
-
31
  # Create embeddings using Sentence Transformers
32
  embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
33
 
@@ -41,9 +35,9 @@ def PromptLLM():
41
  # Create a conversational chain
42
  chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
43
 
44
- result = chain({"question": user_prompt, "chat_history":''})
45
- return result["answer"]
46
-
47
 
48
  if __name__ == '__main__':
49
- app.run(debug=True)
 
 
1
+ from fastapi import FastAPI, File, UploadFile, Form
 
 
2
  from langchain_community.document_loaders import PyMuPDFLoader
3
  from LoadLLM import Loadllm
4
  from langchain_community.embeddings import HuggingFaceEmbeddings
 
7
 
8
  DB_FAISS_PATH = 'vectorstore/db_faiss'
9
 
10
+ app = FastAPI()
11
 
12
+ @app.get('/')
13
+ async def home():
14
  return "API Server Running"
15
 
16
+ @app.post('/PromptBuddy')
17
+ async def PromptLLM(file: UploadFile = File(...), query: str = Form(...)):
18
+ pdf_name = file.filename
19
+ with open(pdf_name, 'wb') as f:
20
+ f.write(file.file.read())
 
 
 
21
 
22
  loader = PyMuPDFLoader(file_path=pdf_name)
23
  data = loader.load()
24
 
 
25
  # Create embeddings using Sentence Transformers
26
  embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
27
 
 
35
  # Create a conversational chain
36
  chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
37
 
38
+ result = chain({"question": query, "chat_history": ''})
39
+ return {"answer": result["answer"]}
 
40
 
41
  if __name__ == '__main__':
42
+ import uvicorn
43
+ uvicorn.run(app, host="127.0.0.1", port=8000, log_level="info")