0504ankitsharma commited on
Commit
de91770
·
verified ·
1 Parent(s): 6fa6752

Update app/main.py

Browse files
Files changed (1) hide show
  1. app/main.py +38 -68
app/main.py CHANGED
@@ -12,92 +12,57 @@ from fastapi.middleware.cors import CORSMiddleware
12
  from fastapi import FastAPI
13
  from pydantic import BaseModel
14
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
15
- import nltk # Importing NLTK
16
  import time
17
 
18
- # Set writable paths for cache and data
19
- raise
20
-
21
- def clean_response(response):
22
- # Remove any leading/trailing whitespace, including newlines
23
- cleaned = response.strip()
24
-
25
- # Remove any enclosing quotation marks
26
- cleaned = re.sub(r'^["\']+|["\']+$', '', cleaned)
27
-
28
- # Replace multiple newlines with a single newline
29
- cleaned = re.sub(r'\n+', '\n', cleaned)
30
-
31
- # Remove any remaining '\n' characters
32
- cleaned = cleaned.replace('\\n', '')
33
-
34
- return cleaned
35
-
36
  app = FastAPI()
 
 
37
  openai_api_key = os.environ.get('OPENAI_API_KEY')
38
  llm = ChatOpenAI(
39
  api_key=openai_api_key,
40
- model_name="gpt-4-turbo-preview", # or "gpt-3.5-turbo" for a more economical option
41
- temperature=0.7
42
  )
43
 
44
-
45
-
46
  @app.get("/")
47
  def read_root():
48
  return {"Hello": "World"}
49
 
50
  class Query(BaseModel):
51
-
52
  query_text: str
53
 
 
 
 
 
 
 
 
54
  prompt = ChatPromptTemplate.from_template(
55
  """
56
- You are a helpful assistant designed specifically for the Thapar Institute of Engineering and Technology (TIET), a renowned technical college. Your task is to answer all queries related to TIET. Every response you provide should be relevant to the context of TIET. If a question falls outside of this context, please decline by stating, 'Sorry, I cannot help with that.' If you do not know the answer to a question, do not attempt to fabricate a response; instead, politely decline.
57
- You may elaborate on your answers slightly to provide more information, but avoid sounding boastful or exaggerating. Stay focused on the context provided.
58
- If the query is not related to TIET or falls outside the context of education, respond with:
59
- "Sorry, I cannot help with that. I'm specifically designed to answer questions about the Thapar Institute of Engineering and Technology.
60
- For more information, please contact at our toll-free number: 18002024100 or E-mail us at [email protected]
61
  <context>
62
  {context}
63
  </context>
64
- Question: {input}
65
  """
66
  )
67
 
68
- def vector_embedding():
69
-
70
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)
71
- chunks = text_splitter.split_documents(documents)
72
-
73
- print(f"Created {len(chunks)} chunks.")
74
-
75
- model_name = "BAAI/bge-base-en"
76
- encode_kwargs = {'normalize_embeddings': True}
77
- model_norm = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
78
-
79
- db = FAISS.from_documents(chunks, model_norm)
80
- db.save_local("./vectors_db")
81
-
82
- print("Vector store created and saved successfully.")
83
- return {"response": "Vector Store DB Is Ready"}
84
-
85
- model_norm = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
86
- return model_norm
87
-
88
- @app.post("/chat") # Changed from /anthropic to /chat
89
  def read_item(query: Query):
90
  try:
91
-
92
-
93
-
94
-
95
- embeddings = get_embeddings()
96
  vectors = FAISS.load_local("./vectors_db", embeddings, allow_dangerous_deserialization=True)
97
  except Exception as e:
98
  print(f"Error loading vector store: {str(e)}")
99
  return {"response": "Vector Store Not Found or Error Loading. Please run /setup first."}
100
-
101
  prompt1 = query.query_text
102
  if prompt1:
103
  start = time.process_time()
@@ -106,23 +71,28 @@ def read_item(query: Query):
106
  retrieval_chain = create_retrieval_chain(retriever, document_chain)
107
  response = retrieval_chain.invoke({'input': prompt1})
108
 
109
- print("Response time:", time.process_time() - start)
110
-
111
- # Apply the cleaning function to the response
112
  cleaned_response = clean_response(response['answer'])
113
-
114
- # For debugging, print the cleaned response
115
- print("Cleaned response:", repr(cleaned_response))
116
-
117
- return cleaned_response
118
  else:
119
- return "No Query Found"
120
 
121
  @app.get("/setup")
122
  def setup():
 
 
 
 
 
 
 
 
 
 
 
 
 
123
 
124
  if __name__ == "__main__":
125
  import uvicorn
126
  uvicorn.run(app, host="0.0.0.0", port=8000)
127
-
128
-
 
12
  from fastapi import FastAPI
13
  from pydantic import BaseModel
14
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
15
+ import nltk
16
  import time
17
 
18
+ # Set up FastAPI app
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  app = FastAPI()
20
+
21
+ # Get OpenAI API key
22
  openai_api_key = os.environ.get('OPENAI_API_KEY')
23
  llm = ChatOpenAI(
24
  api_key=openai_api_key,
25
+ model_name="gpt-4-turbo-preview",
26
+ temperature=0.7,
27
  )
28
 
 
 
29
  @app.get("/")
30
  def read_root():
31
  return {"Hello": "World"}
32
 
33
  class Query(BaseModel):
 
34
  query_text: str
35
 
36
+ def clean_response(response):
37
+ cleaned = response.strip()
38
+ cleaned = re.sub(r'^["\']+|["\']+$', '', cleaned)
39
+ cleaned = re.sub(r'\n+', '\n', cleaned)
40
+ cleaned = cleaned.replace('\\n', '')
41
+ return cleaned
42
+
43
  prompt = ChatPromptTemplate.from_template(
44
  """
45
+ You are a helpful assistant designed specifically for the Thapar Institute of Engineering and Technology (TIET), a renowned technical college. Your task is to answer all queries related to TIET. If a question falls outside of this context, please decline by stating, 'Sorry, I cannot help with that.' For more information, please contact our toll-free number: 18002024100 or email us at admissions@thapar.edu.
 
 
 
 
46
  <context>
47
  {context}
48
  </context>
49
+ Question: {input}
50
  """
51
  )
52
 
53
+ @app.post("/chat")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  def read_item(query: Query):
55
  try:
56
+ # Load vector store
57
+ embeddings = HuggingFaceBgeEmbeddings(
58
+ model_name="BAAI/bge-base-en",
59
+ encode_kwargs={'normalize_embeddings': True}
60
+ )
61
  vectors = FAISS.load_local("./vectors_db", embeddings, allow_dangerous_deserialization=True)
62
  except Exception as e:
63
  print(f"Error loading vector store: {str(e)}")
64
  return {"response": "Vector Store Not Found or Error Loading. Please run /setup first."}
65
+
66
  prompt1 = query.query_text
67
  if prompt1:
68
  start = time.process_time()
 
71
  retrieval_chain = create_retrieval_chain(retriever, document_chain)
72
  response = retrieval_chain.invoke({'input': prompt1})
73
 
 
 
 
74
  cleaned_response = clean_response(response['answer'])
75
+ print("Response time:", time.process_time() - start)
76
+ return {"response": cleaned_response}
 
 
 
77
  else:
78
+ return {"response": "No Query Found"}
79
 
80
  @app.get("/setup")
81
  def setup():
82
+ # Example setup function for vector embedding
83
+ documents = [] # Load your documents here
84
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)
85
+ chunks = text_splitter.split_documents(documents)
86
+
87
+ model_name = "BAAI/bge-base-en"
88
+ encode_kwargs = {'normalize_embeddings': True}
89
+ embeddings = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
90
+
91
+ db = FAISS.from_documents(chunks, embeddings)
92
+ db.save_local("./vectors_db")
93
+ print("Vector store created and saved successfully.")
94
+ return {"response": "Vector Store DB Is Ready"}
95
 
96
  if __name__ == "__main__":
97
  import uvicorn
98
  uvicorn.run(app, host="0.0.0.0", port=8000)