shukdevdatta123 commited on
Commit
d8ae64a
·
verified ·
1 Parent(s): ede883f

Update generate_answer.py

Browse files
Files changed (1) hide show
  1. generate_answer.py +2 -6
generate_answer.py CHANGED
@@ -1,15 +1,11 @@
1
- # generate_answer.py
2
-
3
  import os
4
  from glob import glob
5
  import openai
6
  from dotenv import load_dotenv
7
-
8
  from langchain.embeddings import OpenAIEmbeddings
9
  from langchain.vectorstores import Chroma
10
  from langchain.document_loaders import PyPDFLoader
11
  from langchain.text_splitter import RecursiveCharacterTextSplitter
12
-
13
  from langchain_community.chat_models import ChatOpenAI
14
  from langchain.chains import RetrievalQA
15
  from langchain.memory import ConversationBufferMemory
@@ -28,7 +24,7 @@ def base_model_chatbot(messages):
28
  response = openai.ChatCompletion.create(
29
  model="gpt-3.5-turbo",
30
  messages=messages,
31
- max_tokens=2048 # Increase max_tokens limit (or adjust based on your needs)
32
  )
33
  return response.choices[0].message['content']
34
 
@@ -70,7 +66,7 @@ class ConversationalRetrievalChain:
70
  retriever=retriever,
71
  memory=memory,
72
  )
73
-
74
  def with_pdf_chatbot(messages):
75
  """Main function to execute the QA system."""
76
  query = messages[-1]['content'].strip()
 
 
 
1
  import os
2
  from glob import glob
3
  import openai
4
  from dotenv import load_dotenv
 
5
  from langchain.embeddings import OpenAIEmbeddings
6
  from langchain.vectorstores import Chroma
7
  from langchain.document_loaders import PyPDFLoader
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
 
9
  from langchain_community.chat_models import ChatOpenAI
10
  from langchain.chains import RetrievalQA
11
  from langchain.memory import ConversationBufferMemory
 
24
  response = openai.ChatCompletion.create(
25
  model="gpt-3.5-turbo",
26
  messages=messages,
27
+ max_tokens=1500 # Increase max_tokens limit
28
  )
29
  return response.choices[0].message['content']
30
 
 
66
  retriever=retriever,
67
  memory=memory,
68
  )
69
+
70
  def with_pdf_chatbot(messages):
71
  """Main function to execute the QA system."""
72
  query = messages[-1]['content'].strip()