firqaaa commited on
Commit
1209449
·
1 Parent(s): c8c543f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -68,7 +68,7 @@ class Journal:
68
  def __repr__(self):
69
  return f"Journal(name='{self.name}', bytes='{self.bytes}')"
70
 
71
- llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-1106")
72
 
73
  textex_chain = create_extraction_chain(textex_schema, llm)
74
  tablex_chain = create_extraction_chain(tablex_schema, llm)
@@ -198,7 +198,7 @@ if uploaded_files:
198
  embeddings = OpenAIEmbeddings()
199
 
200
  db = Chroma.from_documents(docs, embeddings)
201
- llm_table = ChatOpenAI(model_name="gpt-3.5-turbo-1106", temperature=0)
202
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
203
 
204
  # List of questions
@@ -649,7 +649,7 @@ if uploaded_files:
649
  embeddings = OpenAIEmbeddings()
650
 
651
  db = Chroma.from_documents(docs, embeddings)
652
- llm_table = ChatOpenAI(model_name="gpt-3.5-turbo-16k", temperature=0)
653
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
654
 
655
  # List of questions
 
68
  def __repr__(self):
69
  return f"Journal(name='{self.name}', bytes='{self.bytes}')"
70
 
71
+ llm = ChatOpenAI(temperature=0, model="gpt-4-1106-preview")
72
 
73
  textex_chain = create_extraction_chain(textex_schema, llm)
74
  tablex_chain = create_extraction_chain(tablex_schema, llm)
 
198
  embeddings = OpenAIEmbeddings()
199
 
200
  db = Chroma.from_documents(docs, embeddings)
201
+ llm_table = ChatOpenAI(model_name="gpt-4-1106-preview", temperature=0)
202
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
203
 
204
  # List of questions
 
649
  embeddings = OpenAIEmbeddings()
650
 
651
  db = Chroma.from_documents(docs, embeddings)
652
+ llm_table = ChatOpenAI(model_name="gpt-4-1106-preview", temperature=0)
653
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
654
 
655
  # List of questions