Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,6 @@ import numpy as np
|
|
2 |
import pandas as pd
|
3 |
import configparser
|
4 |
import streamlit as st
|
5 |
-
import streamlit
|
6 |
import chromadb
|
7 |
import langchain
|
8 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
@@ -15,8 +14,8 @@ import shutil
|
|
15 |
import os
|
16 |
import openai
|
17 |
|
18 |
-
#
|
19 |
-
api_key =
|
20 |
|
21 |
if not api_key:
|
22 |
st.error("I can not find API key")
|
@@ -32,17 +31,17 @@ else:
|
|
32 |
shutil.copyfileobj(uploaded_file, tmpfile)
|
33 |
file_path = tmpfile.name
|
34 |
|
35 |
-
# PyPDFLoader
|
36 |
loader = PyPDFLoader(file_path)
|
37 |
pages = loader.load_and_split()
|
38 |
|
39 |
-
#
|
|
|
40 |
embeddings = OpenAIEmbeddings()
|
41 |
vectorstore = Chroma.from_documents(pages, embedding=embeddings, persist_directory=".")
|
42 |
vectorstore.persist()
|
43 |
-
llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
44 |
|
45 |
-
# Q&A chain
|
46 |
pdf_qa = ConversationalRetrievalChain.from_llm(llm, vectorstore.as_retriever(), return_source_documents=True)
|
47 |
|
48 |
# Get question from user
|
@@ -53,4 +52,5 @@ else:
|
|
53 |
st.write("Answer:", answer)
|
54 |
|
55 |
# Delete tempfile
|
56 |
-
|
|
|
|
2 |
import pandas as pd
|
3 |
import configparser
|
4 |
import streamlit as st
|
|
|
5 |
import chromadb
|
6 |
import langchain
|
7 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
|
|
14 |
import os
|
15 |
import openai
|
16 |
|
17 |
+
# Replace with your OpenAI API key
|
18 |
+
api_key = "your_openai_api_key_here"
|
19 |
|
20 |
if not api_key:
|
21 |
st.error("I can not find API key")
|
|
|
31 |
shutil.copyfileobj(uploaded_file, tmpfile)
|
32 |
file_path = tmpfile.name
|
33 |
|
34 |
+
# Load and split PDF using PyPDFLoader
|
35 |
loader = PyPDFLoader(file_path)
|
36 |
pages = loader.load_and_split()
|
37 |
|
38 |
+
# Initialize model and vectorstore
|
39 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
40 |
embeddings = OpenAIEmbeddings()
|
41 |
vectorstore = Chroma.from_documents(pages, embedding=embeddings, persist_directory=".")
|
42 |
vectorstore.persist()
|
|
|
43 |
|
44 |
+
# Create Q&A chain
|
45 |
pdf_qa = ConversationalRetrievalChain.from_llm(llm, vectorstore.as_retriever(), return_source_documents=True)
|
46 |
|
47 |
# Get question from user
|
|
|
52 |
st.write("Answer:", answer)
|
53 |
|
54 |
# Delete tempfile
|
55 |
+
if file_path:
|
56 |
+
os.unlink(file_path)
|