Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -28,6 +28,7 @@ qa_chain = None
|
|
28 |
|
29 |
def invoke(openai_api_key, youtube_url, prompt):
|
30 |
openai.api_key = openai_api_key
|
|
|
31 |
if (os.path.isdir("docs/chroma/") == False):
|
32 |
print(1)
|
33 |
youtube_dir = "docs/youtube/"
|
@@ -38,9 +39,9 @@ def invoke(openai_api_key, youtube_url, prompt):
|
|
38 |
chroma_dir = "docs/chroma/"
|
39 |
vectordb = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = chroma_dir)
|
40 |
llm = ChatOpenAI(model_name = "gpt-4", temperature = 0)
|
41 |
-
|
42 |
print(2)
|
43 |
-
result =
|
44 |
shutil.rmtree(youtube_dir)
|
45 |
#shutil.rmtree(chroma_dir)
|
46 |
return result["result"]
|
|
|
28 |
|
29 |
def invoke(openai_api_key, youtube_url, prompt):
|
30 |
openai.api_key = openai_api_key
|
31 |
+
global qa_chain
|
32 |
if (os.path.isdir("docs/chroma/") == False):
|
33 |
print(1)
|
34 |
youtube_dir = "docs/youtube/"
|
|
|
39 |
chroma_dir = "docs/chroma/"
|
40 |
vectordb = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = chroma_dir)
|
41 |
llm = ChatOpenAI(model_name = "gpt-4", temperature = 0)
|
42 |
+
qa_chain = RetrievalQA.from_chain_type(llm, retriever = vectordb.as_retriever(), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
|
43 |
print(2)
|
44 |
+
result = qa_chain({"query": prompt})
|
45 |
shutil.rmtree(youtube_dir)
|
46 |
#shutil.rmtree(chroma_dir)
|
47 |
return result["result"]
|