bstraehle commited on
Commit
e9e1421
·
1 Parent(s): a0ed7f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -5
app.py CHANGED
@@ -22,13 +22,11 @@ template = """Use the following pieces of context to answer the question at the
22
 
23
  QA_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"], template = template)
24
 
25
- initial_load = True
26
-
27
- print(initial_load)
28
 
29
  def invoke(openai_api_key, youtube_url, prompt):
30
  openai.api_key = openai_api_key
31
- if initial_load == True:
32
  youtube_dir = "docs/youtube/"
33
  loader = GenericLoader(YoutubeAudioLoader([youtube_url], youtube_dir), OpenAIWhisperParser())
34
  docs = loader.load()
@@ -38,7 +36,7 @@ def invoke(openai_api_key, youtube_url, prompt):
38
  vectordb = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = chroma_dir)
39
  llm = ChatOpenAI(model_name = "gpt-4", temperature = 0)
40
  qa_chain = RetrievalQA.from_chain_type(llm, retriever = vectordb.as_retriever(), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
41
- initial_load = False
42
  result = qa_chain({"query": prompt})
43
  #shutil.rmtree(youtube_dir)
44
  #shutil.rmtree(chroma_dir)
 
22
 
23
  QA_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"], template = template)
24
 
25
+ is_initial_load = True
 
 
26
 
27
  def invoke(openai_api_key, youtube_url, prompt):
28
  openai.api_key = openai_api_key
29
+ if is_initial_load:
30
  youtube_dir = "docs/youtube/"
31
  loader = GenericLoader(YoutubeAudioLoader([youtube_url], youtube_dir), OpenAIWhisperParser())
32
  docs = loader.load()
 
36
  vectordb = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = chroma_dir)
37
  llm = ChatOpenAI(model_name = "gpt-4", temperature = 0)
38
  qa_chain = RetrievalQA.from_chain_type(llm, retriever = vectordb.as_retriever(), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
39
+ is_initial_load = False
40
  result = qa_chain({"query": prompt})
41
  #shutil.rmtree(youtube_dir)
42
  #shutil.rmtree(chroma_dir)