bstraehle commited on
Commit
d0400f6
·
1 Parent(s): b2c4bf8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -10
app.py CHANGED
@@ -30,22 +30,20 @@ RAG_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"],
30
  CHROMA_DIR = "/data/chroma"
31
  YOUTUBE_DIR = "/data/youtube"
32
 
33
- YOUTUBE_URL = "https://www.youtube.com/watch?v=--khbXchTeE"
34
- #YOUTUBE_URL = "https://www.youtube.com/watch?v=RfvL_423a-I&list=PL2yQDdvlhXf_hIzmfHCdbcXj2hS52oP9r&index=2"
35
 
36
  MODEL_NAME = "gpt-4"
37
 
38
  def invoke(openai_api_key, use_rag, prompt):
39
- print(os.listdir("../"))
40
- print(os.listdir("../app/"))
41
  llm = ChatOpenAI(model_name = MODEL_NAME,
42
  openai_api_key = openai_api_key,
43
  temperature = 0)
44
  if (use_rag):
45
- if (os.path.isdir(CHROMA_DIR)):
46
- vector_db = Chroma(embedding_function = OpenAIEmbeddings(),
47
- persist_directory = CHROMA_DIR)
48
- print("Load DB")
49
  else:
50
  loader = GenericLoader(YoutubeAudioLoader([YOUTUBE_URL], YOUTUBE_DIR),
51
  OpenAIWhisperParser())
@@ -71,8 +69,8 @@ def invoke(openai_api_key, use_rag, prompt):
71
  return result
72
 
73
  description = """<strong>Overview:</strong> The app demonstrates how to use a Large Language Model (LLM) with Retrieval Augmented Generation (RAG) on external data
74
- (in this case a YouTube video, but it could be PDFs, URLs, or other
75
- <a href='https://raw.githubusercontent.com/bstraehle/ai-ml-dl/c38b224c196fc984aab6b6cc6bdc666f8f4fbcff/langchain/document-loaders.png'>data sources</a>).\n\n
76
  <strong>Instructions:</strong> Enter an OpenAI API key and perform LLM use cases (semantic search, sentiment analysis, summarization, translation, etc.) on
77
  a <a href='https://www.youtube.com/watch?v=--khbXchTeE'>short video of GPT-4</a>.
78
  <ul style="list-style-type:square;">
 
30
  CHROMA_DIR = "/data/chroma"
31
  YOUTUBE_DIR = "/data/youtube"
32
 
33
+ #YOUTUBE_URL = "https://www.youtube.com/watch?v=--khbXchTeE"
34
+ YOUTUBE_URL = "https://www.youtube.com/watch?v=Iy1IpvcJH7I&list=PL2yQDdvlhXf9XsB2W76_seM6dJxcE2Pdc&index=2"
35
 
36
  MODEL_NAME = "gpt-4"
37
 
38
  def invoke(openai_api_key, use_rag, prompt):
 
 
39
  llm = ChatOpenAI(model_name = MODEL_NAME,
40
  openai_api_key = openai_api_key,
41
  temperature = 0)
42
  if (use_rag):
43
+ # if (os.path.isdir(CHROMA_DIR)):
44
+ # vector_db = Chroma(embedding_function = OpenAIEmbeddings(),
45
+ # persist_directory = CHROMA_DIR)
46
+ # print("Load DB")
47
  else:
48
  loader = GenericLoader(YoutubeAudioLoader([YOUTUBE_URL], YOUTUBE_DIR),
49
  OpenAIWhisperParser())
 
69
  return result
70
 
71
  description = """<strong>Overview:</strong> The app demonstrates how to use a Large Language Model (LLM) with Retrieval Augmented Generation (RAG) on external data
72
+ (in this case the <a href='https://www.youtube.com/playlist?list=PL2yQDdvlhXf9XsB2W76_seM6dJxcE2Pdc'>AWS re:Invent 2022 - AI/ML YouTube playlist</a>,
73
+ but it could be PDFs, URLs, or other <a href='https://raw.githubusercontent.com/bstraehle/ai-ml-dl/c38b224c196fc984aab6b6cc6bdc666f8f4fbcff/langchain/document-loaders.png'>data sources</a>).\n\n
74
  <strong>Instructions:</strong> Enter an OpenAI API key and perform LLM use cases (semantic search, sentiment analysis, summarization, translation, etc.) on
75
  a <a href='https://www.youtube.com/watch?v=--khbXchTeE'>short video of GPT-4</a>.
76
  <ul style="list-style-type:square;">