Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ import logging, os, sys, threading, time
|
|
4 |
from dotenv import load_dotenv, find_dotenv
|
5 |
|
6 |
from rag_langchain import LangChainRAG
|
7 |
-
from rag_llamaindex import LlamaIndexRAG
|
8 |
from trace import trace_wandb
|
9 |
|
10 |
lock = threading.Lock()
|
@@ -43,9 +43,9 @@ def invoke(openai_api_key, prompt, rag_option):
|
|
43 |
if (rag_option == RAG_LANGCHAIN):
|
44 |
rag = LangChainRAG()
|
45 |
rag.ingestion(config)
|
46 |
-
elif (rag_option == RAG_LLAMAINDEX):
|
47 |
-
|
48 |
-
|
49 |
|
50 |
completion = ""
|
51 |
result = ""
|
@@ -59,9 +59,9 @@ def invoke(openai_api_key, prompt, rag_option):
|
|
59 |
rag = LangChainRAG()
|
60 |
completion, callback = rag.rag_chain(config, prompt)
|
61 |
result = completion["result"]
|
62 |
-
elif (rag_option == RAG_LLAMAINDEX):
|
63 |
-
|
64 |
-
|
65 |
else:
|
66 |
rag = LangChainRAG()
|
67 |
completion, callback = rag.llm_chain(config, prompt)
|
|
|
4 |
from dotenv import load_dotenv, find_dotenv
|
5 |
|
6 |
from rag_langchain import LangChainRAG
|
7 |
+
#from rag_llamaindex import LlamaIndexRAG
|
8 |
from trace import trace_wandb
|
9 |
|
10 |
lock = threading.Lock()
|
|
|
43 |
if (rag_option == RAG_LANGCHAIN):
|
44 |
rag = LangChainRAG()
|
45 |
rag.ingestion(config)
|
46 |
+
#elif (rag_option == RAG_LLAMAINDEX):
|
47 |
+
# rag = LlamaIndexRAG()
|
48 |
+
# rag.ingestion(config)
|
49 |
|
50 |
completion = ""
|
51 |
result = ""
|
|
|
59 |
rag = LangChainRAG()
|
60 |
completion, callback = rag.rag_chain(config, prompt)
|
61 |
result = completion["result"]
|
62 |
+
#elif (rag_option == RAG_LLAMAINDEX):
|
63 |
+
# rag = LlamaIndexRAG()
|
64 |
+
# result, callback = rag.retrieval(config, prompt)
|
65 |
else:
|
66 |
rag = LangChainRAG()
|
67 |
completion, callback = rag.llm_chain(config, prompt)
|