Update app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ def construct_index(directory_path):
|
|
24 |
temperature = 0.1
|
25 |
|
26 |
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
27 |
-
llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=temperature, model_name="text-embedding-
|
28 |
documents = SimpleDirectoryReader(directory_path).load_data()
|
29 |
#index = GPTVectorStoreIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
|
30 |
index = GPTVectorStoreIndex.from_documents(documents, urls=[
|
@@ -57,13 +57,13 @@ def construct_index(directory_path):
|
|
57 |
|
58 |
return index
|
59 |
|
60 |
-
def chatbotCustom(input):
|
61 |
-
storage_context = StorageContext.from_defaults(persist_dir="index.json")
|
62 |
-
index = load_index_from_storage(storage_context)
|
63 |
-
query_engine = index.as_query_engine()
|
64 |
-
response = query_engine.query(input)
|
65 |
# response = index.query(input, similarity_top_k=5, response_mode="tree_summarize")
|
66 |
-
return response.response
|
67 |
|
68 |
def chatbotGPT(input):
|
69 |
if input:
|
|
|
24 |
temperature = 0.1
|
25 |
|
26 |
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
27 |
+
llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=temperature, model_name="text-embedding-3-large", max_tokens=num_outputs))
|
28 |
documents = SimpleDirectoryReader(directory_path).load_data()
|
29 |
#index = GPTVectorStoreIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
|
30 |
index = GPTVectorStoreIndex.from_documents(documents, urls=[
|
|
|
57 |
|
58 |
return index
|
59 |
|
60 |
+
#def chatbotCustom(input):
|
61 |
+
# storage_context = StorageContext.from_defaults(persist_dir="index.json")
|
62 |
+
# index = load_index_from_storage(storage_context)
|
63 |
+
# query_engine = index.as_query_engine()
|
64 |
+
# response = query_engine.query(input)
|
65 |
# response = index.query(input, similarity_top_k=5, response_mode="tree_summarize")
|
66 |
+
# return response.response
|
67 |
|
68 |
def chatbotGPT(input):
|
69 |
if input:
|