theekshana commited on
Commit
a624e23
1 Parent(s): 38be0ae

moved app.python to main directory

Browse files
__init__.py ADDED
File without changes
reggpt/app.py → app.py RENAMED
@@ -22,11 +22,11 @@ from dotenv import load_dotenv
22
  from fastapi import FastAPI
23
  from fastapi.middleware.cors import CORSMiddleware
24
 
25
- from configs.api import API_TITLE, API_VERSION, API_DESCRIPTION
26
- from api.router import ChatAPI
27
 
28
  def filer():
29
- return "logs/log"
30
  # today = datetime.datetime.today()
31
  # log_filename = f"logs/{today.year}-{today.month:02d}-{today.day:02d}.log"
32
  # return log_filename
 
22
  from fastapi import FastAPI
23
  from fastapi.middleware.cors import CORSMiddleware
24
 
25
+ from reggpt.configs.api import API_TITLE, API_VERSION, API_DESCRIPTION
26
+ from reggpt.api.router import ChatAPI
27
 
28
  def filer():
29
+ return "reggpt/logs/log"
30
  # today = datetime.datetime.today()
31
  # log_filename = f"logs/{today.year}-{today.month:02d}-{today.day:02d}.log"
32
  # return log_filename
reggpt/agents/__init__.py ADDED
File without changes
reggpt/api/router.py CHANGED
@@ -4,9 +4,9 @@ import logging
4
  from fastapi import APIRouter, HTTPException, status
5
  from fastapi import HTTPException, status
6
 
7
- from schemas.schema import UserQuery, LoginRequest, UserModel
8
- from routers.controller import get_QA_Answers, get_avaliable_models
9
- from configs.api import API_ENDPOINT_LOGIN,API_ENDPOINT_CHAT, API_ENDPOINT_HEALTH, API_ENDPOINT_MODEL
10
 
11
  logger = logging.getLogger(__name__)
12
 
 
4
  from fastapi import APIRouter, HTTPException, status
5
  from fastapi import HTTPException, status
6
 
7
+ from reggpt.schemas.schema import UserQuery, LoginRequest, UserModel
8
+ from reggpt.routers.controller import get_QA_Answers, get_avaliable_models
9
+ from reggpt.configs.api import API_ENDPOINT_LOGIN,API_ENDPOINT_CHAT, API_ENDPOINT_HEALTH, API_ENDPOINT_MODEL
10
 
11
  logger = logging.getLogger(__name__)
12
 
reggpt/chains/llmChain.py CHANGED
@@ -23,16 +23,16 @@ load_dotenv()
23
 
24
  verbose = os.environ.get('VERBOSE')
25
 
26
- from llms.llm import get_model
27
  from langchain.chains import ConversationalRetrievalChain
28
  # from conversationBufferWindowMemory import ConversationBufferWindowMemory
29
 
30
  # from langchain.prompts import PromptTemplate
31
  from langchain.chains import LLMChain
32
- from prompts.document_combine import document_combine_prompt
33
- from prompts.retrieval import retrieval_qa_chain_prompt
34
- from prompts.general import general_qa_chain_prompt
35
- from prompts.router import router_prompt
36
 
37
 
38
  def get_qa_chain(model_type,retriever):
 
23
 
24
  verbose = os.environ.get('VERBOSE')
25
 
26
+ from reggpt.llms.llm import get_model
27
  from langchain.chains import ConversationalRetrievalChain
28
  # from conversationBufferWindowMemory import ConversationBufferWindowMemory
29
 
30
  # from langchain.prompts import PromptTemplate
31
  from langchain.chains import LLMChain
32
+ from reggpt.prompts.document_combine import document_combine_prompt
33
+ from reggpt.prompts.retrieval import retrieval_qa_chain_prompt
34
+ from reggpt.prompts.general import general_qa_chain_prompt
35
+ from reggpt.prompts.router import router_prompt
36
 
37
 
38
  def get_qa_chain(model_type,retriever):
reggpt/controller/agent.py CHANGED
@@ -9,10 +9,10 @@ load_dotenv()
9
 
10
  verbose = os.environ.get('VERBOSE')
11
 
12
- from controller.router import run_router_chain
13
- from routers.out_of_domain import run_out_of_domain_chain
14
- from routers.general import run_general_qa_chain
15
- from routers.qa import run_qa_chain
16
 
17
  def chain_selector(chain_type, query):
18
  chain_type = chain_type.lower().strip()
 
9
 
10
  verbose = os.environ.get('VERBOSE')
11
 
12
+ from reggpt.controller.router import run_router_chain
13
+ from reggpt.routers.out_of_domain import run_out_of_domain_chain
14
+ from reggpt.routers.general import run_general_qa_chain
15
+ from reggpt.routers.qa import run_qa_chain
16
 
17
  def chain_selector(chain_type, query):
18
  chain_type = chain_type.lower().strip()
reggpt/controller/router.py CHANGED
@@ -18,9 +18,9 @@ import time
18
  import logging
19
  logger = logging.getLogger(__name__)
20
 
21
- from chains.llmChain import get_router_chain
22
 
23
- from configs.model import ROUTER_MODEL_TYPE
24
 
25
 
26
  router_model_type=ROUTER_MODEL_TYPE
 
18
  import logging
19
  logger = logging.getLogger(__name__)
20
 
21
+ from reggpt.chains.llmChain import get_router_chain
22
 
23
+ from reggpt.configs.model import ROUTER_MODEL_TYPE
24
 
25
 
26
  router_model_type=ROUTER_MODEL_TYPE
reggpt/llms/__init__.py ADDED
File without changes
reggpt/retriever/__init__.py ADDED
File without changes
reggpt/retriever/multi_query_retriever.py CHANGED
@@ -36,7 +36,7 @@ import pandas as pd
36
 
37
  logger = logging.getLogger(__name__)
38
 
39
- from prompts.multi_query import MULTY_QUERY_PROMPT
40
 
41
  class LineListOutputParser(BaseOutputParser[List[str]]):
42
  """Output parser for a list of lines."""
 
36
 
37
  logger = logging.getLogger(__name__)
38
 
39
+ from reggpt.prompts.multi_query import MULTY_QUERY_PROMPT
40
 
41
  class LineListOutputParser(BaseOutputParser[List[str]]):
42
  """Output parser for a list of lines."""
reggpt/routers/controller.py CHANGED
@@ -16,13 +16,13 @@
16
 
17
  import logging
18
  logger = logging.getLogger(__name__)
19
- from configs.model import AVALIABLE_MODELS , MEMORY_WINDOW_K
20
 
21
  # from qaPipeline import QAPipeline
22
  # from qaPipeline_retriever_only import QAPipeline
23
  # qaPipeline = QAPipeline()
24
 
25
- from controller.agent import run_agent
26
 
27
  def get_QA_Answers(userQuery):
28
  # model=userQuery.model
 
16
 
17
  import logging
18
  logger = logging.getLogger(__name__)
19
+ from reggpt.configs.model import AVALIABLE_MODELS , MEMORY_WINDOW_K
20
 
21
  # from qaPipeline import QAPipeline
22
  # from qaPipeline_retriever_only import QAPipeline
23
  # qaPipeline = QAPipeline()
24
 
25
+ from reggpt.controller.agent import run_agent
26
 
27
  def get_QA_Answers(userQuery):
28
  # model=userQuery.model
reggpt/routers/general.py CHANGED
@@ -3,10 +3,10 @@ import time
3
  import logging
4
  logger = logging.getLogger(__name__)
5
 
6
- from chains.llmChain import get_general_qa_chain
7
- from output_parsers.output_parser import general_qa_chain_output_parser
8
 
9
- from configs.model import GENERAL_QA_MODEL_TYPE
10
 
11
 
12
 
 
3
  import logging
4
  logger = logging.getLogger(__name__)
5
 
6
+ from reggpt.chains.llmChain import get_general_qa_chain
7
+ from reggpt.output_parsers.output_parser import general_qa_chain_output_parser
8
 
9
+ from reggpt.configs.model import GENERAL_QA_MODEL_TYPE
10
 
11
 
12
 
reggpt/routers/out_of_domain.py CHANGED
@@ -1,4 +1,4 @@
1
- from output_parsers.output_parser import out_of_domain_chain_parser
2
 
3
  def run_out_of_domain_chain(query):
4
  return out_of_domain_chain_parser(query)
 
1
+ from reggpt.output_parsers.output_parser import out_of_domain_chain_parser
2
 
3
  def run_out_of_domain_chain(query):
4
  return out_of_domain_chain_parser(query)
reggpt/routers/qa.py CHANGED
@@ -19,11 +19,11 @@ import time
19
  import logging
20
  logger = logging.getLogger(__name__)
21
  from dotenv import load_dotenv
22
- from chains.llmChain import get_qa_chain
23
- from output_parsers.output_parser import qa_chain_output_parser
24
 
25
- from configs.model import QA_MODEL_TYPE
26
- from utils.retriever import load_ensemble_retriever
27
  load_dotenv()
28
 
29
  verbose = os.environ.get('VERBOSE')
 
19
  import logging
20
  logger = logging.getLogger(__name__)
21
  from dotenv import load_dotenv
22
+ from reggpt.chains.llmChain import get_qa_chain
23
+ from reggpt.output_parsers.output_parser import qa_chain_output_parser
24
 
25
+ from reggpt.configs.model import QA_MODEL_TYPE
26
+ from reggpt.utils.retriever import load_ensemble_retriever
27
  load_dotenv()
28
 
29
  verbose = os.environ.get('VERBOSE')
reggpt/routers/qaPipeline.py CHANGED
@@ -20,11 +20,10 @@ import logging
20
  logger = logging.getLogger(__name__)
21
  from dotenv import load_dotenv
22
  from fastapi import HTTPException
23
- from chains.llmChain import get_qa_chain, get_general_qa_chain, get_router_chain
24
- from output_parsers.output_parser import general_qa_chain_output_parser, qa_chain_output_parser, out_of_domain_chain_parser
25
 
26
- from configs.model import QA_MODEL_TYPE, GENERAL_QA_MODEL_TYPE, ROUTER_MODEL_TYPE, Multi_Query_MODEL_TYPE
27
- from utils.retriever import load_faiss_retriever, load_ensemble_retriever, load_multi_query_retriever
28
  load_dotenv()
29
 
30
  verbose = os.environ.get('VERBOSE')
 
20
  logger = logging.getLogger(__name__)
21
  from dotenv import load_dotenv
22
  from fastapi import HTTPException
23
+ from reggpt.chains.llmChain import get_qa_chain, get_general_qa_chain, get_router_chain
 
24
 
25
+ from reggpt.configs.model import QA_MODEL_TYPE, GENERAL_QA_MODEL_TYPE, ROUTER_MODEL_TYPE, Multi_Query_MODEL_TYPE
26
+ from reggpt.utils.retriever import load_ensemble_retriever
27
  load_dotenv()
28
 
29
  verbose = os.environ.get('VERBOSE')
reggpt/utils/retriever.py CHANGED
@@ -23,7 +23,7 @@ multiple retrievers by using weighted Reciprocal Rank Fusion
23
  import logging
24
  logger = logging.getLogger(__name__)
25
 
26
- from vectorstores.faissDb import load_FAISS_store
27
 
28
  from langchain_community.retrievers import BM25Retriever
29
  from langchain_community.document_loaders import PyPDFLoader
@@ -74,11 +74,11 @@ def split_documents():
74
  for text in texts:
75
  splits_list.append(text)
76
 
77
- splitted_texts_file='../data/splitted_texts.jsonl'
78
  save_docs_to_jsonl(splits_list,splitted_texts_file)
79
 
80
- from retriever.ensemble_retriever import EnsembleRetriever
81
- from retriever.multi_query_retriever import MultiQueryRetriever
82
 
83
  def load_faiss_retriever():
84
  try:
@@ -97,7 +97,7 @@ def load_faiss_retriever():
97
  def load_ensemble_retriever():
98
  try:
99
  # splitted_texts_file=os.path.dirname(os.path.abspath(__file__).join('/data/splitted_texts.jsonl'))
100
- splitted_texts_file='./data/splitted_texts.jsonl'
101
  sementic_k = 4
102
  bm25_k = 2
103
  splits_list = load_docs_from_jsonl(splitted_texts_file)
@@ -118,7 +118,7 @@ def load_ensemble_retriever():
118
  logger.exception(e)
119
  raise e
120
 
121
- from llms.llm import get_model
122
 
123
  def load_multi_query_retriever(multi_query_model_type):
124
  #multi query
 
23
  import logging
24
  logger = logging.getLogger(__name__)
25
 
26
+ from reggpt.vectorstores.faissDb import load_FAISS_store
27
 
28
  from langchain_community.retrievers import BM25Retriever
29
  from langchain_community.document_loaders import PyPDFLoader
 
74
  for text in texts:
75
  splits_list.append(text)
76
 
77
+ splitted_texts_file='../reggpt/data/splitted_texts.jsonl'
78
  save_docs_to_jsonl(splits_list,splitted_texts_file)
79
 
80
+ from reggpt.retriever.ensemble_retriever import EnsembleRetriever
81
+ from reggpt.retriever.multi_query_retriever import MultiQueryRetriever
82
 
83
  def load_faiss_retriever():
84
  try:
 
97
  def load_ensemble_retriever():
98
  try:
99
  # splitted_texts_file=os.path.dirname(os.path.abspath(__file__).join('/data/splitted_texts.jsonl'))
100
+ splitted_texts_file='./reggpt/data/splitted_texts.jsonl'
101
  sementic_k = 4
102
  bm25_k = 2
103
  splits_list = load_docs_from_jsonl(splitted_texts_file)
 
118
  logger.exception(e)
119
  raise e
120
 
121
+ from reggpt.llms.llm import get_model
122
 
123
  def load_multi_query_retriever(multi_query_model_type):
124
  #multi query
reggpt/vectorstores/faissDb.py CHANGED
@@ -31,7 +31,7 @@ chunk_size=2000
31
  chunk_overlap=100
32
 
33
  embeddings_model_name = "BAAI/bge-large-en-v1.5"
34
- persist_directory = "./vectorstores/faiss_embeddings_2024"
35
 
36
  from langchain_community.embeddings import HuggingFaceEmbeddings
37
  embeddings = HuggingFaceEmbeddings(model_name=embeddings_model_name)
 
31
  chunk_overlap=100
32
 
33
  embeddings_model_name = "BAAI/bge-large-en-v1.5"
34
+ persist_directory = "./reggpt/vectorstores/faiss_embeddings_2024"
35
 
36
  from langchain_community.embeddings import HuggingFaceEmbeddings
37
  embeddings = HuggingFaceEmbeddings(model_name=embeddings_model_name)