File size: 688 Bytes
10330bc
 
 
99a3f34
10330bc
 
 
 
 
 
 
 
 
 
99a3f34
10330bc
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from langchain.chains import RetrievalQAWithSourcesChain
from langchain.chat_models import ChatOpenAI
import logging
import os


from src.config import Config





def load_model():
    model = ChatOpenAI(temperature=Config.temperature,
                   streaming=Config.streaming,api_key=os.getenv('OPENAI_API_KEY'))
    return model


def load_chain(docsearch):
    model = load_model()
    chain = RetrievalQAWithSourcesChain.from_chain_type(model,
                                                        chain_type=Config.chain_type,
                                                        retriever=docsearch.as_retriever(max_tokens_limit=Config.max_token_limit))
    return chain