File size: 605 Bytes
1cb46fc
10330bc
 
99a3f34
10330bc
 
 
 
 
 
 
 
 
 
1cb46fc
10330bc
 
 
 
 
1cb46fc
 
 
 
 
10330bc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain
from langchain.chat_models import ChatOpenAI
import logging
import os


from src.config import Config





def load_model():
    model = ChatOpenAI(temperature=Config.temperature,
                   streaming=Config.streaming)
    return model


def load_chain(docsearch):
    model = load_model()
    chain = RetrievalQAWithSourcesChain.from_chain_type(
        ChatOpenAI(temperature=0, streaming=True),
        chain_type="stuff",
        retriever=docsearch.as_retriever(max_tokens_limit=4097),
    )
    return chain