from langchain.chat_models import ChatOpenAI from langchain.vectorstores.pinecone import Pinecone import openai from langchain.chains import RetrievalQA import os OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") def get_chain(vectorstore: Pinecone): openai.api_key = OPENAI_API_KEY llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo") qa_chain = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=vectorstore.as_retriever(),return_source_documents=True) return qa_chain