|
import gradio as gr |
|
from langchain.document_loaders import TextLoader |
|
from langchain.vectorstores import Chroma |
|
from langchain.chains import RetrievalQA |
|
from langchain.embeddings import HuggingFaceInstructEmbeddings |
|
from langchain.agents import Tool |
|
|
|
|
|
hf = HuggingFaceInstructEmbeddings( |
|
model_name="hkunlp/instructor-large", |
|
embed_instruction="Represent the document for retrieval: ", |
|
query_instruction="Represent the query for retrieval: " |
|
) |
|
|
|
|
|
texts=["The meaning of life is to love","The meaning of vacation is to relax","Roses are red.","Hack the planet!"] |
|
|
|
|
|
db = Chroma.from_texts(texts, hf, collection_name="my-collection") |
|
|
|
|
|
llm = LLM.from_model("lgaalves/gpt2-dolly") |
|
docsearcher = RetrievalQA.from_chain_type( |
|
llm=llm, |
|
chain_type="stuff", |
|
return_source_documents=False, |
|
retriever=db.as_retriever(search_type="similarity", search_kwargs={"k": 1}) |
|
) |
|
|
|
class VectorStoreRetrieverTool(Tool): |
|
name = "vectorstore_retriever" |
|
description = "This tool uses LangChain's RetrievalQA to find relevant answers from a vector store based on a given query." |
|
|
|
inputs = ["text"] |
|
outputs = ["text"] |
|
|
|
def __call__(self, query: str): |
|
|
|
response = docsearcher.run(query) |
|
return response |
|
|
|
|
|
tool = gr.Interface( |
|
VectorStoreRetrieverTool(), |
|
live=True, |
|
title="LangChain-Application: Vectorstore-Retriever", |
|
description="This tool uses LangChain's RetrievalQA to find relevant answers from a vector store based on a given query.", |
|
) |
|
|
|
|
|
tool.launch() |
|
|