Spaces:
Running
Running
# openAI Model e openAI Embedings | |
from langchain_community.document_loaders import UnstructuredMarkdownLoader | |
from langchain_core.documents import Document | |
from langchain.text_splitter import CharacterTextSplitter | |
from langchain_community.embeddings import OpenAIEmbeddings | |
from langchain_community.vectorstores import Chroma | |
from langchain.chains import RetrievalQA | |
from langchain.chat_models import init_chat_model | |
import gradio as gr | |
llm = init_chat_model("gpt-4o-mini", model_provider="openai") | |
loader = UnstructuredMarkdownLoader("manual.md") | |
documentos = loader.load() | |
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200) | |
textos = text_splitter.split_documents(documentos) | |
embeddings = OpenAIEmbeddings() | |
db = Chroma.from_documents(textos, embeddings) | |
retriever = db.as_retriever(search_kwargs={"k": 3}) | |
qa_chain = RetrievalQA.from_chain_type( | |
llm=llm, | |
chain_type="stuff", | |
retriever=retriever, | |
verbose=True | |
) | |
def consultar_base_conhecimento(pergunta, history): | |
resposta = qa_chain.run(pergunta) | |
return resposta | |
css = """ | |
footer { display: none !important; } | |
.footer { display: none !important; } | |
.gradio-footer { display: none !important;}" | |
""" | |
demo = gr.ChatInterface(css=css, fn=consultar_base_conhecimento, title="Este chatbot responde perguntas com base no manual do aluno do IFAL", examples=["O que você sabe?", "Quem é o reitor?", "Como funciona o processo de matrícula?", "Quais são as regras para aprovação nas disciplinas?"]) | |
if __name__ == "__main__": | |
demo.launch() |