experto_b18 / app.py
jdmorzan's picture
Update app.py
36561d9 verified
raw
history blame
3.29 kB
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationBufferMemory, ConversationSummaryMemory
from langchain.chat_models import ChatOpenAI
from langsmith import traceable
from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import Chroma
import os
import sys
from langchain.schema import (
AIMessage,
HumanMessage,
SystemMessage
)
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessagePromptTemplate, HumanMessagePromptTemplate
#embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2')
embeddings = OpenAIEmbeddings()
#vectordb=Chroma.from_documents(document_chunks,embedding=embeddings, persist_directory='./ai_vocacional_v2')
vectordb = Chroma(persist_directory="./ai_vocacional_v2", embedding_function=embeddings)
llm=ChatOpenAI(temperature=0, model_name='gpt-4o-mini')
memory = ConversationBufferMemory(
memory_key='chat_history',
return_messages=True)
general_system_template = r"""
Eres el Orientador de carreras. Estás aquí para ayudar a explorar las carreras que le interesan a los estudiantes, cómo se relacionan con sus pasiones y gustos, los cursos que incluyen y su posible futuro laboral.
Toma los siguientes documentos de contexto {context} y responde únicamente basado en este contexto.
"""
general_user_template = "Pregunta:```{question}```"
messages = [
SystemMessagePromptTemplate.from_template(general_system_template),
HumanMessagePromptTemplate.from_template(general_user_template)
]
qa_prompt = ChatPromptTemplate.from_messages( messages )
@traceable
def pdf_qa(query):
function = ConversationalRetrievalChain.from_llm(
llm = llm,
retriever=vectordb.as_retriever(search_kwargs={'k':16})
, combine_docs_chain_kwargs={'prompt': qa_prompt},
memory = memory#,max_tokens_limit=4000
)
return function({"question": query})
import gradio as gr
# Define chat interface
with gr.Blocks() as demo:
chatbot = gr.Chatbot(value=[[None,'''
¡Hola! Soy tu Orientador de carreras. Estoy aquí para ayudarte a explorar las carreras que te interesan, cómo se relacionan con tus pasiones y gustos, los cursos que incluyen y tu posible futuro laboral.
'''
]])
msg = gr.Textbox()
clear = gr.Button("Clear")
chat_history = []
def user(query, chat_history):
print("User query:", query)
print("Chat history:", chat_history)
# Convert chat history to list of tuples
chat_history_tuples = []
for message in chat_history:
chat_history_tuples.append((message[0], message[1]))
# Get result from QA chain
result = pdf_qa(query))
# Append user message and response to chat history
chat_history.append((query, result["answer"]))
print("Updated chat history:", chat_history)
return gr.update(value=""), chat_history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
demo.launch()