|
from langchain_community.chat_models import ChatOpenAI |
|
from langchain_core.prompts import PromptTemplate |
|
from langchain_core.runnables import RunnableSequence |
|
from langchain.memory import ConversationBufferMemory |
|
import gradio as gr |
|
|
|
|
|
prompt = PromptTemplate.from_template("{user_message}") |
|
|
|
|
|
llm = ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo") |
|
|
|
|
|
runnable_sequence = RunnableSequence(prompt | llm) |
|
|
|
|
|
memory = ConversationBufferMemory() |
|
|
|
|
|
def get_text_response(user_message, history): |
|
|
|
memory.add_user_message(user_message) |
|
|
|
|
|
response = runnable_sequence.run(user_message=user_message) |
|
|
|
|
|
memory.add_ai_message(response) |
|
|
|
return response |
|
|
|
|
|
theme = "default" |
|
|
|
|
|
clear_btn = "Clear" |
|
retry_btn = "Retry" |
|
stop_btn = "Stop" |
|
undo_btn = "Undo" |
|
|
|
demo = gr.ChatInterface( |
|
get_text_response, |
|
clear_btn=clear_btn, |
|
retry_btn=retry_btn, |
|
stop_btn=stop_btn, |
|
undo_btn=undo_btn, |
|
theme=theme |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |