Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,12 @@
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
# from langchain.chat_models import ChatOpenAI
|
4 |
-
from langchain_community.chat_models import ChatOpenAI
|
5 |
-
from
|
6 |
-
from langchain
|
|
|
|
|
|
|
7 |
from langchain.memory import ConversationBufferMemory
|
8 |
|
9 |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
@@ -80,18 +83,15 @@ prompt = PromptTemplate(
|
|
80 |
|
81 |
memory = ConversationBufferMemory(memory_key="chat_history")
|
82 |
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
verbose=True,
|
87 |
-
memory=memory,
|
88 |
-
)
|
89 |
|
90 |
def get_text_response(user_message,history):
|
91 |
-
response = llm_chain.
|
92 |
return response
|
93 |
|
94 |
-
theme =gr.themes.Default(primary_hue='
|
95 |
demo = gr.ChatInterface(get_text_response,clear_btn=None,retry_btn=None,stop_btn=None,undo_btn=None,theme=theme,
|
96 |
chatbot= gr.Chatbot(bubble_full_width=False,label='Kum bot',show_label=True,height=350,show_share_button=False,
|
97 |
avatar_images=('https://res.cloudinary.com/dtbarluca/image/upload/v1692694826/user_1177568_mmmdi6.png','https://res.cloudinary.com/dtbarluca/image/upload/v1690875247/hlogo.ico_nqdhd6.png')),examples=["Where did you learn this techonologies?","What are your interests?","Which places do you like to visit?","Your greatest Achievements?","how can connect to you through linkedin?"])
|
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
# from langchain.chat_models import ChatOpenAI
|
4 |
+
# from langchain_community.chat_models import ChatOpenAI
|
5 |
+
from langchain_openai import ChatOpenAI
|
6 |
+
# from langchain import PromptTemplate
|
7 |
+
from langchain_core.prompts import PromptTemplate
|
8 |
+
# from langchain.chains import LLMChain
|
9 |
+
from langchain_core.runnables import RunnableSequence
|
10 |
from langchain.memory import ConversationBufferMemory
|
11 |
|
12 |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
|
|
83 |
|
84 |
memory = ConversationBufferMemory(memory_key="chat_history")
|
85 |
|
86 |
+
llm = ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo")
|
87 |
+
|
88 |
+
llm_chain = RunnableSequence(prompt | llm | memory)
|
|
|
|
|
|
|
89 |
|
90 |
def get_text_response(user_message,history):
|
91 |
+
response = llm_chain.run(user_message = user_message)
|
92 |
return response
|
93 |
|
94 |
+
theme =gr.themes.Default(primary_hue='purple')
|
95 |
demo = gr.ChatInterface(get_text_response,clear_btn=None,retry_btn=None,stop_btn=None,undo_btn=None,theme=theme,
|
96 |
chatbot= gr.Chatbot(bubble_full_width=False,label='Kum bot',show_label=True,height=350,show_share_button=False,
|
97 |
avatar_images=('https://res.cloudinary.com/dtbarluca/image/upload/v1692694826/user_1177568_mmmdi6.png','https://res.cloudinary.com/dtbarluca/image/upload/v1690875247/hlogo.ico_nqdhd6.png')),examples=["Where did you learn this techonologies?","What are your interests?","Which places do you like to visit?","Your greatest Achievements?","how can connect to you through linkedin?"])
|