Update app.py
Browse files
app.py
CHANGED
@@ -1,14 +1,8 @@
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
-
|
4 |
-
|
5 |
-
from langchain_openai import ChatOpenAI
|
6 |
-
# from langchain import PromptTemplate
|
7 |
-
from langchain_core.prompts import PromptTemplate
|
8 |
-
# from langchain.chains import LLMChain
|
9 |
-
from langchain_core.runnables import RunnableSequence
|
10 |
from langchain.memory import ConversationBufferMemory
|
11 |
-
from langchain.chains import ConversationChain
|
12 |
|
13 |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
14 |
|
@@ -83,20 +77,26 @@ prompt = PromptTemplate(
|
|
83 |
)
|
84 |
|
85 |
memory = ConversationBufferMemory(memory_key="chat_history")
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
|
87 |
-
llm = ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo")
|
88 |
-
|
89 |
-
llm_chain = ConversationChain(llm=llm,verbose=True,memory=memory)
|
90 |
|
91 |
def get_text_response(user_message,history):
|
92 |
-
|
93 |
response = llm_chain.predict(user_message = user_message)
|
94 |
-
|
95 |
return response
|
96 |
|
97 |
-
theme =gr.themes.Default(primary_hue=
|
|
|
|
|
|
|
|
|
|
|
98 |
demo = gr.ChatInterface(get_text_response,theme=theme,
|
99 |
-
chatbot= gr.Chatbot(bubble_full_width=False,label='Kum bot',show_label=True,height=350,show_share_button=False,
|
100 |
avatar_images=('https://res.cloudinary.com/dtbarluca/image/upload/v1692694826/user_1177568_mmmdi6.png','https://res.cloudinary.com/dtbarluca/image/upload/v1690875247/hlogo.ico_nqdhd6.png')),examples=["Where did you learn this techonologies?","What are your interests?","Which places do you like to visit?","Your greatest Achievements?","how can connect to you through linkedin?"])
|
101 |
if __name__ == "__main__":
|
102 |
demo.launch()
|
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
+
from langchain.chat_models import ChatOpenAI
|
4 |
+
from langchain import LLMChain, PromptTemplate
|
|
|
|
|
|
|
|
|
|
|
5 |
from langchain.memory import ConversationBufferMemory
|
|
|
6 |
|
7 |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
8 |
|
|
|
77 |
)
|
78 |
|
79 |
memory = ConversationBufferMemory(memory_key="chat_history")
|
80 |
+
llm_chain = LLMChain(
|
81 |
+
llm=ChatOpenAI(temperature='0.5', model_name="gpt-3.5-turbo"),
|
82 |
+
prompt=prompt,
|
83 |
+
verbose=True,
|
84 |
+
memory=memory,
|
85 |
+
)
|
86 |
|
|
|
|
|
|
|
87 |
|
88 |
def get_text_response(user_message,history):
|
|
|
89 |
response = llm_chain.predict(user_message = user_message)
|
|
|
90 |
return response
|
91 |
|
92 |
+
theme = gr.themes.Default(primary_hue="blue").set(
|
93 |
+
|
94 |
+
button_primary_background_fill="*primary_400",
|
95 |
+
button_primary_background_fill_hover="*primary_300",
|
96 |
+
|
97 |
+
)
|
98 |
demo = gr.ChatInterface(get_text_response,theme=theme,
|
99 |
+
chatbot= gr.Chatbot(bubble_full_width=False,label='Kum bot',show_label=True,height=350,show_share_button=False,undo_btn=None,clear_btn=None,retry_btn=None,
|
100 |
avatar_images=('https://res.cloudinary.com/dtbarluca/image/upload/v1692694826/user_1177568_mmmdi6.png','https://res.cloudinary.com/dtbarluca/image/upload/v1690875247/hlogo.ico_nqdhd6.png')),examples=["Where did you learn this techonologies?","What are your interests?","Which places do you like to visit?","Your greatest Achievements?","how can connect to you through linkedin?"])
|
101 |
if __name__ == "__main__":
|
102 |
demo.launch()
|