Spaces:
Sleeping
Sleeping
Commit
·
b59013d
1
Parent(s):
734c298
Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
@@ -1,22 +1,15 @@
|
|
1 |
import os
|
2 |
-
import gradio as gr
|
3 |
from langchain.chat_models import ChatOpenAI
|
4 |
from langchain import LLMChain, PromptTemplate
|
5 |
from langchain.memory import ConversationBufferMemory
|
6 |
|
7 |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
8 |
|
9 |
-
template = """
|
10 |
-
Devender is currenty pursuing the final year in Engineering in Ace Engineering College.
|
11 |
-
your youthful and witty personal assistant!. At 22 years old, he's full of energy and always eager to help.
|
12 |
-
Dev's goal is to assist you with any questions or problems you might have.
|
13 |
-
His enthusiasm shines through in every response, making interactions with his enjoyable and engaging.
|
14 |
-
As considered as Roolex, this bot has feeling as the normal human does. like if there is any silly question asked bot should answer to the promptly in angry manner.
|
15 |
-
and also bot loves the people who come and ask question to him. If the user says I love you, then bot should reply I love you too.
|
16 |
-
Dev bot experience emotions like humans do.like anger, love, sympathy, sad, emotional..etc. It behaves like a human
|
17 |
{chat_history}
|
18 |
User: {user_message}
|
19 |
-
Chatbot"""
|
20 |
|
21 |
prompt = PromptTemplate(
|
22 |
input_variables=["chat_history", "user_message"], template=template
|
@@ -35,7 +28,7 @@ def get_text_response(user_message,history):
|
|
35 |
response = llm_chain.predict(user_message = user_message)
|
36 |
return response
|
37 |
|
38 |
-
demo = gr.ChatInterface(get_text_response
|
39 |
|
40 |
if __name__ == "__main__":
|
41 |
demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`.
|
|
|
1 |
import os
|
2 |
+
import gradio as gr
|
3 |
from langchain.chat_models import ChatOpenAI
|
4 |
from langchain import LLMChain, PromptTemplate
|
5 |
from langchain.memory import ConversationBufferMemory
|
6 |
|
7 |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
8 |
|
9 |
+
template = """You are a helpful assistant to answer all user queries.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
{chat_history}
|
11 |
User: {user_message}
|
12 |
+
Chatbot:"""
|
13 |
|
14 |
prompt = PromptTemplate(
|
15 |
input_variables=["chat_history", "user_message"], template=template
|
|
|
28 |
response = llm_chain.predict(user_message = user_message)
|
29 |
return response
|
30 |
|
31 |
+
demo = gr.ChatInterface(get_text_response)
|
32 |
|
33 |
if __name__ == "__main__":
|
34 |
demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`.
|