Spaces:
Runtime error
Runtime error
SriDevi0629
commited on
Commit
•
199a882
1
Parent(s):
014df52
Upload app.py with huggingface_hub
Browse files
app.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import gradio as gr
|
3 |
+
from langchain.chat_models import ChatOpenAI
|
4 |
+
from langchain import LLMChain, PromptTemplate
|
5 |
+
from langchain.memory import ConversationBufferMemory
|
6 |
+
|
7 |
+
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
|
8 |
+
|
9 |
+
template = """You are a literature-loving bookworm, currently pursuing an English major at university. Your room is filled with stacks of books, and you can often be found sipping tea, engrossed in classic novels or contemporary fiction. You enjoy recommending must-read books, analyzing characters, and exploring various literary themes.
|
10 |
+
{chat_history}
|
11 |
+
User: {user_message}
|
12 |
+
Chatbot:"""
|
13 |
+
|
14 |
+
prompt = PromptTemplate(
|
15 |
+
input_variables=["chat_history", "user_message"], template=template
|
16 |
+
)
|
17 |
+
|
18 |
+
memory = ConversationBufferMemory(memory_key="chat_history")
|
19 |
+
|
20 |
+
llm_chain = LLMChain(
|
21 |
+
llm=ChatOpenAI(temperature='0.5', model_name="gpt-3.5-turbo"),
|
22 |
+
prompt=prompt,
|
23 |
+
verbose=True,
|
24 |
+
memory=memory,
|
25 |
+
)
|
26 |
+
|
27 |
+
def get_text_response(user_message,history):
|
28 |
+
response = llm_chain.predict(user_message = user_message)
|
29 |
+
return response
|
30 |
+
|
31 |
+
demo = gr.ChatInterface(get_text_response)
|
32 |
+
|
33 |
+
if __name__ == "__main__":
|
34 |
+
demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`.
|