Spaces:
Sleeping
Sleeping
Commit
·
b918d4f
1
Parent(s):
b0d38d0
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import os
|
3 |
+
import chainlit as cl
|
4 |
+
from langchain import HuggingFaceHub, PromptTemplate, LLMChain
|
5 |
+
|
6 |
+
# Define your AI assistant setup and configurations here
|
7 |
+
os.environ['API_KEY'] = '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'
|
8 |
+
model_id = 'tiiuae/falcon-7b-instruct'
|
9 |
+
falcon_llm = HuggingFaceHub(huggingfacehub_api_token=os.environ['API_KEY'],
|
10 |
+
repo_id=model_id,
|
11 |
+
model_kwargs={"temperature": 0.8, "max_new_tokens": 2000})
|
12 |
+
template = """
|
13 |
+
You are an AI assistant that provides helpful answers to user queries.
|
14 |
+
{conversation}
|
15 |
+
"""
|
16 |
+
|
17 |
+
prompt = PromptTemplate(template=template, input_variables=['conversation'])
|
18 |
+
|
19 |
+
falcon_chain = LLMChain(llm=falcon_llm,
|
20 |
+
prompt=prompt,
|
21 |
+
verbose=True)
|
22 |
+
|
23 |
+
# Define the Streamlit app
|
24 |
+
def main():
|
25 |
+
st.title("Mouli's AI Assistant")
|
26 |
+
|
27 |
+
# Initialize conversation history as a list
|
28 |
+
conversation_history = st.session_state.get("conversation_history", [])
|
29 |
+
|
30 |
+
# Create an input box at the bottom for user's message
|
31 |
+
user_message = st.text_input("Your message:")
|
32 |
+
|
33 |
+
# If the user's message is not empty, process it
|
34 |
+
if user_message:
|
35 |
+
# Add user's message to conversation history
|
36 |
+
conversation_history.append(("User", user_message))
|
37 |
+
|
38 |
+
# Combine conversation history to use as input for the AI assistant
|
39 |
+
conversation_input = "\n".join([f"{author}: {message}" for author, message in conversation_history])
|
40 |
+
|
41 |
+
# Use your AI assistant to generate a response based on the conversation
|
42 |
+
response = falcon_chain.run(conversation_input)
|
43 |
+
|
44 |
+
# Add AI's response to conversation history
|
45 |
+
conversation_history.append(("AI", response))
|
46 |
+
|
47 |
+
# Store the updated conversation history in session state
|
48 |
+
st.session_state.conversation_history = conversation_history
|
49 |
+
|
50 |
+
# Display the conversation history
|
51 |
+
display_conversation(conversation_history)
|
52 |
+
|
53 |
+
def display_conversation(conversation_history):
|
54 |
+
st.markdown("<style>.message-container { display: flex; flex-direction: row; padding: 16px; }</style>", unsafe_allow_html=True)
|
55 |
+
st.markdown("<style>.user-message { align: left; background-color: green; padding: 8px; border-radius: 8px; margin: 4px; }</style>", unsafe_allow_html=True)
|
56 |
+
st.markdown("<style>.ai-message { align: right; background-color: black; padding: 8px; border-radius: 8px; margin: 4px; }</style>", unsafe_allow_html=True)
|
57 |
+
|
58 |
+
st.markdown("<div class='message-container'>", unsafe_allow_html=True)
|
59 |
+
|
60 |
+
for author, message in conversation_history:
|
61 |
+
if author == "AI":
|
62 |
+
st.markdown(f"<div class='ai-message'>{message}</div>", unsafe_allow_html=True)
|
63 |
+
else:
|
64 |
+
st.markdown(f"<div class='user-message'>{message}</div>", unsafe_allow_html=True)
|
65 |
+
|
66 |
+
st.markdown("</div>", unsafe_allow_html=True)
|
67 |
+
|
68 |
+
if __name__ == "__main__":
|
69 |
+
main()
|