File size: 2,890 Bytes
b918d4f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e49f323
b918d4f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import streamlit as st
import os
from langchain import HuggingFaceHub, PromptTemplate, LLMChain

# Define your AI assistant setup and configurations here
os.environ['API_KEY'] = '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'
model_id = 'tiiuae/falcon-7b-instruct'
falcon_llm = HuggingFaceHub(huggingfacehub_api_token=os.environ['API_KEY'],
                            repo_id=model_id,
                            model_kwargs={"temperature": 0.8, "max_new_tokens": 2000})
template = """
You are an AI assistant that provides helpful answers to user queries.
{conversation}
"""

prompt = PromptTemplate(template=template, input_variables=['conversation'])

falcon_chain = LLMChain(llm=falcon_llm,
                        prompt=prompt,
                        verbose=True)

# Define the Streamlit app
def main():
    st.title("Mouli's AI Assistant")
    
    # Initialize conversation history as a list
    conversation_history = st.session_state.get("conversation_history", [])

    # Create an input box at the bottom for user's message
    user_message = st.text_input("Your message:")

    # If the user's message is not empty, process it
    if user_message:
        # Add user's message to conversation history
        conversation_history.append(("user", user_message))

        # Combine conversation history to use as input for the AI assistant
        conversation_input = "\n".join([f"{author}: {message}" for author, message in conversation_history])

        # Use your AI assistant to generate a response based on the conversation
        response = falcon_chain.run(conversation_input)
        
        # Add AI's response to conversation history
        conversation_history.append(("AI", response))

        # Store the updated conversation history in session state
        st.session_state.conversation_history = conversation_history

    # Display the conversation history
    display_conversation(conversation_history)

def display_conversation(conversation_history):
    st.markdown("<style>.message-container { display: flex; flex-direction: row; padding: 16px; }</style>", unsafe_allow_html=True)
    st.markdown("<style>.user-message { align: left; background-color: green; padding: 8px; border-radius: 8px; margin: 4px; }</style>", unsafe_allow_html=True)
    st.markdown("<style>.ai-message { align: right; background-color: black; padding: 8px; border-radius: 8px; margin: 4px; }</style>", unsafe_allow_html=True)
    
    st.markdown("<div class='message-container'>", unsafe_allow_html=True)
    
    for author, message in conversation_history:
        if author == "AI":
            st.markdown(f"<div class='ai-message'>{message}</div>", unsafe_allow_html=True)
        else:
            st.markdown(f"<div class='user-message'>{message}</div>", unsafe_allow_html=True)
    
    st.markdown("</div>", unsafe_allow_html=True)

if __name__ == "__main__":
    main()