File size: 5,277 Bytes
ae51f0c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
from dataclasses import dataclass
from typing import Literal
import streamlit as st
from langchain_pinecone.vectorstores import PineconeVectorStore
from langchain_huggingface import HuggingFaceEmbeddings, HuggingFaceEndpoint
from langchain.prompts import PromptTemplate
from pinecone import Pinecone #, ServerlessSpec
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
# from dotenv import load_dotenv
import os

# Load environment variables from the .env file
# load_dotenv()

@dataclass
class Message:
    """Class for keeping track of a chat message."""
    origin: Literal["πŸ‘€ Human", "πŸ‘¨πŸ»β€βš–οΈ Ai"]
    message: str


def download_hugging_face_embeddings():
    embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
    return embeddings


def initialize_session_state():
    if "history" not in st.session_state:
        st.session_state.history = []
    if "conversation" not in st.session_state:
        embeddings = download_hugging_face_embeddings()
        pc = Pinecone(api_key=os.getenv["PINECONE_API_KEY"])
        index = pc.Index("il-legal")
        docsearch = PineconeVectorStore.from_existing_index(index_name="il-legal", embedding=embeddings)
        
        repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
        llm = HuggingFaceEndpoint(
            repo_id=repo_id,
            model_kwargs={"huggingface_api_token":os.getenv["HUGGINGFACEHUB_API_TOKEN"]},
            temperature=0.5,
            top_k=10,
        )

        prompt_template = """
            You are a trained bot to guide people about Illinois Crimnal Law Statutes and the Safe-T Act. You will answer user's query with your knowledge and the context provided. 
            If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
            Do not say thank you and tell you are an AI Assistant and be open about everything.
            Use the following pieces of context to answer the users question.
            Context: {context}
            Question: {question}
            Only return the helpful answer below and nothing else.
            Helpful answer:
            """

        PROMPT = PromptTemplate(
            template=prompt_template, 
            input_variables=["context", "question"])
        
        #chain_type_kwargs = {"prompt": PROMPT}
        message_history = ChatMessageHistory()
        memory = ConversationBufferMemory(
            memory_key="chat_history",
            output_key="answer",
            chat_memory=message_history,
            return_messages=True,
            )
        retrieval_chain = ConversationalRetrievalChain.from_llm(
            llm=llm,
            chain_type="stuff",
            retriever=docsearch.as_retriever(
                search_kwargs={
                    'filter': {'source': 'user_id'},
                    }),
            return_source_documents=True,
            combine_docs_chain_kwargs={"prompt": PROMPT},
            memory= memory
            )

        st.session_state.conversation = retrieval_chain


def on_click_callback():
    human_prompt = st.session_state.human_prompt
    st.session_state.human_prompt=""
    response = st.session_state.conversation(
        human_prompt
    )
    llm_response = response['answer']
    st.session_state.history.append(
        Message("πŸ‘€ Human", human_prompt)
    )
    st.session_state.history.append(
        Message("πŸ‘¨πŸ»β€βš–οΈ Ai", llm_response)
    )


initialize_session_state()

st.title("IL-Legal Advisor Chatbot")

st.markdown(
    """
    πŸ‘‹ **Welcome to IL-Legal Advisor!**
    I'm here to assist you with your legal queries within the framework of Illinois criminal law. Whether you're navigating through specific legal issues or seeking general advice, I'm here to help.
    
    πŸ“š **How I Can Assist:**
    
    - Answer questions on various aspects of Illinois criminal law.
    - Guide you through legal processes relevant to Illinois.
    - Provide information on your rights and responsibilities as per Illinois legal standards.
    
    βš–οΈ **Disclaimer:**
    
    While I can provide general information, it may be necessary to consult with a qualified Illinois attorney for advice tailored to your specific situation.
    
    πŸ€– **Getting Started:**
    
    Feel free to ask any legal question related to Illinois law, using keywords like "pre-trial release," "motions," or "procedure." I'm here to assist you!
    Let's get started! How may I help you today?
    """
)

chat_placeholder = st.container()
prompt_placeholder = st.form("chat-form")

with chat_placeholder:
    for chat in st.session_state.history:
        st.markdown(f"{chat.origin} : {chat.message}")

with prompt_placeholder:
    st.markdown("**Chat**")
    cols = st.columns((6, 1))
    cols[0].text_input(
        "Chat",
        label_visibility="collapsed",
        key="human_prompt",
    )
    cols[1].form_submit_button(
        "Submit",
        type="primary",
        on_click=on_click_callback,
    )