Sidoine commited on
Commit
4d541cc
·
verified ·
1 Parent(s): 9ceb97f

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -79
app.py DELETED
@@ -1,79 +0,0 @@
1
- import streamlit as st
2
- import json
3
- import time
4
- import requests
5
- from langchain.chains import LLMChain
6
- from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder
7
- from langchain_core.messages import SystemMessage
8
- from langchain.chains.conversation.memory import ConversationBufferWindowMemory
9
- from langchain_groq import ChatGroq
10
-
11
-
12
- # Changement du logo et du titre de l'application
13
- st.set_page_config(page_title="LOG-CHAT", page_icon="BEAC.jpg", layout="centered", menu_items=None)
14
- st.image("BEAC.jpg")
15
- # page de chargement
16
- query_params = st.experimental_get_query_params()
17
- page = query_params.get("page", ["chatbot"])[0]
18
-
19
-
20
- st.markdown('<div class="content">', unsafe_allow_html=True)
21
-
22
-
23
- if page == "chatbot":
24
- st.header("LOG-CHAT")
25
-
26
- def main():
27
- groq_api_key = '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' # Use environment variables or secrets management for API keys
28
- st.markdown('<div id="chatbot"></div>', unsafe_allow_html=True)
29
-
30
- system_prompt = st.text_input("System prompt:", "You are a helpful assistant.")
31
- model = st.selectbox('Choose a model', ['llama3-8b-8192', 'mixtral-8x7b-32768', 'gemma-7b-it'])
32
- conversational_memory_length = st.slider('Conversational memory length:', 1, 10, value=5)
33
-
34
- memory = ConversationBufferWindowMemory(k=conversational_memory_length, memory_key="chat_history", return_messages=True)
35
-
36
- user_question = st.text_input("Ask me a question:")
37
- send_question_to_ai = st.button("Send")
38
-
39
- if 'chat_history' not in st.session_state:
40
- st.session_state.chat_history = []
41
- else:
42
- for message in st.session_state.chat_history:
43
- memory.save_context({'input': message['human']}, {'output': message['AI']})
44
-
45
- groq_chat = ChatGroq(groq_api_key=groq_api_key, model_name=model)
46
-
47
- if send_question_to_ai:
48
- prompt = ChatPromptTemplate.from_messages(
49
- [
50
- SystemMessage(content=system_prompt),
51
- MessagesPlaceholder(variable_name="chat_history"),
52
- HumanMessagePromptTemplate.from_template("{human_input}")
53
- ]
54
- )
55
-
56
- conversation = LLMChain(
57
- llm=groq_chat,
58
- prompt=prompt,
59
- verbose=True,
60
- memory=memory
61
- )
62
-
63
- response = conversation.predict(human_input=user_question)
64
- message = {'human': user_question, 'AI': response}
65
- st.session_state.chat_history.append(message)
66
- st.write("chatbot:", response)
67
-
68
- if __name__ == "__main__":
69
- main()
70
-
71
-
72
-
73
- # Footer
74
- st.markdown("""
75
- <footer class="footer">
76
- <p>Contact us: <a href="mailto:[email protected]">[email protected]</a></p>
77
- <p>© 2024 Your Company. All rights reserved.</p>
78
- </footer>
79
- """, unsafe_allow_html=True)