PrabhakarVenkat commited on
Commit
064fadb
·
verified ·
1 Parent(s): 8edae4d

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +51 -0
  2. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ import random
4
+ from groq import Groq
5
+ from langchain.chains import ConversationChain
6
+ from langchain.chains.conversation.memory import ConversationBufferMemory
7
+ from langchain_groq import ChatGroq
8
+ from langchain.prompts import PromptTemplate
9
+
10
+ groq_api_key = "GROQ_API_KEY"
11
+
12
+ def main():
13
+
14
+ st.title("Groq Chatbot")
15
+ st.sidebar.title("Select an LLM")
16
+ model = st.sidebar.selectbox(
17
+ 'Choose a model',
18
+ ['Mixtral-8x7b-32768', 'llama2-70b-4096']
19
+ )
20
+
21
+ conversational_memory_length = st.sidebar.slider('Conversational Memory Length:', 1, 10, value=5)
22
+
23
+ memory = ConversationBufferMemory(k=conversational_memory_length)
24
+
25
+ user_question = st.text_area("Ask a question...")
26
+
27
+ #session state variables
28
+ if 'chat_history' not in st.session_state:
29
+ st.session_state.chat_history = []
30
+ else:
31
+ for message in st.session_state.chat_history:
32
+ memory.save_context({'input': message['human']}, {'output': message['AI']})
33
+
34
+ groq_chat = ChatGroq(
35
+ groq_api_key= groq_api_key,
36
+ model_name = model
37
+ )
38
+
39
+ conversation = ConversationChain(
40
+ llm = groq_chat,
41
+ memory = memory
42
+ )
43
+
44
+ if user_question:
45
+ response = conversation(user_question)
46
+ message = {"human": user_question, "AI": response['response']}
47
+ st.session_state.chat_history.append(message)
48
+ st.write("ChatBot:", response['response'])
49
+
50
+ if __name__ == "__main__":
51
+ main()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ Groq - API
2
+ os
3
+ langchain-Groq
4
+ streamlit