Spaces:
Running
Running
import streamlit as st | |
import os | |
from streamlit_chat import message | |
import google.generativeai as genai | |
from langchain.prompts import PromptTemplate | |
from langchain import LLMChain | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
genai.configure(api_key=os.environ["GOOGLE_API_KEY"]) | |
llm = ChatGoogleGenerativeAI(model="gemini-pro-vision", | |
temperature=0.7) | |
template = """You are a friendly chatbot called "CRETA" who give clear an well having a conversation with a human and you are created by suriya an AI Enthusiastic. | |
previous_chat: | |
{chat_history} | |
Human: {human_input} | |
Chatbot:""" | |
prompt = PromptTemplate( | |
input_variables=["chat_history", "human_input"], template=template | |
) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=prompt, | |
verbose=True, | |
) | |
previous_response = "" | |
def conversational_chat(query): | |
global previous_response | |
for i in st.session_state['history']: | |
if i is not None: | |
previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}" | |
print(previous_response) | |
result = llm_chain.predict(chat_history=previous_response, human_input=query) | |
st.session_state['history'].append((query, result)) | |
return result | |
st.title("ASSISTANT BOT:") | |
if 'history' not in st.session_state: | |
st.session_state['history'] = [] | |
# Initialize messages | |
if 'generated' not in st.session_state: | |
st.session_state['generated'] = ["Hello ! Ask me anything"] | |
if 'past' not in st.session_state: | |
st.session_state['past'] = [" "] | |
# Create containers for chat history and user input | |
response_container = st.container() | |
container = st.container() | |
# User input form | |
user_input = st.chat_input("Ask Your Questions π..") | |
with container: | |
if user_input: | |
output = conversational_chat(user_input) | |
# answer = response_generator(output) | |
st.session_state['past'].append(user_input) | |
st.session_state['generated'].append(output) | |
# Display chat history | |
if st.session_state['generated']: | |
with response_container: | |
for i in range(len(st.session_state['generated'])): | |
if i != 0: | |
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer") | |
message(st.session_state["generated"][i], key=str(i), avatar_style="bottts") |