Spaces:
Sleeping
Sleeping
# Generics | |
import os | |
import keyfile | |
import warnings | |
import streamlit as st | |
from pydantic import BaseModel | |
warnings.filterwarnings("ignore") | |
# Langchain packages | |
from langchain.document_loaders import TextLoader | |
from langchain.text_splitter import CharacterTextSplitter | |
from langchain.embeddings import HuggingFaceEmbeddings | |
from langchain.vectorstores import Pinecone as PineconeVectorStore | |
from langchain.llms import HuggingFaceHub | |
from langchain import PromptTemplate | |
from langchain.schema.runnable import RunnablePassthrough | |
from langchain.schema.output_parser import StrOutputParser | |
from pinecone import Pinecone, ServerlessSpec | |
pc = Pinecone(api_key=keyfile.PINECONE_API_KEY) | |
os.environ["PINECONE_API_KEY"] = keyfile.PINECONE_API_KEY | |
cloud = os.environ.get("PINECONE_CLOUD") or "aws" | |
region = os.environ.get("PINECONE_REGION") or "us-east-1" | |
serv = ServerlessSpec(cloud = cloud, region = region) | |
model_id = "mistralai/Mixtral-8x7B-Instruct-v0.1" | |
llm = HuggingFaceHub( | |
repo_id = model_id, | |
model_kwargs = {"temperature" : 0.8, "top_k" : 50}, | |
huggingfacehub_api_token = userdata.get("HFToken") | |
) | |
index_name = "parasgupta" | |
# We are check if the name of our index is not existing in pinecone directory | |
if index_name not in pc.list_indexes().names(): | |
# if not then we will create a index for us | |
pc.create_index( | |
name = index_name, | |
dimension = 768, | |
metric = "cosine", | |
spec = serv | |
) | |
while not pc.describe_index(index_name).status['ready']: | |
time.sleep(1) | |
# IF the index is not there in the index list | |
if index_name not in pc.list_indexes(): | |
docsearch = PineconeVectorStore.from_documents(docs, embeddings, index_name = index_name) | |
else: | |
docsearch = PineconeVectorStore.from_existing_index(index_name, embeddings, pinecone_index = pc.Index(index_name)) | |
loader = TextLoader("/content/drive/MyDrive/Intelli_GenAI/RAG/Machine Learning Operations.txt") | |
documents = loader.load() | |
text_splitter = CharacterTextSplitter(chunk_size = 1000, chunk_overlap = 4) | |
docs = text_splitter.split_documents(documents) | |
class AIMessage(BaseModel): | |
content: str | |
# initializing the sessionMessages | |
if "sessionMessages" not in st.session_state: | |
st.session_state["sessionMessages"] = [] | |
# General Instruction | |
if "sessionMessages" not in st.session_state: | |
st.session_state.sessionMessage = [ | |
SystemMessage(content = "You are a medieval magical healer known for your peculiar sarcasm") | |
] | |
# Configuring the key | |
os.environ["GOOGLE_API_KEY"] = keyfile.GOOGLEKEY | |
# Create a model | |
llm = ChatGoogleGenerativeAI( | |
model="gemini-1.5-pro", | |
temperature=0.7, | |
convert_system_message_to_human= True | |
) | |
# Response function | |
def load_answer(question): | |
st.session_state.sessionMessages.append(HumanMessage(content=question)) | |
assistant_response = llm.invoke(st.session_state.sessionMessages) | |
# Assuming assistant_response is an object with a 'content' attribute | |
if hasattr(assistant_response, 'content') and isinstance(assistant_response.content, str): | |
processed_content = assistant_response.content | |
st.session_state.sessionMessages.append(AIMessage(content=processed_content)) | |
else: | |
st.error("Invalid response received from AI.") | |
processed_content = "Sorry, I couldn't process your request." | |
return processed_content | |
# def load_answer(question): | |
# st.session_state.sessionMessages.append(HumanMessage(content = question)) | |
# assistant_answer = llm.invoke(st.session_state.sessionMessages) | |
# st.session_state.sessionMessages.append(AIMessage(content = assistant_answer)) | |
# return assistant_answer.content | |
# User message | |
def get_text(): | |
input_text = st.text_input("You: ", key = input) | |
return input_text | |
# Implementation | |
user_input = get_text() | |
submit = st.button("Generate") | |
if submit: | |
resp = load_answer(user_input) | |
st.subheader("Answer: ") | |
st.write(resp, key = 1) |