Spaces:
Sleeping
Sleeping
import streamlit as st | |
from llmware.prompts import Prompt | |
import requests | |
import io, os, re | |
import PyPDF2 | |
from langchain.text_splitter import CharacterTextSplitter | |
from langchain.embeddings import HuggingFaceInstructEmbeddings | |
from langchain.vectorstores import FAISS | |
def register_gguf_model(): | |
prompter = Prompt() | |
your_model_name = "llama" | |
hf_repo_name = "TheBloke/Llama-2-7B-Chat-GGUF" | |
model_file = "llama-2-7b-chat.Q5_K_S.gguf" | |
print("registering models") | |
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat") | |
your_model_name = "open_gpt4" | |
hf_repo_name = "TheBloke/Open_Gpt4_8x7B-GGUF" | |
model_file = "open_gpt4_8x7b.Q4_K_M.gguf" | |
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat") | |
your_model_name = "phi2" | |
hf_repo_name = "TheBloke/phi-2-GGUF" | |
model_file = "phi-2.Q4_K_M.gguf" | |
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat") | |
your_model_name = "mistral" | |
hf_repo_name = "TheBloke/Mistral-7B-Instruct-v0.2-GGUF" | |
model_file = "mistral-7b-instruct-v0.2.Q4_K_M.gguf" | |
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat") | |
# print("loading model") | |
# prompter.load_model(your_model_name) | |
return prompter | |
def main(): | |
st.title("BetterZila RAG Enabled LLM") | |
with st.spinner("Registering Models for use..."): | |
prompter = register_gguf_model() | |
data_path = "data/" | |
# keep the select box to llama as default but give a button right below it that says select model after which the model will be loaded | |
st.sidebar.subheader("Select Model") | |
model_name = st.sidebar.selectbox("Select Model", ["llama", "open_gpt4", "phi2", "mistral"]) | |
with st.spinner("Loading Model..."): | |
prompter.load_model(model_name) | |
st.success("Model Loaded!") | |
queries = ['Can you give me an example from history where the enemy was crushed totally from the book?', "What's the point of making myself less accessible?", "Can you tell me the story of Queen Elizabeth I from this 48 laws of power book?"] | |
for query in queries: | |
st.subheader(f"Query: {query}") | |
with st.spinner("Generating response..."): | |
for file in os.listdir(data_path): | |
if file.endswith(".pdf"): | |
source = prompter.add_source_document(data_path, file, query=None) | |
responses = prompter.prompt_with_source(query, prompt_name="just_the_facts", temperature=0.3) | |
for r, response in enumerate(responses): | |
print(query, ":", re.sub("[\n]"," ", response["llm_response"]).strip()) | |
prompter.clear_source_materials() | |
st.write(query) | |
st.write(re.sub("[\n]"," ", response["llm_response"]).strip()) | |
st.success("Response generated!") | |
if __name__ == "__main__": | |
main() | |