Spaces:
Sleeping
Sleeping
File size: 3,731 Bytes
a2cccdb 00062c3 e7f4e46 a2cccdb d9c6906 a2cccdb 5b0f27d d9c6906 a2cccdb d9c6906 5b0f27d d9c6906 5b0f27d d9c6906 5b0f27d d9c6906 a2cccdb 9bf72c1 5b0f27d 00062c3 5b0f27d 9bf72c1 5b0f27d 543f41b 00062c3 a2cccdb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
import streamlit as st
from llmware.prompts import Prompt
import io, os, re
import PyPDF2
def register_gguf_model():
prompter = Prompt()
your_model_name = "llama"
hf_repo_name = "TheBloke/Llama-2-7B-Chat-GGUF"
model_file = "llama-2-7b-chat.Q3_K_M.gguf"
print("registering models")
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat")
your_model_name = "open_gpt4"
hf_repo_name = "TheBloke/Open_Gpt4_8x7B-GGUF"
model_file = "open_gpt4_8x7b.Q3_K_M.gguf"
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat")
your_model_name = "phi2"
hf_repo_name = "TheBloke/phi-2-GGUF"
model_file = "phi-2.Q3_K_M.gguf"
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat")
your_model_name = "mistral"
hf_repo_name = "TheBloke/Mistral-7B-Instruct-v0.2-GGUF"
model_file = "mistral-7b-instruct-v0.2.Q3_K_M.gguf"
prompter.model_catalog.register_gguf_model(your_model_name,hf_repo_name, model_file, prompt_wrapper="open_chat")
return prompter
def main():
st.title("BetterZila RAG Enabled LLM")
with st.spinner("Registering Models for use..."):
prompter = register_gguf_model()
data_path = "data/"
st.sidebar.subheader("Select Model")
model_name = st.sidebar.selectbox("Select Model", ["llama", "open_gpt4", "phi2", "mistral"])
with st.spinner("Loading Model..."):
prompter.load_model(model_name)
st.success("Model Loaded!")
queries = ['Can you give me an example from history where the enemy was crushed totally from the book?', "What's the point of making myself less accessible?", "Can you tell me the story of Queen Elizabeth I from this 48 laws of power book?"]
st.subheader("Query")
with st.spinner("Loading PDF file..."):
for file in os.listdir(data_path):
if file.endswith(".pdf"):
print("Found PDF file: ", file)
pdf_file = file
break
print("loading Source...")
source = prompter.add_source_document(data_path, pdf_file, query=None)
for query in queries:
st.subheader(f"Query: {query}")
with st.spinner("Generating response..."):
responses = prompter.prompt_with_source(query, prompt_name="just_the_facts", temperature=0.3)
for r, response in enumerate(responses):
st.write(query)
st.write(re.sub("[\n]", " ", response["llm_response"]).strip())
st.success("Responses generated!")
# for query in queries:
# st.subheader(f"Query: {query}")
# with st.spinner("Generating response..."):
# for file in os.listdir(data_path):
# if file.endswith(".pdf"):
# print("Found PDF file: ", file)
# print("loading Source...")
# source = prompter.add_source_document(data_path, file, query=None)
# print("generating response...")
# responses = prompter.prompt_with_source(query, prompt_name="just_the_facts", temperature=0.3)
# print("response generated!")
# for r, response in enumerate(responses):
# print(query, ":", re.sub("[\n]"," ", response["llm_response"]).strip())
# prompter.clear_source_materials()
# st.write(query)
# st.write(re.sub("[\n]"," ", response["llm_response"]).strip())
# st.success("Response generated!")
if __name__ == "__main__":
main()
|