Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
from langchain.memory import ConversationBufferMemory | |
from langchain.memory.chat_message_histories import StreamlitChatMessageHistory | |
from langchain.chains import LLMChain | |
from langchain.prompts import PromptTemplate | |
from langchain.embeddings import VoyageEmbeddings | |
from langchain.vectorstores import SupabaseVectorStore | |
from langchain.llms.huggingface_pipeline import HuggingFacePipeline | |
from st_supabase_connection import SupabaseConnection | |
msgs = StreamlitChatMessageHistory() | |
memory = ConversationBufferMemory(memory_key="history", chat_memory=msgs) | |
supabase_client = st.connection( | |
name="orbgpt", | |
type=SupabaseConnection, | |
ttl=None, | |
) | |
embeddings = VoyageEmbeddings(model="voyage-01") | |
vector_store = SupabaseVectorStore( | |
embedding=embeddings, | |
client=supabase_client, | |
table_name="documents", | |
query_name="match_documents", | |
) | |
model_path = "01-ai/Yi-6B-Chat-8bits" | |
tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_path, device_map="auto", torch_dtype="auto" | |
).eval() | |
pipe = pipeline( | |
"text-generation", | |
model=model, | |
tokenizer=tokenizer, | |
max_new_tokens=10, | |
use_fast=False, | |
) | |
hf = HuggingFacePipeline(pipeline=pipe) | |
template = """Question: {question} | |
Answer: Let's think step by step.""" | |
prompt = PromptTemplate.from_template(template) | |
chain = prompt | hf | |
question = "What is electroencephalography?" | |
st.text(chain.invoke({"question": question})) | |
st.title("πͺ©π€") | |
if len(msgs.messages) == 0: | |
msgs.add_ai_message("Ask me anything about orb community projects!") | |
for msg in msgs.messages: | |
st.chat_message(msg.type).write(msg.content) | |
if prompt := st.chat_input("Ask something"): | |
st.chat_message("human").write(prompt) | |
# Run | |
st.chat_message("ai").write("hehe") | |