File size: 1,845 Bytes
61e5dfd b8e158c 61e5dfd b8e158c 61e5dfd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import streamlit as st # Don't forget to include `streamlit` in your `requirements.txt`
from transformers import PaliGemmaProcessor, PaliGemmaForConditionalGeneration
# Set up authentication
if "hf_token" not in st.session_state:
st.title("Authentication Required")
st.write("Please authenticate with Hugging Face using the following token:")
hf_token = st.text_input("Enter your token", type="password")
if hf_token == "":
st.stop()
else:
hf_token = st.session_state.hf_token
# Load Token from Storage
if "hf_token_local" not in st.session_state:
st.title("Load Token from Storage")
st.write("Please load your token from storage (e.g., environment variable, file)")
hf_token_local = st.text_input("Enter your token", type="password")
if hf_token_local == "":
st.stop()
else:
hf_token_local = st.session_state.hf_token_local
# Load Processor and Model
if hf_token or hf_token_local:
processor = PaliGemmaProcessor.from_pretrained(
"google/paligemma2",
token=hf_token,
local_file_dir="/tmp/",
)
model = PaliGemmaForConditionalGeneration.from_pretrained(
"google/paligemma2",
token=hf_token,
local_file_dir="/tmp/",
)
# Rest of your code
else:
st.title("No Token Found")
st.write("Please authenticate with Hugging Face or load token from storage")
# Use the model
def main():
if "output" not in st.session_state:
st.write("Model output")
else:
st.write(st.session_state.output)
# Add a button to generate text using the model
if st.button("Generate Text"):
input_text = st.text_input("Input text")
if input_text:
output = model.generate(input_text, max_length=50)
st.session_state.output = output
if __name__ == "__main__":
main() |