import streamlit as st from tokenizers.tools import EncodingVisualizer from transformers import AutoTokenizer st.set_page_config(page_title="BigScience Tokenizer", page_icon='👩‍💻', layout="wide") tokenizer = AutoTokenizer.from_pretrained('bigscience/tokenizer', use_auth_token=st.secrets["AUTH_TOKEN"]) visualizer = EncodingVisualizer(tokenizer=tokenizer._tokenizer, default_to_notebook=False) text = st.text_area(label="", placeholder="Text to tokenize") button_clicked = st.button("Tokenize") if text or button_clicked: st.write(f"The input was split into {len(tokenizer.tokenize(text))} tokens.") st.components.v1.html(visualizer(text), height=1500)