christopher's picture
Create app.py
422fadf
raw
history blame
266 Bytes
import streamlit as st
from tokenizers.tools import EncodingVisualizer
from transformers import AutoTokenizer
st.set_page_config(page_title="BigScience Tokenizer", page_icon='πŸ‘©β€πŸ’»', layout="wide")
text = st.text_area(label="", placeholder="Text to tokenize")