demo-cop-gemma / app.py
bapfalraz's picture
Update app.py
9a74ca9 verified
raw
history blame
347 Bytes
import streamlit as st
from transformers import pipeline
import torch
import os
from huggingface_hub import login
login(token = os.getenv('HF_TOKEN'))
pipe = pipeline("text-generation", model="google/gemma-3-1b-pt", device="cuda", torch_dtype=torch.bfloat16)
txt = st.text_area("Test this")
if (txt):
st.write(pipe(txt, max_new_tokens=50))