import torch from transformers import pipeline import streamlit as st import os model_id = "meta-llama/Llama-3.2-1B" pipe = pipeline( "text-generation", model=model_id, torch_dtype=torch.bfloat16, device_map="auto", token = os.getenv('HF_TOKEN') ) input = st.text_input("Ingrese el Promt") st.write(pipe(input))