Spaces:
Sleeping
Sleeping
import torch | |
from transformers import pipeline | |
import streamlit as st | |
import os | |
model_id = "meta-llama/Llama-3.2-1B" | |
pipe = pipeline( | |
"text-generation", | |
model=model_id, | |
torch_dtype=torch.bfloat16, | |
device_map="auto", | |
token = os.getenv('HF_TOKEN') | |
) | |
input_text = st.text_input("Ingrese el Prompt") | |
if input_text: | |
# Establecer la longitud máxima de la respuesta | |
max_length = 100 # Cambia este valor según tus necesidades | |
response = pipe(input_text, max_length=max_length) | |
generated_text = response[0]['generated_text'] | |
st.write(generated_text) |