danieldux's picture
Update app.py
b0b048d verified
raw
history blame
825 Bytes
import os
import gradio as gr
import spaces
from transformers import pipeline
import torch
zero = torch.Tensor([0]).cuda()
print(zero.device) # <-- 'cpu' 🤔
token = os.getenv("HF_TOKEN")
# gr.load("models/ICILS/xlm-r-icils-ilo", hf_token=token).launch()
# Load the pre-trained model
classifier = pipeline("text-classification", model="ICILS/xlm-r-icils-ilo", hf_token=token)
# Define the prediction function
@spaces.GPU
def classify_text(text):
return classifier(text)[0]
# Create the Gradio interface
demo = gr.Interface(
fn=classify_text,
inputs=gr.Textbox(lines=2, placeholder="Enter text here..."),
outputs=gr.Text(),
title="XLM-R ISCO classification with ZeroGPU",
description="Classify occupations using a pre-trained XLM-R-ISCO model on Hugging Face Spaces with ZeroGPU"
)
demo.launch()