danieldux's picture
Update app.py
89acf17 verified
raw
history blame
837 Bytes
import os
import gradio as gr
import spaces
from transformers import pipeline
import torch
import huggingface_hub
token = os.getenv("HF_TOKEN")
huggingface_hub.login(token=token)
# gr.load("models/ICILS/xlm-r-icils-ilo", hf_token=token).launch()
# Load the pre-trained model
classifier = pipeline("text-classification", model="ICILS/xlm-r-icils-ilo", use_auth_token=token, device='cuda:0')
# Define the prediction function
@spaces.GPU
def classify_text(text):
return classifier(text)[0]
# Create the Gradio interface
demo = gr.Interface(
fn=classify_text,
inputs=gr.Textbox(lines=2, placeholder="Enter text here..."),
outputs=gr.Text(),
title="XLM-R ISCO classification with ZeroGPU",
description="Classify occupations using a pre-trained XLM-R-ISCO model on Hugging Face Spaces with ZeroGPU"
)
demo.launch()