Spaces:
Sleeping
Sleeping
File size: 1,681 Bytes
a17aa18 02e73ec a17aa18 02e73ec a17aa18 02e73ec a17aa18 66345e8 02e73ec a17aa18 02e73ec 458e9f0 02e73ec 66345e8 02e73ec 7df4732 02e73ec a17aa18 02e73ec |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
# 1. Import the required packages
import torch
import gradio as gr
from typing import Dict
from transformers import pipeline
# 2. Define function to use our model on given text
def food_not_food_classifier(text: str) -> Dict[str, float]:
# Set up text classification pipeline
food_not_food_classifier = pipeline(task="text-classification",
# Because our model is on Hugging Face already, we can pass in the model name directly
model="devagonal/bert-f1-durga-muhammad-c", # link to model on HF Hub
device="cuda" if torch.cuda.is_available() else "cpu",
top_k=None) # return all possible scores (not just top-1)
# Get outputs from pipeline (as a list of dicts)
outputs = food_not_food_classifier(text)[0]
# Format output for Gradio (e.g. {"label_1": probability_1, "label_2": probability_2})
output_dict = {}
for item in outputs:
output_dict[item["label"]] = item["score"]
return output_dict
# 3. Create a Gradio interface with details about our app
description = """
a text classifier to determine question class.
label0 = durga
label1 = muhammad
"""
demo = gr.Interface(fn=food_not_food_classifier,
inputs="text",
outputs=gr.Label(num_top_classes=2), # show top 2 classes (that's all we have)
title="Bert F1 Durga Muhammad c",
description=description,
examples=[["siapakah durga"],
["siapakah muhammad"]])
# 4. Launch the interface
if __name__ == "__main__":
demo.launch()
|