hugging2021's picture
Update app.py
8aca554 verified
raw
history blame
1.4 kB
import gradio as gr
title = "BERT"
description = "Gradio Demo for BERT. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
article = "<p style='text-align: center'><a href='https://arxiv.org/abs/1810.04805' target='_blank'>BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding</a></p>"
examples = [
['Paris is the [MASK] of France.', 'bert-base-cased']
]
# Lade die Interfaces für die Modelle
io1 = gr.Interface.load("huggingface/bert-base-cased")
io2 = gr.Interface.load("huggingface/bert-base-uncased")
def inference(inputtext, model):
if "[MASK]" not in inputtext:
return {"error": "The input text must contain the [MASK] token."}
if model == "bert-base-cased":
return io1(inputtext)
elif model == "bert-base-uncased":
return io2(inputtext)
else:
return {"error": "Invalid model selected"}
iface = gr.Interface(
fn=inference,
inputs=[
gr.Textbox(label="Context", lines=10, placeholder="Enter text with [MASK] token"),
gr.Dropdown(choices=["bert-base-cased", "bert-base-uncased"], value="bert-base-cased", label="model")
],
outputs=gr.JSON(label="Output"), # We use JSON to display errors or outputs
examples=examples,
article=article,
title=title,
description=description
)
iface.launch(share=True)