hugging2021's picture
Update app.py
a623e87 verified
raw
history blame
1.37 kB
import gradio as gr
title = "BERT"
description = "Gradio Demo for BERT. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
article = "<p style='text-align: center'><a href='https://arxiv.org/abs/1810.04805' target='_blank'>BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding</a></p>"
examples = [
['Paris is the [MASK] of France.', 'bert-base-cased']
]
# Lade die Interfaces für die Modelle
io1 = gr.load("huggingface/bert-base-cased")
io2 = gr.load("huggingface/bert-base-uncased")
def inference(inputtext, model):
if "[MASK]" not in inputtext:
return "Error: The input text must contain the [MASK] token."
if model == "bert-base-cased":
outlabel = io1(inputtext)
elif model == "bert-base-uncased":
outlabel = io2(inputtext)
else:
outlabel = "Invalid model selected"
return str(outlabel)
# Aktualisierte Gradio-Syntax
gr.Interface(
fn=inference,
inputs=[
gr.Textbox(label="Context", lines=10, placeholder="Enter text with [MASK] token"),
gr.Dropdown(choices=["bert-base-cased", "bert-base-uncased"], value="bert-base-cased", label="model")
],
outputs=gr.Label(label="Output"),
examples=examples,
article=article,
title=title,
description=description
).launch(share=True)