ShaohanTian commited on
Commit
7742f88
·
1 Parent(s): e17b63e
Files changed (1) hide show
  1. app.py +46 -30
app.py CHANGED
@@ -1,6 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
 
3
- title = "SteelBERT"
 
 
 
 
 
 
 
 
4
 
5
  examples = [
6
  ['Paris is the [MASK] of France.', 'SteelBERT'],
@@ -12,36 +39,25 @@ examples = [
12
  ["Nano-engineered [MASK] have the potential to revolutionize the steel industry with their superior properties.", 'SteelBERT']
13
  ]
14
 
15
- # Load interfaces for different models
16
- try:
17
- io1 = gr.Interface.load("MGE-LLMs/SteelBERT")
18
- except Exception as e:
19
- print(f"Failed to load SteelBERT: {e}")
20
- io1 = None
21
 
22
- try:
23
- io2 = gr.Interface.load("huggingface/bert-base-uncased")
24
- except Exception as e:
25
- print(f"Failed to load bert-base-uncased: {e}")
26
- io2 = None
27
 
28
- def inference(inputtext, model):
29
- if model == "SteelBERT" and io1 is not None:
30
- return io1(inputtext)
31
- elif model == "bert-base-uncased" and io2 is not None:
32
- return io2(inputtext)
33
- else:
34
- return "Model not available."
35
-
36
- with gr.Blocks(title=title) as demo:
37
- gr.Markdown(f"# {title}")
38
-
39
- context = gr.Textbox(label="Context", lines=10)
40
- model = gr.Dropdown(choices=["SteelBERT", "bert-base-uncased"], label="Model")
41
- output = gr.Textbox(label="Output")
42
 
43
- examples_component = gr.Examples(examples=examples, inputs=[context, model])
44
 
45
- btn = gr.Button("Run", fn=inference, inputs=[context, model], outputs=output)
46
-
47
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # import gradio as gr
2
+
3
+ # title = "SteelBERT"
4
+
5
+ # examples = [
6
+ # ['Paris is the [MASK] of France.', 'SteelBERT'],
7
+ # ["A composite steel plate for marine construction was fabricated using 316L stainless steel.", 'SteelBERT'],
8
+ # ["The use of composite [MASK] in construction is growing rapidly.", 'SteelBERT'],
9
+ # ["Advances in [MASK] science are leading to stronger and more durable steel products.", 'SteelBERT'],
10
+ # ["The corrosion resistance of stainless steel is attributed to the [MASK] of a passive film on the surface.", 'SteelBERT'],
11
+ # ["Heat treatment of steel involves a controlled [MASK] and cooling process to alter its mechanical properties.", 'SteelBERT'],
12
+ # ["Nano-engineered [MASK] have the potential to revolutionize the steel industry with their superior properties.", 'SteelBERT']
13
+ # ]
14
+
15
+ # # Load interfaces for different models
16
+ # try:
17
+ # io1 = gr.Interface.load("MGE-LLMs/SteelBERT")
18
+
19
+
20
  import gradio as gr
21
 
22
+ title = "BERT"
23
+
24
+ description = "Gradio Demo for BERT. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
25
+
26
+ article = "<p style='text-align: center'><a href='https://arxiv.org/abs/1810.04805' target='_blank'>BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding</a></p>"
27
+
28
+ # examples = [
29
+ # ['Paris is the [MASK] of France.','bert-base-uncased']
30
+ # ]
31
 
32
  examples = [
33
  ['Paris is the [MASK] of France.', 'SteelBERT'],
 
39
  ["Nano-engineered [MASK] have the potential to revolutionize the steel industry with their superior properties.", 'SteelBERT']
40
  ]
41
 
 
 
 
 
 
 
42
 
43
+ io1 = gr.Interface.load("MGE-LLMs/SteelBERT")
 
 
 
 
44
 
45
+ io2 = gr.Interface.load("huggingface/bert-base-uncased")
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
 
47
 
48
+ def inference(inputtext, model):
49
+ if model == "SteelBERT":
50
+ outlabel = io1(inputtext)
51
+ else:
52
+ outlabel = io2(inputtext)
53
+ return outlabel
54
+
55
+
56
+ gr.Interface(
57
+ inference,
58
+ [gr.inputs.Textbox(label="Context",lines=10),gr.inputs.Dropdown(choices=["SteelBERT","bert-base-uncased"], type="value", default="SteelBERT", label="model")],
59
+ [gr.outputs.Label(label="Output")],
60
+ examples=examples,
61
+ article=article,
62
+ title=title,
63
+ description=description).launch(enable_queue=True)