Jingjing Zhai commited on
Commit
ef10b63
·
1 Parent(s): 9bc098b

initial commint

Browse files
Files changed (2) hide show
  1. app.py +24 -0
  2. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import pipeline, AutoModelForMaskedLM, AutoTokenizer
2
+ import gradio as gr
3
+
4
+ model_name = "kuleshov-group/PlantCaduceus_l20"
5
+ model = AutoModelForMaskedLM.from_pretrained(model_name, trust_remote_code=True)
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
7
+
8
+ nlp = pipeline("fill-mask", model=model, tokenizer=tokenizer)
9
+
10
+ def predict_masked_text(text):
11
+ results = nlp(text)
12
+ return [result['sequence'] for result in results]
13
+
14
+ # Create the Gradio interface
15
+ iface = gr.Interface(
16
+ fn=predict_masked_text,
17
+ inputs=gr.Textbox(lines=2, placeholder="Enter text with a [MASK] token..."),
18
+ outputs=gr.Textbox(),
19
+ title="Masked Language Modeling",
20
+ description="Fill in the masked token in the input text."
21
+ )
22
+
23
+ # Launch the interface
24
+ iface.launch(server_port = 8020)
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ gradio==3.1
2
+ transformers==4.33.3
3
+ torch==2.0.1
4
+ mamba-ssm==1.1.3.post1
5
+ pytorch==2.1.1
6
+