File size: 807 Bytes
76ceb1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import torch
from transformers import BioGptTokenizer, BioGptForCausalLM, set_seed

tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt")
model = BioGptForCausalLM.from_pretrained("microsoft/biogpt")

sentence = "COVID-19 is"


set_seed(42)

def get_beam_output(sentence):
    inputs = tokenizer(sentence, return_tensors="pt")
    with torch.no_grad():
        beam_output = model.generate(**inputs,
                                    min_length=100,
                                    max_length=1024,
                                    num_beams=5,
                                    early_stopping=True
                                    )
    tokenizer.decode(beam_output[0], skip_special_tokens=True)


demo = gr.Interface(fn=get_beam_output, inputs="text", outputs="text")
demo.launch()