Spaces:
Build error
Build error
import gradio as gr | |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
from datasets import load_dataset | |
import torch | |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') | |
SAVED_MODEL_PATH = 'bart_base_full_finetune_save' | |
model_name = "facebook/bart-base" | |
model = AutoModelForSeq2SeqLM.from_pretrained(SAVED_MODEL_PATH).to(device) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
def summarize(text): | |
inputs = tokenizer(f"Summarize dialogue >>\n {text}", return_tensors="pt", max_length=1000, truncation=True, padding="max_length").to(device) | |
summary_ids = model.generate(inputs.input_ids, num_beams=4, max_length=100, early_stopping=True) | |
summary = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids] | |
return summary[0] | |
## | |
iface = gr.Interface( | |
fn=summarize, | |
inputs=gr.Textbox(lines=10, label="Input Dialogue"), | |
outputs=gr.Textbox(label="Generated Summary") | |
) | |
iface.launch() | |