import transformers from huggingface_hub import notebook_login from datasets import load_dataset from transformers import AutoModelForSequenceClassification from transformers import AutoTokenizer model_checkpoint = "distilgpt2" tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, use_fast=True) from transformers import pipeline import gradio as gr from transformers import pipeline generator = pipeline('text-generation', model='DioLiu/GPT2_Suggestion',tokenizer=tokenizer) def get_suggestion(input_text): answer=generator(input_text, max_length=200, num_return_sequences=1)[0]['generated_text'] if answer.find('\n\nGiven')!=-1: output=answer[0:answer.find('\n\nGiven')] else: output=answer[0:answer.rfind('.')+1] return output iface = gr.Interface(fn= get_suggestion, inputs="text", outputs = ["text"], title="Suggestions") iface.launch()