|
import gradio as gr |
|
from transformers import GPT2LMHeadModel, GPT2Tokenizer |
|
|
|
|
|
model_name = "gpt2" |
|
model = GPT2LMHeadModel.from_pretrained(model_name) |
|
tokenizer = GPT2Tokenizer.from_pretrained(model_name) |
|
|
|
|
|
def complete_sentence(sentence): |
|
input_ids = tokenizer.encode(sentence, return_tensors="pt") |
|
output = model.generate(input_ids, max_length=50, num_return_sequences=1) |
|
completed_sentence = tokenizer.decode(output[0], skip_special_tokens=True) |
|
return completed_sentence |
|
|
|
|
|
iface = gr.Interface( |
|
fn=complete_sentence, |
|
inputs="text", |
|
outputs="text", |
|
title="Sentence Completion", |
|
description="Enter a sentence to complete", |
|
example="I love to" |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
iface.launch() |