Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
import gradio as gr | |
# Load pre-trained tokenizer and model | |
tokenizer = AutoTokenizer.from_pretrained('huggingartists/ed-sheeran') | |
model = AutoModelForCausalLM.from_pretrained('huggingartists/ed-sheeran', pad_token_id=50269) | |
# Function to generate predictions | |
def ed_lyrics(prompt): | |
encoded_prompt = tokenizer.encode(prompt + "\n\nLyrics: ", add_special_tokens=False, return_tensors='pt').to('cpu') | |
output_sequences = model.generate(encoded_prompt, max_length=75+len(encoded_prompt), top_p=0.8, do_sample=True)[0].tolist() | |
generated_song = tokenizer.decode(output_sequences[:], clean_up_tokenization_spaces=True) | |
final_result = generated_song.split("\n\n")[-1:] | |
return final_result | |
# Launch interactive web demo | |
iface = gr.Interface(fn=ed_lyrics, inputs=["textbox", outputs="text").launch() |