File size: 989 Bytes
b9b54b8
dba5c21
b9b54b8
 
 
 
dba5c21
b9b54b8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import torch
from model import  BigramLanguageModel, decode
import gradio as gr

device = 'cuda' if torch.cuda.is_available() else 'cpu'

model = BigramLanguageModel()
model.load_state_dict(torch.load("./neo_gpt.pth", map_location=device))
def generate_text(max_new_tokens):
    context = torch.zeros((1, 1), dtype=torch.long)
    return decode(model.generate(context, max_new_tokens=max_new_tokens)[0].tolist())


# Define the application components
title = "Text Generation: Write Like Shakespeare"
description = "This Gradio app uses a large language model (LLM) to generate text in the style of William Shakespeare."


# Create a Gradio interface
g_app = gr.Interface(
    fn = generate_text,
    inputs = [gr.Number(value = 10,label = "Number of Output Tokens",info = "Specify the desired length of the text to be generated.")],
    outputs = [gr.TextArea(lines = 5,label="Generated Text")],
    title = title,
    description = description

)

# Launch the Gradio app
g_app.launch()