File size: 901 Bytes
0a144a1
bc33552
bf2dc83
0a144a1
ea04b6c
5b42b54
 
 
 
0a144a1
bf2dc83
 
 
 
bc33552
bf2dc83
bc33552
bf2dc83
bc33552
5b42b54
bc33552
 
bf2dc83
 
230c5ac
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from transformers import GPT2Tokenizer, GPTNeoForCausalLM
import torch

# Load the tokenizer and model for GPT-Neo
tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")

# Explicitly set pad_token_id to eos_token_id
tokenizer.pad_token_id = tokenizer.eos_token_id

model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")

# Input text description for UI
input_text = "Generate a login form with username, password, and a submit button."

# Tokenize the input and create an attention mask
input_ids = tokenizer.encode(input_text, return_tensors="pt")
attention_mask = torch.ones(input_ids.shape, dtype=torch.long)  # Set the attention mask

# Generate code
output = model.generate(input_ids, attention_mask=attention_mask, max_length=10000)

# Decode and print the generated code
generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
print(generated_code)