File size: 803 Bytes
0a144a1
bc33552
bf2dc83
0a144a1
ea04b6c
0a144a1
bf2dc83
 
 
 
bc33552
bf2dc83
bc33552
bf2dc83
bc33552
 
 
 
bf2dc83
 
65d72cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
from transformers import GPT2Tokenizer, GPTNeoForCausalLM
import torch

# Load the tokenizer and model for GPT-Neo
tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")

# Input text description for UI
input_text = "Generate a login form with username, password, and a submit button."

# Tokenize the input and create an attention mask
input_ids = tokenizer.encode(input_text, return_tensors="pt")
attention_mask = torch.ones(input_ids.shape, dtype=torch.long)  # Set the attention mask

# Generate code
output = model.generate(input_ids, attention_mask=attention_mask, max_length=100)

# Decode and print the generated code
generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
print(generated_code)