from transformers import GPT2Tokenizer, GPTNeoForCausalLM import torch # Load the tokenizer and model for GPT-Neo tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B") # Explicitly set pad_token_id to eos_token_id tokenizer.pad_token_id = tokenizer.eos_token_id model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B") # Input text description for UI input_text = "Generate a login form with username, password, and a submit button." # Tokenize the input and create an attention mask input_ids = tokenizer.encode(input_text, return_tensors="pt") attention_mask = torch.ones(input_ids.shape, dtype=torch.long) # Set the attention mask # Generate code output = model.generate(input_ids, attention_mask=attention_mask, max_length=10000) # Decode and print the generated code generated_code = tokenizer.decode(output[0], skip_special_tokens=True) print(generated_code)