nikshep01 commited on
Commit
bc33552
·
verified ·
1 Parent(s): 0a144a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -3
app.py CHANGED
@@ -1,4 +1,5 @@
1
  from transformers import GPT2Tokenizer, GPTNeoForCausalLM
 
2
 
3
  # Load the tokenizer and model for GPT-Neo
4
  tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
@@ -7,10 +8,13 @@ model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
7
  # Input text description for UI
8
  input_text = "Generate a login form with username, password, and a submit button."
9
 
10
- # Tokenize input and generate code
11
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
12
- output = model.generate(input_ids, max_length=100)
13
 
14
- # Decode and print generated code
 
 
 
15
  generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
16
  print(generated_code)
 
1
  from transformers import GPT2Tokenizer, GPTNeoForCausalLM
2
+ import torch
3
 
4
  # Load the tokenizer and model for GPT-Neo
5
  tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
 
8
  # Input text description for UI
9
  input_text = "Generate a login form with username, password, and a submit button."
10
 
11
+ # Tokenize the input and create an attention mask
12
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
13
+ attention_mask = torch.ones(input_ids.shape, dtype=torch.long) # Set the attention mask
14
 
15
+ # Generate code
16
+ output = model.generate(input_ids, attention_mask=attention_mask, max_length=100)
17
+
18
+ # Decode and print the generated code
19
  generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
20
  print(generated_code)