loubnabnl HF Staff commited on
Commit
101e4d0
·
1 Parent(s): 04f8503

update app

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -5,9 +5,12 @@ from transformers import pipeline
5
 
6
  title = "CodeParrot Generator"
7
  description = "This is a subspace to make code generation with CodeParrot, it is used in a larger space for model comparison."
8
- example = [["def print_hello_world():\n \"""Print 'Hello world' \""" "], ["def get_file_size(filepath):"]]
 
 
 
9
  tokenizer = AutoTokenizer.from_pretrained("lvwerra/codeparrot")
10
- model = AutoModelForCausalLM.from_pretrained("lvwerra/codeparrot")
11
 
12
 
13
  def code_generation(gen_prompt, strategy, max_tokens, seed=42):
@@ -45,7 +48,7 @@ iface = gr.Interface(
45
  )
46
  ],
47
  outputs=gr.Textbox(label="Predicted code", lines=10),
48
- #examples=example,
49
  layout="horizontal",
50
  theme="peach",
51
  description=description,
 
5
 
6
  title = "CodeParrot Generator"
7
  description = "This is a subspace to make code generation with CodeParrot, it is used in a larger space for model comparison."
8
+ example = [
9
+ ["def print_hello_world():", "Sample", 8, 42],
10
+ ["\"""import GPT2 from transformers\""" ", "Sample", 16, 42],
11
+ ["def get_file_size(filepath):", "Sample", 8, 42]]
12
  tokenizer = AutoTokenizer.from_pretrained("lvwerra/codeparrot")
13
+ model = AutoModelForCausalLM.from_pretrained("lvwerra/codeparrot", low_cpu_mem_usage)
14
 
15
 
16
  def code_generation(gen_prompt, strategy, max_tokens, seed=42):
 
48
  )
49
  ],
50
  outputs=gr.Textbox(label="Predicted code", lines=10),
51
+ examples=example,
52
  layout="horizontal",
53
  theme="peach",
54
  description=description,