Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,8 +3,8 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, set_seed, pipeline
|
|
3 |
|
4 |
#https://huggingface.co/spaces/lvwerra/codeparrot-generation
|
5 |
|
6 |
-
title = "SantaCoder+
|
7 |
-
description = "This is a subspace to make code generation with [SantaCoder](https://huggingface.co/bigcode/santacoder)
|
8 |
|
9 |
example = [
|
10 |
["def print_hello_world():", 8, 0.6, 42],
|
@@ -12,8 +12,8 @@ example = [
|
|
12 |
["def count_lines(filename):", 40, 0.6, 42],
|
13 |
["def count_words(filename):", 40, 0.6, 42]]
|
14 |
|
15 |
-
tokenizer = AutoTokenizer.from_pretrained("ArmelR/
|
16 |
-
model = AutoModelForCausalLM.from_pretrained("ArmelR/
|
17 |
|
18 |
|
19 |
def code_generation(gen_prompt, max_tokens, temperature=0.6, seed=42):
|
|
|
3 |
|
4 |
#https://huggingface.co/spaces/lvwerra/codeparrot-generation
|
5 |
|
6 |
+
title = "SantaCoder+Stack Exchange Generator π
πΎ+π"
|
7 |
+
description = "This is a subspace to make code generation with [SantaCoder](https://huggingface.co/bigcode/santacoder) fine-tuned on [Stack Exchange](https://huggingface.co/datasets/ArmelR/stack-exchange-instruction). Feel free to check this larger [space](https://huggingface.co/spaces/loubnabnl/Code-generation-models-v1) for more information about code generation with π€."
|
8 |
|
9 |
example = [
|
10 |
["def print_hello_world():", 8, 0.6, 42],
|
|
|
12 |
["def count_lines(filename):", 40, 0.6, 42],
|
13 |
["def count_words(filename):", 40, 0.6, 42]]
|
14 |
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained("ArmelR/Instruction10K2048")
|
16 |
+
model = AutoModelForCausalLM.from_pretrained("ArmelR/Instruction10K2048", trust_remote_code=True)
|
17 |
|
18 |
|
19 |
def code_generation(gen_prompt, max_tokens, temperature=0.6, seed=42):
|