IEIT-Yuan commited on
Commit
4a3cca5
·
verified ·
1 Parent(s): 9efb3d2

modified config file

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -7,12 +7,13 @@
7
  "AutoConfig":"configuration_yuan.YuanConfig",
8
  "AutoModelForCausalLM":"yuan_hf_model.YuanForCausalLM"
9
  },
 
10
  "hidden_act": "silu",
11
  "hidden_size": 2048,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 8192,
14
  "max_position_embeddings": 8192,
15
- "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 24,
18
  "rms_norm_eps": 1e-06,
@@ -20,7 +21,7 @@
20
  "tie_word_embeddings": true,
21
  "torch_dtype": "bfloat16",
22
  "transformers_version": "4.30.0.dev0",
23
- "use_cache": false,
24
  "causal_mask": true,
25
  "use_flash_attention": false,
26
  "reset_attention_mask": true,
@@ -32,8 +33,7 @@
32
  "sep_token_id": 77185,
33
  "pad_token_id": 77185,
34
  "bos_token_id": 77185,
35
- "eos_token_id": 2324,
36
  "mask_token_id": 77185,
37
  "vocab_size": 135040
38
- }
39
-
 
7
  "AutoConfig":"configuration_yuan.YuanConfig",
8
  "AutoModelForCausalLM":"yuan_hf_model.YuanForCausalLM"
9
  },
10
+ "tokenizer_class":"YuanTokenizer",
11
  "hidden_act": "silu",
12
  "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 8192,
15
  "max_position_embeddings": 8192,
16
+ "model_type": "yuan",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 24,
19
  "rms_norm_eps": 1e-06,
 
21
  "tie_word_embeddings": true,
22
  "torch_dtype": "bfloat16",
23
  "transformers_version": "4.30.0.dev0",
24
+ "use_cache": true,
25
  "causal_mask": true,
26
  "use_flash_attention": false,
27
  "reset_attention_mask": true,
 
33
  "sep_token_id": 77185,
34
  "pad_token_id": 77185,
35
  "bos_token_id": 77185,
36
+ "eos_token_id": 77185,
37
  "mask_token_id": 77185,
38
  "vocab_size": 135040
39
+ }