NotoriousH2 commited on
Commit
04aa989
1 Parent(s): c7a48c0

Upload Qwen2ForCausalLM

Browse files
Files changed (4) hide show
  1. README.md +3 -1
  2. config.json +10 -5
  3. generation_config.json +2 -10
  4. model.safetensors +2 -2
README.md CHANGED
@@ -1,6 +1,8 @@
1
  ---
2
  library_name: transformers
3
- tags: []
 
 
4
  ---
5
 
6
  # Model Card for Model ID
 
1
  ---
2
  library_name: transformers
3
+ tags:
4
+ - trl
5
+ - sft
6
  ---
7
 
8
  # Model Card for Model ID
config.json CHANGED
@@ -1,20 +1,24 @@
1
  {
2
- "_name_or_path": "Qwen/Qwen2.5-1.5B-instruct",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
- "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 1536,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 8960,
13
- "max_position_embeddings": 32768,
14
- "max_window_layers": 21,
15
  "model_type": "qwen2",
 
 
 
 
16
  "num_attention_heads": 12,
17
- "num_hidden_layers": 28,
18
  "num_key_value_heads": 2,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
@@ -24,6 +28,7 @@
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.46.2",
26
  "use_cache": true,
 
27
  "use_sliding_window": false,
28
  "vocab_size": 151936
29
  }
 
1
  {
2
+ "_name_or_path": "test1",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
+ "eos_token_id": 151643,
9
  "hidden_act": "silu",
10
  "hidden_size": 1536,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 8960,
13
+ "max_position_embeddings": 131072,
14
+ "max_window_layers": 28,
15
  "model_type": "qwen2",
16
+ "new_layer_indices": [
17
+ 29,
18
+ 14
19
+ ],
20
  "num_attention_heads": 12,
21
+ "num_hidden_layers": 30,
22
  "num_key_value_heads": 2,
23
  "rms_norm_eps": 1e-06,
24
  "rope_scaling": null,
 
28
  "torch_dtype": "bfloat16",
29
  "transformers_version": "4.46.2",
30
  "use_cache": true,
31
+ "use_mrope": false,
32
  "use_sliding_window": false,
33
  "vocab_size": 151936
34
  }
generation_config.json CHANGED
@@ -1,14 +1,6 @@
1
  {
 
2
  "bos_token_id": 151643,
3
- "do_sample": true,
4
- "eos_token_id": [
5
- 151645,
6
- 151643
7
- ],
8
- "pad_token_id": 151643,
9
- "repetition_penalty": 1.1,
10
- "temperature": 0.7,
11
- "top_k": 20,
12
- "top_p": 0.8,
13
  "transformers_version": "4.46.2"
14
  }
 
1
  {
2
+ "_from_model_config": true,
3
  "bos_token_id": 151643,
4
+ "eos_token_id": 151643,
 
 
 
 
 
 
 
 
 
5
  "transformers_version": "4.46.2"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dd924a11b4c220f385b51ffa522daea7c9f3d850e31b162bb5661df483c6d3ee
3
- size 3087467144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc4dba4f492953e718c3116423c264511e84060d4dc760f34cacf19d36818f35
3
+ size 3274661200