ehristoforu commited on
Commit
073f8ad
·
verified ·
1 Parent(s): 5e6fd12

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +15 -14
config.json CHANGED
@@ -1,30 +1,30 @@
1
  {
2
- "_name_or_path": "unsloth/Phi-3.5-mini-instruct-bnb-4bit",
3
  "architectures": [
4
- "LlamaForCausalLM"
5
  ],
6
- "attention_bias": false,
7
  "attention_dropout": 0.0,
 
 
 
 
8
  "bos_token_id": 1,
 
9
  "eos_token_id": 32000,
10
- "head_dim": 96,
11
  "hidden_act": "silu",
12
  "hidden_size": 3072,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 8192,
15
  "max_position_embeddings": 131072,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
  "num_attention_heads": 32,
19
  "num_hidden_layers": 32,
20
  "num_key_value_heads": 32,
21
  "original_max_position_embeddings": 4096,
22
  "pad_token_id": 32000,
23
- "pretraining_tp": 1,
24
  "rms_norm_eps": 1e-05,
25
  "rope_scaling": {
26
- "attention_factor": 32.0,
27
- "factor": 32.0,
28
  "long_factor": [
29
  1.0800000429153442,
30
  1.1100000143051147,
@@ -75,7 +75,6 @@
75
  64.52999877929688,
76
  64.83999633789062
77
  ],
78
- "rope_type": "longrope",
79
  "short_factor": [
80
  1.0,
81
  1.0199999809265137,
@@ -125,13 +124,15 @@
125
  2.729999542236328,
126
  2.749999523162842,
127
  2.8399994373321533
128
- ]
 
129
  },
130
  "rope_theta": 10000.0,
 
131
  "tie_word_embeddings": false,
132
- "torch_dtype": "float16",
133
- "transformers_version": "4.47.1",
134
- "unsloth_version": "2024.12.12",
135
  "use_cache": true,
 
136
  "vocab_size": 32064
137
  }
 
1
  {
2
+ "_name_or_path": "Phi-3.5-mini-instruct",
3
  "architectures": [
4
+ "Phi3ForCausalLM"
5
  ],
 
6
  "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
10
+ },
11
  "bos_token_id": 1,
12
+ "embd_pdrop": 0.0,
13
  "eos_token_id": 32000,
 
14
  "hidden_act": "silu",
15
  "hidden_size": 3072,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 8192,
18
  "max_position_embeddings": 131072,
19
+ "model_type": "phi3",
 
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 32,
22
  "num_key_value_heads": 32,
23
  "original_max_position_embeddings": 4096,
24
  "pad_token_id": 32000,
25
+ "resid_pdrop": 0.0,
26
  "rms_norm_eps": 1e-05,
27
  "rope_scaling": {
 
 
28
  "long_factor": [
29
  1.0800000429153442,
30
  1.1100000143051147,
 
75
  64.52999877929688,
76
  64.83999633789062
77
  ],
 
78
  "short_factor": [
79
  1.0,
80
  1.0199999809265137,
 
124
  2.729999542236328,
125
  2.749999523162842,
126
  2.8399994373321533
127
+ ],
128
+ "type": "longrope"
129
  },
130
  "rope_theta": 10000.0,
131
+ "sliding_window": 262144,
132
  "tie_word_embeddings": false,
133
+ "torch_dtype": "bfloat16",
134
+ "transformers_version": "4.43.3",
 
135
  "use_cache": true,
136
+ "attention_bias": false,
137
  "vocab_size": 32064
138
  }