joshuahor commited on
Commit
a6014ec
·
1 Parent(s): 7b9d24e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +24 -19
config.json CHANGED
@@ -1,21 +1,26 @@
1
  {
2
- "auto_mapping": null,
3
- "base_model_name_or_path": "microsoft/phi-1_5",
4
- "bias": "none",
5
- "fan_in_fan_out": false,
6
- "inference_mode": true,
7
- "init_lora_weights": true,
8
- "layers_pattern": null,
9
- "layers_to_transform": null,
10
- "lora_alpha": 16,
11
- "lora_dropout": 0.05,
12
- "modules_to_save": null,
13
- "peft_type": "LORA",
14
- "r": 16,
15
- "revision": null,
16
- "target_modules": [
17
- "Wqkv",
18
- "out_proj"
19
  ],
20
- "task_type": "CAUSAL_LM"
21
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "phi-1.5-half",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "MixFormerSequentialForCausalLM"
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  ],
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_mixformer_sequential.MixFormerSequentialConfig",
9
+ "AutoModelForCausalLM": "modeling_mixformer_sequential.MixFormerSequentialForCausalLM"
10
+ },
11
+ "embd_pdrop": 0.0,
12
+ "initializer_range": 0.02,
13
+ "layer_norm_epsilon": 1e-05,
14
+ "model_type": "mixformer-sequential",
15
+ "n_embd": 2048,
16
+ "n_head": 32,
17
+ "n_inner": null,
18
+ "n_layer": 24,
19
+ "n_positions": 2048,
20
+ "resid_pdrop": 0.0,
21
+ "rotary_dim": 32,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float16",
24
+ "transformers_version": "4.32.1",
25
+ "vocab_size": 51200
26
+ }