mgoin commited on
Commit
9024356
·
1 Parent(s): 7658a95

Update recipe.yaml

Browse files
Files changed (1) hide show
  1. recipe.yaml +62 -0
recipe.yaml CHANGED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ test_stage:
2
+ obcq_modifiers:
3
+ QuantizationModifier:
4
+ ignore:
5
+ - MistralRotaryEmbedding
6
+ - MistralRMSNorm
7
+ - SiLUActivation
8
+ - model.layers.1.mlp.down_proj
9
+ - model.layers.31.mlp.down_proj
10
+ - model.layers.30.mlp.down_proj
11
+ - model.layers.30.mlp.gate_proj
12
+ - model.layers.30.mlp.up_proj
13
+ post_oneshot_calibration: True
14
+ scheme_overrides:
15
+ Embedding:
16
+ input_activations: null
17
+ weights:
18
+ num_bits: 8
19
+ symmetric: False
20
+ SparseGPTModifier:
21
+ sparsity: 0.5
22
+ block_size: 128
23
+ sequential_update: False
24
+ quantize: True
25
+ percdamp: 0.01
26
+ prunen: 0
27
+ prunem: 0
28
+ targets: [
29
+ "model.layers.0",
30
+ "model.layers.1",
31
+ "model.layers.2",
32
+ "model.layers.3",
33
+ "model.layers.4",
34
+ "model.layers.5",
35
+ "model.layers.6",
36
+ "model.layers.7",
37
+ "model.layers.8",
38
+ "model.layers.9",
39
+ "model.layers.10",
40
+ "model.layers.11",
41
+ "model.layers.12",
42
+ "model.layers.13",
43
+ "model.layers.14",
44
+ "model.layers.15",
45
+ "model.layers.16",
46
+ "model.layers.17",
47
+ "model.layers.18",
48
+ "model.layers.19",
49
+ "model.layers.20",
50
+ "model.layers.21",
51
+ "model.layers.22",
52
+ "model.layers.23",
53
+ "model.layers.24",
54
+ "model.layers.25",
55
+ "model.layers.26",
56
+ "model.layers.27",
57
+ "model.layers.28",
58
+ "model.layers.29",
59
+ "model.layers.30",
60
+ "model.layers.31",
61
+ ]
62
+ target_ids: ["attention_mask", "position_ids"]