automerger commited on
Commit
0ef6656
·
verified ·
1 Parent(s): 3481227

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -1,33 +1,39 @@
1
  ---
2
- license: cc-by-nc-4.0
3
  tags:
4
  - merge
5
  - mergekit
6
  - lazymergekit
7
  - automerger
8
  base_model:
9
- - rwitz/experiment26-truthy-iter-1
 
10
  ---
11
 
12
  # Experiment26Experiment26-7B
13
 
14
  Experiment26Experiment26-7B is an automated merge created by [Maxime Labonne](https://huggingface.co/mlabonne) using the following configuration.
15
- * [rwitz/experiment26-truthy-iter-1](https://huggingface.co/rwitz/experiment26-truthy-iter-1)
 
16
 
17
  ## 🧩 Configuration
18
 
19
  ```yaml
20
- models:
21
- - model: rwitz/experiment26-truthy-iter-0
22
- # No parameters necessary for base model
23
- - model: rwitz/experiment26-truthy-iter-1
24
- parameters:
25
- density: 0.53
26
- weight: 0.6
27
- merge_method: dare_ties
28
  base_model: rwitz/experiment26-truthy-iter-0
29
  parameters:
30
- int8_mask: true
 
 
 
 
 
31
  dtype: bfloat16
32
  random_seed: 0
33
  ```
 
1
  ---
2
+ license: apache-2.0
3
  tags:
4
  - merge
5
  - mergekit
6
  - lazymergekit
7
  - automerger
8
  base_model:
9
+ - rwitz/experiment26-truthy-iter-0
10
+ - yam-peleg/Experiment26-7B
11
  ---
12
 
13
  # Experiment26Experiment26-7B
14
 
15
  Experiment26Experiment26-7B is an automated merge created by [Maxime Labonne](https://huggingface.co/mlabonne) using the following configuration.
16
+ * [rwitz/experiment26-truthy-iter-0](https://huggingface.co/rwitz/experiment26-truthy-iter-0)
17
+ * [yam-peleg/Experiment26-7B](https://huggingface.co/yam-peleg/Experiment26-7B)
18
 
19
  ## 🧩 Configuration
20
 
21
  ```yaml
22
+ slices:
23
+ - sources:
24
+ - model: rwitz/experiment26-truthy-iter-0
25
+ layer_range: [0, 32]
26
+ - model: yam-peleg/Experiment26-7B
27
+ layer_range: [0, 32]
28
+ merge_method: slerp
 
29
  base_model: rwitz/experiment26-truthy-iter-0
30
  parameters:
31
+ t:
32
+ - filter: self_attn
33
+ value: [0, 0.5, 0.3, 0.7, 1]
34
+ - filter: mlp
35
+ value: [1, 0.5, 0.7, 0.3, 0]
36
+ - value: 0.5
37
  dtype: bfloat16
38
  random_seed: 0
39
  ```
config.json CHANGED
@@ -20,7 +20,7 @@
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.38.2",
24
  "use_cache": true,
25
  "vocab_size": 32000
26
  }
 
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.39.0",
24
  "use_cache": true,
25
  "vocab_size": 32000
26
  }
mergekit_config.yml CHANGED
@@ -1,15 +1,19 @@
1
 
2
- models:
3
- - model: rwitz/experiment26-truthy-iter-0
4
- # No parameters necessary for base model
5
- - model: rwitz/experiment26-truthy-iter-1
6
- parameters:
7
- density: 0.53
8
- weight: 0.6
9
- merge_method: dare_ties
10
  base_model: rwitz/experiment26-truthy-iter-0
11
  parameters:
12
- int8_mask: true
 
 
 
 
 
13
  dtype: bfloat16
14
  random_seed: 0
15
 
 
1
 
2
+ slices:
3
+ - sources:
4
+ - model: rwitz/experiment26-truthy-iter-0
5
+ layer_range: [0, 32]
6
+ - model: yam-peleg/Experiment26-7B
7
+ layer_range: [0, 32]
8
+ merge_method: slerp
 
9
  base_model: rwitz/experiment26-truthy-iter-0
10
  parameters:
11
+ t:
12
+ - filter: self_attn
13
+ value: [0, 0.5, 0.3, 0.7, 1]
14
+ - filter: mlp
15
+ value: [1, 0.5, 0.7, 0.3, 0]
16
+ - value: 0.5
17
  dtype: bfloat16
18
  random_seed: 0
19
 
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fdf075b12c2126c91988cac825b9ce34c3728b89e763861273deeb2065f1d3f2
3
  size 9942981696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c2040230cbe78f2d058b7866da799b07a846eb7bb8ae405b4a29330bf44e587
3
  size 9942981696
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:985e5c387b87e00718735b983905e732e9a8a7056628c03d0db1e8f84b1bfb69
3
  size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:135770e8e6aed94e95184c999e8e081160cf9d0f551353d8565cc510ede4e382
3
  size 4540516344