Daniël de Kok commited on
Commit
54e8cc9
·
1 Parent(s): 7ea1ac6

Fixup settings

Browse files
Files changed (2) hide show
  1. config.json +2 -4
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -22,12 +22,10 @@
22
  "initializer_range": 0.02,
23
  "layer_norm_epsilon": 1e-05,
24
  "model_type": "RefinedWebModel",
25
- "multi_query": true,
26
  "n_head": 4,
27
  "n_layer": 5,
28
- "parallel_attn": [
29
- false
30
- ],
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.30.2",
33
  "type_vocab_size": 16,
 
22
  "initializer_range": 0.02,
23
  "layer_norm_epsilon": 1e-05,
24
  "model_type": "RefinedWebModel",
25
+ "multi_query": false,
26
  "n_head": 4,
27
  "n_layer": 5,
28
+ "parallel_attn": false,
 
 
29
  "torch_dtype": "float32",
30
  "transformers_version": "4.30.2",
31
  "type_vocab_size": 16,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4bdf6f4a22c6df0162bc34601c8251b155ba79b466c2b18bc5bb5b427a5eb6af
3
- size 358036
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be94ab6a725077850a3922d42291817ec283ae9b94e4ea349975fcb81a01c633
3
+ size 393266