mwjohnson commited on
Commit
2714578
·
verified ·
1 Parent(s): 6573b3b

Upload 3 files

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -25,7 +25,7 @@
25
  "rope_scaling": null,
26
  "rope_theta": 10000000.0,
27
  "tie_word_embeddings": false,
28
- "torch_dtype": "float32",
29
  "transformers_version": "4.52.4",
30
  "use_cache": true,
31
  "vocab_size": 49159
 
25
  "rope_scaling": null,
26
  "rope_theta": 10000000.0,
27
  "tie_word_embeddings": false,
28
+ "torch_dtype": "bfloat16",
29
  "transformers_version": "4.52.4",
30
  "use_cache": true,
31
  "vocab_size": 49159
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2f06a932f2c77e18ad1c6ecb527b14525be054771b879ac686207531f831520f
3
- size 4001713496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54017ad30d2212439b34c4cd1103b7d8dc4578f5821548cc79d1d2ae073e4a3d
3
+ size 2000858480