nielsbantilan commited on
Commit
073797c
1 Parent(s): 0e0b629

Upload GPTNeoXForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-1B-deduped",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -18,7 +18,7 @@
18
  "rotary_emb_base": 10000,
19
  "rotary_pct": 0.25,
20
  "tie_word_embeddings": false,
21
- "torch_dtype": "float16",
22
  "transformers_version": "4.29.2",
23
  "use_cache": true,
24
  "use_parallel_residual": true,
 
1
  {
2
+ "_name_or_path": "/tmp/flyteav9zvdg1/local_flytekit/3dc3fdcbe70c98829ce1a05b3cd622c0",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
18
  "rotary_emb_base": 10000,
19
  "rotary_pct": 0.25,
20
  "tie_word_embeddings": false,
21
+ "torch_dtype": "float32",
22
  "transformers_version": "4.29.2",
23
  "use_cache": true,
24
  "use_parallel_residual": true,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:247c2c8d4e70d963e87cfcd9c5ff7c32aac07fc279083a9c02af4c141ad296fc
3
- size 2090751965
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ff095431efbef5d7a3b7e02f6a0888af48d5e1c2fe8de04c43c1c810c7e1b49
3
+ size 4114316253