at676 commited on
Commit
5a9dcfc
·
verified ·
1 Parent(s): a07aa32

dd284c1a01c3f7753aefd7ac368804dcc3e9bdbfcb9662113ede94a8d65b0fdc

Browse files
config.json CHANGED
@@ -4,6 +4,7 @@
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
 
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
@@ -34,7 +35,7 @@
34
  "rope_theta": 10000.0,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "float16",
37
- "transformers_version": "4.34.0",
38
  "use_cache": true,
39
  "vocab_size": 32000
40
  }
 
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
 
35
  "rope_theta": 10000.0,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "float16",
38
+ "transformers_version": "4.36.2",
39
  "use_cache": true,
40
  "vocab_size": 32000
41
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
- "transformers_version": "4.34.0"
10
  }
 
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
+ "transformers_version": "4.36.2"
10
  }
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2efc4d3f1b89667e967eab529df041d63877224bb5104b3e09be820610a52bc6
3
+ size 1897188890
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff