transmogrifier commited on
Commit
97d2c87
1 Parent(s): 685ac4e

Upload model

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "tiiuae/falcon-7b",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
@@ -8,12 +8,8 @@
8
  "attention_dropout": 0.0,
9
  "attention_probs_dropout_prob": 0.0,
10
  "auto_map": {
11
- "AutoConfig": "tiiuae/falcon-7b--configuration_RW.RWConfig",
12
- "AutoModel": "tiiuae/falcon-7b--modelling_RW.RWModel",
13
- "AutoModelForCausalLM": "tiiuae/falcon-7b--modelling_RW.RWForCausalLM",
14
- "AutoModelForQuestionAnswering": "tiiuae/falcon-7b--modelling_RW.RWForQuestionAnswering",
15
- "AutoModelForSequenceClassification": "tiiuae/falcon-7b--modelling_RW.RWForSequenceClassification",
16
- "AutoModelForTokenClassification": "tiiuae/falcon-7b--modelling_RW.RWForTokenClassification"
17
  },
18
  "bias": false,
19
  "bos_token_id": 11,
@@ -38,5 +34,5 @@
38
  "torch_dtype": "float16",
39
  "transformers_version": "4.30.2",
40
  "use_cache": true,
41
- "vocab_size": 65024
42
  }
 
1
  {
2
+ "_name_or_path": "tiiuae/falcon-7b-instruct",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
 
8
  "attention_dropout": 0.0,
9
  "attention_probs_dropout_prob": 0.0,
10
  "auto_map": {
11
+ "AutoConfig": "tiiuae/falcon-7b-instruct--configuration_RW.RWConfig",
12
+ "AutoModelForCausalLM": "tiiuae/falcon-7b-instruct--modelling_RW.RWForCausalLM"
 
 
 
 
13
  },
14
  "bias": false,
15
  "bos_token_id": 11,
 
34
  "torch_dtype": "float16",
35
  "transformers_version": "4.30.2",
36
  "use_cache": true,
37
+ "vocab_size": 65027
38
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2ce1cd8870a3e23b5ec184aba7e40817decc8f2da5007739119c535ed1dcc2a
3
- size 9950994688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9cbb4d9fa805ea8393e9aafceadcbf9301bec79ac6b9c990358f37806b1d8b9
3
+ size 9951021952
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e903db723e4060119decd0a9cb2a766759f0f6b8ef6ec12b1bdc328914308188
3
  size 3892469864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65a2134e95fdbae0ca8cd83574702d1beac2c536996c39147f6031e1822e1dfc
3
  size 3892469864
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 13843441408
4
  },
5
  "weight_map": {
6
  "transformer.h.0.input_layernorm.bias": "model-00001-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 13843468672
4
  },
5
  "weight_map": {
6
  "transformer.h.0.input_layernorm.bias": "model-00001-of-00002.safetensors",