Suparious commited on
Commit
6555571
·
verified ·
1 Parent(s): 5f3b7ae

Upload quantized AWQ model

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/ubuntu/.cache/huggingface/hub/models--mlabonne--ChimeraLlama-3-8B-v3/snapshots/c8c1787e1426e3979ae82134f4eb7fa332f58ae0",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -12,6 +12,7 @@
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
  "max_position_embeddings": 8192,
 
15
  "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
@@ -30,7 +31,7 @@
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "float16",
33
- "transformers_version": "4.38.2",
34
- "use_cache": false,
35
  "vocab_size": 128256
36
  }
 
1
  {
2
+ "_name_or_path": "/opt/openbet/inference/data/mlabonne-ChimeraLlama-3-8B-v3",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
  "max_position_embeddings": 8192,
15
+ "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 32,
 
31
  "rope_theta": 500000.0,
32
  "tie_word_embeddings": false,
33
  "torch_dtype": "float16",
34
+ "transformers_version": "4.44.2",
35
+ "use_cache": true,
36
  "vocab_size": 128256
37
  }
generation_config.json CHANGED
@@ -3,6 +3,5 @@
3
  "bos_token_id": 128000,
4
  "do_sample": true,
5
  "eos_token_id": 128001,
6
- "transformers_version": "4.38.2",
7
- "use_cache": false
8
  }
 
3
  "bos_token_id": 128000,
4
  "do_sample": true,
5
  "eos_token_id": 128001,
6
+ "transformers_version": "4.44.2"
 
7
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e91c5029bc967a898aa8dcc7568d511dc7758038f6bee13f7c913225a7e6656e
3
  size 4677265296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae369237ef085a9570b3e3b34293faec0f9c6f75e2fb93e048106be45195cb0f
3
  size 4677265296
tokenizer.json CHANGED
@@ -2407,6 +2407,7 @@
2407
  "end_of_word_suffix": null,
2408
  "fuse_unk": false,
2409
  "byte_fallback": false,
 
2410
  "vocab": {
2411
  "!": 0,
2412
  "\"": 1,
 
2407
  "end_of_word_suffix": null,
2408
  "fuse_unk": false,
2409
  "byte_fallback": false,
2410
+ "ignore_merges": true,
2411
  "vocab": {
2412
  "!": 0,
2413
  "\"": 1,