minpeter commited on
Commit
fb28f0e
·
verified ·
1 Parent(s): fb9a492

Upload model.safetensors.index.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. model.safetensors.index.json +24 -1
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 108357120
4
  },
5
  "weight_map": {
6
  "model.embed_tokens.biases": "model.safetensors",
@@ -489,6 +489,29 @@
489
  "model.layers.27.self_attn.v_proj.biases": "model.safetensors",
490
  "model.layers.27.self_attn.v_proj.scales": "model.safetensors",
491
  "model.layers.27.self_attn.v_proj.weight": "model.safetensors",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
492
  "model.layers.3.input_layernorm.weight": "model.safetensors",
493
  "model.layers.3.mlp.down_proj.biases": "model.safetensors",
494
  "model.layers.3.mlp.down_proj.scales": "model.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 111733248
4
  },
5
  "weight_map": {
6
  "model.embed_tokens.biases": "model.safetensors",
 
489
  "model.layers.27.self_attn.v_proj.biases": "model.safetensors",
490
  "model.layers.27.self_attn.v_proj.scales": "model.safetensors",
491
  "model.layers.27.self_attn.v_proj.weight": "model.safetensors",
492
+ "model.layers.28.input_layernorm.weight": "model.safetensors",
493
+ "model.layers.28.mlp.down_proj.biases": "model.safetensors",
494
+ "model.layers.28.mlp.down_proj.scales": "model.safetensors",
495
+ "model.layers.28.mlp.down_proj.weight": "model.safetensors",
496
+ "model.layers.28.mlp.gate_proj.biases": "model.safetensors",
497
+ "model.layers.28.mlp.gate_proj.scales": "model.safetensors",
498
+ "model.layers.28.mlp.gate_proj.weight": "model.safetensors",
499
+ "model.layers.28.mlp.up_proj.biases": "model.safetensors",
500
+ "model.layers.28.mlp.up_proj.scales": "model.safetensors",
501
+ "model.layers.28.mlp.up_proj.weight": "model.safetensors",
502
+ "model.layers.28.post_attention_layernorm.weight": "model.safetensors",
503
+ "model.layers.28.self_attn.k_proj.biases": "model.safetensors",
504
+ "model.layers.28.self_attn.k_proj.scales": "model.safetensors",
505
+ "model.layers.28.self_attn.k_proj.weight": "model.safetensors",
506
+ "model.layers.28.self_attn.o_proj.biases": "model.safetensors",
507
+ "model.layers.28.self_attn.o_proj.scales": "model.safetensors",
508
+ "model.layers.28.self_attn.o_proj.weight": "model.safetensors",
509
+ "model.layers.28.self_attn.q_proj.biases": "model.safetensors",
510
+ "model.layers.28.self_attn.q_proj.scales": "model.safetensors",
511
+ "model.layers.28.self_attn.q_proj.weight": "model.safetensors",
512
+ "model.layers.28.self_attn.v_proj.biases": "model.safetensors",
513
+ "model.layers.28.self_attn.v_proj.scales": "model.safetensors",
514
+ "model.layers.28.self_attn.v_proj.weight": "model.safetensors",
515
  "model.layers.3.input_layernorm.weight": "model.safetensors",
516
  "model.layers.3.mlp.down_proj.biases": "model.safetensors",
517
  "model.layers.3.mlp.down_proj.scales": "model.safetensors",