Add checkpoint
Browse files- .DS_Store +0 -0
- checkpoints/.DS_Store +0 -0
- checkpoints/adapter_0.pt +3 -0
- checkpoints/adapter_100.pt +3 -0
- checkpoints/adapter_150.pt +3 -0
- checkpoints/adapter_200.pt +3 -0
- checkpoints/adapter_250.pt +3 -0
- checkpoints/adapter_50.pt +3 -0
- checkpoints/adapter_config.json +1 -0
- checkpoints/adapter_model.bin +3 -0
- checkpoints/config.json +1 -0
.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
checkpoints/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
checkpoints/adapter_0.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1badf8326c9aa2c85850fcd65c7a22b70e8037fc7d249b6f33de170cb90b4a73
|
3 |
+
size 390897466
|
checkpoints/adapter_100.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f3aa1061fec853140dc4dbf8a350ac5ecff992e202036208e565a360702fea1
|
3 |
+
size 390899394
|
checkpoints/adapter_150.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7294851174cb67c19453e294dd6c8c57e6f7935e7ec5b336b35517b218de985
|
3 |
+
size 390899394
|
checkpoints/adapter_200.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3986ffd99fc7d9f462615142a7a22d25071e1b8ae1729d3989c020393126706c
|
3 |
+
size 390899394
|
checkpoints/adapter_250.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3248663545ad554314213545b87d267fb6dd6fa7649f10bd85f1a75cc0482ed3
|
3 |
+
size 390899394
|
checkpoints/adapter_50.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1fd42a31cd95674146d8075abcf5277b0facec364b7471796eb1203871766b1
|
3 |
+
size 390898430
|
checkpoints/adapter_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"r": 16, "lora_alpha": 32, "target_modules": ["q_proj", "v_proj", "o_proj", "gate_proj", "down_proj", "up_proj"], "peft_type": "LORA"}
|
checkpoints/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:502c03f6c3ca91e61c81b0c61f39cb8921d61988a433be6f8292b06a5f14dd09
|
3 |
+
size 390928074
|
checkpoints/config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 128, "hidden_act": "silu", "hidden_size": 8192, "initializer_range": 0.02, "intermediate_size": 28672, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "num_attention_heads": 64, "num_hidden_layers": 80, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.47.0.dev0", "use_cache": true, "vocab_size": 128256}
|