SystemAdmin123 commited on
Commit
18f72e5
·
verified ·
1 Parent(s): f8b226a

Training in progress, step 40, checkpoint

Browse files
last-checkpoint/config.json CHANGED
@@ -1,28 +1,31 @@
1
  {
2
- "_name_or_path": "unsloth/OpenHermes-2.5-Mistral-7B",
 
 
 
3
  "architectures": [
4
- "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 1,
8
- "eos_token_id": 32000,
9
- "head_dim": 128,
10
- "hidden_act": "silu",
11
- "hidden_size": 4096,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 14336,
14
- "max_position_embeddings": 32768,
15
- "model_type": "mistral",
16
- "num_attention_heads": 32,
17
- "num_hidden_layers": 32,
18
- "num_key_value_heads": 8,
19
- "pad_token_id": 0,
20
- "rms_norm_eps": 1e-05,
21
- "rope_theta": 10000.0,
22
- "sliding_window": 4096,
23
- "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.48.1",
26
  "use_cache": false,
27
- "vocab_size": 32002
 
28
  }
 
1
  {
2
+ "_name_or_path": "facebook/opt-125m",
3
+ "_remove_final_layer_norm": false,
4
+ "activation_dropout": 0.0,
5
+ "activation_function": "relu",
6
  "architectures": [
7
+ "OPTForCausalLM"
8
  ],
9
  "attention_dropout": 0.0,
10
+ "bos_token_id": 2,
11
+ "do_layer_norm_before": true,
12
+ "dropout": 0.1,
13
+ "enable_bias": true,
14
+ "eos_token_id": 2,
15
+ "ffn_dim": 3072,
16
+ "hidden_size": 768,
17
+ "init_std": 0.02,
18
+ "layer_norm_elementwise_affine": true,
19
+ "layerdrop": 0.0,
20
+ "max_position_embeddings": 2048,
21
+ "model_type": "opt",
22
+ "num_attention_heads": 12,
23
+ "num_hidden_layers": 12,
24
+ "pad_token_id": 1,
25
+ "prefix": "</s>",
 
26
  "torch_dtype": "bfloat16",
27
  "transformers_version": "4.48.1",
28
  "use_cache": false,
29
+ "vocab_size": 50265,
30
+ "word_embed_proj_dim": 768
31
  }
last-checkpoint/generation_config.json CHANGED
@@ -1,7 +1,8 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 1,
4
  "do_sample": true,
5
- "eos_token_id": 32000,
 
6
  "transformers_version": "4.48.1"
7
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 2,
4
  "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 1,
7
  "transformers_version": "4.48.1"
8
  }
last-checkpoint/merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7632ba1b08e41a9688dc652015271c5554d97ca9c7a3699508fa42877e5f157f
3
- size 136062744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:020fb30959dcfbcac82451f41a438ae05e3b42759d7aef54dbd8b1b18fa92a0b
3
+ size 250490408
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5dcb4aaff4751c7031e867ff29e6e2b9b4b0674cc9dd1b3848e0ceb758301a28
3
- size 14710155092
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9872fc9d64a3810789ce509a7a6922036ba50d567eec659feeb8df060129157f
3
+ size 255265850
last-checkpoint/rng_state_0.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bcc743e915af10eb8f02d681d8900320ed9cdeb606f6a873967440c8da7f4171
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bd18683b863492ce631a94ee5584e11dd4d36cc792cbaecef9af926cbae3258
3
  size 15984
last-checkpoint/rng_state_1.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:af355680189f24fb86dbec91733b793b613ef2007cc1d98d0dda1baea0cfe447
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:150e9d8ac7ca53b6542b3bcb586e54185b9e3192646438983c13148331580cfb
3
  size 15984
last-checkpoint/rng_state_2.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0cc519c300189fd8bf8532291017b4f18ca0e4ed4f300c5f1db28f7c2a306b1f
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:734cf14d86163a569080064839ed18c1874440d23b1f6d1c9853d47632a8afe9
3
  size 15984
last-checkpoint/rng_state_3.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:43ab8fc2e16a3a3a411c16a904c939083a120546d1ba4e6b33896a8d98f82434
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:807f31446b661fa88560ba5f27adce494a63b711698296f75f4ec55596cb0d16
3
  size 15984
last-checkpoint/rng_state_4.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bf50222ecfd70b80cfe785ca128782a0335e8f9b2dc7d9c6f9de53c31acf54fb
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ddacfc5985aaaf9c5485d233a2647099f91b76cf2d14ec27180de14a96e3a08
3
  size 15984
last-checkpoint/rng_state_5.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d5459d8790300f0c26473f655cab63f95e275592efb1f11439d5dfe599c61078
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6362ec4db59fac739742d93389181b84a0b42aced1dd528d1bb6541d165d11b6
3
  size 15984
last-checkpoint/rng_state_6.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b4cc73916421b8a9ff178b961a13691f77d415880f0b12c4e0a3d44cdc1a5891
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad8f7136ea7db91b7b021660c4455507a8ca0de1d5b8ee7e42f733779d6ecc04
3
  size 15984
last-checkpoint/rng_state_7.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9c72813dc00c3ca644b08ae1a69d75889c7a46e91ae3e1c0401df8a2aa23a9c8
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b119d7dc6c4723bd70ab53c942895b56911d3f61b5a7093b269d2e567ab0602
3
  size 15984
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:572996e9dc190ff3f3a6efdf58de929f593fa2dd2f97d74fed7646828651b17f
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bd85be8ff4bc728c66eadc82b696b848e3baf1ad51034e1c2a8d5d22b53d6ea
3
  size 1064
last-checkpoint/special_tokens_map.json CHANGED
@@ -1,29 +1,29 @@
1
  {
2
  "bos_token": {
3
- "content": "<s>",
4
  "lstrip": false,
5
- "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|im_end|>",
11
  "lstrip": false,
12
- "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "<unk>",
18
  "lstrip": false,
19
- "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
- "content": "<unk>",
25
  "lstrip": false,
26
- "normalized": false,
27
  "rstrip": false,
28
  "single_word": false
29
  }
 
1
  {
2
  "bos_token": {
3
+ "content": "</s>",
4
  "lstrip": false,
5
+ "normalized": true,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
11
  "lstrip": false,
12
+ "normalized": true,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<pad>",
18
  "lstrip": false,
19
+ "normalized": true,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
+ "content": "</s>",
25
  "lstrip": false,
26
+ "normalized": true,
27
  "rstrip": false,
28
  "single_word": false
29
  }
last-checkpoint/tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:04222cd76979c181cd3f72c3bf6982fe2a09d9f4b8f23d82902efde18f1d0668
3
- size 3506125
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a80800503667fe0bd639ad10c33879f747ba1582f369b06abb21f3f65d5ad3b
3
+ size 3558658
last-checkpoint/tokenizer_config.json CHANGED
@@ -1,20 +1,11 @@
1
  {
2
  "add_bos_token": true,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
  "1": {
15
- "content": "<s>",
16
  "lstrip": false,
17
- "normalized": false,
18
  "rstrip": false,
19
  "single_word": false,
20
  "special": true
@@ -22,43 +13,21 @@
22
  "2": {
23
  "content": "</s>",
24
  "lstrip": false,
25
- "normalized": false,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": true
29
- },
30
- "32000": {
31
- "content": "<|im_end|>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- },
38
- "32001": {
39
- "content": "<|im_start|>",
40
- "lstrip": false,
41
- "normalized": false,
42
  "rstrip": false,
43
  "single_word": false,
44
  "special": true
45
  }
46
  },
47
- "additional_special_tokens": [],
48
- "bos_token": "<s>",
49
- "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
50
  "clean_up_tokenization_spaces": false,
51
- "eos_token": "<|im_end|>",
 
52
  "extra_special_tokens": {},
53
- "legacy": true,
54
- "model_max_length": 32768,
55
- "pad_token": "<unk>",
56
- "padding_side": "right",
57
- "sp_model_kwargs": {},
58
- "spaces_between_special_tokens": false,
59
- "tokenizer_class": "LlamaTokenizer",
60
- "trust_remote_code": false,
61
- "unk_token": "<unk>",
62
- "use_default_system_prompt": true,
63
  "use_fast": true
64
  }
 
1
  {
2
  "add_bos_token": true,
3
+ "add_prefix_space": false,
 
4
  "added_tokens_decoder": {
 
 
 
 
 
 
 
 
5
  "1": {
6
+ "content": "<pad>",
7
  "lstrip": false,
8
+ "normalized": true,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
 
13
  "2": {
14
  "content": "</s>",
15
  "lstrip": false,
16
+ "normalized": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
20
  }
21
  },
22
+ "bos_token": "</s>",
23
+ "chat_template": "{% for message in messages %}{% if message['role'] == 'user' %}{{ '### Instruction: ' + message['content'] + '\n\n' }}{% elif message['role'] == 'assistant' %}{{ '### Response: ' + message['content'] + eos_token}}{% endif %}{% endfor %}",
 
24
  "clean_up_tokenization_spaces": false,
25
+ "eos_token": "</s>",
26
+ "errors": "replace",
27
  "extra_special_tokens": {},
28
+ "model_max_length": 1000000000000000019884624838656,
29
+ "pad_token": "<pad>",
30
+ "tokenizer_class": "GPT2Tokenizer",
31
+ "unk_token": "</s>",
 
 
 
 
 
 
32
  "use_fast": true
33
  }
last-checkpoint/trainer_state.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.7692307692307693,
5
  "eval_steps": 200,
6
  "global_step": 40,
7
  "is_hyper_param_search": false,
@@ -9,46 +9,46 @@
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
- "epoch": 0.019230769230769232,
13
- "eval_loss": 1.8673125505447388,
14
- "eval_runtime": 14.7715,
15
- "eval_samples_per_second": 101.615,
16
- "eval_steps_per_second": 6.364,
17
  "step": 1
18
  },
19
  {
20
- "epoch": 0.19230769230769232,
21
- "grad_norm": 18.375,
22
- "learning_rate": 6.666666666666667e-05,
23
- "loss": 2.4377,
24
  "step": 10
25
  },
26
  {
27
- "epoch": 0.38461538461538464,
28
- "grad_norm": 80.5,
29
- "learning_rate": 0.00013333333333333334,
30
- "loss": 2.8471,
31
  "step": 20
32
  },
33
  {
34
- "epoch": 0.5769230769230769,
35
- "grad_norm": 1200.0,
36
- "learning_rate": 0.0002,
37
- "loss": 7.8725,
38
  "step": 30
39
  },
40
  {
41
- "epoch": 0.7692307692307693,
42
- "grad_norm": 37.25,
43
- "learning_rate": 0.00019984815164333163,
44
- "loss": 13.1972,
45
  "step": 40
46
  }
47
  ],
48
  "logging_steps": 10,
49
- "max_steps": 600,
50
  "num_input_tokens_seen": 0,
51
- "num_train_epochs": 12,
52
  "save_steps": 40,
53
  "stateful_callbacks": {
54
  "TrainerControl": {
@@ -62,8 +62,8 @@
62
  "attributes": {}
63
  }
64
  },
65
- "total_flos": 5.592056868320051e+16,
66
- "train_batch_size": 2,
67
  "trial_name": null,
68
  "trial_params": null
69
  }
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 8.0,
5
  "eval_steps": 200,
6
  "global_step": 40,
7
  "is_hyper_param_search": false,
 
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
+ "epoch": 0.2,
13
+ "eval_loss": 3.4542977809906006,
14
+ "eval_runtime": 1.9768,
15
+ "eval_samples_per_second": 759.323,
16
+ "eval_steps_per_second": 6.071,
17
  "step": 1
18
  },
19
  {
20
+ "epoch": 2.0,
21
+ "grad_norm": 2.578125,
22
+ "learning_rate": 0.00019980267284282717,
23
+ "loss": 6.0334,
24
  "step": 10
25
  },
26
  {
27
+ "epoch": 4.0,
28
+ "grad_norm": 4.4375,
29
+ "learning_rate": 0.0001992114701314478,
30
+ "loss": 5.3149,
31
  "step": 20
32
  },
33
  {
34
+ "epoch": 6.0,
35
+ "grad_norm": 6.65625,
36
+ "learning_rate": 0.0001982287250728689,
37
+ "loss": 4.8724,
38
  "step": 30
39
  },
40
  {
41
+ "epoch": 8.0,
42
+ "grad_norm": 3.5,
43
+ "learning_rate": 0.0001968583161128631,
44
+ "loss": 4.5259,
45
  "step": 40
46
  }
47
  ],
48
  "logging_steps": 10,
49
+ "max_steps": 500,
50
  "num_input_tokens_seen": 0,
51
+ "num_train_epochs": 100,
52
  "save_steps": 40,
53
  "stateful_callbacks": {
54
  "TrainerControl": {
 
62
  "attributes": {}
63
  }
64
  },
65
+ "total_flos": 5351260815360000.0,
66
+ "train_batch_size": 16,
67
  "trial_name": null,
68
  "trial_params": null
69
  }
last-checkpoint/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53ebcd35271d188c087a5f00ee35959f144cc8fbcdfc0d1744678c3f065510bd
3
- size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3d5f1e240ecc8a87b6c5953fd6de84410c7aff5f9646db311ea17ff72210b93
3
+ size 6840
last-checkpoint/vocab.json CHANGED
The diff for this file is too large to render. See raw diff