aadityap commited on
Commit
66b2049
·
verified ·
1 Parent(s): 49d9614

End of training

Browse files
Files changed (2) hide show
  1. README.md +4 -1
  2. config.json +30 -0
README.md CHANGED
@@ -3,9 +3,12 @@ library_name: peft
3
  license: mit
4
  base_model: deepseek-ai/DeepSeek-R1-Distill-Qwen-32B
5
  tags:
 
6
  - trl
7
  - sft
8
  - generated_from_trainer
 
 
9
  model-index:
10
  - name: ttt-collate-datasets-test-13feb
11
  results: []
@@ -16,7 +19,7 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  # ttt-collate-datasets-test-13feb
18
 
19
- This model is a fine-tuned version of [deepseek-ai/DeepSeek-R1-Distill-Qwen-32B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B) on an unknown dataset.
20
 
21
  ## Model description
22
 
 
3
  license: mit
4
  base_model: deepseek-ai/DeepSeek-R1-Distill-Qwen-32B
5
  tags:
6
+ - alignment-handbook
7
  - trl
8
  - sft
9
  - generated_from_trainer
10
+ datasets:
11
+ - tttx/ttt-bigestrun-021225-night-big-collated
12
  model-index:
13
  - name: ttt-collate-datasets-test-13feb
14
  results: []
 
19
 
20
  # ttt-collate-datasets-test-13feb
21
 
22
+ This model is a fine-tuned version of [tttx/sft-32b-020925-19k-5ep](https://huggingface.co/tttx/sft-32b-020925-19k-5ep) on the tttx/ttt-bigestrun-021225-night-big-collated dataset.
23
 
24
  ## Model description
25
 
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_attn_implementation_autoset": true,
3
+ "_name_or_path": "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
4
+ "architectures": [
5
+ "Qwen2ForCausalLM"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 151643,
9
+ "eos_token_id": 151643,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 5120,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 27648,
14
+ "max_position_embeddings": 131072,
15
+ "max_window_layers": 64,
16
+ "model_type": "qwen2",
17
+ "num_attention_heads": 40,
18
+ "num_hidden_layers": 64,
19
+ "num_key_value_heads": 8,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 1000000.0,
23
+ "sliding_window": null,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.47.0.dev0",
27
+ "use_cache": true,
28
+ "use_sliding_window": false,
29
+ "vocab_size": 152064
30
+ }