Jason0214 commited on
Commit
89f7e70
1 Parent(s): ced2d75

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. config.json +35 -30
  2. special_tokens_map.json +5 -0
  3. tokenizer_config.json +9 -0
config.json CHANGED
@@ -1,31 +1,36 @@
1
  {
2
- "_name_or_path": "MBZUAI-LLM/SlimPajama-DC",
3
- "architectures": [
4
- "BTLMLMHeadModel"
5
- ],
6
- "auto_map": {
7
- "AutoConfig": "MBZUAI-LLM/SlimPajama-DC--configuration_btlm.BTLMConfig",
8
- "AutoModel": "MBZUAI-LLM/SlimPajama-DC--modeling_btlm.BTLMModel",
9
- "AutoModelForCausalLM": "MBZUAI-LLM/SlimPajama-DC--modeling_btlm.BTLMLMHeadModel"
10
- },
11
- "model_type": "btlm",
12
- "eos_token_id": 0,
13
- "pad_token_id": 0,
14
- "attn_pdrop": 0.0,
15
- "scale_attn_weights": true,
16
- "resid_pdrop": 0.0,
17
- "embeddings_scale": 14.6,
18
- "n_inner": 5461,
19
- "n_embd": 2048,
20
- "layer_norm_epsilon": 1e-05,
21
- "n_positions": 2048,
22
- "activation_function": "swiglu",
23
- "n_head": 16,
24
- "n_layer": 24,
25
- "width_scale": 0.2775,
26
- "position_embedding_type": "alibi",
27
- "scale_qk_dot_by_d": true,
28
- "tie_word_embeddings": true,
29
- "vocab_size": 50277,
30
- "embd_pdrop": 0.0
31
- }
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "MBZUAI-LLM/SlimPajama-DC",
3
+ "activation_function": "swiglu",
4
+ "architectures": [
5
+ "BTLMLMHeadModel"
6
+ ],
7
+ "attn_pdrop": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "configuration_btlm.BTLMConfig",
10
+ "AutoModel": "MBZUAI-LLM/SlimPajama-DC--modeling_btlm.BTLMModel",
11
+ "AutoModelForCausalLM": "MBZUAI-LLM/SlimPajama-DC--modeling_btlm.BTLMLMHeadModel"
12
+ },
13
+ "bos_token_id": 50256,
14
+ "embd_pdrop": 0.0,
15
+ "embeddings_scale": 14.6,
16
+ "eos_token_id": 0,
17
+ "initializer_range": 0.02,
18
+ "layer_norm_epsilon": 1e-05,
19
+ "model_type": "btlm",
20
+ "n_embd": 2048,
21
+ "n_head": 16,
22
+ "n_inner": 5461,
23
+ "n_layer": 24,
24
+ "n_positions": 2048,
25
+ "pad_token_id": 0,
26
+ "position_embedding_type": "alibi",
27
+ "reorder_and_upcast_attn": false,
28
+ "resid_pdrop": 0.0,
29
+ "scale_attn_by_inverse_layer_idx": false,
30
+ "scale_attn_weights": true,
31
+ "scale_qk_dot_by_d": true,
32
+ "transformers_version": "4.32.1",
33
+ "use_cache": true,
34
+ "vocab_size": 50277,
35
+ "width_scale": 0.2775
36
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "unk_token": "<|endoftext|>"
5
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<|endoftext|>",
4
+ "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|endoftext|>",
6
+ "model_max_length": 8192,
7
+ "tokenizer_class": "GPT2Tokenizer",
8
+ "unk_token": "<|endoftext|>"
9
+ }