Dampfinchen commited on
Commit
a54ecf9
1 Parent(s): ef15e71

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -4
config.json CHANGED
@@ -1,12 +1,15 @@
1
  {
2
- "_name_or_path": "NousResearch/Meta-Llama-3.1-8B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128001,
 
 
 
 
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -21,15 +24,15 @@
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": {
23
  "factor": 8.0,
24
- "high_freq_factor": 4.0,
25
  "low_freq_factor": 1.0,
 
26
  "original_max_position_embeddings": 8192,
27
  "rope_type": "llama3"
28
  },
29
  "rope_theta": 500000.0,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "bfloat16",
32
- "transformers_version": "4.44.1",
33
  "use_cache": true,
34
  "vocab_size": 128256
35
  }
 
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
  "hidden_act": "silu",
14
  "hidden_size": 4096,
15
  "initializer_range": 0.02,
 
24
  "rms_norm_eps": 1e-05,
25
  "rope_scaling": {
26
  "factor": 8.0,
 
27
  "low_freq_factor": 1.0,
28
+ "high_freq_factor": 4.0,
29
  "original_max_position_embeddings": 8192,
30
  "rope_type": "llama3"
31
  },
32
  "rope_theta": 500000.0,
33
  "tie_word_embeddings": false,
34
  "torch_dtype": "bfloat16",
35
+ "transformers_version": "4.42.3",
36
  "use_cache": true,
37
  "vocab_size": 128256
38
  }