LHC88 commited on
Commit
c1b4b3c
·
1 Parent(s): 1d427be
Files changed (1) hide show
  1. config.json +5 -14
config.json CHANGED
@@ -1,22 +1,13 @@
1
  {
2
- "_name_or_path": "/root/.cache/huggingface/hub/models--meetkai--functionary-small-v3.1/snapshots/29db9eb1954e151d0d845da70bc549c1c1e30f16",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "auto_map": {
7
  "AutoModelForCausalLM": "modeling_functionary.FunctionaryForCausalLM"
8
  },
9
  "attention_bias": false,
10
  "attention_dropout": 0.0,
11
- "auto_map": {
12
- "AutoModelForCausalLM": "modeling_functionary.FunctionaryForCausalLM"
13
- },
14
  "bos_token_id": 128000,
15
- "eos_token_id": [
16
- 128001,
17
- 128008,
18
- 128009
19
- ],
20
  "hidden_act": "silu",
21
  "hidden_size": 4096,
22
  "initializer_range": 0.02,
@@ -36,7 +27,7 @@
36
  "version": "gemm",
37
  "zero_point": true
38
  },
39
- "rms_norm_eps": 1e-05,
40
  "rope_scaling": {
41
  "factor": 8.0,
42
  "high_freq_factor": 4.0,
 
1
  {
2
+ "_name_or_path": "meetkai/functionary-small-v3.1",
3
+ "architectures": ["LlamaForCausalLM"],
4
+ "auto_map": {
 
 
5
  "AutoModelForCausalLM": "modeling_functionary.FunctionaryForCausalLM"
6
  },
7
  "attention_bias": false,
8
  "attention_dropout": 0.0,
 
 
 
9
  "bos_token_id": 128000,
10
+ "eos_token_id": [128001, 128008, 128009],
 
 
 
 
11
  "hidden_act": "silu",
12
  "hidden_size": 4096,
13
  "initializer_range": 0.02,
 
27
  "version": "gemm",
28
  "zero_point": true
29
  },
30
+ "rms_norm_eps": 1e-5,
31
  "rope_scaling": {
32
  "factor": 8.0,
33
  "high_freq_factor": 4.0,