beamaia commited on
Commit
a8fcd18
·
verified ·
1 Parent(s): 72147ad

Model save

Browse files
README.md CHANGED
@@ -20,7 +20,7 @@ should probably proofread and complete it, then remove this comment. -->
20
 
21
  This model is a fine-tuned version of [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b) on the generator dataset.
22
  It achieves the following results on the evaluation set:
23
- - Loss: 1.2589
24
 
25
  ## Model description
26
 
 
20
 
21
  This model is a fine-tuned version of [NousResearch/Nous-Hermes-llama-2-7b](https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b) on the generator dataset.
22
  It achieves the following results on the evaluation set:
23
+ - Loss: 1.2621
24
 
25
  ## Model description
26
 
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0ed1be0fd08ffc79c17790bfe501494d82b9ea6986cd90e5b70c1bbd3e73f1e6
3
  size 67126104
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12ad68598cb12a43a8606771489ebe4069849b8e9854c5981887e5276c5fc7f0
3
  size 67126104
special_tokens_map.json CHANGED
@@ -13,7 +13,13 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "</s>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
tokenizer.json CHANGED
@@ -71,12 +71,6 @@
71
  "id": "A",
72
  "type_id": 0
73
  }
74
- },
75
- {
76
- "SpecialToken": {
77
- "id": "</s>",
78
- "type_id": 0
79
- }
80
  }
81
  ],
82
  "pair": [
@@ -92,12 +86,6 @@
92
  "type_id": 0
93
  }
94
  },
95
- {
96
- "SpecialToken": {
97
- "id": "</s>",
98
- "type_id": 0
99
- }
100
- },
101
  {
102
  "SpecialToken": {
103
  "id": "<s>",
@@ -109,24 +97,9 @@
109
  "id": "B",
110
  "type_id": 1
111
  }
112
- },
113
- {
114
- "SpecialToken": {
115
- "id": "</s>",
116
- "type_id": 1
117
- }
118
  }
119
  ],
120
  "special_tokens": {
121
- "</s>": {
122
- "id": "</s>",
123
- "ids": [
124
- 2
125
- ],
126
- "tokens": [
127
- "</s>"
128
- ]
129
- },
130
  "<s>": {
131
  "id": "<s>",
132
  "ids": [
 
71
  "id": "A",
72
  "type_id": 0
73
  }
 
 
 
 
 
 
74
  }
75
  ],
76
  "pair": [
 
86
  "type_id": 0
87
  }
88
  },
 
 
 
 
 
 
89
  {
90
  "SpecialToken": {
91
  "id": "<s>",
 
97
  "id": "B",
98
  "type_id": 1
99
  }
 
 
 
 
 
 
100
  }
101
  ],
102
  "special_tokens": {
 
 
 
 
 
 
 
 
 
103
  "<s>": {
104
  "id": "<s>",
105
  "ids": [
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
- "add_eos_token": true,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -39,10 +39,8 @@
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "</s>",
41
  "legacy": false,
42
- "max_lenght": 2048,
43
  "model_max_length": 1000000000000000019884624838656,
44
- "pad_token": "</s>",
45
- "padding": true,
46
  "sp_model_kwargs": {},
47
  "tokenizer_class": "LlamaTokenizer",
48
  "unk_token": "<unk>",
 
1
  {
2
  "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "</s>",
41
  "legacy": false,
 
42
  "model_max_length": 1000000000000000019884624838656,
43
+ "pad_token": "<unk>",
 
44
  "sp_model_kwargs": {},
45
  "tokenizer_class": "LlamaTokenizer",
46
  "unk_token": "<unk>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb7c97a1558730520d8dc471422f9590d275c8aea1a3edac1992711a02a32491
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:804acc3e6b044fd37207b3754ad0a10b52ceac13a20bf9b7a5c09a4056852860
3
  size 5112