Upload folder using huggingface_hub
Browse files
README.md
CHANGED
@@ -1793,9 +1793,9 @@ def get_new_queries(queries, query_max_len, examples_prefix, tokenizer):
|
|
1793 |
return_tensors=None,
|
1794 |
add_special_tokens=False
|
1795 |
)
|
1796 |
-
prefix_ids = tokenizer(examples_prefix)['input_ids']
|
1797 |
-
suffix_ids = tokenizer('\n<response>')['input_ids']
|
1798 |
-
new_max_length = (len(prefix_ids) + len(suffix_ids) + query_max_len) // 8 * 8 + 8
|
1799 |
new_queries = tokenizer.batch_decode(inputs['input_ids'])
|
1800 |
for i in range(len(new_queries)):
|
1801 |
new_queries[i] = examples_prefix + new_queries[i] + '\n<response>'
|
|
|
1793 |
return_tensors=None,
|
1794 |
add_special_tokens=False
|
1795 |
)
|
1796 |
+
prefix_ids = tokenizer(examples_prefix, add_special_tokens=False)['input_ids']
|
1797 |
+
suffix_ids = tokenizer('\n<response>', add_special_tokens=False)['input_ids']
|
1798 |
+
new_max_length = (len(prefix_ids) + len(suffix_ids) + query_max_len + 8) // 8 * 8 + 8
|
1799 |
new_queries = tokenizer.batch_decode(inputs['input_ids'])
|
1800 |
for i in range(len(new_queries)):
|
1801 |
new_queries[i] = examples_prefix + new_queries[i] + '\n<response>'
|