(Trained with Unsloth)
Browse files
model-00001-of-00004.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4976698672
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c7dd2feb307785c6143f3821c0acb3f6b2dcf423d11dba86944fe906033053b9
|
3 |
size 4976698672
|
model-00002-of-00004.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4999802720
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8aed1cadc8ae3e17f201f80e582f52d75393c9c3501cbf12291551d76e64518d
|
3 |
size 4999802720
|
model-00003-of-00004.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4915916176
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3cd2886eff0db0b021398c64731def9edc02b37f55adcdf0e3e319353d8dadcc
|
3 |
size 4915916176
|
model-00004-of-00004.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1168138808
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b86953bf6eb3a4c19bf79882ea523f610235863dbd4387a636d37d580f938fce
|
3 |
size 1168138808
|
tokenizer_config.json
CHANGED
@@ -2050,7 +2050,7 @@
|
|
2050 |
}
|
2051 |
},
|
2052 |
"bos_token": "<|begin_of_text|>",
|
2053 |
-
"chat_template": "{
|
2054 |
"clean_up_tokenization_spaces": true,
|
2055 |
"eos_token": "<|eot_id|>",
|
2056 |
"model_input_names": [
|
|
|
2050 |
}
|
2051 |
},
|
2052 |
"bos_token": "<|begin_of_text|>",
|
2053 |
+
"chat_template": "{{ bos_token + '\n'}}{{ 'Cutting Knowledge Date: December 2023' + '\n' 'Today Date: 23 July 2024'+ '\n'}}{{ 'When you receive a tool call response, use the output to format an answer to the orginal user question'+ '\n'}}{{ 'You are an intelligent AI that controls a drone. Given a command or request from the user'+ '\n'}}{{ 'call one of your functions to complete the request. If the request cannot be completed by your available functions, call the reject_request function.'+ '\n'}}{{ 'If the request is ambiguous or unclear, reject the request.'+ '\n'}}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'Respond in the format {name: function name, parameters: dictionary of argument name and its value}. Do not use variables.'+ '\n'}}{{ '>>> User: ' + message['content'] + '\n' }}{% elif message['role'] == 'assistant' %}{{ '>>> Assistant: ' + message['content'] + eos_token + '\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '>>> Assistant: ' }}{% endif %}",
|
2054 |
"clean_up_tokenization_spaces": true,
|
2055 |
"eos_token": "<|eot_id|>",
|
2056 |
"model_input_names": [
|