ayyylol commited on
Commit
778b571
1 Parent(s): 16e280f

Fix TypeError in _pad method by adding missing padding_side field

Browse files

Hi,
Thank you for this model!
I noticed that the _pad method in the ChatGLM4Tokenizer class is missing the padding_side field, which is causing a TypeError when calling the encode method.
This issue comes up when making quants with llama.cpp:

```Traceback (most recent call last):
File "/home/glm4/llama.cpp/convert_hf_to_gguf.py", line 4430, in <module>
main()
File "/home/glm4/llama.cpp/convert_hf_to_gguf.py", line 4424, in main
model_instance.write()
File "/home/glm4/llama.cpp/convert_hf_to_gguf.py", line 434, in write
self.prepare_metadata(vocab_only=False)
File "/home/glm4/llama.cpp/convert_hf_to_gguf.py", line 427, in prepare_metadata
self.set_vocab()
File "/home/glm4/llama.cpp/convert_hf_to_gguf.py", line 3928, in set_vocab
tokpre = self.get_vocab_base_pre(tokenizer)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/glm4/llama.cpp/convert_hf_to_gguf.py", line 550, in get_vocab_base_pre
chktok = tokenizer.encode(chktxt)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/glm4/llama.cpp/venv/lib/python3.12/site-packages/transformers/tokenization_utils_base.py", line 2791, in encode
encoded_inputs = self.encode_plus(
^^^^^^^^^^^^^^^^^
File "/home/glm4/llama.cpp/venv/lib/python3.12/site-packages/transformers/tokenization_utils_base.py", line 3210, in encode_plus
return self._encode_plus(
^^^^^^^^^^^^^^^^^^
File "/home/glm4/llama.cpp/venv/lib/python3.12/site-packages/transformers/tokenization_utils.py", line 801, in _encode_plus
return self.prepare_for_model(
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/glm4/llama.cpp/venv/lib/python3.12/site-packages/transformers/tokenization_utils_base.py", line 3706, in prepare_for_model
encoded_inputs = self.pad(
^^^^^^^^^
File "/home/glm4/llama.cpp/venv/lib/python3.12/site-packages/transformers/tokenization_utils_base.py", line 3508, in pad
encoded_inputs = self._pad(
^^^^^^^^^^
TypeError: ChatGLM4Tokenizer._pad() got an unexpected keyword argument 'padding_side'
```
Thank you!

Files changed (1) hide show
  1. tokenization_chatglm.py +1 -0
tokenization_chatglm.py CHANGED
@@ -207,6 +207,7 @@ class ChatGLM4Tokenizer(PreTrainedTokenizer):
207
  padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
208
  pad_to_multiple_of: Optional[int] = None,
209
  return_attention_mask: Optional[bool] = None,
 
210
  ) -> dict:
211
  """
212
  Pad encoded inputs (on left/right and up to predefined length or max length in the batch)
 
207
  padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
208
  pad_to_multiple_of: Optional[int] = None,
209
  return_attention_mask: Optional[bool] = None,
210
+ padding_side: Optional[str] = None,
211
  ) -> dict:
212
  """
213
  Pad encoded inputs (on left/right and up to predefined length or max length in the batch)