katuni4ka commited on
Commit
a9fcd3e
·
verified ·
1 Parent(s): a157fa3

fix for transformers 4.49 compatibility

Browse files

past_key_values.get_max_length() is no longer available in transformers

Files changed (1) hide show
  1. modeling_deepseek.py +1 -1
modeling_deepseek.py CHANGED
@@ -1652,7 +1652,7 @@ class DeepseekV3ForCausalLM(DeepseekV3PreTrainedModel):
1652
  if isinstance(past_key_values, Cache):
1653
  cache_length = past_key_values.get_seq_length()
1654
  past_length = past_key_values.seen_tokens
1655
- max_cache_length = past_key_values.get_max_length()
1656
  else:
1657
  cache_length = past_length = past_key_values[0][0].shape[2]
1658
  max_cache_length = None
 
1652
  if isinstance(past_key_values, Cache):
1653
  cache_length = past_key_values.get_seq_length()
1654
  past_length = past_key_values.seen_tokens
1655
+ max_cache_length = past_key_values.get_max_length() if hasattr(past_key_values, "get_max_length") else past_key_values.get_max_cache_shape()
1656
  else:
1657
  cache_length = past_length = past_key_values[0][0].shape[2]
1658
  max_cache_length = None