katuni4ka echarlaix HF staff commited on
Commit
fba44ea
·
verified ·
1 Parent(s): 71acfe8

fix-transformers-4.49 (#1)

Browse files

- replace get_max_length which is deprecated in 4.49 (18dad74787382de2ac7e42f7fc9f94c177499f7f)


Co-authored-by: Ella Charlaix <[email protected]>

Files changed (1) hide show
  1. modeling_minicpm.py +1 -1
modeling_minicpm.py CHANGED
@@ -1369,7 +1369,7 @@ class MiniCPM3ForCausalLM(MiniCPM3PreTrainedModel):
1369
  if isinstance(past_key_values, Cache):
1370
  cache_length = past_key_values.get_seq_length()
1371
  past_length = past_key_values.seen_tokens
1372
- max_cache_length = past_key_values.get_max_length()
1373
  else:
1374
  cache_length = past_length = past_key_values[0][0].shape[2]
1375
  max_cache_length = None
 
1369
  if isinstance(past_key_values, Cache):
1370
  cache_length = past_key_values.get_seq_length()
1371
  past_length = past_key_values.seen_tokens
1372
+ max_cache_length = past_key_values.get_max_cache_shape()
1373
  else:
1374
  cache_length = past_length = past_key_values[0][0].shape[2]
1375
  max_cache_length = None