Replaced get_max_length with get_max_cache_shape

#12
Files changed (1) hide show
  1. modeling_hymba.py +1 -1
modeling_hymba.py CHANGED
@@ -2573,7 +2573,7 @@ class HymbaForCausalLM(HymbaPreTrainedModel):
2573
  if isinstance(past_key_values, Cache):
2574
  cache_length = past_key_values.get_seq_length()
2575
  past_length = past_key_values.seen_tokens
2576
- max_cache_length = past_key_values.get_max_length()
2577
 
2578
  past_length = cache_length
2579
 
 
2573
  if isinstance(past_key_values, Cache):
2574
  cache_length = past_key_values.get_seq_length()
2575
  past_length = past_key_values.seen_tokens
2576
+ max_cache_length = past_key_values.get_max_cache_shape()
2577
 
2578
  past_length = cache_length
2579