seen_tokens and get_max_length depriciated

#8
Files changed (1) hide show
  1. modeling_deepseek.py +2 -2
modeling_deepseek.py CHANGED
@@ -1724,8 +1724,8 @@ class DeepseekV2ForCausalLM(DeepseekV2PreTrainedModel):
1724
  if past_key_values is not None:
1725
  if isinstance(past_key_values, Cache):
1726
  cache_length = past_key_values.get_seq_length()
1727
- past_length = past_key_values.seen_tokens
1728
- max_cache_length = past_key_values.get_max_length()
1729
  else:
1730
  cache_length = past_length = past_key_values[0][0].shape[2]
1731
  max_cache_length = None
 
1724
  if past_key_values is not None:
1725
  if isinstance(past_key_values, Cache):
1726
  cache_length = past_key_values.get_seq_length()
1727
+ past_length = past_key_values.cache_position
1728
+ max_cache_length = past_key_values.get_max_cache_shape()
1729
  else:
1730
  cache_length = past_length = past_key_values[0][0].shape[2]
1731
  max_cache_length = None