Update modeling_phi3.py for compatibility with transformers 4.49 (#13)
Browse files- Update modeling_minicpm.py for compatibility with transformers 4.49 (2eb01a26f2b1ca7e145e8a123b748dae7cd04714)
Co-authored-by: Sylwia Kuros <[email protected]>
- modeling_phi3.py +1 -1
modeling_phi3.py
CHANGED
|
@@ -1332,7 +1332,7 @@ class Phi3ForCausalLM(Phi3PreTrainedModel):
|
|
| 1332 |
if isinstance(past_key_values, Cache):
|
| 1333 |
cache_length = past_key_values.get_seq_length()
|
| 1334 |
past_length = past_key_values.seen_tokens
|
| 1335 |
-
max_cache_length = past_key_values.get_max_length()
|
| 1336 |
else:
|
| 1337 |
cache_length = past_length = past_key_values[0][0].shape[2]
|
| 1338 |
max_cache_length = None
|
|
|
|
| 1332 |
if isinstance(past_key_values, Cache):
|
| 1333 |
cache_length = past_key_values.get_seq_length()
|
| 1334 |
past_length = past_key_values.seen_tokens
|
| 1335 |
+
max_cache_length = past_key_values.get_max_length() if hasattr(past_key_values, "get_max_length") else past_key_values.get_max_cache_shape()
|
| 1336 |
else:
|
| 1337 |
cache_length = past_length = past_key_values[0][0].shape[2]
|
| 1338 |
max_cache_length = None
|