Skip to content

Commit cd719de

Browse files
authored
Fix RoPE failures in Transformers nightly (#29700)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
1 parent 8c363ed commit cd719de

File tree

2 files changed

+2
-19
lines changed

2 files changed

+2
-19
lines changed

vllm/transformers_utils/config.py

Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -300,25 +300,10 @@ def set_default_rope_theta(config: PretrainedConfig, default_theta: float) -> No
300300

301301
def patch_rope_parameters(config: PretrainedConfig) -> None:
302302
"""Provide backwards compatibility for RoPE."""
303-
# Patch rope_parameters differently based on Transformers version
304-
if Version(version("transformers")) >= Version("5.0.0.dev0"):
305-
from transformers.modeling_rope_utils import (
306-
rope_config_validation,
307-
standardize_rope_params,
308-
)
309-
310-
# When Transformers v5 is installed, legacy rope_theta may be present
311-
# when using custom code models written for Transformers v4
312-
if (rope_theta := getattr(config, "rope_theta", None)) is not None:
313-
standardize_rope_params(config, rope_theta=rope_theta)
314-
rope_config_validation(config)
315-
# Delete rope_theta to avoid confusion in downstream code
316-
del config.rope_theta
317-
else:
318-
# When Transformers v4 is installed, legacy rope_scaling may be present
303+
if Version(version("transformers")) < Version("5.0.0.dev0"):
304+
# Transformers v4 installed, legacy config fields may be present
319305
if (rope_scaling := getattr(config, "rope_scaling", None)) is not None:
320306
config.rope_parameters = rope_scaling
321-
# When Transformers v4 is installed, legacy rope_theta may be present
322307
if (rope_theta := getattr(config, "rope_theta", None)) is not None:
323308
if not hasattr(config, "rope_parameters"):
324309
config.rope_parameters = {"rope_type": "default"}

vllm/transformers_utils/configs/qwen3_next.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
"""Qwen3-Next model configuration"""
1818

1919
from transformers.configuration_utils import PretrainedConfig, layer_type_validation
20-
from transformers.modeling_rope_utils import rope_config_validation
2120
from transformers.utils import logging
2221

2322
logger = logging.get_logger(__name__)
@@ -245,7 +244,6 @@ def __init__(
245244
self.attention_bias = attention_bias
246245
self.attention_dropout = attention_dropout
247246
self.head_dim = head_dim
248-
rope_config_validation(self)
249247

250248
self.layer_types = layer_types
251249
if self.layer_types is None:

0 commit comments

Comments
 (0)