@@ -300,25 +300,10 @@ def set_default_rope_theta(config: PretrainedConfig, default_theta: float) -> No
300300
301301def patch_rope_parameters (config : PretrainedConfig ) -> None :
302302 """Provide backwards compatibility for RoPE."""
303- # Patch rope_parameters differently based on Transformers version
304- if Version (version ("transformers" )) >= Version ("5.0.0.dev0" ):
305- from transformers .modeling_rope_utils import (
306- rope_config_validation ,
307- standardize_rope_params ,
308- )
309-
310- # When Transformers v5 is installed, legacy rope_theta may be present
311- # when using custom code models written for Transformers v4
312- if (rope_theta := getattr (config , "rope_theta" , None )) is not None :
313- standardize_rope_params (config , rope_theta = rope_theta )
314- rope_config_validation (config )
315- # Delete rope_theta to avoid confusion in downstream code
316- del config .rope_theta
317- else :
318- # When Transformers v4 is installed, legacy rope_scaling may be present
303+ if Version (version ("transformers" )) < Version ("5.0.0.dev0" ):
304+ # Transformers v4 installed, legacy config fields may be present
319305 if (rope_scaling := getattr (config , "rope_scaling" , None )) is not None :
320306 config .rope_parameters = rope_scaling
321- # When Transformers v4 is installed, legacy rope_theta may be present
322307 if (rope_theta := getattr (config , "rope_theta" , None )) is not None :
323308 if not hasattr (config , "rope_parameters" ):
324309 config .rope_parameters = {"rope_type" : "default" }
0 commit comments