diff --git a/Cargo.lock b/Cargo.lock index 54c9505a6..13bbb6b9c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -288,8 +288,7 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jiter" version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed1be5dfeadf968b30fa03a012a2f161de8be6df2d91bd8085c62cfb5efca65a" +source = "git+https://github.com/pydantic/jiter.git?branch=dh/pyo3-0.27#c74454182da301c82d5c3f660a267a3ef5e1d972" dependencies = [ "ahash", "bitvec", @@ -458,14 +457,14 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383" +version = "0.27.0" +source = "git+https://github.com/pyo3/pyo3.git?branch=release-0.27.0#a5cadba2c71748e0918762d951798812d96fc8eb" dependencies = [ "indoc", "libc", "memoffset", "num-bigint", + "num-traits", "once_cell", "portable-atomic", "pyo3-build-config", @@ -476,9 +475,8 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f" +version = "0.27.0" +source = "git+https://github.com/pyo3/pyo3.git?branch=release-0.27.0#a5cadba2c71748e0918762d951798812d96fc8eb" dependencies = [ "python3-dll-a", "target-lexicon", @@ -486,9 +484,8 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105" +version = "0.27.0" +source = "git+https://github.com/pyo3/pyo3.git?branch=release-0.27.0#a5cadba2c71748e0918762d951798812d96fc8eb" dependencies = [ "libc", "pyo3-build-config", @@ -496,9 +493,8 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded" +version = "0.27.0" +source = "git+https://github.com/pyo3/pyo3.git?branch=release-0.27.0#a5cadba2c71748e0918762d951798812d96fc8eb" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -508,9 +504,8 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf" +version = "0.27.0" +source = "git+https://github.com/pyo3/pyo3.git?branch=release-0.27.0#a5cadba2c71748e0918762d951798812d96fc8eb" dependencies = [ "heck", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 60e49387f..74447a091 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,7 +27,7 @@ rust-version = "1.75" [dependencies] # TODO it would be very nice to remove the "py-clone" feature as it can panic, # but needs a bit of work to make sure it's not used in the codebase -pyo3 = { version = "0.26", features = ["generate-import-lib", "num-bigint", "py-clone"] } +pyo3 = { version = "0.27", features = ["generate-import-lib", "num-bigint", "py-clone"] } regex = "1.11.3" strum = { version = "0.27", features = ["derive"] } strum_macros = "0.27" @@ -69,12 +69,12 @@ debug = true strip = false [dev-dependencies] -pyo3 = { version = "0.26", features = ["auto-initialize"] } +pyo3 = { version = "0.27", features = ["auto-initialize"] } [build-dependencies] version_check = "0.9.5" # used where logic has to be version/distribution specific, e.g. pypy -pyo3-build-config = { version = "0.26" } +pyo3-build-config = { version = "0.27" } [lints.clippy] dbg_macro = "warn" @@ -105,3 +105,8 @@ too_many_lines = "allow" unnecessary_wraps = "allow" unused_self = "allow" used_underscore_binding = "allow" + +[patch.crates-io] +pyo3 = { git = "https://github.com/pyo3/pyo3.git", branch = "release-0.27.0" } +pyo3-build-config = { git = "https://github.com/pyo3/pyo3.git", branch = "release-0.27.0" } +jiter = { git = "https://github.com/pydantic/jiter.git", branch = "dh/pyo3-0.27" } diff --git a/src/build_tools.rs b/src/build_tools.rs index 2ccb82728..01a4c1036 100644 --- a/src/build_tools.rs +++ b/src/build_tools.rs @@ -7,7 +7,7 @@ use std::sync::OnceLock; use pyo3::exceptions::PyException; use pyo3::prelude::*; use pyo3::types::{PyDict, PyList, PyString}; -use pyo3::{intern, FromPyObject, PyErrArguments}; +use pyo3::{intern, PyErrArguments}; use crate::errors::{PyLineError, ValError}; use crate::input::InputType; @@ -21,7 +21,7 @@ pub fn schema_or_config<'py, T>( config_key: &Bound<'py, PyString>, ) -> PyResult> where - T: FromPyObject<'py>, + T: FromPyObjectOwned<'py>, { match schema.get_as(schema_key)? { Some(v) => Ok(Some(v)), @@ -38,7 +38,7 @@ pub fn schema_or_config_same<'py, T>( key: &Bound<'py, PyString>, ) -> PyResult> where - T: FromPyObject<'py>, + T: FromPyObjectOwned<'py>, { schema_or_config(schema, config, key, key) } diff --git a/src/errors/line_error.rs b/src/errors/line_error.rs index 346226be2..bb5226113 100644 --- a/src/errors/line_error.rs +++ b/src/errors/line_error.rs @@ -2,8 +2,8 @@ use std::convert::Infallible; use pyo3::exceptions::PyTypeError; use pyo3::prelude::*; -use pyo3::DowncastError; -use pyo3::DowncastIntoError; +use pyo3::CastError; +use pyo3::CastIntoError; use jiter::JsonValue; @@ -45,15 +45,15 @@ impl From for ValError { } } -impl From> for ValError { - fn from(py_downcast: DowncastError) -> Self { - Self::InternalErr(PyTypeError::new_err(py_downcast.to_string())) +impl From> for ValError { + fn from(py_cast: CastError) -> Self { + Self::InternalErr(PyTypeError::new_err(py_cast.to_string())) } } -impl From> for ValError { - fn from(py_downcast: DowncastIntoError) -> Self { - Self::InternalErr(PyTypeError::new_err(py_downcast.to_string())) +impl From> for ValError { + fn from(py_cast: CastIntoError) -> Self { + Self::InternalErr(PyTypeError::new_err(py_cast.to_string())) } } diff --git a/src/errors/location.rs b/src/errors/location.rs index c79848a14..8ebe58f9d 100644 --- a/src/errors/location.rs +++ b/src/errors/location.rs @@ -176,9 +176,9 @@ impl TryFrom>> for Location { /// Thus this expects the location to *not* be reversed and reverses it before storing it. fn try_from(location: Option<&Bound<'_, PyAny>>) -> PyResult { if let Some(location) = location { - let mut loc_vec: Vec = if let Ok(tuple) = location.downcast::() { + let mut loc_vec: Vec = if let Ok(tuple) = location.cast::() { tuple.iter().map(Into::into).collect() - } else if let Ok(list) = location.downcast::() { + } else if let Ok(list) = location.cast::() { list.iter().map(Into::into).collect() } else { return Err(PyTypeError::new_err( diff --git a/src/errors/types.rs b/src/errors/types.rs index 922c9bfeb..a1467888f 100644 --- a/src/errors/types.rs +++ b/src/errors/types.rs @@ -42,7 +42,7 @@ pub fn list_all_errors(py: Python<'_>) -> PyResult> { PyList::new(py, errors) } -fn field_from_context<'py, T: FromPyObject<'py>>( +fn field_from_context<'py, T: FromPyObjectOwned<'py>>( context: Option<&Bound<'py, PyDict>>, field_name: &str, enum_name: &str, @@ -56,7 +56,7 @@ fn field_from_context<'py, T: FromPyObject<'py>>( .map_err(|_| py_error_type!(PyTypeError; "{}: '{}' context value must be a {}", enum_name, field_name, type_name_fn())) } -fn cow_field_from_context<'py, T: FromPyObject<'py>, B: ToOwned + ?Sized + 'static>( +fn cow_field_from_context<'py, T: FromPyObjectOwned<'py>, B: ToOwned + ?Sized + 'static>( context: Option<&Bound<'py, PyDict>>, field_name: &str, enum_name: &str, @@ -127,7 +127,7 @@ macro_rules! error_types { dict.set_item(stringify!($key), $key)?; )* if let Some(ctx) = context { - dict.update(ctx.bind(py).downcast::()?)?; + dict.update(ctx.bind(py).cast::()?)?; Ok(true) } else { Ok(false) @@ -809,9 +809,11 @@ impl From for Number { } } -impl FromPyObject<'_> for Number { - fn extract_bound(obj: &Bound<'_, PyAny>) -> PyResult { - if let Some(int) = extract_i64(obj) { +impl FromPyObject<'_, '_> for Number { + type Error = PyErr; + + fn extract(obj: Borrowed<'_, '_, PyAny>) -> PyResult { + if let Some(int) = extract_i64(&obj) { Ok(Number::Int(int)) } else if let Ok(float) = obj.extract::() { Ok(Number::Float(float)) diff --git a/src/errors/validation_exception.rs b/src/errors/validation_exception.rs index 2475d79dd..c4bd8c427 100644 --- a/src/errors/validation_exception.rs +++ b/src/errors/validation_exception.rs @@ -430,14 +430,14 @@ impl TryFrom<&Bound<'_, PyAny>> for PyLineError { type Error = PyErr; fn try_from(value: &Bound<'_, PyAny>) -> PyResult { - let dict = value.downcast::()?; + let dict = value.cast::()?; let py = value.py(); let type_raw = dict .get_item(intern!(py, "type"))? .ok_or_else(|| PyKeyError::new_err("type"))?; - let error_type = if let Ok(type_str) = type_raw.downcast::() { + let error_type = if let Ok(type_str) = type_raw.cast::() { let context: Option> = dict.get_as(intern!(py, "ctx"))?; ErrorType::new(py, type_str.to_str()?, context)? } else if let Ok(custom_error) = type_raw.extract::() { diff --git a/src/errors/value_exception.rs b/src/errors/value_exception.rs index 964899905..743e7f307 100644 --- a/src/errors/value_exception.rs +++ b/src/errors/value_exception.rs @@ -120,8 +120,8 @@ impl PydanticCustomError { let mut message = message_template.to_string(); if let Some(ctx) = context { for (key, value) in ctx.iter() { - let key = key.downcast::()?; - if let Ok(py_str) = value.downcast::() { + let key = key.cast::()?; + if let Ok(py_str) = value.cast::() { message = message.replace(&format!("{{{}}}", key.to_str()?), py_str.to_str()?); } else if let Some(value_int) = extract_i64(&value) { message = message.replace(&format!("{{{}}}", key.to_str()?), &value_int.to_string()); diff --git a/src/input/datetime.rs b/src/input/datetime.rs index 2acc15b5c..267f0162b 100644 --- a/src/input/datetime.rs +++ b/src/input/datetime.rs @@ -226,10 +226,10 @@ impl<'py> TryFrom<&'_ Bound<'py, PyAny>> for EitherTimedelta<'py> { type Error = PyErr; fn try_from(value: &Bound<'py, PyAny>) -> PyResult { - if let Ok(dt) = value.downcast_exact() { + if let Ok(dt) = value.cast_exact() { Ok(EitherTimedelta::PyExact(dt.clone())) } else { - let dt = value.downcast()?; + let dt = value.cast()?; Ok(EitherTimedelta::PySubclass(dt.clone())) } } @@ -344,7 +344,7 @@ fn time_as_tzinfo<'py>(py: Python<'py>, time: &Time) -> PyResult { let tz_info: TzInfo = offset.try_into()?; - Ok(Some(Bound::new(py, tz_info)?.into_any().downcast_into()?)) + Ok(Some(Bound::new(py, tz_info)?.into_any().cast_into()?)) } None => Ok(None), } diff --git a/src/input/input_python.rs b/src/input/input_python.rs index 5a1b9a4b0..e26dde62f 100644 --- a/src/input/input_python.rs +++ b/src/input/input_python.rs @@ -63,7 +63,7 @@ pub fn get_fraction_type(py: Python<'_>) -> &Bound<'_, PyType> { } pub(crate) fn downcast_python_input<'py, T: PyTypeCheck>(input: &(impl Input<'py> + ?Sized)) -> Option<&Bound<'py, T>> { - input.as_python().and_then(|any| any.downcast::().ok()) + input.as_python().and_then(|any| any.cast::().ok()) } pub(crate) fn input_as_python_instance<'a, 'py>( @@ -75,7 +75,7 @@ pub(crate) fn input_as_python_instance<'a, 'py>( impl From<&Bound<'_, PyAny>> for LocItem { fn from(py_any: &Bound<'_, PyAny>) -> Self { - if let Ok(py_str) = py_any.downcast::() { + if let Ok(py_str) = py_any.cast::() { py_str.to_string_lossy().as_ref().into() } else if let Some(key_int) = extract_i64(py_any) { key_int.into() @@ -110,7 +110,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn as_kwargs(&self, _py: Python<'py>) -> Option> { - self.downcast::().ok().map(Bound::to_owned) + self.cast::().ok().map(Bound::to_owned) } type Arguments<'a> @@ -119,15 +119,15 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { Self: 'a; fn validate_args(&self) -> ValResult> { - if let Ok(dict) = self.downcast::() { + if let Ok(dict) = self.cast::() { Ok(PyArgs::new(None, Some(dict.clone()))) } else if let Ok(args_kwargs) = self.extract::() { let args = args_kwargs.args.into_bound(self.py()); let kwargs = args_kwargs.kwargs.map(|d| d.into_bound(self.py())); Ok(PyArgs::new(Some(args), kwargs)) - } else if let Ok(tuple) = self.downcast::() { + } else if let Ok(tuple) = self.cast::() { Ok(PyArgs::new(Some(tuple.clone()), None)) - } else if let Ok(list) = self.downcast::() { + } else if let Ok(list) = self.cast::() { Ok(PyArgs::new(Some(list.to_tuple()), None)) } else { Err(ValError::new(ErrorTypeDefaults::ArgumentsType, self)) @@ -145,7 +145,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn validate_dataclass_args<'a>(&'a self, class_name: &str) -> ValResult> { - if let Ok(dict) = self.downcast::() { + if let Ok(dict) = self.cast::() { Ok(PyArgs::new(None, Some(dict.clone()))) } else if let Ok(args_kwargs) = self.extract::() { let args = args_kwargs.args.into_bound(self.py()); @@ -168,9 +168,9 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { strict: bool, coerce_numbers_to_str: bool, ) -> ValResult>> { - if let Ok(py_str) = self.downcast_exact::() { + if let Ok(py_str) = self.cast_exact::() { return Ok(ValidationMatch::exact(py_str.clone().into())); - } else if let Ok(py_str) = self.downcast::() { + } else if let Ok(py_str) = self.cast::() { // force to a rust string to make sure behavior is consistent whether or not we go via a // rust string in StrConstrainedValidator - e.g. to_lower return Ok(ValidationMatch::strict(py_string_str(py_str)?.into())); @@ -178,12 +178,12 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { 'lax: { if !strict { - return if let Ok(bytes) = self.downcast::() { + return if let Ok(bytes) = self.cast::() { match from_utf8(bytes.as_bytes()) { Ok(str) => Ok(str.into()), Err(_) => Err(ValError::new(ErrorTypeDefaults::StringUnicode, self)), } - } else if let Ok(py_byte_array) = self.downcast::() { + } else if let Ok(py_byte_array) = self.cast::() { match bytearray_to_str(py_byte_array) { Ok(py_str) => Ok(py_str.into()), Err(_) => Err(ValError::new(ErrorTypeDefaults::StringUnicode, self)), @@ -215,21 +215,21 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { strict: bool, mode: ValBytesMode, ) -> ValResult>> { - if let Ok(py_bytes) = self.downcast_exact::() { + if let Ok(py_bytes) = self.cast_exact::() { return Ok(ValidationMatch::exact(py_bytes.into())); - } else if let Ok(py_bytes) = self.downcast::() { + } else if let Ok(py_bytes) = self.cast::() { return Ok(ValidationMatch::strict(py_bytes.into())); } 'lax: { if !strict { - return if let Ok(py_str) = self.downcast::() { + return if let Ok(py_str) = self.cast::() { let str = py_string_str(py_str)?; match mode.deserialize_string(str) { Ok(b) => Ok(b), Err(e) => Err(ValError::new(e, self)), } - } else if let Ok(py_byte_array) = self.downcast::() { + } else if let Ok(py_byte_array) = self.cast::() { Ok(py_byte_array.to_vec().into()) } else { break 'lax; @@ -242,7 +242,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn validate_bool(&self, strict: bool) -> ValResult> { - if let Ok(bool) = self.downcast::() { + if let Ok(bool) = self.cast::() { return Ok(ValidationMatch::exact(bool.is_true())); } @@ -315,7 +315,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn exact_str(&self) -> ValResult> { - if let Ok(py_str) = self.downcast_exact() { + if let Ok(py_str) = self.cast_exact() { Ok(EitherString::Py(py_str.clone())) } else { Err(ValError::new(ErrorTypeDefaults::IntType, self)) @@ -323,7 +323,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn validate_float(&self, strict: bool) -> ValResult>> { - if let Ok(float) = self.downcast_exact::() { + if let Ok(float) = self.cast_exact::() { return Ok(ValidationMatch::exact(EitherFloat::Py(float.clone()))); } @@ -395,19 +395,19 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { Self: 'a; fn strict_dict<'a>(&'a self) -> ValResult> { - if let Ok(dict) = self.downcast_exact::() { + if let Ok(dict) = self.cast_exact::() { Ok(GenericPyMapping::Dict(dict)) } else if self.is_instance_of::() { - Ok(GenericPyMapping::Mapping(self.downcast::()?)) + Ok(GenericPyMapping::Mapping(self.cast::()?)) } else { Err(ValError::new(ErrorTypeDefaults::DictType, self)) } } fn lax_dict<'a>(&'a self) -> ValResult> { - if let Ok(dict) = self.downcast_exact::() { + if let Ok(dict) = self.cast_exact::() { Ok(GenericPyMapping::Dict(dict)) - } else if let Ok(mapping) = self.downcast::() { + } else if let Ok(mapping) = self.cast::() { Ok(GenericPyMapping::Mapping(mapping)) } else { Err(ValError::new(ErrorTypeDefaults::DictType, self)) @@ -421,10 +421,10 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { ) -> ValResult> { if from_attributes { // if from_attributes, first try a dict, then mapping then from_attributes - if let Ok(dict) = self.downcast::() { + if let Ok(dict) = self.cast::() { return Ok(GenericPyMapping::Dict(dict)); } else if !strict { - if let Ok(mapping) = self.downcast::() { + if let Ok(mapping) = self.cast::() { return Ok(GenericPyMapping::Mapping(mapping)); } } @@ -454,7 +454,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { Self: 'a; fn validate_list<'a>(&'a self, strict: bool) -> ValMatch> { - if let Ok(list) = self.downcast::() { + if let Ok(list) = self.cast::() { return Ok(ValidationMatch::exact(PySequenceIterable::List(list))); } else if !strict { if let Ok(other) = extract_sequence_iterable(self) { @@ -471,7 +471,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { Self: 'a; fn validate_tuple<'a>(&'a self, strict: bool) -> ValMatch> { - if let Ok(tup) = self.downcast::() { + if let Ok(tup) = self.cast::() { return Ok(ValidationMatch::exact(PySequenceIterable::Tuple(tup))); } else if !strict { if let Ok(other) = extract_sequence_iterable(self) { @@ -488,7 +488,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { Self: 'a; fn validate_set<'a>(&'a self, strict: bool) -> ValMatch> { - if let Ok(set) = self.downcast::() { + if let Ok(set) = self.cast::() { return Ok(ValidationMatch::exact(PySequenceIterable::Set(set))); } else if !strict { if let Ok(other) = extract_sequence_iterable(self) { @@ -500,7 +500,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn validate_frozenset<'a>(&'a self, strict: bool) -> ValMatch> { - if let Ok(frozenset) = self.downcast::() { + if let Ok(frozenset) = self.cast::() { return Ok(ValidationMatch::exact(PySequenceIterable::FrozenSet(frozenset))); } else if !strict { if let Ok(other) = extract_sequence_iterable(self) { @@ -520,21 +520,21 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn validate_date(&self, strict: bool, mode: TemporalUnitMode) -> ValResult>> { - if let Ok(date) = self.downcast_exact::() { + if let Ok(date) = self.cast_exact::() { Ok(ValidationMatch::exact(date.clone().into())) } else if self.is_instance_of::() { // have to check if it's a datetime first, otherwise the line below converts to a date // even if we later try coercion from a datetime, we don't want to return a datetime now Err(ValError::new(ErrorTypeDefaults::DateType, self)) - } else if let Ok(date) = self.downcast::() { + } else if let Ok(date) = self.cast::() { Ok(ValidationMatch::strict(date.clone().into())) } else if let Some(bytes) = { if strict { None - } else if let Ok(py_str) = self.downcast::() { + } else if let Ok(py_str) = self.cast::() { let str = py_string_str(py_str)?; Some(str.as_bytes()) - } else if let Ok(py_bytes) = self.downcast::() { + } else if let Ok(py_bytes) = self.cast::() { Some(py_bytes.as_bytes()) } else { None @@ -551,18 +551,18 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { strict: bool, microseconds_overflow_behavior: MicrosecondsPrecisionOverflowBehavior, ) -> ValResult>> { - if let Ok(time) = self.downcast_exact::() { + if let Ok(time) = self.cast_exact::() { return Ok(ValidationMatch::exact(time.clone().into())); - } else if let Ok(time) = self.downcast::() { + } else if let Ok(time) = self.cast::() { return Ok(ValidationMatch::strict(time.clone().into())); } 'lax: { if !strict { - return if let Ok(py_str) = self.downcast::() { + return if let Ok(py_str) = self.cast::() { let str = py_string_str(py_str)?; bytes_as_time(self, str.as_bytes(), microseconds_overflow_behavior) - } else if let Ok(py_bytes) = self.downcast::() { + } else if let Ok(py_bytes) = self.cast::() { bytes_as_time(self, py_bytes.as_bytes(), microseconds_overflow_behavior) } else if self.is_exact_instance_of::() { Err(ValError::new(ErrorTypeDefaults::TimeType, self)) @@ -586,18 +586,18 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { microseconds_overflow_behavior: MicrosecondsPrecisionOverflowBehavior, mode: TemporalUnitMode, ) -> ValResult>> { - if let Ok(dt) = self.downcast_exact::() { + if let Ok(dt) = self.cast_exact::() { return Ok(ValidationMatch::exact(dt.clone().into())); - } else if let Ok(dt) = self.downcast::() { + } else if let Ok(dt) = self.cast::() { return Ok(ValidationMatch::strict(dt.clone().into())); } 'lax: { if !strict { - return if let Ok(py_str) = self.downcast::() { + return if let Ok(py_str) = self.cast::() { let str = py_string_str(py_str)?; bytes_as_datetime(self, str.as_bytes(), microseconds_overflow_behavior, mode) - } else if let Ok(py_bytes) = self.downcast::() { + } else if let Ok(py_bytes) = self.cast::() { bytes_as_datetime(self, py_bytes.as_bytes(), microseconds_overflow_behavior, mode) } else if self.is_exact_instance_of::() { Err(ValError::new(ErrorTypeDefaults::DatetimeType, self)) @@ -605,7 +605,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { int_as_datetime(self, int, 0, mode) } else if let Ok(float) = self.extract::() { float_as_datetime(self, float, mode) - } else if let Ok(date) = self.downcast::() { + } else if let Ok(date) = self.cast::() { Ok(date_as_datetime(date)?) } else { break 'lax; @@ -633,10 +633,10 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { 'lax: { if !strict { - return if let Ok(py_str) = self.downcast::() { + return if let Ok(py_str) = self.cast::() { let str = py_string_str(py_str)?; bytes_as_timedelta(self, str.as_bytes(), microseconds_overflow_behavior) - } else if let Ok(py_bytes) = self.downcast::() { + } else if let Ok(py_bytes) = self.cast::() { bytes_as_timedelta(self, py_bytes.as_bytes(), microseconds_overflow_behavior) } else if let Some(int) = extract_i64(self) { Ok(int_as_duration(self, int)?.into()) @@ -653,7 +653,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { } fn validate_complex<'a>(&'a self, strict: bool, py: Python<'py>) -> ValResult>> { - if let Ok(complex) = self.downcast::() { + if let Ok(complex) = self.cast::() { return Ok(ValidationMatch::strict(EitherComplex::Py(complex.to_owned()))); } if strict { @@ -669,7 +669,7 @@ impl<'py> Input<'py> for Bound<'py, PyAny> { )); } - if let Ok(s) = self.downcast::() { + if let Ok(s) = self.cast::() { // If input is not a valid complex string, instead of telling users to correct // the string, it makes more sense to tell them to provide any acceptable value // since they might have just given values of some incorrect types instead @@ -713,7 +713,7 @@ fn from_attributes_applicable(obj: &Bound<'_, PyAny>) -> bool { .get_type() .getattr(intern!(obj.py(), "__module__")) .ok() - .and_then(|module_name| module_name.downcast_into::().ok()) + .and_then(|module_name| module_name.cast_into::().ok()) else { return false; }; @@ -726,9 +726,9 @@ fn from_attributes_applicable(obj: &Bound<'_, PyAny>) -> bool { /// Utility for extracting a string from a PyAny, if possible. fn maybe_as_string<'a>(v: &'a Bound<'_, PyAny>, unicode_error: ErrorType) -> ValResult> { - if let Ok(py_string) = v.downcast::() { + if let Ok(py_string) = v.cast::() { py_string_str(py_string).map(Some) - } else if let Ok(bytes) = v.downcast::() { + } else if let Ok(bytes) = v.cast::() { match from_utf8(bytes.as_bytes()) { Ok(s) => Ok(Some(s)), Err(_) => Err(ValError::new(unicode_error, v)), @@ -746,7 +746,7 @@ fn bytearray_to_str<'py>(bytearray: &Bound<'py, PyByteArray>) -> PyResult { /// or frozenset fn extract_sequence_iterable<'a, 'py>(obj: &'a Bound<'py, PyAny>) -> ValResult> { // Handle concrete non-overlapping types first, then abstract types - if let Ok(iterable) = obj.downcast::() { + if let Ok(iterable) = obj.cast::() { Ok(PySequenceIterable::List(iterable)) - } else if let Ok(iterable) = obj.downcast::() { + } else if let Ok(iterable) = obj.cast::() { Ok(PySequenceIterable::Tuple(iterable)) - } else if let Ok(iterable) = obj.downcast::() { + } else if let Ok(iterable) = obj.cast::() { Ok(PySequenceIterable::Set(iterable)) - } else if let Ok(iterable) = obj.downcast::() { + } else if let Ok(iterable) = obj.cast::() { Ok(PySequenceIterable::FrozenSet(iterable)) } else { // Try to get this as a generable iterable thing, but exclude string and mapping types @@ -930,7 +930,7 @@ fn extract_sequence_iterable<'a, 'py>(obj: &'a Bound<'py, PyAny>) -> ValResult

() || obj.is_instance_of::() || obj.is_instance_of::() - || obj.downcast::().is_ok()) + || obj.cast::().is_ok()) { if let Ok(iter) = obj.try_iter() { return Ok(PySequenceIterable::Iterator(iter)); diff --git a/src/input/input_string.rs b/src/input/input_string.rs index a635188a8..38878a256 100644 --- a/src/input/input_string.rs +++ b/src/input/input_string.rs @@ -30,7 +30,7 @@ pub enum StringMapping<'py> { impl<'py> StringMapping<'py> { pub fn new_key(py_key: Bound<'py, PyAny>) -> ValResult { - match py_key.downcast_into::() { + match py_key.cast_into::() { Ok(value) => Ok(Self::String(value)), Err(downcast_error) => Err(ValError::new( ErrorTypeDefaults::StringType, @@ -40,9 +40,9 @@ impl<'py> StringMapping<'py> { } pub fn new_value(py_value: Bound<'py, PyAny>) -> ValResult { - match py_value.downcast_into::() { + match py_value.cast_into::() { Ok(py_str) => Ok(Self::String(py_str)), - Err(downcast_error) => match downcast_error.into_inner().downcast_into::() { + Err(downcast_error) => match downcast_error.into_inner().cast_into::() { Ok(value) => Ok(Self::Mapping(value)), Err(downcast_error) => Err(ValError::new( ErrorTypeDefaults::StringType, diff --git a/src/input/return_enums.rs b/src/input/return_enums.rs index 526d2c970..40ed25d16 100644 --- a/src/input/return_enums.rs +++ b/src/input/return_enums.rs @@ -334,7 +334,7 @@ pub(crate) fn iterate_attributes<'a, 'py>( // or we get to the end of the list of attributes let name = attributes_iterator.next()?; // from benchmarks this is 14x faster than using the python `startswith` method - let name_cow = match name.downcast::() { + let name_cow = match name.cast::() { Ok(name) => name.to_string_lossy(), Err(e) => return Some(Err(e.into())), }; @@ -706,9 +706,11 @@ impl Rem for &Int { } } -impl FromPyObject<'_> for Int { - fn extract_bound(obj: &Bound<'_, PyAny>) -> PyResult { - match extract_int(obj) { +impl FromPyObject<'_, '_> for Int { + type Error = PyErr; + + fn extract(obj: Borrowed<'_, '_, PyAny>) -> PyResult { + match extract_int(&obj) { Some(i) => Ok(i), None => py_err!(PyTypeError; "Expected int, got {}", obj.get_type()), } diff --git a/src/lookup_key.rs b/src/lookup_key.rs index 7fe5d61e9..79356ea43 100644 --- a/src/lookup_key.rs +++ b/src/lookup_key.rs @@ -47,7 +47,7 @@ impl fmt::Display for LookupKey { impl LookupKey { pub fn from_py(py: Python, value: &Bound<'_, PyAny>, alt_alias: Option<&str>) -> PyResult { - if let Ok(alias_py) = value.downcast::() { + if let Ok(alias_py) = value.cast::() { let alias: String = alias_py.extract()?; let path1 = LookupPath::from_str(py, &alias, Some(alias_py.clone())); match alt_alias { @@ -58,11 +58,11 @@ impl LookupKey { None => Ok(Self::Simple(path1)), } } else { - let list = value.downcast::()?; + let list = value.cast::()?; let Ok(first) = list.get_item(0) else { return py_schema_err!("Lookup paths should have at least one element"); }; - let mut locs: Vec = if first.downcast::().is_ok() { + let mut locs: Vec = if first.cast::().is_ok() { // list of strings rather than list of lists vec![LookupPath::from_list(list)?] } else { @@ -385,13 +385,13 @@ impl LookupPath { } fn from_list(obj: &Bound<'_, PyAny>) -> PyResult { - let mut iter = obj.downcast::()?.iter(); + let mut iter = obj.cast::()?.iter(); let Some(first_item) = iter.next() else { return py_schema_err!("Each alias path should have at least one element"); }; - let Ok(first_item_py_str) = first_item.downcast_into::() else { + let Ok(first_item_py_str) = first_item.cast_into::() else { return py_err!(PyTypeError; "The first item in an alias path should be a string"); }; @@ -485,7 +485,7 @@ impl<'a, 'py> IntoPyObject<'py> for &'a PathItemString { impl PathItem { pub fn from_py(obj: Bound<'_, PyAny>) -> PyResult { - let obj = match obj.downcast_into::() { + let obj = match obj.cast_into::() { Ok(py_str_key) => { let str_key = py_str_key.to_str()?.to_string(); return Ok(Self::S(PathItemString { @@ -507,7 +507,7 @@ impl PathItem { pub fn py_get_item<'py>(&self, py_any: &Bound<'py, PyAny>) -> Option> { // we definitely don't want to index strings, so explicitly omit this case - if py_any.downcast::().is_ok() { + if py_any.cast::().is_ok() { None } else { // otherwise, blindly try getitem on v since no better logic is realistic @@ -560,7 +560,7 @@ impl PathItem { impl PathItemString { fn py_get_attrs<'py>(&self, obj: &Bound<'py, PyAny>) -> PyResult>> { // if obj is a dict, we want to use get_item, not getattr - if obj.downcast::().is_ok() { + if obj.cast::().is_ok() { Ok(py_get_item(obj, self)) } else { py_get_attrs(obj, &self.py_key) diff --git a/src/serializers/computed_fields.rs b/src/serializers/computed_fields.rs index 6589f41b0..63d398f47 100644 --- a/src/serializers/computed_fields.rs +++ b/src/serializers/computed_fields.rs @@ -197,7 +197,7 @@ impl ComputedField { definitions: &mut DefinitionsBuilder>, ) -> PyResult { let py = schema.py(); - let schema: &Bound<'_, PyDict> = schema.downcast()?; + let schema: &Bound<'_, PyDict> = schema.cast()?; let property_name: Bound<'_, PyString> = schema.get_as_req(intern!(py, "property_name"))?; let return_schema = schema.get_as_req(intern!(py, "return_schema"))?; let serializer = CombinedSerializer::build(&return_schema, config, definitions) diff --git a/src/serializers/config.rs b/src/serializers/config.rs index 0779e2964..0dcacd0e6 100644 --- a/src/serializers/config.rs +++ b/src/serializers/config.rs @@ -347,8 +347,9 @@ pub fn utf8_py_error(py: Python, err: Utf8Error, data: &[u8]) -> PyErr { } } -impl FromPyObject<'_> for InfNanMode { - fn extract_bound(ob: &Bound<'_, PyAny>) -> PyResult { - Self::from_str(ob.downcast::()?.to_str()?) +impl FromPyObject<'_, '_> for InfNanMode { + type Error = PyErr; + fn extract(ob: Borrowed<'_, '_, PyAny>) -> PyResult { + Self::from_str(ob.cast::()?.to_str()?) } } diff --git a/src/serializers/extra.rs b/src/serializers/extra.rs index 8d4afddf4..47bf17521 100644 --- a/src/serializers/extra.rs +++ b/src/serializers/extra.rs @@ -317,9 +317,11 @@ pub enum WarningsMode { Error, } -impl<'py> FromPyObject<'py> for WarningsMode { - fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult { - if let Ok(bool_mode) = ob.downcast::() { +impl<'py> FromPyObject<'_, 'py> for WarningsMode { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, 'py, PyAny>) -> PyResult { + if let Ok(bool_mode) = ob.cast::() { Ok(bool_mode.is_true().into()) } else if let Ok(str_mode) = ob.extract::<&str>() { match str_mode { diff --git a/src/serializers/fields.rs b/src/serializers/fields.rs index 3793bdd5d..27641b458 100644 --- a/src/serializers/fields.rs +++ b/src/serializers/fields.rs @@ -154,7 +154,7 @@ impl GeneralFieldsSerializer { match self.mode { FieldsMode::ModelExtra => value.extract().ok(), _ => { - if let Ok(main_dict) = value.downcast::() { + if let Ok(main_dict) = value.cast::() { Some((main_dict.clone(), None)) } else { None @@ -511,7 +511,7 @@ impl TypeSerializer for GeneralFieldsSerializer { } fn key_str<'a>(key: &'a Bound<'_, PyAny>) -> PyResult<&'a str> { - key.downcast::()?.to_str() + key.cast::()?.to_str() } fn dict_items<'py>( diff --git a/src/serializers/filter.rs b/src/serializers/filter.rs index 7f893726a..147db2ed6 100644 --- a/src/serializers/filter.rs +++ b/src/serializers/filter.rs @@ -39,13 +39,13 @@ fn map_negative_indices<'py>( len: Option, ) -> PyResult> { let py = include_or_exclude.py(); - if let Ok(exclude_dict) = include_or_exclude.downcast::() { + if let Ok(exclude_dict) = include_or_exclude.cast::() { let out = PyDict::new(py); for (k, v) in exclude_dict.iter() { out.set_item(map_negative_index(&k, len)?, v)?; } Ok(out.into_any()) - } else if let Ok(exclude_set) = include_or_exclude.downcast::() { + } else if let Ok(exclude_set) = include_or_exclude.cast::() { let mut values = Vec::with_capacity(exclude_set.len()); for v in exclude_set.iter() { values.push(map_negative_index(&v, len)?); @@ -78,7 +78,7 @@ impl SchemaFilter { if value.is_none() { Ok(None) } else { - let py_set = value.downcast::()?; + let py_set = value.cast::()?; let mut set: AHashSet = AHashSet::with_capacity(py_set.len()); for item in py_set { @@ -117,7 +117,7 @@ impl SchemaFilter { if value.is_none() { Ok(None) } else { - let py_set = value.downcast::()?; + let py_set = value.cast::()?; let mut set: AHashSet = AHashSet::with_capacity(py_set.len()); for item in py_set.iter() { @@ -161,7 +161,7 @@ trait FilterLogic { if let Some(exclude) = exclude { if exclude.is_none() { // Do nothing; place this check at the top for performance in the common case - } else if let Ok(exclude_dict) = exclude.downcast::() { + } else if let Ok(exclude_dict) = exclude.cast::() { let op_exc_value = merge_all_value(exclude_dict, py_key)?; if let Some(exc_value) = op_exc_value { if is_ellipsis_like(&exc_value) { @@ -172,7 +172,7 @@ trait FilterLogic { // we want to return `Some((..., Some(next_exclude))` next_exclude = Some(exc_value); } - } else if let Ok(exclude_set) = exclude.downcast::() { + } else if let Ok(exclude_set) = exclude.cast::() { if exclude_set.contains(py_key)? || exclude_set.contains(intern!(exclude_set.py(), "__all__"))? { // index is in the exclude set, we return Ok(None) to omit this index return Ok(None); @@ -189,7 +189,7 @@ trait FilterLogic { if let Some(include) = include { if include.is_none() { // Do nothing; place this check at the top for performance in the common case - } else if let Ok(include_dict) = include.downcast::() { + } else if let Ok(include_dict) = include.cast::() { let op_inc_value = merge_all_value(include_dict, py_key)?; if let Some(inc_value) = op_inc_value { @@ -204,7 +204,7 @@ trait FilterLogic { // this index should be omitted return Ok(None); } - } else if let Ok(include_set) = include.downcast::() { + } else if let Ok(include_set) = include.cast::() { if include_set.contains(py_key)? || include_set.contains(intern!(include_set.py(), "__all__"))? { return Ok(Some((None, next_exclude))); } else if !self.explicit_include(int_key) { @@ -321,7 +321,7 @@ where /// detect both ellipsis and `True` to be compatible with pydantic V1 fn is_ellipsis_like(v: &Bound<'_, PyAny>) -> bool { v.is(v.py().Ellipsis()) - || match v.downcast::() { + || match v.cast::() { Ok(b) => b.is_true(), Err(_) => false, } @@ -352,9 +352,9 @@ fn merge_all_value<'py>( } fn as_dict<'py>(value: &Bound<'py, PyAny>) -> PyResult> { - if let Ok(dict) = value.downcast::() { + if let Ok(dict) = value.cast::() { dict.copy() - } else if let Ok(set) = value.downcast::() { + } else if let Ok(set) = value.cast::() { let py = value.py(); let dict = PyDict::new(py); for item in set.iter() { @@ -370,7 +370,7 @@ fn as_dict<'py>(value: &Bound<'py, PyAny>) -> PyResult> { fn merge_dicts<'py>(item_dict: &Bound<'py, PyDict>, all_value: &Bound<'py, PyAny>) -> PyResult> { let item_dict = item_dict.copy()?; - if let Ok(all_dict) = all_value.downcast::() { + if let Ok(all_dict) = all_value.cast::() { for (all_key, all_value) in all_dict.iter() { if let Some(item_value) = item_dict.get_item(&all_key)? { if is_ellipsis_like(&item_value) { @@ -385,7 +385,7 @@ fn merge_dicts<'py>(item_dict: &Bound<'py, PyDict>, all_value: &Bound<'py, PyAny item_dict.set_item(all_key, all_value)?; } } - } else if let Ok(set) = all_value.downcast::() { + } else if let Ok(set) = all_value.cast::() { for item in set.iter() { if !item_dict.contains(&item)? { item_dict.set_item(item, set.py().Ellipsis())?; diff --git a/src/serializers/infer.rs b/src/serializers/infer.rs index ee2a788a7..acffc01e6 100644 --- a/src/serializers/infer.rs +++ b/src/serializers/infer.rs @@ -62,7 +62,7 @@ pub(crate) fn infer_to_python_known( macro_rules! serialize_seq { ($t:ty) => { value - .downcast::<$t>()? + .cast::<$t>()? .iter() .map(|v| infer_to_python(&v, None, None, extra)) .collect::>>>()? @@ -71,7 +71,7 @@ pub(crate) fn infer_to_python_known( macro_rules! serialize_seq_filter { ($t:ty) => {{ - let py_seq = value.downcast::<$t>()?; + let py_seq = value.cast::<$t>()?; let mut items = Vec::with_capacity(py_seq.len()); let filter = AnyFilter::new(); let len = value.len().ok(); @@ -137,14 +137,14 @@ pub(crate) fn infer_to_python_known( v.into_py_any(py)? } ObType::Decimal => value.to_string().into_py_any(py)?, - ObType::StrSubclass => PyString::new(py, value.downcast::()?.to_str()?).into(), + ObType::StrSubclass => PyString::new(py, value.cast::()?.to_str()?).into(), ObType::Bytes => extra .config .bytes_mode - .bytes_to_string(py, value.downcast::()?.as_bytes())? + .bytes_to_string(py, value.cast::()?.as_bytes())? .into_py_any(py)?, ObType::Bytearray => { - let py_byte_array = value.downcast::()?; + let py_byte_array = value.cast::()?; pyo3::sync::with_critical_section(py_byte_array, || { // SAFETY: `py_byte_array` is protected by a critical section, // which guarantees no mutation, and `bytes_to_string` does not @@ -171,24 +171,21 @@ pub(crate) fn infer_to_python_known( PyList::new(py, elements)?.into() } ObType::Dict => { - let dict = value.downcast::()?; + let dict = value.cast::()?; serialize_pairs_python(py, dict.iter().map(Ok), include, exclude, extra, |k| { Ok(PyString::new(py, &infer_json_key(&k, extra)?).into_any()) })? } ObType::Datetime => { - let datetime = extra - .config - .temporal_mode - .datetime_to_json(value.py(), value.downcast()?)?; + let datetime = extra.config.temporal_mode.datetime_to_json(value.py(), value.cast()?)?; datetime.into_py_any(py)? } ObType::Date => { - let date = extra.config.temporal_mode.date_to_json(value.py(), value.downcast()?)?; + let date = extra.config.temporal_mode.date_to_json(value.py(), value.cast()?)?; date.into_py_any(py)? } ObType::Time => { - let time = extra.config.temporal_mode.time_to_json(value.py(), value.downcast()?)?; + let time = extra.config.temporal_mode.time_to_json(value.py(), value.cast()?)?; time.into_py_any(py)? } ObType::Timedelta => { @@ -218,7 +215,7 @@ pub(crate) fn infer_to_python_known( infer_to_python(&v, include, exclude, extra)? } ObType::Generator => { - let py_seq = value.downcast::()?; + let py_seq = value.cast::()?; let mut items = Vec::new(); let filter = AnyFilter::new(); @@ -237,7 +234,7 @@ pub(crate) fn infer_to_python_known( PyList::new(py, items)?.into() } ObType::Complex => { - let v = value.downcast::()?; + let v = value.cast::()?; let complex_str = type_serializers::complex::complex_to_str(v); complex_str.into_py_any(py)? } @@ -273,14 +270,14 @@ pub(crate) fn infer_to_python_known( PyFrozenSet::new(py, &elements)?.into() } ObType::Dict => { - let dict = value.downcast::()?; + let dict = value.cast::()?; serialize_pairs_python(py, dict.iter().map(Ok), include, exclude, extra, Ok)? } ObType::PydanticSerializable => serialize_with_serializer()?, ObType::Dataclass => serialize_pairs_python(py, any_dataclass_iter(value)?.0, include, exclude, extra, Ok)?, ObType::Generator => { let iter = super::type_serializers::generator::SerializationIterator::new( - value.downcast()?, + value.cast()?, super::type_serializers::any::AnySerializer::get(), SchemaFilter::default(), include, @@ -290,7 +287,7 @@ pub(crate) fn infer_to_python_known( iter.into_py_any(py)? } ObType::Complex => { - let v = value.downcast::()?; + let v = value.cast::()?; v.into_py_any(py)? } ObType::Unknown => { @@ -386,7 +383,7 @@ pub(crate) fn infer_serialize_known( macro_rules! serialize_seq { ($t:ty) => {{ - let py_seq = value.downcast::<$t>().map_err(py_err_se_err)?; + let py_seq = value.cast::<$t>().map_err(py_err_se_err)?; let mut seq = serializer.serialize_seq(Some(py_seq.len()))?; for element in py_seq.iter() { let item_serializer = SerializeInfer::new(&element, include, exclude, extra); @@ -398,7 +395,7 @@ pub(crate) fn infer_serialize_known( macro_rules! serialize_seq_filter { ($t:ty) => {{ - let py_seq = value.downcast::<$t>().map_err(py_err_se_err)?; + let py_seq = value.cast::<$t>().map_err(py_err_se_err)?; let mut seq = serializer.serialize_seq(Some(py_seq.len()))?; let filter = AnyFilter::new(); let len = value.len().ok(); @@ -422,7 +419,7 @@ pub(crate) fn infer_serialize_known( ObType::Int | ObType::IntSubclass => serialize!(Int), ObType::Bool => serialize!(bool), ObType::Complex => { - let v = value.downcast::().map_err(py_err_se_err)?; + let v = value.cast::().map_err(py_err_se_err)?; let complex_str = type_serializers::complex::complex_to_str(v); Ok(serializer.collect_str::(&complex_str)?) } @@ -432,15 +429,15 @@ pub(crate) fn infer_serialize_known( } ObType::Decimal => value.to_string().serialize(serializer), ObType::Str | ObType::StrSubclass => { - let py_str = value.downcast::().map_err(py_err_se_err)?; + let py_str = value.cast::().map_err(py_err_se_err)?; super::type_serializers::string::serialize_py_str(py_str, serializer) } ObType::Bytes => { - let py_bytes = value.downcast::().map_err(py_err_se_err)?; + let py_bytes = value.cast::().map_err(py_err_se_err)?; extra.config.bytes_mode.serialize_bytes(py_bytes.as_bytes(), serializer) } ObType::Bytearray => { - let py_byte_array = value.downcast::().map_err(py_err_se_err)?; + let py_byte_array = value.cast::().map_err(py_err_se_err)?; pyo3::sync::with_critical_section(py_byte_array, || { // SAFETY: `py_byte_array` is protected by a critical section, // which guarantees no mutation, and `serialize_bytes` does not @@ -451,7 +448,7 @@ pub(crate) fn infer_serialize_known( }) } ObType::Dict => { - let dict = value.downcast::().map_err(py_err_se_err)?; + let dict = value.cast::().map_err(py_err_se_err)?; serialize_pairs_json(dict.iter().map(Ok), dict.len(), serializer, include, exclude, extra) } ObType::List => serialize_seq_filter!(PyList), @@ -459,15 +456,15 @@ pub(crate) fn infer_serialize_known( ObType::Set => serialize_seq!(PySet), ObType::Frozenset => serialize_seq!(PyFrozenSet), ObType::Datetime => { - let py_datetime = value.downcast().map_err(py_err_se_err)?; + let py_datetime = value.cast().map_err(py_err_se_err)?; extra.config.temporal_mode.datetime_serialize(py_datetime, serializer) } ObType::Date => { - let py_date = value.downcast().map_err(py_err_se_err)?; + let py_date = value.cast().map_err(py_err_se_err)?; extra.config.temporal_mode.date_serialize(py_date, serializer) } ObType::Time => { - let py_time = value.downcast().map_err(py_err_se_err)?; + let py_time = value.cast().map_err(py_err_se_err)?; extra.config.temporal_mode.time_serialize(py_time, serializer) } ObType::Timedelta => { @@ -524,7 +521,7 @@ pub(crate) fn infer_serialize_known( infer_serialize(&v, serializer, include, exclude, extra) } ObType::Generator => { - let py_seq = value.downcast::().map_err(py_err_se_err)?; + let py_seq = value.cast::().map_err(py_err_se_err)?; let mut seq = serializer.serialize_seq(None)?; let filter = AnyFilter::new(); for (index, r) in py_seq.try_iter().map_err(py_err_se_err)?.enumerate() { @@ -614,13 +611,13 @@ pub(crate) fn infer_json_key_known<'a>( } ObType::Decimal => Ok(Cow::Owned(key.to_string())), ObType::Bool => super::type_serializers::simple::bool_json_key(key), - ObType::Str | ObType::StrSubclass => key.downcast::()?.to_cow(), + ObType::Str | ObType::StrSubclass => key.cast::()?.to_cow(), ObType::Bytes => extra .config .bytes_mode - .bytes_to_string(key.py(), key.downcast::()?.as_bytes()), + .bytes_to_string(key.py(), key.cast::()?.as_bytes()), ObType::Bytearray => { - let py_byte_array = key.downcast::()?; + let py_byte_array = key.cast::()?; pyo3::sync::with_critical_section(py_byte_array, || { // SAFETY: `py_byte_array` is protected by a critical section, // which guarantees no mutation, and `bytes_to_string` does not @@ -631,9 +628,9 @@ pub(crate) fn infer_json_key_known<'a>( }) .map(|cow| Cow::Owned(cow.into_owned())) } - ObType::Datetime => extra.config.temporal_mode.datetime_json_key(key.downcast()?), - ObType::Date => extra.config.temporal_mode.date_json_key(key.downcast()?), - ObType::Time => extra.config.temporal_mode.time_json_key(key.downcast()?), + ObType::Datetime => extra.config.temporal_mode.datetime_json_key(key.cast()?), + ObType::Date => extra.config.temporal_mode.date_json_key(key.cast()?), + ObType::Time => extra.config.temporal_mode.time_json_key(key.cast()?), ObType::Uuid => { let uuid = super::type_serializers::uuid::uuid_to_string(key)?; Ok(Cow::Owned(uuid)) @@ -652,7 +649,7 @@ pub(crate) fn infer_json_key_known<'a>( } ObType::Tuple => { let mut key_build = super::type_serializers::tuple::KeyBuilder::new(); - for element in key.downcast::()?.iter_borrowed() { + for element in key.cast::()?.iter_borrowed() { key_build.push(&infer_json_key(&element, extra)?); } Ok(Cow::Owned(key_build.finish())) @@ -675,7 +672,7 @@ pub(crate) fn infer_json_key_known<'a>( Ok(Cow::Owned(key.str()?.to_string_lossy().into_owned())) } ObType::Complex => { - let v = key.downcast::()?; + let v = key.cast::()?; Ok(type_serializers::complex::complex_to_str(v).into()) } ObType::Pattern => Ok(Cow::Owned( diff --git a/src/serializers/mod.rs b/src/serializers/mod.rs index 1b9ae8493..8aaab0516 100644 --- a/src/serializers/mod.rs +++ b/src/serializers/mod.rs @@ -95,7 +95,7 @@ impl SchemaSerializer { #[pyo3(signature = (schema, config=None))] pub fn py_new(schema: Bound<'_, PyDict>, config: Option<&Bound<'_, PyDict>>) -> PyResult { let mut definitions_builder = DefinitionsBuilder::new(); - let serializer = CombinedSerializer::build_base(schema.downcast()?, config, &mut definitions_builder)?; + let serializer = CombinedSerializer::build_base(schema.cast()?, config, &mut definitions_builder)?; Ok(Self { serializer, definitions: definitions_builder.finish()?, diff --git a/src/serializers/ob_type.rs b/src/serializers/ob_type.rs index f1c161dfb..03d12fe62 100644 --- a/src/serializers/ob_type.rs +++ b/src/serializers/ob_type.rs @@ -363,7 +363,7 @@ fn is_pydantic_serializable(op_value: Option<&Bound<'_, PyAny>>) -> bool { fn is_generator(op_value: Option<&Bound<'_, PyAny>>) -> bool { if let Some(value) = op_value { - value.downcast::().is_ok() + value.cast::().is_ok() } else { false } diff --git a/src/serializers/shared.rs b/src/serializers/shared.rs index bffd73b2b..71987df7d 100644 --- a/src/serializers/shared.rs +++ b/src/serializers/shared.rs @@ -598,13 +598,13 @@ where let py = dataclass.py(); let fields = dataclass .getattr(intern!(py, "__dataclass_fields__"))? - .downcast_into::()?; + .cast_into::()?; let field_type_marker = get_field_marker(py)?; let next = move |(field_name, field): (Bound<'py, PyAny>, Bound<'py, PyAny>)| -> PyResult, Bound<'py, PyAny>)>> { let field_type = field.getattr(intern!(py, "_field_type"))?; if field_type.is(field_type_marker) { - let value = dataclass.getattr(field_name.downcast::()?)?; + let value = dataclass.getattr(field_name.cast::()?)?; Ok(Some((field_name, value))) } else { Ok(None) diff --git a/src/serializers/type_serializers/bytes.rs b/src/serializers/type_serializers/bytes.rs index 8ace2091a..bf75b58ab 100644 --- a/src/serializers/type_serializers/bytes.rs +++ b/src/serializers/type_serializers/bytes.rs @@ -73,7 +73,7 @@ impl TypeSerializer for BytesSerializer { extra: &Extra, ) -> PyResult> { let py = value.py(); - match value.downcast::() { + match value.cast::() { Ok(py_bytes) => match extra.mode { SerMode::Json => self .bytes_mode @@ -89,7 +89,7 @@ impl TypeSerializer for BytesSerializer { } fn json_key<'a>(&self, key: &'a Bound<'_, PyAny>, extra: &Extra) -> PyResult> { - match key.downcast::() { + match key.cast::() { Ok(py_bytes) => self.bytes_mode.bytes_to_string(key.py(), py_bytes.as_bytes()), Err(_) => { extra.warnings.on_fallback_py(self.get_name(), key, extra)?; @@ -106,7 +106,7 @@ impl TypeSerializer for BytesSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_bytes) => self.bytes_mode.serialize_bytes(py_bytes.as_bytes(), serializer), Err(_) => { extra.warnings.on_fallback_ser::(self.get_name(), value, extra)?; diff --git a/src/serializers/type_serializers/complex.rs b/src/serializers/type_serializers/complex.rs index 8156528f2..d8452bf03 100644 --- a/src/serializers/type_serializers/complex.rs +++ b/src/serializers/type_serializers/complex.rs @@ -36,7 +36,7 @@ impl TypeSerializer for ComplexSerializer { extra: &Extra, ) -> PyResult> { let py = value.py(); - match value.downcast::() { + match value.cast::() { Ok(py_complex) => match extra.mode { SerMode::Json => complex_to_str(py_complex).into_py_any(py), _ => Ok(value.clone().unbind()), @@ -60,7 +60,7 @@ impl TypeSerializer for ComplexSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_complex) => { let s = complex_to_str(py_complex); Ok(serializer.collect_str::(&s)?) diff --git a/src/serializers/type_serializers/dataclass.rs b/src/serializers/type_serializers/dataclass.rs index 6b9f34923..6a5fa3841 100644 --- a/src/serializers/type_serializers/dataclass.rs +++ b/src/serializers/type_serializers/dataclass.rs @@ -40,7 +40,7 @@ impl BuildSerializer for DataclassArgsBuilder { let serialize_by_alias = config.get_as(intern!(py, "serialize_by_alias"))?; for (index, item) in fields_list.iter().enumerate() { - let field_info = item.downcast::()?; + let field_info = item.cast::()?; let name: String = field_info.get_as_req(intern!(py, "name"))?; let key_py: Py = PyString::new(py, &name).into(); @@ -107,7 +107,7 @@ impl BuildSerializer for DataclassSerializer { let fields = schema .get_as_req::>(intern!(py, "fields"))? .iter() - .map(|s| Ok(s.downcast_into::()?.unbind())) + .map(|s| Ok(s.cast_into::()?.unbind())) .collect::>>()?; Ok(CombinedSerializer::Dataclass(Self { diff --git a/src/serializers/type_serializers/datetime_etc.rs b/src/serializers/type_serializers/datetime_etc.rs index 9d267cb39..da237853c 100644 --- a/src/serializers/type_serializers/datetime_etc.rs +++ b/src/serializers/type_serializers/datetime_etc.rs @@ -71,7 +71,7 @@ pub(crate) fn time_to_milliseconds(py_time: &Bound<'_, PyTime>) -> PyResult } fn downcast_date_reject_datetime<'a, 'py>(py_date: &'a Bound<'py, PyAny>) -> PyResult<&'a Bound<'py, PyDate>> { - if let Ok(py_date) = py_date.downcast::() { + if let Ok(py_date) = py_date.cast::() { // because `datetime` is a subclass of `date` we have to check that the value is not a // `datetime` to avoid lossy serialization if !py_date.is_instance_of::() { @@ -170,7 +170,7 @@ macro_rules! build_temporal_serializer { build_temporal_serializer!( DatetimeSerializer, "datetime", - PyAnyMethods::downcast::, + Bound::cast::, datetime_to_json, datetime_json_key, datetime_serialize @@ -188,7 +188,7 @@ build_temporal_serializer!( build_temporal_serializer!( TimeSerializer, "time", - PyAnyMethods::downcast::, + Bound::cast::, time_to_json, time_json_key, time_serialize diff --git a/src/serializers/type_serializers/definitions.rs b/src/serializers/type_serializers/definitions.rs index 384cd5d3b..4849e8803 100644 --- a/src/serializers/type_serializers/definitions.rs +++ b/src/serializers/type_serializers/definitions.rs @@ -29,7 +29,7 @@ impl BuildSerializer for DefinitionsSerializerBuilder { let schema_definitions: Bound<'_, PyList> = schema.get_as_req(intern!(py, "definitions"))?; for schema_definition in schema_definitions { - let schema = schema_definition.downcast()?; + let schema = schema_definition.cast()?; let reference = schema.get_as_req::(intern!(py, "ref"))?; let serializer = CombinedSerializer::build(schema, config, definitions)?; definitions.add_definition(reference, serializer)?; diff --git a/src/serializers/type_serializers/dict.rs b/src/serializers/type_serializers/dict.rs index 99908392c..75dacb367 100644 --- a/src/serializers/type_serializers/dict.rs +++ b/src/serializers/type_serializers/dict.rs @@ -81,7 +81,7 @@ impl TypeSerializer for DictSerializer { extra: &Extra, ) -> PyResult> { let py = value.py(); - match value.downcast::() { + match value.cast::() { Ok(py_dict) => { let value_serializer = self.value_serializer.as_ref(); @@ -119,7 +119,7 @@ impl TypeSerializer for DictSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_dict) => { let mut map = serializer.serialize_map(Some(py_dict.len()))?; let key_serializer = self.key_serializer.as_ref(); diff --git a/src/serializers/type_serializers/format.rs b/src/serializers/type_serializers/format.rs index 51d0dc859..03ebe0972 100644 --- a/src/serializers/type_serializers/format.rs +++ b/src/serializers/type_serializers/format.rs @@ -128,7 +128,7 @@ impl TypeSerializer for FormatSerializer { .call(key) .map_err(PydanticSerializationError::new_err)? .into_bound(key.py()) - .downcast_into::()?; + .cast_into::()?; Ok(Cow::Owned(py_str.to_str()?.to_owned())) } else { none_json_key() @@ -146,7 +146,7 @@ impl TypeSerializer for FormatSerializer { if self.when_used.should_use_json(value) { match self.call(value) { Ok(v) => { - let py_str = v.bind(value.py()).downcast().map_err(py_err_se_err)?; + let py_str = v.bind(value.py()).cast().map_err(py_err_se_err)?; serialize_py_str(py_str, serializer) } Err(e) => Err(S::Error::custom(e)), diff --git a/src/serializers/type_serializers/generator.rs b/src/serializers/type_serializers/generator.rs index b140acb2e..35048618e 100644 --- a/src/serializers/type_serializers/generator.rs +++ b/src/serializers/type_serializers/generator.rs @@ -56,7 +56,7 @@ impl TypeSerializer for GeneratorSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> PyResult> { - match value.downcast::() { + match value.cast::() { Ok(py_iter) => { let py = value.py(); match extra.mode { @@ -113,7 +113,7 @@ impl TypeSerializer for GeneratorSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_iter) => { let len = value.len().ok(); let mut seq = serializer.serialize_seq(len)?; diff --git a/src/serializers/type_serializers/list.rs b/src/serializers/type_serializers/list.rs index f2391816d..f1dcfab9f 100644 --- a/src/serializers/type_serializers/list.rs +++ b/src/serializers/type_serializers/list.rs @@ -59,7 +59,7 @@ impl TypeSerializer for ListSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> PyResult> { - match value.downcast::() { + match value.cast::() { Ok(py_list) => { let py = value.py(); let item_serializer = self.item_serializer.as_ref(); @@ -97,7 +97,7 @@ impl TypeSerializer for ListSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_list) => { let mut seq = serializer.serialize_seq(Some(py_list.len()))?; let item_serializer = self.item_serializer.as_ref(); diff --git a/src/serializers/type_serializers/literal.rs b/src/serializers/type_serializers/literal.rs index 5f94b4daf..d5410dc35 100644 --- a/src/serializers/type_serializers/literal.rs +++ b/src/serializers/type_serializers/literal.rs @@ -46,11 +46,11 @@ impl BuildSerializer for LiteralSerializer { let mut repr_args: Vec = Vec::new(); for item in expected { repr_args.push(item.repr()?.extract()?); - if let Ok(bool) = item.downcast::() { + if let Ok(bool) = item.cast::() { expected_py.append(bool)?; } else if let Some(int) = extract_i64(&item) { expected_int.insert(int); - } else if let Ok(py_str) = item.downcast::() { + } else if let Ok(py_str) = item.cast::() { expected_str.insert(py_str.to_str()?.to_string()); } else { expected_py.append(item)?; @@ -90,7 +90,7 @@ impl LiteralSerializer { } } if !self.expected_str.is_empty() { - if let Ok(py_str) = value.downcast::() { + if let Ok(py_str) = value.cast::() { let s = py_str.to_str()?; if self.expected_str.contains(s) { return Ok(OutputValue::OkStr(PyString::new(value.py(), s))); diff --git a/src/serializers/type_serializers/model.rs b/src/serializers/type_serializers/model.rs index 7c3cea3b3..eedbd69ac 100644 --- a/src/serializers/type_serializers/model.rs +++ b/src/serializers/type_serializers/model.rs @@ -52,9 +52,9 @@ impl BuildSerializer for ModelFieldsBuilder { let serialize_by_alias = config.get_as(intern!(py, "serialize_by_alias"))?; for (key, value) in fields_dict { - let key_py = key.downcast_into::()?; + let key_py = key.cast_into::()?; let key: String = key_py.extract()?; - let field_info = value.downcast()?; + let field_info = value.cast()?; let key_py: Py = key_py.into(); @@ -214,12 +214,12 @@ impl ModelSerializer { fn get_inner_value<'py>(&self, model: &Bound<'py, PyAny>, extra: &Extra) -> PyResult> { let py: Python<'_> = model.py(); - let mut attrs = model.getattr(intern!(py, "__dict__"))?.downcast_into::()?; + let mut attrs = model.getattr(intern!(py, "__dict__"))?.cast_into::()?; if extra.exclude_unset { let fields_set = model .getattr(intern!(py, "__pydantic_fields_set__"))? - .downcast_into::()?; + .cast_into::()?; let new_attrs = attrs.copy()?; for key in new_attrs.keys() { diff --git a/src/serializers/type_serializers/other.rs b/src/serializers/type_serializers/other.rs index 56dae66ac..6bdff02fd 100644 --- a/src/serializers/type_serializers/other.rs +++ b/src/serializers/type_serializers/other.rs @@ -26,7 +26,7 @@ impl BuildSerializer for ChainBuilder { .iter() .last() .unwrap() - .downcast_into()?; + .cast_into()?; CombinedSerializer::build(&last_schema, config, definitions) } } diff --git a/src/serializers/type_serializers/set_frozenset.rs b/src/serializers/type_serializers/set_frozenset.rs index 6387fd2f4..ebc98905f 100644 --- a/src/serializers/type_serializers/set_frozenset.rs +++ b/src/serializers/type_serializers/set_frozenset.rs @@ -59,7 +59,7 @@ macro_rules! build_serializer { extra: &Extra, ) -> PyResult> { let py = value.py(); - match value.downcast::<$py_type>() { + match value.cast::<$py_type>() { Ok(py_set) => { let item_serializer = self.item_serializer.as_ref(); @@ -91,7 +91,7 @@ macro_rules! build_serializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::<$py_type>() { + match value.cast::<$py_type>() { Ok(py_set) => { let mut seq = serializer.serialize_seq(Some(py_set.len()))?; let item_serializer = self.item_serializer.as_ref(); diff --git a/src/serializers/type_serializers/string.rs b/src/serializers/type_serializers/string.rs index be54e9619..33cfeb498 100644 --- a/src/serializers/type_serializers/string.rs +++ b/src/serializers/type_serializers/string.rs @@ -49,7 +49,7 @@ impl TypeSerializer for StrSerializer { match extra.ob_type_lookup.is_type(value, ObType::Str) { IsType::Exact => Ok(value.clone().unbind()), IsType::Subclass => match extra.mode { - SerMode::Json => value.downcast::()?.to_str()?.into_py_any(py), + SerMode::Json => value.cast::()?.to_str()?.into_py_any(py), _ => Ok(value.clone().unbind()), }, IsType::False => { @@ -60,7 +60,7 @@ impl TypeSerializer for StrSerializer { } fn json_key<'a>(&self, key: &'a Bound<'_, PyAny>, extra: &Extra) -> PyResult> { - if let Ok(py_str) = key.downcast::() { + if let Ok(py_str) = key.cast::() { // FIXME py cow to avoid the copy Ok(Cow::Owned(py_str.to_string_lossy().into_owned())) } else { @@ -77,7 +77,7 @@ impl TypeSerializer for StrSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_str) => serialize_py_str(py_str, serializer), Err(_) => { extra.warnings.on_fallback_ser::(self.get_name(), value, extra)?; diff --git a/src/serializers/type_serializers/tuple.rs b/src/serializers/type_serializers/tuple.rs index 20315e8a4..c82b92da3 100644 --- a/src/serializers/type_serializers/tuple.rs +++ b/src/serializers/type_serializers/tuple.rs @@ -38,7 +38,7 @@ impl BuildSerializer for TupleSerializer { let items: Bound<'_, PyList> = schema.get_as_req(intern!(py, "items_schema"))?; let serializers: Vec> = items .iter() - .map(|item| CombinedSerializer::build(item.downcast()?, config, definitions)) + .map(|item| CombinedSerializer::build(item.cast()?, config, definitions)) .collect::>()?; let mut serializer_names = serializers.iter().map(|v| v.get_name()).collect::>(); @@ -68,7 +68,7 @@ impl TypeSerializer for TupleSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> PyResult> { - match value.downcast::() { + match value.cast::() { Ok(py_tuple) => { let py = value.py(); @@ -95,7 +95,7 @@ impl TypeSerializer for TupleSerializer { } fn json_key<'a>(&self, key: &'a Bound<'_, PyAny>, extra: &Extra) -> PyResult> { - match key.downcast::() { + match key.cast::() { Ok(py_tuple) => { let mut key_builder = KeyBuilder::new(); @@ -123,9 +123,9 @@ impl TypeSerializer for TupleSerializer { exclude: Option<&Bound<'_, PyAny>>, extra: &Extra, ) -> Result { - match value.downcast::() { + match value.cast::() { Ok(py_tuple) => { - let py_tuple = py_tuple.downcast::().map_err(py_err_se_err)?; + let py_tuple = py_tuple.cast::().map_err(py_err_se_err)?; let n_items = py_tuple.len(); let mut seq = serializer.serialize_seq(Some(n_items))?; diff --git a/src/serializers/type_serializers/typed_dict.rs b/src/serializers/type_serializers/typed_dict.rs index 9b1751bc2..3a29ce601 100644 --- a/src/serializers/type_serializers/typed_dict.rs +++ b/src/serializers/type_serializers/typed_dict.rs @@ -48,9 +48,9 @@ impl BuildSerializer for TypedDictBuilder { }; for (key, value) in fields_dict { - let key_py = key.downcast_into::()?; + let key_py = key.cast_into::()?; let key: String = key_py.extract()?; - let field_info = value.downcast()?; + let field_info = value.cast()?; let key_py: Py = key_py.into(); let required = field_info.get_as(intern!(py, "required"))?.unwrap_or(total); diff --git a/src/serializers/type_serializers/union.rs b/src/serializers/type_serializers/union.rs index 529a4e27e..1bbc18d43 100644 --- a/src/serializers/type_serializers/union.rs +++ b/src/serializers/type_serializers/union.rs @@ -36,11 +36,11 @@ impl BuildSerializer for UnionSerializer { .get_as_req::>(intern!(py, "choices"))? .iter() .map(|choice| { - let choice = match choice.downcast::() { + let choice = match choice.cast::() { Ok(py_tuple) => py_tuple.get_item(0)?, Err(_) => choice, }; - CombinedSerializer::build(choice.downcast()?, config, definitions) + CombinedSerializer::build(choice.cast()?, config, definitions) }) .collect::>()?; @@ -208,7 +208,7 @@ impl BuildSerializer for TaggedUnionSerializer { let mut choices = Vec::with_capacity(choices_map.len()); for (idx, (choice_key, choice_schema)) in choices_map.into_iter().enumerate() { - let serializer = CombinedSerializer::build(choice_schema.downcast()?, config, definitions)?; + let serializer = CombinedSerializer::build(choice_schema.cast()?, config, definitions)?; choices.push(serializer); lookup.insert(choice_key.to_string(), idx); } @@ -292,7 +292,7 @@ impl TaggedUnionSerializer { // we're pretty lax here, we allow either dict[key] or object.key, as we very well could // be doing a discriminator lookup on a typed dict, and there's no good way to check that // at this point. we could be more strict and only do this in lax mode... - if let Ok(value_dict) = value.downcast::() { + if let Ok(value_dict) = value.cast::() { lookup_key.py_get_dict_item(value_dict).ok().flatten() } else { lookup_key.simple_py_get_attr(value).ok().flatten() diff --git a/src/tools.rs b/src/tools.rs index edf9b6bac..b49fe0158 100644 --- a/src/tools.rs +++ b/src/tools.rs @@ -3,9 +3,9 @@ use core::fmt; use num_bigint::BigInt; use pyo3::exceptions::PyKeyError; +use pyo3::intern; use pyo3::prelude::*; use pyo3::types::{PyDict, PyString}; -use pyo3::{intern, FromPyObject}; use crate::input::Int; use jiter::{cached_py_string, StringCacheMode}; @@ -13,30 +13,30 @@ use jiter::{cached_py_string, StringCacheMode}; pub trait SchemaDict<'py> { fn get_as(&self, key: &Bound<'py, PyString>) -> PyResult> where - T: FromPyObject<'py>; + T: FromPyObjectOwned<'py>; fn get_as_req(&self, key: &Bound<'py, PyString>) -> PyResult where - T: FromPyObject<'py>; + T: FromPyObjectOwned<'py>; } impl<'py> SchemaDict<'py> for Bound<'py, PyDict> { fn get_as(&self, key: &Bound<'py, PyString>) -> PyResult> where - T: FromPyObject<'py>, + T: FromPyObjectOwned<'py>, { match self.get_item(key)? { - Some(t) => t.extract().map(Some), + Some(t) => t.extract().map_err(Into::into).map(Some), None => Ok(None), } } fn get_as_req(&self, key: &Bound<'py, PyString>) -> PyResult where - T: FromPyObject<'py>, + T: FromPyObjectOwned<'py>, { match self.get_item(key)? { - Some(t) => t.extract(), + Some(t) => t.extract().map_err(Into::into), None => py_err!(PyKeyError; "{}", key), } } @@ -45,7 +45,7 @@ impl<'py> SchemaDict<'py> for Bound<'py, PyDict> { impl<'py> SchemaDict<'py> for Option<&Bound<'py, PyDict>> { fn get_as(&self, key: &Bound<'py, PyString>) -> PyResult> where - T: FromPyObject<'py>, + T: FromPyObjectOwned<'py>, { match self { Some(d) => d.get_as(key), @@ -56,7 +56,7 @@ impl<'py> SchemaDict<'py> for Option<&Bound<'py, PyDict>> { #[cfg_attr(has_coverage_attribute, coverage(off))] fn get_as_req(&self, key: &Bound<'py, PyString>) -> PyResult where - T: FromPyObject<'py>, + T: FromPyObjectOwned<'py>, { match self { Some(d) => d.get_as_req(key), diff --git a/src/url.rs b/src/url.rs index c2d9c3579..bc7b70dd7 100644 --- a/src/url.rs +++ b/src/url.rs @@ -103,7 +103,7 @@ impl PyUrl { }; ValidationError::from_val_error(py, name, InputType::Python, e, None, false, false) })? - .downcast_bound::(py)? + .cast_bound::(py)? .get() .clone(); // FIXME: avoid the clone, would need to make `validate` be aware of what URL subclass to create Ok(url_obj) @@ -308,7 +308,7 @@ impl PyMultiHostUrl { }; ValidationError::from_val_error(py, name, InputType::Python, e, None, false, false) })? - .downcast_bound::(py)? + .cast_bound::(py)? .get() .clone(); // FIXME: avoid the clone, would need to make `validate` be aware of what URL subclass to create Ok(url_obj) @@ -521,10 +521,12 @@ impl UrlHostParts { } } -impl FromPyObject<'_> for UrlHostParts { - fn extract_bound(ob: &Bound<'_, PyAny>) -> PyResult { +impl FromPyObject<'_, '_> for UrlHostParts { + type Error = PyErr; + + fn extract(ob: Borrowed<'_, '_, PyAny>) -> PyResult { let py = ob.py(); - let dict = ob.downcast::()?; + let dict = ob.cast::()?; Ok(UrlHostParts { username: dict.get_as(intern!(py, "username"))?, password: dict.get_as(intern!(py, "password"))?, diff --git a/src/validators/arguments.rs b/src/validators/arguments.rs index ad13e6ade..6481f211e 100644 --- a/src/validators/arguments.rs +++ b/src/validators/arguments.rs @@ -80,7 +80,7 @@ impl BuildValidator for ArgumentsValidator { let mut had_keyword_only = false; for (arg_index, arg) in arguments_schema.iter().enumerate() { - let arg = arg.downcast::()?; + let arg = arg.cast::()?; let py_name: Bound = arg.get_as_req(intern!(py, "name"))?; let name = py_name.to_string(); @@ -385,7 +385,7 @@ impl Validator for ArgumentsValidator { .validate(py, remaining_kwargs.as_any(), state) { Ok(value) => { - output_kwargs.update(value.downcast_bound::(py).unwrap().as_mapping())?; + output_kwargs.update(value.cast_bound::(py).unwrap().as_mapping())?; } Err(ValError::LineErrors(line_errors)) => { errors.extend(line_errors); diff --git a/src/validators/arguments_v3.rs b/src/validators/arguments_v3.rs index 8e5781051..d7d8bbe90 100644 --- a/src/validators/arguments_v3.rs +++ b/src/validators/arguments_v3.rs @@ -97,7 +97,7 @@ impl BuildValidator for ArgumentsV3Validator { let mut names: AHashSet = AHashSet::with_capacity(arguments_schema.len()); for arg in arguments_schema.iter() { - let arg = arg.downcast::()?; + let arg = arg.cast::()?; let py_name: Bound = arg.get_as_req(intern!(py, "name"))?; let name = py_name.to_string(); @@ -370,7 +370,7 @@ impl ArgumentsV3Validator { ParameterMode::VarKwargsUnpackedTypedDict => { match parameter.validator.validate(py, dict_value.borrow_input(), state) { Ok(value) => { - output_kwargs.update(value.downcast_bound::(py).unwrap().as_mapping())?; + output_kwargs.update(value.cast_bound::(py).unwrap().as_mapping())?; } Err(ValError::LineErrors(line_errors)) => { errors.extend( @@ -421,7 +421,7 @@ impl ArgumentsV3Validator { ParameterMode::VarKwargsUnpackedTypedDict => { match parameter.validator.validate(py, PyDict::new(py).borrow_input(), state) { Ok(value) => { - output_kwargs.update(value.downcast_bound::(py).unwrap().as_mapping())?; + output_kwargs.update(value.cast_bound::(py).unwrap().as_mapping())?; } Err(ValError::LineErrors(line_errors)) => { errors.extend( @@ -742,7 +742,7 @@ impl ArgumentsV3Validator { .validate(py, remaining_kwargs.as_any(), state) { Ok(value) => { - output_kwargs.update(value.downcast_bound::(py).unwrap().as_mapping())?; + output_kwargs.update(value.cast_bound::(py).unwrap().as_mapping())?; } Err(ValError::LineErrors(line_errors)) => { errors.extend(line_errors); diff --git a/src/validators/call.rs b/src/validators/call.rs index 48509a92f..4b8d2b177 100644 --- a/src/validators/call.rs +++ b/src/validators/call.rs @@ -87,7 +87,7 @@ impl Validator for CallValidator { let return_value = if let Ok((args, kwargs)) = args.extract::<(Bound, Bound)>() { self.function.call(py, args, Some(&kwargs))? - } else if let Ok(kwargs) = args.downcast::() { + } else if let Ok(kwargs) = args.cast::() { self.function.call(py, (), Some(kwargs))? } else { let msg = "Arguments validator should return a tuple of (args, kwargs) or a dict of kwargs"; diff --git a/src/validators/complex.rs b/src/validators/complex.rs index 2e6cc3c50..b39b2a16c 100644 --- a/src/validators/complex.rs +++ b/src/validators/complex.rs @@ -79,6 +79,6 @@ pub(crate) fn string_to_complex<'py>( ValError::InternalErr(err) } })? - .downcast::()? + .cast::()? .to_owned()) } diff --git a/src/validators/dataclass.rs b/src/validators/dataclass.rs index 8d3f2adf8..8f445be59 100644 --- a/src/validators/dataclass.rs +++ b/src/validators/dataclass.rs @@ -72,7 +72,7 @@ impl BuildValidator for DataclassArgsValidator { let mut positional_count = 0; for field in fields_schema { - let field = field.downcast::()?; + let field = field.cast::()?; let name_py: Bound<'_, PyString> = field.get_as_req(intern!(py, "name"))?; let name: String = name_py.extract()?; @@ -386,7 +386,7 @@ impl Validator for DataclassArgsValidator { field_value: &Bound<'py, PyAny>, state: &mut ValidationState<'_, 'py>, ) -> ValResult> { - let dict = obj.downcast::()?; + let dict = obj.cast::()?; let extra_behavior = state.extra_behavior_or(self.extra_behavior); let ok = |output: Py| { @@ -666,7 +666,7 @@ impl DataclassValidator { ) -> ValResult<()> { let (dc_dict, post_init_kwargs): (Bound<'_, PyAny>, Bound<'_, PyAny>) = val_output.extract(py)?; if self.slots { - let dc_dict = dc_dict.downcast::()?; + let dc_dict = dc_dict.cast::()?; for (key, value) in dc_dict.iter() { force_setattr(py, dc, key, value)?; } @@ -679,7 +679,7 @@ impl DataclassValidator { let r = if PyAnyMethods::is_none(&post_init_kwargs) { dc.call_method0(post_init) } else { - let args = post_init_kwargs.downcast::()?; + let args = post_init_kwargs.cast::()?; dc.call_method1(post_init, args.clone()) // FIXME should not need clone here }; r.map_err(|e| convert_err(py, e, input))?; diff --git a/src/validators/datetime.rs b/src/validators/datetime.rs index 4c6810d4c..8761350c6 100644 --- a/src/validators/datetime.rs +++ b/src/validators/datetime.rs @@ -305,7 +305,7 @@ impl TZConstraint { let Some(tz_constraint) = schema.get_item(intern!(py, "tz_constraint"))? else { return Ok(None); }; - if let Ok(s) = tz_constraint.downcast::() { + if let Ok(s) = tz_constraint.cast::() { let s = s.to_str()?; Ok(Some(Self::from_str(s)?)) } else { diff --git a/src/validators/is_instance.rs b/src/validators/is_instance.rs index 62d995b54..30bcf8b86 100644 --- a/src/validators/is_instance.rs +++ b/src/validators/is_instance.rs @@ -86,7 +86,7 @@ impl Validator for IsInstanceValidator { pub fn class_repr(schema: &Bound<'_, PyDict>, class: &Bound<'_, PyAny>) -> PyResult { match schema.get_as(intern!(schema.py(), "cls_repr"))? { Some(s) => Ok(s), - None => match class.downcast::() { + None => match class.cast::() { Ok(t) => Ok(t.qualname()?.to_string()), Err(_) => Ok(class.repr()?.extract()?), }, diff --git a/src/validators/is_subclass.rs b/src/validators/is_subclass.rs index 6e6b8598b..acee58da5 100644 --- a/src/validators/is_subclass.rs +++ b/src/validators/is_subclass.rs @@ -61,7 +61,7 @@ impl Validator for IsSubclassValidator { input, )); }; - match obj.downcast::() { + match obj.cast::() { Ok(py_type) if py_type.is_subclass(self.class.bind(py))? => Ok(obj.clone().unbind()), _ => Err(ValError::new( ErrorType::IsSubclassOf { diff --git a/src/validators/mod.rs b/src/validators/mod.rs index adcf1ba55..48ba6b709 100644 --- a/src/validators/mod.rs +++ b/src/validators/mod.rs @@ -552,7 +552,7 @@ fn build_validator_inner( definitions: &mut DefinitionsBuilder>, use_prebuilt: bool, ) -> PyResult> { - let dict = schema.downcast::()?; + let dict = schema.cast::()?; let py = schema.py(); let type_: Bound<'_, PyString> = dict.get_as_req(intern!(py, "type"))?; let type_ = type_.to_str()?; diff --git a/src/validators/model.rs b/src/validators/model.rs index 5f7a5970d..a6607a908 100644 --- a/src/validators/model.rs +++ b/src/validators/model.rs @@ -166,8 +166,8 @@ impl Validator for ModelValidator { let inner_input = if PyAnyMethods::is_none(&model_extra) { dict } else { - let full_model_dict = dict.downcast::()?.copy()?; - full_model_dict.update(model_extra.downcast()?)?; + let full_model_dict = dict.cast::()?.copy()?; + full_model_dict.update(model_extra.cast()?)?; full_model_dict.into_any() }; self.validate_construct(py, &inner_input, Some(&fields_set), state) @@ -210,10 +210,10 @@ impl Validator for ModelValidator { Ok(model.into_py_any(py)?) }; } - let old_dict = model.getattr(intern!(py, DUNDER_DICT))?.downcast_into::()?; + let old_dict = model.getattr(intern!(py, DUNDER_DICT))?.cast_into::()?; let input_dict = old_dict.copy()?; - if let Ok(old_extra) = model.getattr(intern!(py, DUNDER_MODEL_EXTRA_KEY))?.downcast::() { + if let Ok(old_extra) = model.getattr(intern!(py, DUNDER_MODEL_EXTRA_KEY))?.cast::() { input_dict.update(old_extra.as_mapping())?; } input_dict.set_item(field_name, field_value)?; @@ -229,7 +229,7 @@ impl Validator for ModelValidator { ) = output.extract(py)?; if let Ok(fields_set) = model.getattr(intern!(py, DUNDER_FIELDS_SET_KEY)) { - let fields_set = fields_set.downcast::()?; + let fields_set = fields_set.cast::()?; for field_name in validated_fields_set { fields_set.add(field_name)?; } diff --git a/src/validators/model_fields.rs b/src/validators/model_fields.rs index 17cbbf542..408021247 100644 --- a/src/validators/model_fields.rs +++ b/src/validators/model_fields.rs @@ -78,7 +78,7 @@ impl BuildValidator for ModelFieldsValidator { let mut fields: Vec = Vec::with_capacity(fields_dict.len()); for (key, value) in fields_dict { - let field_info = value.downcast::()?; + let field_info = value.cast::()?; let field_name_py: Bound<'_, PyString> = key.extract()?; let field_name = field_name_py.to_str()?; @@ -319,7 +319,7 @@ impl Validator for ModelFieldsValidator { let py_key = match self.extras_keys_validator { Some(validator) => { match validator.validate(self.py, raw_key.borrow_input(), self.state) { - Ok(value) => value.downcast_bound::(self.py)?.clone(), + Ok(value) => value.cast_bound::(self.py)?.clone(), Err(ValError::LineErrors(line_errors)) => { for err in line_errors { self.errors.push(err.with_outer_location(raw_key.clone())); @@ -396,7 +396,7 @@ impl Validator for ModelFieldsValidator { field_value: &Bound<'py, PyAny>, state: &mut ValidationState<'_, 'py>, ) -> ValResult> { - let dict = obj.downcast::()?; + let dict = obj.cast::()?; let extra_behavior = state.extra_behavior_or(self.extra_behavior); let get_updated_dict = |output: &Bound<'py, PyAny>| { diff --git a/src/validators/typed_dict.rs b/src/validators/typed_dict.rs index 26af98fd5..b66cea48d 100644 --- a/src/validators/typed_dict.rs +++ b/src/validators/typed_dict.rs @@ -81,8 +81,8 @@ impl BuildValidator for TypedDictValidator { }; for (key, value) in fields_dict { - let field_info = value.downcast::()?; - let field_name_py = key.downcast_into::()?; + let field_info = value.cast::()?; + let field_name_py = key.cast_into::()?; let field_name = field_name_py.to_str()?; let schema = field_info.get_as_req(intern!(py, "schema"))?; diff --git a/src/validators/union.rs b/src/validators/union.rs index 3b88bbc1d..e4b074075 100644 --- a/src/validators/union.rs +++ b/src/validators/union.rs @@ -61,7 +61,7 @@ impl BuildValidator for UnionValidator { .iter() .map(|choice| { let mut label: Option = None; - let choice = match choice.downcast::() { + let choice = match choice.cast::() { Ok(py_tuple) => { let choice = py_tuple.get_item(0)?; label = Some(py_tuple.get_item(1)?.to_string()); diff --git a/src/validators/uuid.rs b/src/validators/uuid.rs index 25b80c55a..62b32b9be 100644 --- a/src/validators/uuid.rs +++ b/src/validators/uuid.rs @@ -27,14 +27,8 @@ const UUID_IS_SAFE: &str = "is_safe"; static UUID_TYPE: PyOnceLock> = PyOnceLock::new(); -fn import_type(py: Python, module: &str, attr: &str) -> PyResult> { - py.import(module)?.getattr(attr)?.extract() -} - fn get_uuid_type(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { - Ok(UUID_TYPE - .get_or_init(py, || import_type(py, "uuid", "UUID").unwrap()) - .bind(py)) + UUID_TYPE.import(py, "uuid", "UUID") } #[derive(Debug, Clone, Copy)] diff --git a/tests/test.rs b/tests/test.rs index 8a3f1d5dc..38edc9179 100644 --- a/tests/test.rs +++ b/tests/test.rs @@ -133,7 +133,7 @@ dump_json_input_2 = {'a': 'something'} .get_item("schema") .unwrap() .unwrap() - .downcast_into::() + .cast_into::() .unwrap(); let dump_json_input_1 = locals.get_item("dump_json_input_1").unwrap().unwrap(); let dump_json_input_2 = locals.get_item("dump_json_input_2").unwrap().unwrap(); diff --git a/tests/test_errors.py b/tests/test_errors.py index 249f18e7e..4e1dfe2d0 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -94,10 +94,10 @@ def my_function(input_value, info): assert str(exc_info.value) == ( '1 validation error for function-plain[my_function()]\n' - " (error rendering message: TypeError: 'tuple' object cannot be converted to 'PyString') " + " (error rendering message: TypeError: 'tuple' object cannot be cast as 'str') " '[type=my_error, input_value=42, input_type=int]' ) - with pytest.raises(TypeError, match="'tuple' object cannot be converted to 'PyString'"): + with pytest.raises(TypeError, match="'tuple' object cannot be cast as 'str'"): exc_info.value.errors(include_url=False) @@ -107,7 +107,7 @@ def f(input_value, info): v = SchemaValidator(core_schema.with_info_plain_validator_function(f)) - with pytest.raises(TypeError, match="argument 'context': 'list' object cannot be converted to 'PyDict'"): + with pytest.raises(TypeError, match="argument 'context': 'list' object cannot be cast as 'dict'"): v.validate_python(42) diff --git a/tests/validators/test_is_subclass.py b/tests/validators/test_is_subclass.py index 43e65b6b1..bbe50d527 100644 --- a/tests/validators/test_is_subclass.py +++ b/tests/validators/test_is_subclass.py @@ -58,7 +58,7 @@ def test_not_parent(): def test_invalid_type(): - with pytest.raises(SchemaError, match="TypeError: 'Foo' object cannot be converted to 'PyType"): + with pytest.raises(SchemaError, match="TypeError: 'Foo' object cannot be cast as 'type'"): SchemaValidator(core_schema.is_subclass_schema(Foo())) diff --git a/tests/validators/test_model.py b/tests/validators/test_model.py index 179b75304..411b529eb 100644 --- a/tests/validators/test_model.py +++ b/tests/validators/test_model.py @@ -475,7 +475,7 @@ def f(input_value, info): def test_model_class_not_type(): - with pytest.raises(SchemaError, match=re.escape("TypeError: 'int' object cannot be converted to 'PyType'")): + with pytest.raises(SchemaError, match=re.escape("TypeError: 'int' object cannot be cast as 'type'")): SchemaValidator( schema=core_schema.model_schema( cls=123, diff --git a/tests/validators/test_model_fields.py b/tests/validators/test_model_fields.py index fc80feb78..f221bce01 100644 --- a/tests/validators/test_model_fields.py +++ b/tests/validators/test_model_fields.py @@ -743,7 +743,7 @@ def test_paths_allow_by_name(py_and_json: PyAndJson, input_value): [ ({'validation_alias': []}, 'Lookup paths should have at least one element'), ({'validation_alias': [[]]}, 'Each alias path should have at least one element'), - ({'validation_alias': [123]}, "TypeError: 'int' object cannot be converted to 'PyList'"), + ({'validation_alias': [123]}, "TypeError: 'int' object cannot be cast as 'list'"), ({'validation_alias': [[1, 'foo']]}, 'TypeError: The first item in an alias path should be a string'), ], ids=repr, diff --git a/tests/validators/test_typed_dict.py b/tests/validators/test_typed_dict.py index 88c05c616..d96281560 100644 --- a/tests/validators/test_typed_dict.py +++ b/tests/validators/test_typed_dict.py @@ -629,7 +629,7 @@ def test_paths_allow_by_name(py_and_json: PyAndJson, input_value): [ ({'validation_alias': []}, 'Lookup paths should have at least one element'), ({'validation_alias': [[]]}, 'Each alias path should have at least one element'), - ({'validation_alias': [123]}, "TypeError: 'int' object cannot be converted to 'PyList'"), + ({'validation_alias': [123]}, "TypeError: 'int' object cannot be cast as 'list'"), ({'validation_alias': [[1, 'foo']]}, 'TypeError: The first item in an alias path should be a string'), ], ids=repr,