diff --git a/Cargo.lock b/Cargo.lock index 1e09a9ea..e8716a71 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -332,6 +332,33 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -424,6 +451,12 @@ dependencies = [ "libc", ] +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-bigint" version = "0.5.3" @@ -851,6 +884,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1467,9 +1511,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] @@ -1530,9 +1574,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.33" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] @@ -1578,6 +1622,7 @@ dependencies = [ "bcder", "bytes", "chrono", + "ciborium", "clap", "clio", "data-url", @@ -2028,9 +2073,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" -version = "2.0.38" +version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", @@ -2667,6 +2712,26 @@ dependencies = [ "thiserror", ] +[[package]] +name = "zerocopy" +version = "0.8.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zeroize" version = "1.6.0" diff --git a/Cargo.toml b/Cargo.toml index 712e36d6..91b0c7be 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,6 +60,7 @@ aes-gcm = "0.10.3" dyn-clone = "1.0.17" hex = "0.4.3" serde_with = { version = "3.11.0", features = ["base64"] } +ciborium = "0.2.2" [build-dependencies] prost-build = "0.12.1" diff --git a/src/cluster_crypto/cert_key_pair.rs b/src/cluster_crypto/cert_key_pair.rs index 37575d44..f36d055f 100644 --- a/src/cluster_crypto/cert_key_pair.rs +++ b/src/cluster_crypto/cert_key_pair.rs @@ -357,7 +357,8 @@ impl CertKeyPair { .as_bytes() .to_vec(), ) - .await; + .await + .context("putting in etcd")?; Ok(()) } diff --git a/src/cluster_crypto/distributed_jwt.rs b/src/cluster_crypto/distributed_jwt.rs index bd899bb6..515414ce 100644 --- a/src/cluster_crypto/distributed_jwt.rs +++ b/src/cluster_crypto/distributed_jwt.rs @@ -86,7 +86,8 @@ impl DistributedJwt { &k8slocation.resource_location.as_etcd_key(), serde_json::to_string(&resource)?.as_bytes().to_vec(), ) - .await; + .await + .context("putting in etcd")?; Ok(()) } diff --git a/src/cluster_crypto/distributed_private_key.rs b/src/cluster_crypto/distributed_private_key.rs index 9166491a..66cbd1ea 100644 --- a/src/cluster_crypto/distributed_private_key.rs +++ b/src/cluster_crypto/distributed_private_key.rs @@ -96,7 +96,8 @@ impl DistributedPrivateKey { .as_bytes() .to_vec(), ) - .await; + .await + .context("putting in etcd")?; Ok(()) } diff --git a/src/cluster_crypto/distributed_public_key.rs b/src/cluster_crypto/distributed_public_key.rs index bc378654..1d039061 100644 --- a/src/cluster_crypto/distributed_public_key.rs +++ b/src/cluster_crypto/distributed_public_key.rs @@ -129,7 +129,8 @@ impl DistributedPublicKey { .as_bytes() .to_vec(), ) - .await; + .await + .context("putting in etcd")?; Ok(()) } diff --git a/src/etcd_encoding.rs b/src/etcd_encoding.rs index 232965c0..c9660484 100644 --- a/src/etcd_encoding.rs +++ b/src/etcd_encoding.rs @@ -63,9 +63,26 @@ k8s_type!(ValidatingWebhookConfigurationWithMeta, ValidatingWebhookConfiguration k8s_type!(MutatingWebhookConfigurationWithMeta, MutatingWebhookConfiguration); k8s_type!(OAuthClientWithMeta, OAuthClient); -pub(crate) async fn decode(data: &[u8]) -> Result> { +mod k8s_cbor; + +#[derive(Clone)] +pub(crate) enum Encoding { + Protobuf, + Cbor, + Json, +} + +pub(crate) async fn decode(data: &[u8]) -> Result<(Vec, Encoding)> { if !data.starts_with("k8s\x00".as_bytes()) { - return Ok(data.to_vec()); + // k8s uses CBOR with the self-describing tag 55799, we can use its bytes to detect CBOR + if data.starts_with([0xd9, 0xd9, 0xf7].as_ref()) { + // It's CBOR, just convert to JSON + let json_value = k8s_cbor::k8s_cbor_bytes_to_json(data).context("converting CBOR to JSON")?; + return Ok((serde_json::to_vec(&json_value)?, Encoding::Cbor)); + } + + // Not CBOR, not protobuf, it's probably just raw JSON, return as-is + return Ok((data.to_vec(), Encoding::Json)); } let data = &data[4..]; @@ -79,7 +96,7 @@ pub(crate) async fn decode(data: &[u8]) -> Result> { .context("missing kind")? .as_str(); - Ok(match kind { + let decoded_data = match kind { "Route" => serde_json::to_vec(&RouteWithMeta::try_from(unknown)?)?, "Deployment" => serde_json::to_vec(&DeploymentWithMeta::try_from(unknown)?)?, "ControllerRevision" => serde_json::to_vec(&ControllerRevisionWithMeta::try_from(unknown)?)?, @@ -95,11 +112,20 @@ pub(crate) async fn decode(data: &[u8]) -> Result> { "MutatingWebhookConfiguration" => serde_json::to_vec(&MutatingWebhookConfigurationWithMeta::try_from(unknown)?)?, "OAuthClient" => serde_json::to_vec(&OAuthClientWithMeta::try_from(unknown)?)?, _ => bail!("unknown kind {}", kind), - }) + }; + + Ok((decoded_data, Encoding::Protobuf)) } -pub(crate) async fn encode(data: &[u8]) -> Result> { +pub(crate) async fn encode(data: &[u8], encoding: Encoding) -> Result> { let value: Value = serde_json::from_slice(data)?; + + if matches!(encoding, Encoding::Cbor) { + return k8s_cbor::json_to_k8s_cbor_bytes(value).context("converting JSON to CBOR"); + } + + // If kind is a known protobuf kind, write it back as protobuf, otherwise return raw JSON + // TODO: Just look at the new encoding param? let kind = value .pointer("/kind") .context("missing kind")? diff --git a/src/etcd_encoding/k8s_cbor.rs b/src/etcd_encoding/k8s_cbor.rs new file mode 100644 index 00000000..7beeb37d --- /dev/null +++ b/src/etcd_encoding/k8s_cbor.rs @@ -0,0 +1,104 @@ +use anyhow::{bail, Context, Result}; +use ciborium::value::Value as CborValue; +use serde_json::{value::Number as JsonNumber, Value as JsonValue}; + +const SELF_DESCRIBING_CBOR_TAG: u64 = 55799; + +fn cbor_to_json(cbor: CborValue) -> Result { + Ok(match cbor { + CborValue::Null => JsonValue::Null, + CborValue::Bool(boolean) => JsonValue::Bool(boolean), + CborValue::Text(string) => JsonValue::String(string), + CborValue::Integer(int) => JsonValue::Number({ + let int: i128 = int.into(); + if let Ok(int) = u64::try_from(int) { + JsonNumber::from(int) + } else if let Ok(int) = i64::try_from(int) { + JsonNumber::from(int) + } else { + JsonNumber::from_f64(int as f64).context("Integer not JSON compatible")? + } + }), + CborValue::Float(float) => JsonValue::Number(JsonNumber::from_f64(float).context("Float not JSON compatible")?), + CborValue::Array(vec) => JsonValue::Array(vec.into_iter().map(cbor_to_json).collect::>>()?), + CborValue::Map(map) => JsonValue::Object(serde_json::Map::from_iter( + map.into_iter() + .map(|(k, v)| { + let key_str = match k { + CborValue::Bytes(bytes) => String::from_utf8(bytes).context("Invalid UTF-8 in CBOR map key")?, + CborValue::Text(text) => text, + _ => bail!("Unsupported CBOR map key type {:?}", k), + }; + Ok((key_str, cbor_to_json(v)?)) + }) + .collect::>>()?, + )), + // TODO: Handle proposed-encoding tags for CBOR bytes? https://github.com/kubernetes/kubernetes/pull/125419 + // It seems that in a typical k8s cluster these are not used anywhere (secrets are + // protobuf, and they're pretty much the only place where raw bytes are used in + // values), so I don't have an example to test that implementation on. For now we will + // crash on unhandled tags below to be safe. + CborValue::Bytes(vec) => JsonValue::String(String::from_utf8(vec).context("Invalid UTF-8 in CBOR bytes")?), + CborValue::Tag(value, _tag) => unimplemented!("Unsupported CBOR tag {:?}", value), + _ => unimplemented!("Unsupported CBOR type {:?}", cbor), + }) +} + +fn json_to_cbor(json: JsonValue) -> Result { + Ok(match json { + JsonValue::Null => CborValue::Null, + JsonValue::Bool(boolean) => CborValue::Bool(boolean), + JsonValue::String(string) => CborValue::Bytes(string.into_bytes()), + JsonValue::Number(number) => { + if let Some(int) = number.as_i64() { + CborValue::Integer(int.into()) + } else if let Some(uint) = number.as_u64() { + CborValue::Integer(uint.into()) + } else if let Some(float) = number.as_f64() { + CborValue::Float(float) + } else { + bail!("Unsupported number type") + } + } + JsonValue::Array(arr) => CborValue::Array(arr.into_iter().map(json_to_cbor).collect::>>()?), + JsonValue::Object(map) => { + // Fallback for regular JSON objects (shouldn't happen in our flow) + let map_entries: Vec<(CborValue, CborValue)> = map + .into_iter() + .map(|(k, v)| Ok((CborValue::Bytes(k.into_bytes()), json_to_cbor(v)?))) + .collect::>>()?; + CborValue::Map(map_entries) + } + }) +} + +pub(crate) fn k8s_cbor_bytes_to_json(cbor_bytes: &[u8]) -> Result { + let v: CborValue = ciborium::de::from_reader(cbor_bytes)?; + + let (v, had_self_describing_tag) = match v { + CborValue::Tag(value, contents) => match value { + SELF_DESCRIBING_CBOR_TAG => { + // Self-describing CBOR tag, unwrap the contents + (*contents, true) + } + _ => panic!("Unsupported CBOR tag {}", value), + }, + // We expected a self-describing CBOR tag at the root. Of course we could just proceed + // as is (since it's just raw CBOR) but it's a bit fishy, so just bail + _ => bail!("CBOR data that does not start with self-describing tag is not supported"), + }; + + cbor_to_json(v) +} + +pub(crate) fn json_to_k8s_cbor_bytes(json: JsonValue) -> Result> { + let cbor = json_to_cbor(json)?; + + // Put back the self-describing CBOR tag that we stripped + let tagged_cbor = CborValue::Tag(SELF_DESCRIBING_CBOR_TAG, Box::new(cbor)); + + let mut bytes = Vec::new(); + ciborium::ser::into_writer(&tagged_cbor, &mut bytes)?; + + Ok(bytes) +} diff --git a/src/k8s_etcd.rs b/src/k8s_etcd.rs index d84afdc5..390b0972 100644 --- a/src/k8s_etcd.rs +++ b/src/k8s_etcd.rs @@ -3,6 +3,7 @@ use crate::encrypt::ResourceTransformers; use crate::etcd_encoding; use anyhow::{bail, ensure, Context, Result}; use etcd_client::{Client as EtcdClient, GetOptions}; +use etcd_encoding::Encoding; use futures_util::future::join_all; use serde_json::Value; use std::collections::{HashMap, HashSet}; @@ -22,7 +23,7 @@ pub(crate) struct EtcdResult { /// have to go through etcd for every single edit. pub(crate) struct InMemoryK8sEtcd { pub(crate) etcd_client: Option>, - etcd_keyvalue_hashmap: Mutex>>, + etcd_keyvalue_hashmap: Mutex)>>, edited: Mutex>>, deleted_keys: Mutex>, decrypt_resource_transformers: Option, @@ -105,10 +106,10 @@ impl InMemoryK8sEtcd { continue; } let key = key.clone(); - let value = value.clone(); + let (encoding, value) = value.clone(); let etcd_client = Arc::clone(etcd_client); - let mut value = etcd_encoding::encode(value.as_slice()).await.context("encoding value")?; + let mut value = etcd_encoding::encode(value.as_slice(), encoding).await.context("encoding value")?; if let Some(resource_transformers) = &self.encrypt_resource_transformers { // https://github.com/kubernetes/apiserver/blob/3423727e46efe7dfa40dcdb1a9c5c5027b07303d/pkg/storage/value/transformer.go#L172 @@ -184,7 +185,7 @@ impl InMemoryK8sEtcd { { let hashmap = self.etcd_keyvalue_hashmap.lock().await; - if let Some(value) = hashmap.get(&key) { + if let Some((_encoding, value)) = hashmap.get(&key) { result.value.clone_from(value); return Ok(Some(result)); } @@ -195,7 +196,7 @@ impl InMemoryK8sEtcd { if let Some(value) = get_result.kvs().first() { let raw_etcd_value = value.value(); - let mut decoded_value = etcd_encoding::decode(raw_etcd_value).await.context("decoding value")?; + let (mut decoded_value, mut encoding) = etcd_encoding::decode(raw_etcd_value).await.context("decoding value")?; if let Some(resource_transformers) = &self.decrypt_resource_transformers { // https://github.com/kubernetes/apiserver/blob/3423727e46efe7dfa40dcdb1a9c5c5027b07303d/pkg/storage/value/transformer.go#L110 @@ -209,7 +210,7 @@ impl InMemoryK8sEtcd { .decrypt(key.to_string(), raw_etcd_value.to_vec()) .await .context("decrypting etcd value")?; - decoded_value = etcd_encoding::decode(&plaintext_value).await.context("decoding value")?; + (decoded_value, encoding) = etcd_encoding::decode(&plaintext_value).await.context("decoding value")?; break; } } @@ -219,7 +220,7 @@ impl InMemoryK8sEtcd { self.etcd_keyvalue_hashmap .lock() .await - .insert(key.to_string(), decoded_value.clone()); + .insert(key.to_string(), (encoding, decoded_value.clone())); result.value = decoded_value; return Ok(Some(result)); @@ -228,10 +229,18 @@ impl InMemoryK8sEtcd { Ok(None) } - pub(crate) async fn put(&self, key: &str, value: Vec) { - self.etcd_keyvalue_hashmap.lock().await.insert(key.to_string(), value.clone()); + pub(crate) async fn put(&self, key: &str, value: Vec) -> Result<()> { + let mut hashmap = self.etcd_keyvalue_hashmap.lock().await; + + // Only put if the key already exists in the cache, preserving the encoding + let (encoding, _) = hashmap.get(key).context(format!("key '{}' not found in cache", key))?; + let encoding = encoding.clone(); // Clone the encoding + hashmap.insert(key.to_string(), (encoding, value.clone())); + drop(hashmap); // Release the lock early + self.deleted_keys.lock().await.remove(key); self.edited.lock().await.insert(key.to_string(), value); + Ok(()) } pub(crate) async fn list_keys(&self, resource_kind: &str) -> Result> { @@ -337,6 +346,7 @@ pub(crate) async fn get_etcd_json(client: &InMemoryK8sEtcd, k8slocation: &K8sRes pub(crate) async fn put_etcd_yaml(client: &InMemoryK8sEtcd, k8slocation: &K8sResourceLocation, value: Value) -> Result<()> { client .put(&k8slocation.as_etcd_key(), serde_json::to_string(&value)?.as_bytes().into()) - .await; + .await + .context("putting in etcd")?; Ok(()) } diff --git a/src/ocp_postprocess/encryption_config/etcd_rename.rs b/src/ocp_postprocess/encryption_config/etcd_rename.rs index e8d6c60e..bcbd628e 100644 --- a/src/ocp_postprocess/encryption_config/etcd_rename.rs +++ b/src/ocp_postprocess/encryption_config/etcd_rename.rs @@ -177,7 +177,8 @@ async fn update_encryption_key(component: &str, etcd_client: &Arc, origina &(format!("/kubernetes.io/secrets/openshift-etcd/{new_secret_name}")), serde_json::to_string(&etcd_value).context("serializing value")?.as_bytes().to_vec(), ) - .await; + .await + .context("putting in etcd")?; etcd_client.delete(&key).await.context(format!("deleting {}", key))?; diff --git a/vendor/ciborium-io/.cargo-checksum.json b/vendor/ciborium-io/.cargo-checksum.json new file mode 100644 index 00000000..445d07fa --- /dev/null +++ b/vendor/ciborium-io/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"87e99846a22dd6bb034e9349b67c9685639e2d5e097ac62f6685f4707210a6e2","Cargo.toml":"abeba525640ec9d3c21a2505d26a94be52680803baa2c0c3e6a46d0d11a7e534","Cargo.toml.orig":"00e61d2e3c518e74031b38509ee844f1806109f2bd1c01d1c61a55d04f23290c","LICENSE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","README.md":"005ef797bef5dc4441d977a8dcca9783a36e3f9c0a0573a492c055080cdff6d5","src/lib.rs":"ef5797420095e3f89a610031f962889472ccba079bfaa3241a0733590bf38659"},"package":"05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757"} \ No newline at end of file diff --git a/vendor/ciborium-io/.cargo_vcs_info.json b/vendor/ciborium-io/.cargo_vcs_info.json new file mode 100644 index 00000000..8ffbb09c --- /dev/null +++ b/vendor/ciborium-io/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "fdf3ec5c53256000f8e12f06c361ca18026a8d99" + }, + "path_in_vcs": "ciborium-io" +} \ No newline at end of file diff --git a/vendor/ciborium-io/Cargo.toml b/vendor/ciborium-io/Cargo.toml new file mode 100644 index 00000000..e5dbc39b --- /dev/null +++ b/vendor/ciborium-io/Cargo.toml @@ -0,0 +1,42 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.58" +name = "ciborium-io" +version = "0.2.2" +authors = ["Nathaniel McCallum "] +description = "Simplified Read/Write traits for no_std usage" +homepage = "https://github.com/enarx/ciborium" +readme = "README.md" +keywords = ["io", "read", "write"] +categories = ["data-structures", "embedded", "no-std"] +license = "Apache-2.0" +repository = "https://github.com/enarx/ciborium" +[package.metadata.docs.rs] +all-features = true + +[features] +alloc = [] +std = ["alloc"] +[badges.github] +repository = "enarx/ciborium" +workflow = "test" + +[badges.is-it-maintained-issue-resolution] +repository = "enarx/ciborium" + +[badges.is-it-maintained-open-issues] +repository = "enarx/ciborium" + +[badges.maintenance] +status = "actively-developed" diff --git a/vendor/ciborium-io/Cargo.toml.orig b/vendor/ciborium-io/Cargo.toml.orig new file mode 100644 index 00000000..d66bf51f --- /dev/null +++ b/vendor/ciborium-io/Cargo.toml.orig @@ -0,0 +1,28 @@ +[package] +name = "ciborium-io" +version = "0.2.2" +authors = ["Nathaniel McCallum "] +license = "Apache-2.0" +edition = "2021" +rust-version = "1.58" +homepage = "https://github.com/enarx/ciborium" +repository = "https://github.com/enarx/ciborium" +description = "Simplified Read/Write traits for no_std usage" +readme = "README.md" +keywords = ["io", "read", "write"] +categories = ["data-structures", "embedded", "no-std"] + +[badges] +# See https://doc.rust-lang.org/cargo/reference/manifest.html#the-badges-section +github = { repository = "enarx/ciborium", workflow = "test" } +#github = { repository = "enarx/ciborium", workflow = "lint" } +maintenance = { status = "actively-developed" } +is-it-maintained-issue-resolution = { repository = "enarx/ciborium" } +is-it-maintained-open-issues = { repository = "enarx/ciborium" } + +[features] +alloc = [] +std = ["alloc"] + +[package.metadata.docs.rs] +all-features = true diff --git a/vendor/ciborium-io/LICENSE b/vendor/ciborium-io/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/vendor/ciborium-io/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/ciborium-io/README.md b/vendor/ciborium-io/README.md new file mode 100644 index 00000000..2ba5cc68 --- /dev/null +++ b/vendor/ciborium-io/README.md @@ -0,0 +1,22 @@ +[![Workflow Status](https://github.com/enarx/ciborium/workflows/test/badge.svg)](https://github.com/enarx/ciborium/actions?query=workflow%3A%22test%22) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/enarx/ciborium.svg)](https://isitmaintained.com/project/enarx/ciborium "Average time to resolve an issue") +[![Percentage of issues still open](https://isitmaintained.com/badge/open/enarx/ciborium.svg)](https://isitmaintained.com/project/enarx/ciborium "Percentage of issues still open") +![Maintenance](https://img.shields.io/badge/maintenance-activly--developed-brightgreen.svg) + +# ciborium-io + +Simple, Low-level I/O traits + +This crate provides two simple traits: `Read` and `Write`. These traits +mimic their counterparts in `std::io`, but are trimmed for simplicity +and can be used in `no_std` and `no_alloc` environments. Since this +crate contains only traits, inline functions and unit structs, it should +be a zero-cost abstraction. + +If the `std` feature is enabled, we provide blanket implementations for +all `std::io` types. If the `alloc` feature is enabled, we provide +implementations for `Vec`. In all cases, you get implementations +for byte slices. You can, of course, implement the traits for your own +types. + +License: Apache-2.0 diff --git a/vendor/ciborium-io/src/lib.rs b/vendor/ciborium-io/src/lib.rs new file mode 100644 index 00000000..fef5b704 --- /dev/null +++ b/vendor/ciborium-io/src/lib.rs @@ -0,0 +1,265 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! Simple, Low-level I/O traits +//! +//! This crate provides two simple traits: `Read` and `Write`. These traits +//! mimic their counterparts in `std::io`, but are trimmed for simplicity +//! and can be used in `no_std` and `no_alloc` environments. Since this +//! crate contains only traits, inline functions and unit structs, it should +//! be a zero-cost abstraction. +//! +//! If the `std` feature is enabled, we provide blanket implementations for +//! all `std::io` types. If the `alloc` feature is enabled, we provide +//! implementations for `Vec`. In all cases, you get implementations +//! for byte slices. You can, of course, implement the traits for your own +//! types. + +#![cfg_attr(not(feature = "std"), no_std)] +#![deny(missing_docs)] +#![deny(clippy::all)] +#![deny(clippy::cargo)] + +#[cfg(feature = "alloc")] +extern crate alloc; + +/// A trait indicating a type that can read bytes +/// +/// Note that this is similar to `std::io::Read`, but simplified for use in a +/// `no_std` context. +pub trait Read { + /// The error type + type Error; + + /// Reads exactly `data.len()` bytes or fails + fn read_exact(&mut self, data: &mut [u8]) -> Result<(), Self::Error>; +} + +/// A trait indicating a type that can write bytes +/// +/// Note that this is similar to `std::io::Write`, but simplified for use in a +/// `no_std` context. +pub trait Write { + /// The error type + type Error; + + /// Writes all bytes from `data` or fails + fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error>; + + /// Flushes all output + fn flush(&mut self) -> Result<(), Self::Error>; +} + +#[cfg(feature = "std")] +impl Read for T { + type Error = std::io::Error; + + #[inline] + fn read_exact(&mut self, data: &mut [u8]) -> Result<(), Self::Error> { + self.read_exact(data) + } +} + +#[cfg(feature = "std")] +impl Write for T { + type Error = std::io::Error; + + #[inline] + fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error> { + self.write_all(data) + } + + #[inline] + fn flush(&mut self) -> Result<(), Self::Error> { + self.flush() + } +} + +#[cfg(not(feature = "std"))] +impl Read for &mut R { + type Error = R::Error; + + #[inline] + fn read_exact(&mut self, data: &mut [u8]) -> Result<(), Self::Error> { + (**self).read_exact(data) + } +} + +#[cfg(not(feature = "std"))] +impl Write for &mut W { + type Error = W::Error; + + #[inline] + fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error> { + (**self).write_all(data) + } + + #[inline] + fn flush(&mut self) -> Result<(), Self::Error> { + (**self).flush() + } +} + +/// An error indicating there are no more bytes to read +#[cfg(not(feature = "std"))] +#[derive(Debug)] +pub struct EndOfFile(()); + +#[cfg(not(feature = "std"))] +impl Read for &[u8] { + type Error = EndOfFile; + + #[inline] + fn read_exact(&mut self, data: &mut [u8]) -> Result<(), Self::Error> { + if data.len() > self.len() { + return Err(EndOfFile(())); + } + + let (prefix, suffix) = self.split_at(data.len()); + data.copy_from_slice(prefix); + *self = suffix; + Ok(()) + } +} + +/// An error indicating that the output cannot accept more bytes +#[cfg(not(feature = "std"))] +#[derive(Debug)] +pub struct OutOfSpace(()); + +#[cfg(not(feature = "std"))] +impl Write for &mut [u8] { + type Error = OutOfSpace; + + #[inline] + fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error> { + if data.len() > self.len() { + return Err(OutOfSpace(())); + } + + let (prefix, suffix) = core::mem::take(self).split_at_mut(data.len()); + prefix.copy_from_slice(data); + *self = suffix; + Ok(()) + } + + #[inline] + fn flush(&mut self) -> Result<(), Self::Error> { + Ok(()) + } +} + +#[cfg(all(not(feature = "std"), feature = "alloc"))] +impl Write for alloc::vec::Vec { + type Error = core::convert::Infallible; + + #[inline] + fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error> { + self.extend_from_slice(data); + Ok(()) + } + + #[inline] + fn flush(&mut self) -> Result<(), Self::Error> { + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn read_eof() { + let mut reader = &[1u8; 0][..]; + let mut buffer = [0u8; 1]; + + reader.read_exact(&mut buffer[..]).unwrap_err(); + } + + #[test] + fn read_one() { + let mut reader = &[1u8; 1][..]; + let mut buffer = [0u8; 1]; + + reader.read_exact(&mut buffer[..]).unwrap(); + assert_eq!(buffer[0], 1); + + reader.read_exact(&mut buffer[..]).unwrap_err(); + } + + #[test] + fn read_two() { + let mut reader = &[1u8; 2][..]; + let mut buffer = [0u8; 1]; + + reader.read_exact(&mut buffer[..]).unwrap(); + assert_eq!(buffer[0], 1); + + reader.read_exact(&mut buffer[..]).unwrap(); + assert_eq!(buffer[0], 1); + + reader.read_exact(&mut buffer[..]).unwrap_err(); + } + + #[test] + #[cfg(feature = "std")] + fn read_std() { + let mut reader = std::io::repeat(1); + let mut buffer = [0u8; 2]; + + reader.read_exact(&mut buffer[..]).unwrap(); + assert_eq!(buffer[0], 1); + assert_eq!(buffer[1], 1); + } + + #[test] + fn write_oos() { + let mut writer = &mut [0u8; 0][..]; + + writer.write_all(&[1u8; 1][..]).unwrap_err(); + } + + #[test] + fn write_one() { + let mut buffer = [0u8; 1]; + let mut writer = &mut buffer[..]; + + writer.write_all(&[1u8; 1][..]).unwrap(); + writer.write_all(&[1u8; 1][..]).unwrap_err(); + assert_eq!(buffer[0], 1); + } + + #[test] + fn write_two() { + let mut buffer = [0u8; 2]; + let mut writer = &mut buffer[..]; + + writer.write_all(&[1u8; 1][..]).unwrap(); + writer.write_all(&[1u8; 1][..]).unwrap(); + writer.write_all(&[1u8; 1][..]).unwrap_err(); + assert_eq!(buffer[0], 1); + assert_eq!(buffer[1], 1); + } + + #[test] + #[cfg(feature = "alloc")] + fn write_vec() { + let mut buffer = alloc::vec::Vec::new(); + + buffer.write_all(&[1u8; 1][..]).unwrap(); + buffer.write_all(&[1u8; 1][..]).unwrap(); + + assert_eq!(buffer.len(), 2); + assert_eq!(buffer[0], 1); + assert_eq!(buffer[1], 1); + } + + #[test] + #[cfg(feature = "std")] + fn write_std() { + let mut writer = std::io::sink(); + + writer.write_all(&[1u8; 1][..]).unwrap(); + writer.write_all(&[1u8; 1][..]).unwrap(); + } +} diff --git a/vendor/ciborium-ll/.cargo-checksum.json b/vendor/ciborium-ll/.cargo-checksum.json new file mode 100644 index 00000000..499d1084 --- /dev/null +++ b/vendor/ciborium-ll/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"bd961f5e27e798106da5cd26e10ec9542ce8fb6c3ceea1af4b2f740df0645e5e","Cargo.toml":"ca45cd09db5f1b8fae6f7f37eb569977ea9e75c9cc19da960b6386e5301438a9","Cargo.toml.orig":"cb3c023b593a90cfbde2e441d68d8b2be359c3f8aec38c4b0b916920c0133404","LICENSE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","README.md":"b79803cccd55d24fc81e47aa3fac77461caa78c80799b9115e283ac0bc805c3a","src/dec.rs":"282263f49ad5adb0b953f75dcede95938e17d6b94db895ed8977fb2fa66146ee","src/enc.rs":"90388589b9798382c1abcae21d91e9f3d4a35415c3ad534f41312e94a0e43635","src/hdr.rs":"6cdbdf3c9ea430d804c1c64ab70c7a2a6e38e49f9cbd6f157c6542d8c9b2e952","src/lib.rs":"da6178728228ebfc50b13fc32ce707f2cc539f0212a919645ec64a50e21a0977","src/seg.rs":"cb3e552aad59bdbd0023c5a27b9c04bc61609175e03e6400d38767d964152288"},"package":"57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9"} \ No newline at end of file diff --git a/vendor/ciborium-ll/.cargo_vcs_info.json b/vendor/ciborium-ll/.cargo_vcs_info.json new file mode 100644 index 00000000..b19fea32 --- /dev/null +++ b/vendor/ciborium-ll/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "fdf3ec5c53256000f8e12f06c361ca18026a8d99" + }, + "path_in_vcs": "ciborium-ll" +} \ No newline at end of file diff --git a/vendor/ciborium-ll/Cargo.toml b/vendor/ciborium-ll/Cargo.toml new file mode 100644 index 00000000..80db55e7 --- /dev/null +++ b/vendor/ciborium-ll/Cargo.toml @@ -0,0 +1,50 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.58" +name = "ciborium-ll" +version = "0.2.2" +authors = ["Nathaniel McCallum "] +description = "Low-level CBOR codec primitives" +homepage = "https://github.com/enarx/ciborium" +readme = "README.md" +keywords = ["cbor"] +categories = ["data-structures", "embedded", "encoding", "no-std", "parsing"] +license = "Apache-2.0" +repository = "https://github.com/enarx/ciborium" +[package.metadata.docs.rs] +all-features = true +[dependencies.ciborium-io] +version = "0.2.2" + +[dependencies.half] +version = "2.2" +default-features = false +[dev-dependencies.hex] +version = "0.4" + +[features] +alloc = [] +std = ["alloc", "half/std"] +[badges.github] +repository = "enarx/ciborium" +workflow = "test" + +[badges.is-it-maintained-issue-resolution] +repository = "enarx/ciborium" + +[badges.is-it-maintained-open-issues] +repository = "enarx/ciborium" + +[badges.maintenance] +status = "actively-developed" diff --git a/vendor/ciborium-ll/Cargo.toml.orig b/vendor/ciborium-ll/Cargo.toml.orig new file mode 100644 index 00000000..2722030f --- /dev/null +++ b/vendor/ciborium-ll/Cargo.toml.orig @@ -0,0 +1,35 @@ +[package] +name = "ciborium-ll" +version = "0.2.2" +authors = ["Nathaniel McCallum "] +license = "Apache-2.0" +edition = "2021" +rust-version = "1.58" +homepage = "https://github.com/enarx/ciborium" +repository = "https://github.com/enarx/ciborium" +description = "Low-level CBOR codec primitives" +readme = "README.md" +keywords = ["cbor"] +categories = ["data-structures", "embedded", "encoding", "no-std", "parsing"] + +[badges] +# See https://doc.rust-lang.org/cargo/reference/manifest.html#the-badges-section +github = { repository = "enarx/ciborium", workflow = "test" } +#github = { repository = "enarx/ciborium", workflow = "lint" } +maintenance = { status = "actively-developed" } +is-it-maintained-issue-resolution = { repository = "enarx/ciborium" } +is-it-maintained-open-issues = { repository = "enarx/ciborium" } + +[dependencies] +ciborium-io = { path = "../ciborium-io", version = "0.2.2" } +half = { version = "2.2", default-features = false} + +[dev-dependencies] +hex = "0.4" + +[features] +alloc = [] +std = ["alloc", "half/std"] + +[package.metadata.docs.rs] +all-features = true diff --git a/vendor/ciborium-ll/LICENSE b/vendor/ciborium-ll/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/vendor/ciborium-ll/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/ciborium-ll/README.md b/vendor/ciborium-ll/README.md new file mode 100644 index 00000000..2ca6306a --- /dev/null +++ b/vendor/ciborium-ll/README.md @@ -0,0 +1,131 @@ +[![Workflow Status](https://github.com/enarx/ciborium/workflows/test/badge.svg)](https://github.com/enarx/ciborium/actions?query=workflow%3A%22test%22) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/enarx/ciborium.svg)](https://isitmaintained.com/project/enarx/ciborium "Average time to resolve an issue") +[![Percentage of issues still open](https://isitmaintained.com/badge/open/enarx/ciborium.svg)](https://isitmaintained.com/project/enarx/ciborium "Percentage of issues still open") +![Maintenance](https://img.shields.io/badge/maintenance-activly--developed-brightgreen.svg) + +# ciborium-ll + +Low level CBOR parsing tools + +This crate contains low-level types for encoding and decoding items in +CBOR. This crate is usable in both `no_std` and `no_alloc` environments. +To understand how this crate works, first we will look at the structure +of a CBOR item on the wire. + +## Anatomy of a CBOR Item + +This is a brief anatomy of a CBOR item on the wire. + +``` ++------------+-----------+ +| | | +| Major | Minor | +| (3bits) | (5bits) | +| | | ++------------+-----------+ +^ ^ +| | ++-----+ +-----+ + | | + | | + +----------------------------+--------------+ + | | | | + | Prefix | Affix | Suffix | + | (1 byte) | (0-8 bytes) | (0+ bytes) | + | | | | + +------------+---------------+--------------+ + + | | | + +------------+---------------+--------------+ + | | + v v + + Header Body +``` + +The `ciborium` crate works by providing the `Decoder` and `Encoder` types +which provide input and output for a CBOR header (see: `Header`). From +there, you can either handle the body yourself or use the provided utility +functions. + +For more information on the CBOR format, see +[RFC 7049](https://tools.ietf.org/html/rfc7049). + +## Decoding + +In order to decode CBOR, you will create a `Decoder` from a reader. The +decoder instance will allow you to `Decoder::pull()` `Header` instances +from the input. + +Most CBOR items are fully contained in their headers and therefore have no +body. These items can be evaluated directly from the `Header` instance. + +Bytes and text items have a body but do not contain child items. Since +both bytes and text values may be segmented, parsing them can be a bit +tricky. Therefore, we provide helper functions to parse these types. See +`Decoder::bytes()` and `Decoder::text()` for more details. + +Array and map items have a body which contains child items. These can be +parsed by simply doing `Decoder::pull()` to parse the child items. + +### Example + +```rust +use ciborium_ll::{Decoder, Header}; +use ciborium_io::Read as _; + +let input = b"\x6dHello, World!"; +let mut decoder = Decoder::from(&input[..]); +let mut chunks = 0; + +match decoder.pull().unwrap() { + Header::Text(len) => { + let mut segments = decoder.text(len); + while let Some(mut segment) = segments.pull().unwrap() { + let mut buffer = [0u8; 7]; + while let Some(chunk) = segment.pull(&mut buffer[..]).unwrap() { + match chunk { + "Hello, " if chunks == 0 => chunks = 1, + "World!" if chunks == 1 => chunks = 2, + _ => panic!("received unexpected chunk"), + } + } + } + } + + _ => panic!("received unexpected value"), +} + +assert_eq!(chunks, 2); +``` + +## Encoding + +To encode values to CBOR, create an `Encoder` from a writer. The encoder +instance provides the `Encoder::push()` method to write a `Header` value +to the wire. CBOR item bodies can be written directly. + +For bytes and text, there are the `Encoder::bytes()` and `Encoder::text()` +utility functions, respectively, which will properly segment the output +on the wire for you. + +### Example + +```rust +use ciborium_ll::{Encoder, Header}; +use ciborium_io::Write as _; + +let mut buffer = [0u8; 19]; +let mut encoder = Encoder::from(&mut buffer[..]); + +// Write the structure +encoder.push(Header::Map(Some(1))).unwrap(); +encoder.push(Header::Positive(7)).unwrap(); +encoder.text("Hello, World!", 7).unwrap(); + +// Validate our output +encoder.flush().unwrap(); +assert_eq!(b"\xa1\x07\x7f\x67Hello, \x66World!\xff", &buffer[..]); +``` + +License: Apache-2.0 diff --git a/vendor/ciborium-ll/src/dec.rs b/vendor/ciborium-ll/src/dec.rs new file mode 100644 index 00000000..3bd0889c --- /dev/null +++ b/vendor/ciborium-ll/src/dec.rs @@ -0,0 +1,174 @@ +use super::*; + +use ciborium_io::Read; + +/// An error that occurred while decoding +#[derive(Debug)] +pub enum Error { + /// An error occurred while reading bytes + /// + /// Contains the underlying error returned while reading. + Io(T), + + /// An error occurred while parsing bytes + /// + /// Contains the offset into the stream where the syntax error occurred. + Syntax(usize), +} + +impl From for Error { + #[inline] + fn from(value: T) -> Self { + Self::Io(value) + } +} + +/// A decoder for deserializing CBOR items +/// +/// This decoder manages the low-level decoding of CBOR items into `Header` +/// objects. It also contains utility functions for parsing segmented bytes +/// and text inputs. +pub struct Decoder { + reader: R, + offset: usize, + buffer: Option, +} + +impl<R: Read> From<R> for Decoder<R> { + #[inline] + fn from(value: R) -> Self { + Self { + reader: value, + offset: 0, + buffer: None, + } + } +} + +impl<R: Read> Read for Decoder<R> { + type Error = R::Error; + + #[inline] + fn read_exact(&mut self, data: &mut [u8]) -> Result<(), Self::Error> { + assert!(self.buffer.is_none()); + self.reader.read_exact(data)?; + self.offset += data.len(); + Ok(()) + } +} + +impl<R: Read> Decoder<R> { + #[inline] + fn pull_title(&mut self) -> Result<Title, Error<R::Error>> { + if let Some(title) = self.buffer.take() { + self.offset += title.1.as_ref().len() + 1; + return Ok(title); + } + + let mut prefix = [0u8; 1]; + self.read_exact(&mut prefix[..])?; + + let major = match prefix[0] >> 5 { + 0 => Major::Positive, + 1 => Major::Negative, + 2 => Major::Bytes, + 3 => Major::Text, + 4 => Major::Array, + 5 => Major::Map, + 6 => Major::Tag, + 7 => Major::Other, + _ => unreachable!(), + }; + + let mut minor = match prefix[0] & 0b00011111 { + x if x < 24 => Minor::This(x), + 24 => Minor::Next1([0; 1]), + 25 => Minor::Next2([0; 2]), + 26 => Minor::Next4([0; 4]), + 27 => Minor::Next8([0; 8]), + 31 => Minor::More, + _ => return Err(Error::Syntax(self.offset - 1)), + }; + + self.read_exact(minor.as_mut())?; + Ok(Title(major, minor)) + } + + #[inline] + fn push_title(&mut self, item: Title) { + assert!(self.buffer.is_none()); + self.buffer = Some(item); + self.offset -= item.1.as_ref().len() + 1; + } + + /// Pulls the next header from the input + #[inline] + pub fn pull(&mut self) -> Result<Header, Error<R::Error>> { + let offset = self.offset; + self.pull_title()? + .try_into() + .map_err(|_| Error::Syntax(offset)) + } + + /// Push a single header into the input buffer + /// + /// # Panics + /// + /// This function panics if called while there is already a header in the + /// input buffer. You should take care to call this function only after + /// pulling a header to ensure there is nothing in the input buffer. + #[inline] + pub fn push(&mut self, item: Header) { + self.push_title(Title::from(item)) + } + + /// Gets the current byte offset into the stream + /// + /// The offset starts at zero when the decoder is created. Therefore, if + /// bytes were already read from the reader before the decoder was created, + /// you must account for this. + #[inline] + pub fn offset(&mut self) -> usize { + self.offset + } + + /// Process an incoming bytes item + /// + /// In CBOR, bytes can be segmented. The logic for this can be a bit tricky, + /// so we encapsulate that logic using this function. This function **MUST** + /// be called immediately after first pulling a `Header::Bytes(len)` from + /// the wire and `len` must be provided to this function from that value. + /// + /// The `buf` parameter provides a buffer used when reading in the segmented + /// bytes. A large buffer will result in fewer calls to read incoming bytes + /// at the cost of memory usage. You should consider this trade off when + /// deciding the size of your buffer. + #[inline] + pub fn bytes(&mut self, len: Option<usize>) -> Segments<R, crate::seg::Bytes> { + self.push(Header::Bytes(len)); + Segments::new(self, |header| match header { + Header::Bytes(len) => Ok(len), + _ => Err(()), + }) + } + + /// Process an incoming text item + /// + /// In CBOR, text can be segmented. The logic for this can be a bit tricky, + /// so we encapsulate that logic using this function. This function **MUST** + /// be called immediately after first pulling a `Header::Text(len)` from + /// the wire and `len` must be provided to this function from that value. + /// + /// The `buf` parameter provides a buffer used when reading in the segmented + /// text. A large buffer will result in fewer calls to read incoming bytes + /// at the cost of memory usage. You should consider this trade off when + /// deciding the size of your buffer. + #[inline] + pub fn text(&mut self, len: Option<usize>) -> Segments<R, crate::seg::Text> { + self.push(Header::Text(len)); + Segments::new(self, |header| match header { + Header::Text(len) => Ok(len), + _ => Err(()), + }) + } +} diff --git a/vendor/ciborium-ll/src/enc.rs b/vendor/ciborium-ll/src/enc.rs new file mode 100644 index 00000000..b8b8a20d --- /dev/null +++ b/vendor/ciborium-ll/src/enc.rs @@ -0,0 +1,127 @@ +use super::*; + +use ciborium_io::Write; + +/// An encoder for serializing CBOR items +/// +/// This structure wraps a writer and provides convenience functions for +/// writing `Header` objects to the wire. +pub struct Encoder<W: Write>(W); + +impl<W: Write> From<W> for Encoder<W> { + #[inline] + fn from(value: W) -> Self { + Self(value) + } +} + +impl<W: Write> Write for Encoder<W> { + type Error = W::Error; + + fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error> { + self.0.write_all(data) + } + + fn flush(&mut self) -> Result<(), Self::Error> { + self.0.flush() + } +} + +impl<W: Write> Encoder<W> { + /// Push a `Header` to the wire + #[inline] + pub fn push(&mut self, header: Header) -> Result<(), W::Error> { + let title = Title::from(header); + + let major = match title.0 { + Major::Positive => 0, + Major::Negative => 1, + Major::Bytes => 2, + Major::Text => 3, + Major::Array => 4, + Major::Map => 5, + Major::Tag => 6, + Major::Other => 7, + }; + + let minor = match title.1 { + Minor::This(x) => x, + Minor::Next1(..) => 24, + Minor::Next2(..) => 25, + Minor::Next4(..) => 26, + Minor::Next8(..) => 27, + Minor::More => 31, + }; + + self.0.write_all(&[major << 5 | minor])?; + self.0.write_all(title.1.as_ref()) + } + + /// Serialize a byte slice as CBOR + /// + /// Optionally, segment the output into `segment` size segments. Note that + /// if `segment == Some(0)` it will be silently upgraded to `Some(1)`. This + /// minimum value is highly inefficient and should not be relied upon. + #[inline] + pub fn bytes( + &mut self, + value: &[u8], + segment: impl Into<Option<usize>>, + ) -> Result<(), W::Error> { + let max = segment.into().unwrap_or(value.len()); + let max = core::cmp::max(max, 1); + + if max >= value.len() { + self.push(Header::Bytes(Some(value.len())))?; + self.write_all(value)?; + } else { + self.push(Header::Bytes(None))?; + + for chunk in value.chunks(max) { + self.push(Header::Bytes(Some(chunk.len())))?; + self.write_all(chunk)?; + } + + self.push(Header::Break)?; + } + + Ok(()) + } + + /// Serialize a string slice as CBOR + /// + /// Optionally, segment the output into `segment` size segments. Note that + /// since care is taken to ensure that each segment is itself a valid UTF-8 + /// string, if `segment` contains a value of less than 4, it will be + /// silently upgraded to 4. This minimum value is highly inefficient and + /// should not be relied upon. + #[inline] + pub fn text(&mut self, value: &str, segment: impl Into<Option<usize>>) -> Result<(), W::Error> { + let max = segment.into().unwrap_or(value.len()); + let max = core::cmp::max(max, 4); + + if max >= value.len() { + self.push(Header::Text(Some(value.len())))?; + self.write_all(value.as_bytes())?; + } else { + self.push(Header::Text(None))?; + + let mut bytes = value.as_bytes(); + while !bytes.is_empty() { + let mut len = core::cmp::min(bytes.len(), max); + while len > 0 && core::str::from_utf8(&bytes[..len]).is_err() { + len -= 1 + } + + let (prefix, suffix) = bytes.split_at(len); + self.push(Header::Text(Some(prefix.len())))?; + self.write_all(prefix)?; + bytes = suffix; + } + + self.push(Header::Break)?; + } + + Ok(()) + } +} diff --git a/vendor/ciborium-ll/src/hdr.rs b/vendor/ciborium-ll/src/hdr.rs new file mode 100644 index 00000000..dec17881 --- /dev/null +++ b/vendor/ciborium-ll/src/hdr.rs @@ -0,0 +1,163 @@ +use super::*; + +use half::f16; + +/// A semantic representation of a CBOR item header +/// +/// This structure represents the valid values of a CBOR item header and is +/// used extensively when serializing or deserializing CBOR items. Note well +/// that this structure **DOES NOT** represent the body (i.e. suffix) of the +/// CBOR item. You must parse the body yourself based on the contents of the +/// `Header`. However, utility functions are provided for this (see: +/// `Decoder::bytes()` and `Decoder::text()`). +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum Header { + /// A positive integer + Positive(u64), + + /// A negative integer + /// + /// Note well that this value has all bits inverted from a normal signed + /// integer. For example, to convert the `u64` to a `i128` you would do + /// this: `neg as i128 ^ !0`. + Negative(u64), + + /// A floating point value + Float(f64), + + /// A "simple" value + Simple(u8), + + /// A tag + Tag(u64), + + /// The "break" value + /// + /// This value is used to terminate indefinite length arrays and maps, + /// as well as segmented byte or text items. + Break, + + /// A bytes item + /// + /// The value contained in this variant indicates the length of the bytes + /// which follow or, if `None`, segmented bytes input. + /// + /// A best practice is to call `Decoder::bytes()` immediately after + /// first pulling a bytes item header since this utility function + /// encapsulates all the logic needed to handle segmentation. + Bytes(Option<usize>), + + /// A text item + /// + /// The value contained in this variant indicates the length of the text + /// which follows (in bytes) or, if `None`, segmented text input. + /// + /// A best practice is to call `Decoder::text()` immediately after + /// first pulling a text item header since this utility function + /// encapsulates all the logic needed to handle segmentation. + Text(Option<usize>), + + /// An array item + /// + /// The value contained in this variant indicates the length of the array + /// which follows (in items) or, if `None`, an indefinite length array + /// terminated by a "break" value. + Array(Option<usize>), + + /// An map item + /// + /// The value contained in this variant indicates the length of the map + /// which follows (in item pairs) or, if `None`, an indefinite length map + /// terminated by a "break" value. + Map(Option<usize>), +} + +impl TryFrom<Title> for Header { + type Error = InvalidError; + + fn try_from(title: Title) -> Result<Self, Self::Error> { + let opt = |minor| { + Some(match minor { + Minor::This(x) => x.into(), + Minor::Next1(x) => u8::from_be_bytes(x).into(), + Minor::Next2(x) => u16::from_be_bytes(x).into(), + Minor::Next4(x) => u32::from_be_bytes(x).into(), + Minor::Next8(x) => u64::from_be_bytes(x), + Minor::More => return None, + }) + }; + + let int = |m| opt(m).ok_or(InvalidError(())); + + let len = |m| { + opt(m) + .map(usize::try_from) + .transpose() + .or(Err(InvalidError(()))) + }; + + Ok(match title { + Title(Major::Positive, minor) => Self::Positive(int(minor)?), + Title(Major::Negative, minor) => Self::Negative(int(minor)?), + Title(Major::Bytes, minor) => Self::Bytes(len(minor)?), + Title(Major::Text, minor) => Self::Text(len(minor)?), + Title(Major::Array, minor) => Self::Array(len(minor)?), + Title(Major::Map, minor) => Self::Map(len(minor)?), + Title(Major::Tag, minor) => Self::Tag(int(minor)?), + + Title(Major::Other, Minor::More) => Self::Break, + Title(Major::Other, Minor::This(x)) => Self::Simple(x), + Title(Major::Other, Minor::Next1(x)) => Self::Simple(x[0]), + Title(Major::Other, Minor::Next2(x)) => Self::Float(f16::from_be_bytes(x).into()), + Title(Major::Other, Minor::Next4(x)) => Self::Float(f32::from_be_bytes(x).into()), + Title(Major::Other, Minor::Next8(x)) => Self::Float(f64::from_be_bytes(x)), + }) + } +} + +impl From<Header> for Title { + fn from(header: Header) -> Self { + let int = |i: u64| match i { + x if x <= 23 => Minor::This(i as u8), + x if x <= core::u8::MAX as u64 => Minor::Next1([i as u8]), + x if x <= core::u16::MAX as u64 => Minor::Next2((i as u16).to_be_bytes()), + x if x <= core::u32::MAX as u64 => Minor::Next4((i as u32).to_be_bytes()), + x => Minor::Next8(x.to_be_bytes()), + }; + + let len = |l: Option<usize>| l.map(|x| int(x as u64)).unwrap_or(Minor::More); + + match header { + Header::Positive(x) => Title(Major::Positive, int(x)), + Header::Negative(x) => Title(Major::Negative, int(x)), + Header::Bytes(x) => Title(Major::Bytes, len(x)), + Header::Text(x) => Title(Major::Text, len(x)), + Header::Array(x) => Title(Major::Array, len(x)), + Header::Map(x) => Title(Major::Map, len(x)), + Header::Tag(x) => Title(Major::Tag, int(x)), + + Header::Break => Title(Major::Other, Minor::More), + + Header::Simple(x) => match x { + x @ 0..=23 => Title(Major::Other, Minor::This(x)), + x => Title(Major::Other, Minor::Next1([x])), + }, + + Header::Float(n64) => { + let n16 = f16::from_f64(n64); + let n32 = n64 as f32; + + Title( + Major::Other, + if f64::from(n16).to_bits() == n64.to_bits() { + Minor::Next2(n16.to_be_bytes()) + } else if f64::from(n32).to_bits() == n64.to_bits() { + Minor::Next4(n32.to_be_bytes()) + } else { + Minor::Next8(n64.to_be_bytes()) + }, + ) + } + } + } +} diff --git a/vendor/ciborium-ll/src/lib.rs b/vendor/ciborium-ll/src/lib.rs new file mode 100644 index 00000000..8a1fe90c --- /dev/null +++ b/vendor/ciborium-ll/src/lib.rs @@ -0,0 +1,487 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! Low level CBOR parsing tools +//! +//! This crate contains low-level types for encoding and decoding items in +//! CBOR. This crate is usable in both `no_std` and `no_alloc` environments. +//! To understand how this crate works, first we will look at the structure +//! of a CBOR item on the wire. +//! +//! # Anatomy of a CBOR Item +//! +//! This is a brief anatomy of a CBOR item on the wire. +//! +//! ```text +//! +------------+-----------+ +//! | | | +//! | Major | Minor | +//! | (3bits) | (5bits) | +//! | | | +//! +------------+-----------+ +//! ^ ^ +//! | | +//! +-----+ +-----+ +//! | | +//! | | +//! +----------------------------+--------------+ +//! | | | | +//! | Prefix | Affix | Suffix | +//! | (1 byte) | (0-8 bytes) | (0+ bytes) | +//! | | | | +//! +------------+---------------+--------------+ +//! +//! | | | +//! +------------+---------------+--------------+ +//! | | +//! v v +//! +//! Header Body +//! ``` +//! +//! The `ciborium` crate works by providing the `Decoder` and `Encoder` types +//! which provide input and output for a CBOR header (see: `Header`). From +//! there, you can either handle the body yourself or use the provided utility +//! functions. +//! +//! For more information on the CBOR format, see +//! [RFC 7049](https://tools.ietf.org/html/rfc7049). +//! +//! # Decoding +//! +//! In order to decode CBOR, you will create a `Decoder` from a reader. The +//! decoder instance will allow you to `Decoder::pull()` `Header` instances +//! from the input. +//! +//! Most CBOR items are fully contained in their headers and therefore have no +//! body. These items can be evaluated directly from the `Header` instance. +//! +//! Bytes and text items have a body but do not contain child items. Since +//! both bytes and text values may be segmented, parsing them can be a bit +//! tricky. Therefore, we provide helper functions to parse these types. See +//! `Decoder::bytes()` and `Decoder::text()` for more details. +//! +//! Array and map items have a body which contains child items. These can be +//! parsed by simply doing `Decoder::pull()` to parse the child items. +//! +//! ## Example +//! +//! ```rust +//! use ciborium_ll::{Decoder, Header}; +//! use ciborium_io::Read as _; +//! +//! let input = b"\x6dHello, World!"; +//! let mut decoder = Decoder::from(&input[..]); +//! let mut chunks = 0; +//! +//! match decoder.pull().unwrap() { +//! Header::Text(len) => { +//! let mut segments = decoder.text(len); +//! while let Some(mut segment) = segments.pull().unwrap() { +//! let mut buffer = [0u8; 7]; +//! while let Some(chunk) = segment.pull(&mut buffer[..]).unwrap() { +//! match chunk { +//! "Hello, " if chunks == 0 => chunks = 1, +//! "World!" if chunks == 1 => chunks = 2, +//! _ => panic!("received unexpected chunk"), +//! } +//! } +//! } +//! } +//! +//! _ => panic!("received unexpected value"), +//! } +//! +//! assert_eq!(chunks, 2); +//! ``` +//! +//! # Encoding +//! +//! To encode values to CBOR, create an `Encoder` from a writer. The encoder +//! instance provides the `Encoder::push()` method to write a `Header` value +//! to the wire. CBOR item bodies can be written directly. +//! +//! For bytes and text, there are the `Encoder::bytes()` and `Encoder::text()` +//! utility functions, respectively, which will properly segment the output +//! on the wire for you. +//! +//! ## Example +//! +//! ```rust +//! use ciborium_ll::{Encoder, Header}; +//! use ciborium_io::Write as _; +//! +//! let mut buffer = [0u8; 19]; +//! let mut encoder = Encoder::from(&mut buffer[..]); +//! +//! // Write the structure +//! encoder.push(Header::Map(Some(1))).unwrap(); +//! encoder.push(Header::Positive(7)).unwrap(); +//! encoder.text("Hello, World!", 7).unwrap(); +//! +//! // Validate our output +//! encoder.flush().unwrap(); +//! assert_eq!(b"\xa1\x07\x7f\x67Hello, \x66World!\xff", &buffer[..]); +//! ``` + +#![cfg_attr(not(feature = "std"), no_std)] +#![deny(missing_docs)] +#![deny(clippy::all)] +#![deny(clippy::cargo)] + +#[cfg(feature = "alloc")] +extern crate alloc; + +mod dec; +mod enc; +mod hdr; +mod seg; + +pub use dec::*; +pub use enc::*; +pub use hdr::*; +pub use seg::{Segment, Segments}; + +/// Simple value constants +pub mod simple { + #![allow(missing_docs)] + + pub const FALSE: u8 = 20; + pub const TRUE: u8 = 21; + pub const NULL: u8 = 22; + pub const UNDEFINED: u8 = 23; +} + +/// Tag constants +pub mod tag { + #![allow(missing_docs)] + + pub const BIGPOS: u64 = 2; + pub const BIGNEG: u64 = 3; +} + +#[derive(Debug)] +struct InvalidError(()); + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +enum Major { + Positive, + Negative, + Bytes, + Text, + Array, + Map, + Tag, + Other, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +enum Minor { + This(u8), + Next1([u8; 1]), + Next2([u8; 2]), + Next4([u8; 4]), + Next8([u8; 8]), + More, +} + +impl AsRef<[u8]> for Minor { + #[inline] + fn as_ref(&self) -> &[u8] { + match self { + Self::More => &[], + Self::This(..) => &[], + Self::Next1(x) => x.as_ref(), + Self::Next2(x) => x.as_ref(), + Self::Next4(x) => x.as_ref(), + Self::Next8(x) => x.as_ref(), + } + } +} + +impl AsMut<[u8]> for Minor { + #[inline] + fn as_mut(&mut self) -> &mut [u8] { + match self { + Self::More => &mut [], + Self::This(..) => &mut [], + Self::Next1(x) => x.as_mut(), + Self::Next2(x) => x.as_mut(), + Self::Next4(x) => x.as_mut(), + Self::Next8(x) => x.as_mut(), + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +struct Title(pub Major, pub Minor); + +#[cfg(test)] +mod tests { + use super::*; + + macro_rules! neg { + ($i:expr) => { + Header::Negative((($i as i128) ^ !0) as u64) + }; + } + + #[allow(clippy::excessive_precision)] + #[test] + fn leaf() { + use core::f64::{INFINITY, NAN}; + + let data = &[ + (Header::Positive(0), "00", true), + (Header::Positive(1), "01", true), + (Header::Positive(10), "0a", true), + (Header::Positive(23), "17", true), + (Header::Positive(24), "1818", true), + (Header::Positive(25), "1819", true), + (Header::Positive(100), "1864", true), + (Header::Positive(1000), "1903e8", true), + (Header::Positive(1000000), "1a000f4240", true), + (Header::Positive(1000000000000), "1b000000e8d4a51000", true), + ( + Header::Positive(18446744073709551615), + "1bffffffffffffffff", + true, + ), + (neg!(-18446744073709551616), "3bffffffffffffffff", true), + (neg!(-1), "20", true), + (neg!(-10), "29", true), + (neg!(-100), "3863", true), + (neg!(-1000), "3903e7", true), + (Header::Float(0.0), "f90000", true), + (Header::Float(-0.0), "f98000", true), + (Header::Float(1.0), "f93c00", true), + (Header::Float(1.1), "fb3ff199999999999a", true), + (Header::Float(1.5), "f93e00", true), + (Header::Float(65504.0), "f97bff", true), + (Header::Float(100000.0), "fa47c35000", true), + (Header::Float(3.4028234663852886e+38), "fa7f7fffff", true), + (Header::Float(1.0e+300), "fb7e37e43c8800759c", true), + (Header::Float(5.960464477539063e-8), "f90001", true), + (Header::Float(0.00006103515625), "f90400", true), + (Header::Float(-4.0), "f9c400", true), + (Header::Float(-4.1), "fbc010666666666666", true), + (Header::Float(INFINITY), "f97c00", true), + (Header::Float(NAN), "f97e00", true), + (Header::Float(-INFINITY), "f9fc00", true), + (Header::Float(INFINITY), "fa7f800000", false), + (Header::Float(NAN), "fa7fc00000", false), + (Header::Float(-INFINITY), "faff800000", false), + (Header::Float(INFINITY), "fb7ff0000000000000", false), + (Header::Float(NAN), "fb7ff8000000000000", false), + (Header::Float(-INFINITY), "fbfff0000000000000", false), + (Header::Simple(simple::FALSE), "f4", true), + (Header::Simple(simple::TRUE), "f5", true), + (Header::Simple(simple::NULL), "f6", true), + (Header::Simple(simple::UNDEFINED), "f7", true), + (Header::Simple(16), "f0", true), + (Header::Simple(24), "f818", true), + (Header::Simple(255), "f8ff", true), + (Header::Tag(0), "c0", true), + (Header::Tag(1), "c1", true), + (Header::Tag(23), "d7", true), + (Header::Tag(24), "d818", true), + (Header::Tag(32), "d820", true), + (Header::Bytes(Some(0)), "40", true), + (Header::Bytes(Some(4)), "44", true), + (Header::Text(Some(0)), "60", true), + (Header::Text(Some(4)), "64", true), + ]; + + for (header, bytes, encode) in data.iter().cloned() { + let bytes = hex::decode(bytes).unwrap(); + + let mut decoder = Decoder::from(&bytes[..]); + match (header, decoder.pull().unwrap()) { + // NaN equality... + (Header::Float(l), Header::Float(r)) if l.is_nan() && r.is_nan() => (), + + // Everything else... + (l, r) => assert_eq!(l, r), + } + + if encode { + let mut buffer = [0u8; 1024]; + let mut writer = &mut buffer[..]; + let mut encoder = Encoder::from(&mut writer); + encoder.push(header).unwrap(); + + let len = writer.len(); + assert_eq!(&bytes[..], &buffer[..1024 - len]); + } + } + } + + #[test] + fn node() { + let data: &[(&str, &[Header])] = &[ + ("80", &[Header::Array(Some(0))]), + ( + "83010203", + &[ + Header::Array(Some(3)), + Header::Positive(1), + Header::Positive(2), + Header::Positive(3), + ], + ), + ( + "98190102030405060708090a0b0c0d0e0f101112131415161718181819", + &[ + Header::Array(Some(25)), + Header::Positive(1), + Header::Positive(2), + Header::Positive(3), + Header::Positive(4), + Header::Positive(5), + Header::Positive(6), + Header::Positive(7), + Header::Positive(8), + Header::Positive(9), + Header::Positive(10), + Header::Positive(11), + Header::Positive(12), + Header::Positive(13), + Header::Positive(14), + Header::Positive(15), + Header::Positive(16), + Header::Positive(17), + Header::Positive(18), + Header::Positive(19), + Header::Positive(20), + Header::Positive(21), + Header::Positive(22), + Header::Positive(23), + Header::Positive(24), + Header::Positive(25), + ], + ), + ("a0", &[Header::Map(Some(0))]), + ( + "a201020304", + &[ + Header::Map(Some(2)), + Header::Positive(1), + Header::Positive(2), + Header::Positive(3), + Header::Positive(4), + ], + ), + ("9fff", &[Header::Array(None), Header::Break]), + ( + "9f018202039f0405ffff", + &[ + Header::Array(None), + Header::Positive(1), + Header::Array(Some(2)), + Header::Positive(2), + Header::Positive(3), + Header::Array(None), + Header::Positive(4), + Header::Positive(5), + Header::Break, + Header::Break, + ], + ), + ( + "9f01820203820405ff", + &[ + Header::Array(None), + Header::Positive(1), + Header::Array(Some(2)), + Header::Positive(2), + Header::Positive(3), + Header::Array(Some(2)), + Header::Positive(4), + Header::Positive(5), + Header::Break, + ], + ), + ( + "83018202039f0405ff", + &[ + Header::Array(Some(3)), + Header::Positive(1), + Header::Array(Some(2)), + Header::Positive(2), + Header::Positive(3), + Header::Array(None), + Header::Positive(4), + Header::Positive(5), + Header::Break, + ], + ), + ( + "83019f0203ff820405", + &[ + Header::Array(Some(3)), + Header::Positive(1), + Header::Array(None), + Header::Positive(2), + Header::Positive(3), + Header::Break, + Header::Array(Some(2)), + Header::Positive(4), + Header::Positive(5), + ], + ), + ( + "9f0102030405060708090a0b0c0d0e0f101112131415161718181819ff", + &[ + Header::Array(None), + Header::Positive(1), + Header::Positive(2), + Header::Positive(3), + Header::Positive(4), + Header::Positive(5), + Header::Positive(6), + Header::Positive(7), + Header::Positive(8), + Header::Positive(9), + Header::Positive(10), + Header::Positive(11), + Header::Positive(12), + Header::Positive(13), + Header::Positive(14), + Header::Positive(15), + Header::Positive(16), + Header::Positive(17), + Header::Positive(18), + Header::Positive(19), + Header::Positive(20), + Header::Positive(21), + Header::Positive(22), + Header::Positive(23), + Header::Positive(24), + Header::Positive(25), + Header::Break, + ], + ), + ]; + + for (bytes, headers) in data { + let bytes = hex::decode(bytes).unwrap(); + + // Test decoding + let mut decoder = Decoder::from(&bytes[..]); + for header in headers.iter().cloned() { + assert_eq!(header, decoder.pull().unwrap()); + } + + // Test encoding + let mut buffer = [0u8; 1024]; + let mut writer = &mut buffer[..]; + let mut encoder = Encoder::from(&mut writer); + + for header in headers.iter().cloned() { + encoder.push(header).unwrap(); + } + + let len = writer.len(); + assert_eq!(&bytes[..], &buffer[..1024 - len]); + } + } +} diff --git a/vendor/ciborium-ll/src/seg.rs b/vendor/ciborium-ll/src/seg.rs new file mode 100644 index 00000000..95eaac9b --- /dev/null +++ b/vendor/ciborium-ll/src/seg.rs @@ -0,0 +1,216 @@ +use super::*; + +use ciborium_io::Read; + +use core::marker::PhantomData; + +/// A parser for incoming segments +pub trait Parser: Default { + /// The type of item that is parsed + type Item: ?Sized; + + /// The parsing error that may occur + type Error; + + /// The main parsing function + /// + /// This function processes the incoming bytes and returns the item. + /// + /// One important detail that **MUST NOT** be overlooked is that the + /// parser may save data from a previous parsing attempt. The number of + /// bytes saved is indicated by the `Parser::saved()` function. The saved + /// bytes will be copied into the beginning of the `bytes` array before + /// processing. Therefore, two requirements should be met. + /// + /// First, the incoming byte slice should be larger than the saved bytes. + /// + /// Second, the incoming byte slice should contain new bytes only after + /// the saved byte prefix. + /// + /// If both criteria are met, this allows the parser to prepend its saved + /// bytes without any additional allocation. + fn parse<'a>(&mut self, bytes: &'a mut [u8]) -> Result<&'a Self::Item, Self::Error>; + + /// Indicates the number of saved bytes in the parser + fn saved(&self) -> usize { + 0 + } +} + +/// A bytes parser +/// +/// No actual processing is performed and the input bytes are directly +/// returned. This implies that this parser never saves any bytes internally. +#[derive(Default)] +pub struct Bytes(()); + +impl Parser for Bytes { + type Item = [u8]; + type Error = core::convert::Infallible; + + fn parse<'a>(&mut self, bytes: &'a mut [u8]) -> Result<&'a [u8], Self::Error> { + Ok(bytes) + } +} + +/// A text parser +/// +/// This parser converts the input bytes to a `str`. This parser preserves +/// trailing invalid UTF-8 sequences in the case that chunking fell in the +/// middle of a valid UTF-8 character. +#[derive(Default)] +pub struct Text { + stored: usize, + buffer: [u8; 3], +} + +impl Parser for Text { + type Item = str; + type Error = core::str::Utf8Error; + + fn parse<'a>(&mut self, bytes: &'a mut [u8]) -> Result<&'a str, Self::Error> { + // If we cannot advance, return nothing. + if bytes.len() <= self.stored { + return Ok(""); + } + + // Copy previously invalid data into place. + bytes[..self.stored].clone_from_slice(&self.buffer[..self.stored]); + + Ok(match core::str::from_utf8(bytes) { + Ok(s) => { + self.stored = 0; + s + } + Err(e) => { + let valid_len = e.valid_up_to(); + let invalid_len = bytes.len() - valid_len; + + // If the size of the invalid UTF-8 is large enough to hold + // all valid UTF-8 characters, we have a syntax error. + if invalid_len > self.buffer.len() { + return Err(e); + } + + // Otherwise, store the invalid bytes for the next read cycle. + self.buffer[..invalid_len].clone_from_slice(&bytes[valid_len..]); + self.stored = invalid_len; + + // Decode the valid part of the string. + core::str::from_utf8(&bytes[..valid_len]).unwrap() + } + }) + } + + fn saved(&self) -> usize { + self.stored + } +} + +/// A CBOR segment +/// +/// This type represents a single bytes or text segment on the wire. It can be +/// read out in parsed chunks based on the size of the input scratch buffer. +pub struct Segment<'r, R: Read, P: Parser> { + reader: &'r mut Decoder<R>, + unread: usize, + offset: usize, + parser: P, +} + +impl<'r, R: Read, P: Parser> Segment<'r, R, P> { + /// Gets the number of unprocessed bytes + #[inline] + pub fn left(&self) -> usize { + self.unread + self.parser.saved() + } + + /// Gets the next parsed chunk within the segment + /// + /// Returns `Ok(None)` when all chunks have been read. + #[inline] + pub fn pull<'a>( + &mut self, + buffer: &'a mut [u8], + ) -> Result<Option<&'a P::Item>, Error<R::Error>> { + use core::cmp::min; + + let prev = self.parser.saved(); + match self.unread { + 0 if prev == 0 => return Ok(None), + 0 => return Err(Error::Syntax(self.offset)), + _ => (), + } + + // Determine how many bytes to read. + let size = min(buffer.len(), prev + self.unread); + let full = &mut buffer[..size]; + let next = &mut full[min(size, prev)..]; + + // Read additional bytes. + self.reader.read_exact(next)?; + self.unread -= next.len(); + + self.parser + .parse(full) + .or(Err(Error::Syntax(self.offset))) + .map(Some) + } +} + +/// A sequence of CBOR segments +/// +/// CBOR allows for bytes or text items to be segmented. This type represents +/// the state of that segmented input stream. +pub struct Segments<'r, R: Read, P: Parser> { + reader: &'r mut Decoder<R>, + finish: bool, + nested: usize, + parser: PhantomData<P>, + unwrap: fn(Header) -> Result<Option<usize>, ()>, +} + +impl<'r, R: Read, P: Parser> Segments<'r, R, P> { + #[inline] + pub(crate) fn new( + decoder: &'r mut Decoder<R>, + unwrap: fn(Header) -> Result<Option<usize>, ()>, + ) -> Self { + Self { + reader: decoder, + finish: false, + nested: 0, + parser: PhantomData, + unwrap, + } + } + + /// Gets the next segment in the stream + /// + /// Returns `Ok(None)` at the conclusion of the stream. + #[inline] + pub fn pull(&mut self) -> Result<Option<Segment<R, P>>, Error<R::Error>> { + while !self.finish { + let offset = self.reader.offset(); + match self.reader.pull()? { + Header::Break if self.nested == 1 => return Ok(None), + Header::Break if self.nested > 1 => self.nested -= 1, + header => match (self.unwrap)(header) { + Err(..) => return Err(Error::Syntax(offset)), + Ok(None) => self.nested += 1, + Ok(Some(len)) => { + self.finish = self.nested == 0; + return Ok(Some(Segment { + reader: self.reader, + unread: len, + offset, + parser: P::default(), + })); + } + }, + } + } + + Ok(None) + } +} diff --git a/vendor/ciborium/.cargo-checksum.json b/vendor/ciborium/.cargo-checksum.json new file mode 100644 index 00000000..bdbeaed9 --- /dev/null +++ b/vendor/ciborium/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"3e929a9338badddb117608b45c7c123fb2cc3cff61703020d01761f61166196c","Cargo.toml":"b7511734761d1d08e1ab4f7db03f3856a7ec513b018e8c4184157a30869a0520","Cargo.toml.orig":"ff1b4edbeb1487d002e869217c5807a96a04718e268e8672f16ab3b51de04e85","LICENSE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","README.md":"3ed22336e9c4f8c537588fda015009ad31259876fbfa4a427d10563cc82e0058","src/de/error.rs":"85f7330740f13ff77e33436c473ba9ddf13dba1265bcf71fffae21aeec82fb58","src/de/mod.rs":"d42cf7486385401290cdd52b3fcccd3287837c45071c93b51ddbbd34b9a79a2b","src/lib.rs":"fa12759ba0c428fd46e3395a78406a4a8b3ccb3b5b039ef7ca7d8fbd077a2a55","src/ser/error.rs":"ba6f98ced22e1778c2ae4b422814fd4bf56dda19c6e24684b95ec37748170df2","src/ser/mod.rs":"bc30087a231f962664d9a48f4a3446ff936ab8dc20fbc9e95ec1dc19622109a3","src/tag.rs":"9f64d74512cbbadce2abeff5c08c6dde7e9f4312da1cc028683503559e6a186d","src/value/canonical.rs":"c7b61ff248d4a3ce899839b7dbd721bd0f9842ff08338611bbfe215f68167499","src/value/de.rs":"639aab116125be46867ed1bbd42129f94823b1ab28ed8fb0fa5acda3fbda4dd1","src/value/error.rs":"e94cdb78dfa5168719e6f698103f6a47c7501bb3d27848e83cccec70e871b1b0","src/value/integer.rs":"da0176e415a31841f6c6f8f9e2c8e9a7841690cfd7680d6fb753c25f7747bd04","src/value/mod.rs":"3e02f8e5c29d43404339150d2f8e898c7d6430d4b3025bb172e8bd6498cc92ca","src/value/ser.rs":"07724688b7d658d0eb5885f701d9632e2c7426e624e8e65c98da1112e489365e","tests/canonical.rs":"f15c3c7465941eba4f61862ab794bee896f1b8c6420e13ce48be1ccbfbc7a1c5","tests/codec.rs":"978d9aacbd1fbade5917dbd6b6a179437d937645ab8871830c84153a3262b5be","tests/error.rs":"d84e9dd66dfab485db6ddceea4b6850e9714aee7832e588ebbc5555e424674f8","tests/fuzz.rs":"fca0f7c875957bfff4a3cfa25188e70eba536dd0071d12aa2478511c0aeafc85","tests/macro.rs":"227fb0fbf2c9e832ebfc87e5f110135cd00eb136fbed4a973fd786eb9e5536d6","tests/no_std.rs":"c302efd394c0296b692e1ca4df2aaa3944632f44c0087152d463a2ce8a49f2bc","tests/recursion.rs":"f352a9700853791e5a01ae6a689a6c93e48d83b3de5f76488a5e16d3bafaebaa","tests/tag.rs":"4eb03d53bd5ee3d7dd10f4d38ae071d47fc83090a9b1cd8425cd8cfa89dbc0a4"},"package":"42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e"} \ No newline at end of file diff --git a/vendor/ciborium/.cargo_vcs_info.json b/vendor/ciborium/.cargo_vcs_info.json new file mode 100644 index 00000000..f29a2d0d --- /dev/null +++ b/vendor/ciborium/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "fdf3ec5c53256000f8e12f06c361ca18026a8d99" + }, + "path_in_vcs": "ciborium" +} \ No newline at end of file diff --git a/vendor/ciborium/Cargo.toml b/vendor/ciborium/Cargo.toml new file mode 100644 index 00000000..30ba9689 --- /dev/null +++ b/vendor/ciborium/Cargo.toml @@ -0,0 +1,64 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.58" +name = "ciborium" +version = "0.2.2" +authors = ["Nathaniel McCallum <npmccallum@profian.com>"] +description = "serde implementation of CBOR using ciborium-basic" +homepage = "https://github.com/enarx/ciborium" +readme = "README.md" +keywords = ["cbor", "serde"] +categories = ["data-structures", "embedded", "encoding", "no-std", "parsing"] +license = "Apache-2.0" +repository = "https://github.com/enarx/ciborium" +[package.metadata.docs.rs] +all-features = true +[dependencies.ciborium-io] +version = "0.2.2" +features = ["alloc"] + +[dependencies.ciborium-ll] +version = "0.2.2" + +[dependencies.serde] +version = "1.0.100" +features = ["alloc", "derive"] +default-features = false +[dev-dependencies.hex] +version = "0.4" + +[dev-dependencies.rand] +version = "0.8" + +[dev-dependencies.rstest] +version = "0.11" + +[dev-dependencies.serde_bytes] +version = "0.11" + +[features] +default = ["std"] +std = ["ciborium-io/std", "serde/std"] +[badges.github] +repository = "enarx/ciborium" +workflow = "test" + +[badges.is-it-maintained-issue-resolution] +repository = "enarx/ciborium" + +[badges.is-it-maintained-open-issues] +repository = "enarx/ciborium" + +[badges.maintenance] +status = "actively-developed" diff --git a/vendor/ciborium/Cargo.toml.orig b/vendor/ciborium/Cargo.toml.orig new file mode 100644 index 00000000..f13ee9f9 --- /dev/null +++ b/vendor/ciborium/Cargo.toml.orig @@ -0,0 +1,39 @@ +[package] +name = "ciborium" +version = "0.2.2" +authors = ["Nathaniel McCallum <npmccallum@profian.com>"] +license = "Apache-2.0" +edition = "2021" +rust-version = "1.58" +homepage = "https://github.com/enarx/ciborium" +repository = "https://github.com/enarx/ciborium" +description = "serde implementation of CBOR using ciborium-basic" +readme = "README.md" +keywords = ["cbor", "serde"] +categories = ["data-structures", "embedded", "encoding", "no-std", "parsing"] + +[badges] +# See https://doc.rust-lang.org/cargo/reference/manifest.html#the-badges-section +github = { repository = "enarx/ciborium", workflow = "test" } +#github = { repository = "enarx/ciborium", workflow = "lint" } +maintenance = { status = "actively-developed" } +is-it-maintained-issue-resolution = { repository = "enarx/ciborium" } +is-it-maintained-open-issues = { repository = "enarx/ciborium" } + +[dependencies] +ciborium-ll = { path = "../ciborium-ll", version = "0.2.2" } +ciborium-io = { path = "../ciborium-io", version = "0.2.2", features = ["alloc"] } +serde = { version = "1.0.100", default-features = false, features = ["alloc", "derive"] } + +[dev-dependencies] +serde_bytes = "0.11" +rstest = "0.11" +rand = "0.8" +hex = "0.4" + +[features] +default = ["std"] +std = ["ciborium-io/std", "serde/std"] + +[package.metadata.docs.rs] +all-features = true diff --git a/vendor/ciborium/LICENSE b/vendor/ciborium/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/vendor/ciborium/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/ciborium/README.md b/vendor/ciborium/README.md new file mode 100644 index 00000000..b60c35e5 --- /dev/null +++ b/vendor/ciborium/README.md @@ -0,0 +1,92 @@ +[![Workflow Status](https://github.com/enarx/ciborium/workflows/test/badge.svg)](https://github.com/enarx/ciborium/actions?query=workflow%3A%22test%22) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/enarx/ciborium.svg)](https://isitmaintained.com/project/enarx/ciborium "Average time to resolve an issue") +[![Percentage of issues still open](https://isitmaintained.com/badge/open/enarx/ciborium.svg)](https://isitmaintained.com/project/enarx/ciborium "Percentage of issues still open") +![Maintenance](https://img.shields.io/badge/maintenance-activly--developed-brightgreen.svg) + +# ciborium + +Welcome to Ciborium! + +Ciborium contains CBOR serialization and deserialization implementations for serde. + +## Quick Start + +You're probably looking for [`from_reader()`](crate::de::from_reader) +and [`into_writer()`](crate::ser::into_writer), which are +the main functions. Note that byte slices are also readers and writers and can be +passed to these functions just as streams can. + +For dynamic CBOR value creation/inspection, see [`Value`](crate::value::Value). + +## Design Decisions + +### Always Serialize Numeric Values to the Smallest Size + +Although the CBOR specification has differing numeric widths, this is only +a form of compression on the wire and is not intended to directly +represent an "integer width" or "float width." Therefore, ciborium always +serializes numbers to the smallest possible lossless encoding. For example, +we serialize `1u128` as a single byte (`01`). Likewise, we will also freely +decode that single byte into a `u128`. + +While there is some minor performance cost for this, there are several +reasons for this choice. First, the specification seems to imply it by +using a separate bit for the sign. Second, the specification requires +that implementations handle leading zeroes; a liberal reading of which +implies a requirement for lossless coercion. Third, dynamic languages like +Python have no notion of "integer width," making this is a practical +choice for maximizing wire compatibility with those languages. + +This coercion is **always** lossless. For floats, this implies that we +only coerce to a smaller size if coercion back to the original size has +the same raw bits as the original. + +### Compatibility with Other Implementations + +The ciborium project follows the [Robustness Principle](https://en.wikipedia.org/wiki/Robustness_principle). +Therefore, we aim to be liberal in what we accept. This implies that we +aim to be wire-compatible with other implementations in decoding, but +not necessarily encoding. + +One notable example of this is that `serde_cbor` uses fixed-width encoding +of numbers and doesn't losslessly coerce. This implies that `ciborium` will +successfully decode `serde_cbor` encodings, but the opposite may not be the +case. + +### Representing Map as a Sequence of Values + +Other serde parsers have generally taken the route of using `BTreeMap` or +`HashMap` to implement their encoding's underlying `Map` type. This crate +chooses to represent the `Map` type using `Vec<(Value, Value)>` instead. + +This decision was made because this type preserves the order of the pairs +on the wire. Further, for those that need the properties of `BTreeMap` or +`HashMap`, you can simply `collect()` the values into the respective type. +This provides maximum flexibility. + +### Low-level Library + +The ciborium crate has the beginnings of a low-level library in the +(private) `basic` module. We may extend this to be more robust and expose +it for application consumption once we have it in a good state. If you'd +like to collaborate with us on that, please contact us. Alternatively, +we might fork this code into a separate crate with no serde dependency. + +### Internal Types + +The ciborium crate contains a number of internal types that implement +useful serde traits. While these are not currently exposed, we might +choose to expose them in the future if there is demand. Generally, this +crate takes a conservative approach to exposing APIs to avoid breakage. + +### Packed Encoding? + +Packed encoding uses numerical offsets to represent structure field names +and enum variant names. This can save significant space on the wire. + +While the authors of this crate like packed encoding, it should generally +be avoided because it can be fragile as it exposes invariants of your Rust +code to remote actors. We might consider adding this in the future. If you +are interested in this, please contact us. + +License: Apache-2.0 diff --git a/vendor/ciborium/src/de/error.rs b/vendor/ciborium/src/de/error.rs new file mode 100644 index 00000000..996aab18 --- /dev/null +++ b/vendor/ciborium/src/de/error.rs @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: Apache-2.0 + +use alloc::string::{String, ToString}; +use core::fmt::{Debug, Display, Formatter, Result}; + +use serde::de::{Error as DeError, StdError}; + +/// An error occurred during deserialization +#[derive(Debug)] +pub enum Error<T> { + /// An error occurred while reading bytes + /// + /// Contains the underlying error returned while reading. + Io(T), + + /// An error occurred while parsing bytes + /// + /// Contains the offset into the stream where the syntax error occurred. + Syntax(usize), + + /// An error occurred while processing a parsed value + /// + /// Contains a description of the error that occurred and (optionally) + /// the offset into the stream indicating the start of the item being + /// processed when the error occurred. + Semantic(Option<usize>, String), + + /// The input caused serde to recurse too much + /// + /// This error prevents a stack overflow. + RecursionLimitExceeded, +} + +impl<T> Error<T> { + /// A helper method for composing a semantic error + #[inline] + pub fn semantic(offset: impl Into<Option<usize>>, msg: impl Into<String>) -> Self { + Self::Semantic(offset.into(), msg.into()) + } +} + +impl<T> From<T> for Error<T> { + #[inline] + fn from(value: T) -> Self { + Error::Io(value) + } +} + +impl<T> From<ciborium_ll::Error<T>> for Error<T> { + #[inline] + fn from(value: ciborium_ll::Error<T>) -> Self { + match value { + ciborium_ll::Error::Io(x) => Self::Io(x), + ciborium_ll::Error::Syntax(x) => Self::Syntax(x), + } + } +} + +impl<T: Debug> Display for Error<T> { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + write!(f, "{:?}", self) + } +} + +impl<T: Debug> StdError for Error<T> {} + +impl<T: Debug> DeError for Error<T> { + #[inline] + fn custom<U: Display>(msg: U) -> Self { + Self::Semantic(None, msg.to_string()) + } +} diff --git a/vendor/ciborium/src/de/mod.rs b/vendor/ciborium/src/de/mod.rs new file mode 100644 index 00000000..1952a4b8 --- /dev/null +++ b/vendor/ciborium/src/de/mod.rs @@ -0,0 +1,875 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! Serde deserialization support for CBOR + +mod error; + +pub use error::Error; + +use alloc::{string::String, vec::Vec}; + +use ciborium_io::Read; +use ciborium_ll::*; +use serde::{de, de::Deserializer as _, forward_to_deserialize_any}; + +trait Expected<E: de::Error> { + fn expected(self, kind: &'static str) -> E; +} + +impl<E: de::Error> Expected<E> for Header { + #[inline] + fn expected(self, kind: &'static str) -> E { + de::Error::invalid_type( + match self { + Header::Positive(x) => de::Unexpected::Unsigned(x), + Header::Negative(x) => de::Unexpected::Signed(x as i64 ^ !0), + Header::Bytes(..) => de::Unexpected::Other("bytes"), + Header::Text(..) => de::Unexpected::Other("string"), + + Header::Array(..) => de::Unexpected::Seq, + Header::Map(..) => de::Unexpected::Map, + + Header::Tag(..) => de::Unexpected::Other("tag"), + + Header::Simple(simple::FALSE) => de::Unexpected::Bool(false), + Header::Simple(simple::TRUE) => de::Unexpected::Bool(true), + Header::Simple(simple::NULL) => de::Unexpected::Other("null"), + Header::Simple(simple::UNDEFINED) => de::Unexpected::Other("undefined"), + Header::Simple(..) => de::Unexpected::Other("simple"), + + Header::Float(x) => de::Unexpected::Float(x), + Header::Break => de::Unexpected::Other("break"), + }, + &kind, + ) + } +} + +struct Deserializer<'b, R: Read> { + decoder: Decoder<R>, + scratch: &'b mut [u8], + recurse: usize, +} + +impl<'a, R: Read> Deserializer<'a, R> +where + R::Error: core::fmt::Debug, +{ + #[inline] + fn recurse<V, F: FnOnce(&mut Self) -> Result<V, Error<R::Error>>>( + &mut self, + func: F, + ) -> Result<V, Error<R::Error>> { + if self.recurse == 0 { + return Err(Error::RecursionLimitExceeded); + } + + self.recurse -= 1; + let result = func(self); + self.recurse += 1; + result + } + + #[inline] + fn integer(&mut self, mut header: Option<Header>) -> Result<(bool, u128), Error<R::Error>> { + loop { + let header = match header.take() { + Some(h) => h, + None => self.decoder.pull()?, + }; + + let neg = match header { + Header::Positive(x) => return Ok((false, x.into())), + Header::Negative(x) => return Ok((true, x.into())), + Header::Tag(tag::BIGPOS) => false, + Header::Tag(tag::BIGNEG) => true, + Header::Tag(..) => continue, + header => return Err(header.expected("integer")), + }; + + let mut buffer = [0u8; 16]; + let mut value = [0u8; 16]; + let mut index = 0usize; + + return match self.decoder.pull()? { + Header::Bytes(len) => { + let mut segments = self.decoder.bytes(len); + while let Some(mut segment) = segments.pull()? { + while let Some(chunk) = segment.pull(&mut buffer)? { + for b in chunk { + match index { + 16 => return Err(de::Error::custom("bigint too large")), + 0 if *b == 0 => continue, // Skip leading zeros + _ => value[index] = *b, + } + + index += 1; + } + } + } + + value[..index].reverse(); + Ok((neg, u128::from_le_bytes(value))) + } + + h => Err(h.expected("bytes")), + }; + } + } +} + +impl<'de, 'a, 'b, R: Read> de::Deserializer<'de> for &'a mut Deserializer<'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn deserialize_any<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let header = self.decoder.pull()?; + self.decoder.push(header); + + match header { + Header::Positive(..) => self.deserialize_u64(visitor), + Header::Negative(x) => match i64::try_from(x) { + Ok(..) => self.deserialize_i64(visitor), + Err(..) => self.deserialize_i128(visitor), + }, + + Header::Bytes(len) => match len { + Some(len) if len <= self.scratch.len() => self.deserialize_bytes(visitor), + _ => self.deserialize_byte_buf(visitor), + }, + + Header::Text(len) => match len { + Some(len) if len <= self.scratch.len() => self.deserialize_str(visitor), + _ => self.deserialize_string(visitor), + }, + + Header::Array(..) => self.deserialize_seq(visitor), + Header::Map(..) => self.deserialize_map(visitor), + + Header::Tag(tag) => { + let _: Header = self.decoder.pull()?; + + // Peek at the next item. + let header = self.decoder.pull()?; + self.decoder.push(header); + + // If it is bytes, capture the length. + let len = match header { + Header::Bytes(x) => x, + _ => None, + }; + + match (tag, len) { + (tag::BIGPOS, Some(len)) | (tag::BIGNEG, Some(len)) if len <= 16 => { + let result = match self.integer(Some(Header::Tag(tag)))? { + (false, raw) => return visitor.visit_u128(raw), + (true, raw) => i128::try_from(raw).map(|x| x ^ !0), + }; + + match result { + Ok(x) => visitor.visit_i128(x), + Err(..) => Err(de::Error::custom("integer too large")), + } + } + + _ => self.recurse(|me| { + let access = crate::tag::TagAccess::new(me, Some(tag)); + visitor.visit_enum(access) + }), + } + } + + Header::Float(..) => self.deserialize_f64(visitor), + + Header::Simple(simple::FALSE) => self.deserialize_bool(visitor), + Header::Simple(simple::TRUE) => self.deserialize_bool(visitor), + Header::Simple(simple::NULL) => self.deserialize_option(visitor), + Header::Simple(simple::UNDEFINED) => self.deserialize_option(visitor), + h @ Header::Simple(..) => Err(h.expected("known simple value")), + + h @ Header::Break => Err(h.expected("non-break")), + } + } + + #[inline] + fn deserialize_bool<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + let offset = self.decoder.offset(); + + return match self.decoder.pull()? { + Header::Tag(..) => continue, + Header::Simple(simple::FALSE) => visitor.visit_bool(false), + Header::Simple(simple::TRUE) => visitor.visit_bool(true), + _ => Err(Error::semantic(offset, "expected bool")), + }; + } + } + + #[inline] + fn deserialize_f32<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_f64(visitor) + } + + #[inline] + fn deserialize_f64<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Tag(..) => continue, + Header::Float(x) => visitor.visit_f64(x), + h => Err(h.expected("float")), + }; + } + } + + fn deserialize_i8<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_i64(visitor) + } + + fn deserialize_i16<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_i64(visitor) + } + + fn deserialize_i32<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_i64(visitor) + } + + fn deserialize_i64<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let result = match self.integer(None)? { + (false, raw) => i64::try_from(raw), + (true, raw) => i64::try_from(raw).map(|x| x ^ !0), + }; + + match result { + Ok(x) => visitor.visit_i64(x), + Err(..) => Err(de::Error::custom("integer too large")), + } + } + + fn deserialize_i128<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let result = match self.integer(None)? { + (false, raw) => i128::try_from(raw), + (true, raw) => i128::try_from(raw).map(|x| x ^ !0), + }; + + match result { + Ok(x) => visitor.visit_i128(x), + Err(..) => Err(de::Error::custom("integer too large")), + } + } + + fn deserialize_u8<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_u64(visitor) + } + + fn deserialize_u16<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_u64(visitor) + } + + fn deserialize_u32<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_u64(visitor) + } + + fn deserialize_u64<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let result = match self.integer(None)? { + (false, raw) => u64::try_from(raw), + (true, ..) => return Err(de::Error::custom("unexpected negative integer")), + }; + + match result { + Ok(x) => visitor.visit_u64(x), + Err(..) => Err(de::Error::custom("integer too large")), + } + } + + fn deserialize_u128<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + match self.integer(None)? { + (false, raw) => visitor.visit_u128(raw), + (true, ..) => Err(de::Error::custom("unexpected negative integer")), + } + } + + fn deserialize_char<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + let offset = self.decoder.offset(); + let header = self.decoder.pull()?; + + return match header { + Header::Tag(..) => continue, + + Header::Text(Some(len)) if len <= 4 => { + let mut buf = [0u8; 4]; + self.decoder.read_exact(&mut buf[..len])?; + + match core::str::from_utf8(&buf[..len]) { + Ok(s) => match s.chars().count() { + 1 => visitor.visit_char(s.chars().next().unwrap()), + _ => Err(header.expected("char")), + }, + Err(..) => Err(Error::Syntax(offset)), + } + } + + _ => Err(header.expected("char")), + }; + } + } + + fn deserialize_str<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + let offset = self.decoder.offset(); + + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Text(Some(len)) if len <= self.scratch.len() => { + self.decoder.read_exact(&mut self.scratch[..len])?; + + match core::str::from_utf8(&self.scratch[..len]) { + Ok(s) => visitor.visit_str(s), + Err(..) => Err(Error::Syntax(offset)), + } + } + + header => Err(header.expected("str")), + }; + } + } + + fn deserialize_string<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Text(len) => { + let mut buffer = String::new(); + + let mut segments = self.decoder.text(len); + while let Some(mut segment) = segments.pull()? { + while let Some(chunk) = segment.pull(self.scratch)? { + buffer.push_str(chunk); + } + } + + visitor.visit_string(buffer) + } + + header => Err(header.expected("string")), + }; + } + } + + fn deserialize_bytes<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Bytes(Some(len)) if len <= self.scratch.len() => { + self.decoder.read_exact(&mut self.scratch[..len])?; + visitor.visit_bytes(&self.scratch[..len]) + } + + Header::Array(len) => self.recurse(|me| { + let access = Access(me, len); + visitor.visit_seq(access) + }), + + header => Err(header.expected("bytes")), + }; + } + } + + fn deserialize_byte_buf<V: de::Visitor<'de>>( + self, + visitor: V, + ) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Bytes(len) => { + let mut buffer = Vec::new(); + + let mut segments = self.decoder.bytes(len); + while let Some(mut segment) = segments.pull()? { + while let Some(chunk) = segment.pull(self.scratch)? { + buffer.extend_from_slice(chunk); + } + } + + visitor.visit_byte_buf(buffer) + } + + Header::Array(len) => self.recurse(|me| { + let access = Access(me, len); + visitor.visit_seq(access) + }), + + header => Err(header.expected("byte buffer")), + }; + } + } + + fn deserialize_seq<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Array(len) => self.recurse(|me| { + let access = Access(me, len); + visitor.visit_seq(access) + }), + + Header::Bytes(len) => { + let mut buffer = Vec::new(); + + let mut segments = self.decoder.bytes(len); + while let Some(mut segment) = segments.pull()? { + while let Some(chunk) = segment.pull(self.scratch)? { + buffer.extend_from_slice(chunk); + } + } + + visitor.visit_seq(BytesAccess::<R>(0, buffer, core::marker::PhantomData)) + } + + header => Err(header.expected("array")), + }; + } + } + + fn deserialize_map<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Map(len) => self.recurse(|me| { + let access = Access(me, len); + visitor.visit_map(access) + }), + + header => Err(header.expected("map")), + }; + } + } + + fn deserialize_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + _fields: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_map(visitor) + } + + fn deserialize_tuple<V: de::Visitor<'de>>( + self, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_seq(visitor) + } + + fn deserialize_tuple_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_seq(visitor) + } + + fn deserialize_identifier<V: de::Visitor<'de>>( + self, + visitor: V, + ) -> Result<V::Value, Self::Error> { + loop { + let offset = self.decoder.offset(); + + return match self.decoder.pull()? { + Header::Tag(..) => continue, + + Header::Text(Some(len)) if len <= self.scratch.len() => { + self.decoder.read_exact(&mut self.scratch[..len])?; + + match core::str::from_utf8(&self.scratch[..len]) { + Ok(s) => visitor.visit_str(s), + Err(..) => Err(Error::Syntax(offset)), + } + } + Header::Bytes(Some(len)) if len <= self.scratch.len() => { + self.decoder.read_exact(&mut self.scratch[..len])?; + visitor.visit_bytes(&self.scratch[..len]) + } + + header => Err(header.expected("str or bytes")), + }; + } + } + + fn deserialize_ignored_any<V: de::Visitor<'de>>( + self, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_any(visitor) + } + + #[inline] + fn deserialize_option<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + match self.decoder.pull()? { + Header::Simple(simple::UNDEFINED) => visitor.visit_none(), + Header::Simple(simple::NULL) => visitor.visit_none(), + header => { + self.decoder.push(header); + visitor.visit_some(self) + } + } + } + + #[inline] + fn deserialize_unit<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + loop { + return match self.decoder.pull()? { + Header::Simple(simple::UNDEFINED) => visitor.visit_unit(), + Header::Simple(simple::NULL) => visitor.visit_unit(), + Header::Tag(..) => continue, + header => Err(header.expected("unit")), + }; + } + } + + #[inline] + fn deserialize_unit_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_unit(visitor) + } + + #[inline] + fn deserialize_newtype_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Self::Error> { + visitor.visit_newtype_struct(self) + } + + #[inline] + fn deserialize_enum<V: de::Visitor<'de>>( + self, + name: &'static str, + _variants: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> { + if name == "@@TAG@@" { + let tag = match self.decoder.pull()? { + Header::Tag(x) => Some(x), + header => { + self.decoder.push(header); + None + } + }; + + return self.recurse(|me| { + let access = crate::tag::TagAccess::new(me, tag); + visitor.visit_enum(access) + }); + } + + loop { + match self.decoder.pull()? { + Header::Tag(..) => continue, + Header::Map(Some(1)) => (), + header @ Header::Text(..) => self.decoder.push(header), + header => return Err(header.expected("enum")), + } + + return self.recurse(|me| { + let access = Access(me, Some(0)); + visitor.visit_enum(access) + }); + } + } + + #[inline] + fn is_human_readable(&self) -> bool { + false + } +} + +struct Access<'a, 'b, R: Read>(&'a mut Deserializer<'b, R>, Option<usize>); + +impl<'de, 'a, 'b, R: Read> de::SeqAccess<'de> for Access<'a, 'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn next_element_seed<U: de::DeserializeSeed<'de>>( + &mut self, + seed: U, + ) -> Result<Option<U::Value>, Self::Error> { + match self.1 { + Some(0) => return Ok(None), + Some(x) => self.1 = Some(x - 1), + None => match self.0.decoder.pull()? { + Header::Break => return Ok(None), + header => self.0.decoder.push(header), + }, + } + + seed.deserialize(&mut *self.0).map(Some) + } + + #[inline] + fn size_hint(&self) -> Option<usize> { + self.1 + } +} + +impl<'de, 'a, 'b, R: Read> de::MapAccess<'de> for Access<'a, 'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn next_key_seed<K: de::DeserializeSeed<'de>>( + &mut self, + seed: K, + ) -> Result<Option<K::Value>, Self::Error> { + match self.1 { + Some(0) => return Ok(None), + Some(x) => self.1 = Some(x - 1), + None => match self.0.decoder.pull()? { + Header::Break => return Ok(None), + header => self.0.decoder.push(header), + }, + } + + seed.deserialize(&mut *self.0).map(Some) + } + + #[inline] + fn next_value_seed<V: de::DeserializeSeed<'de>>( + &mut self, + seed: V, + ) -> Result<V::Value, Self::Error> { + seed.deserialize(&mut *self.0) + } + + #[inline] + fn size_hint(&self) -> Option<usize> { + self.1 + } +} + +impl<'de, 'a, 'b, R: Read> de::EnumAccess<'de> for Access<'a, 'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + type Variant = Self; + + #[inline] + fn variant_seed<V: de::DeserializeSeed<'de>>( + self, + seed: V, + ) -> Result<(V::Value, Self::Variant), Self::Error> { + let variant = seed.deserialize(&mut *self.0)?; + Ok((variant, self)) + } +} + +impl<'de, 'a, 'b, R: Read> de::VariantAccess<'de> for Access<'a, 'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn unit_variant(self) -> Result<(), Self::Error> { + Ok(()) + } + + #[inline] + fn newtype_variant_seed<U: de::DeserializeSeed<'de>>( + self, + seed: U, + ) -> Result<U::Value, Self::Error> { + seed.deserialize(&mut *self.0) + } + + #[inline] + fn tuple_variant<V: de::Visitor<'de>>( + self, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.0.deserialize_any(visitor) + } + + #[inline] + fn struct_variant<V: de::Visitor<'de>>( + self, + _fields: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.0.deserialize_any(visitor) + } +} + +struct BytesAccess<R: Read>(usize, Vec<u8>, core::marker::PhantomData<R>); + +impl<'de, R: Read> de::SeqAccess<'de> for BytesAccess<R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn next_element_seed<U: de::DeserializeSeed<'de>>( + &mut self, + seed: U, + ) -> Result<Option<U::Value>, Self::Error> { + use de::IntoDeserializer; + + if self.0 < self.1.len() { + let byte = self.1[self.0]; + self.0 += 1; + seed.deserialize(byte.into_deserializer()).map(Some) + } else { + Ok(None) + } + } + + #[inline] + fn size_hint(&self) -> Option<usize> { + Some(self.1.len() - self.0) + } +} + +struct TagAccess<'a, 'b, R: Read>(&'a mut Deserializer<'b, R>, usize); + +impl<'de, 'a, 'b, R: Read> de::Deserializer<'de> for &mut TagAccess<'a, 'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn deserialize_any<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let offset = self.0.decoder.offset(); + + match self.0.decoder.pull()? { + Header::Tag(x) => visitor.visit_u64(x), + _ => Err(Error::semantic(offset, "expected tag")), + } + } + + forward_to_deserialize_any! { + i8 i16 i32 i64 i128 + u8 u16 u32 u64 u128 + bool f32 f64 + char str string + bytes byte_buf + seq map + struct tuple tuple_struct + identifier ignored_any + option unit unit_struct newtype_struct enum + } +} + +impl<'de, 'a, 'b, R: Read> de::SeqAccess<'de> for TagAccess<'a, 'b, R> +where + R::Error: core::fmt::Debug, +{ + type Error = Error<R::Error>; + + #[inline] + fn next_element_seed<U: de::DeserializeSeed<'de>>( + &mut self, + seed: U, + ) -> Result<Option<U::Value>, Self::Error> { + self.1 += 1; + + match self.1 { + 1 => seed.deserialize(self).map(Some), + 2 => seed.deserialize(&mut *self.0).map(Some), + _ => Ok(None), + } + } + + #[inline] + fn size_hint(&self) -> Option<usize> { + Some(match self.1 { + 0 => 2, + 1 => 1, + _ => 0, + }) + } +} + +/// Deserializes as CBOR from a type with [`impl +/// ciborium_io::Read`](ciborium_io::Read) using a 4KB buffer on the stack. +/// +/// If you want to deserialize faster at the cost of more memory, consider using +/// [`from_reader_with_buffer`](from_reader_with_buffer) with a larger buffer, +/// for example 64KB. +#[inline] +pub fn from_reader<T: de::DeserializeOwned, R: Read>(reader: R) -> Result<T, Error<R::Error>> +where + R::Error: core::fmt::Debug, +{ + let mut scratch = [0; 4096]; + from_reader_with_buffer(reader, &mut scratch) +} + +/// Deserializes as CBOR from a type with [`impl +/// ciborium_io::Read`](ciborium_io::Read), using a caller-specific buffer as a +/// temporary scratch space. +#[inline] +pub fn from_reader_with_buffer<T: de::DeserializeOwned, R: Read>( + reader: R, + scratch_buffer: &mut [u8], +) -> Result<T, Error<R::Error>> +where + R::Error: core::fmt::Debug, +{ + let mut reader = Deserializer { + decoder: reader.into(), + scratch: scratch_buffer, + recurse: 256, + }; + + T::deserialize(&mut reader) +} + +/// Deserializes as CBOR from a type with [`impl ciborium_io::Read`](ciborium_io::Read), with +/// a specified maximum recursion limit. Inputs that are nested beyond the specified limit +/// will result in [`Error::RecursionLimitExceeded`] . +/// +/// Set a high recursion limit at your own risk (of stack exhaustion)! +#[inline] +pub fn from_reader_with_recursion_limit<T: de::DeserializeOwned, R: Read>( + reader: R, + recurse_limit: usize, +) -> Result<T, Error<R::Error>> +where + R::Error: core::fmt::Debug, +{ + let mut scratch = [0; 4096]; + + let mut reader = Deserializer { + decoder: reader.into(), + scratch: &mut scratch, + recurse: recurse_limit, + }; + + T::deserialize(&mut reader) +} diff --git a/vendor/ciborium/src/lib.rs b/vendor/ciborium/src/lib.rs new file mode 100644 index 00000000..f143943d --- /dev/null +++ b/vendor/ciborium/src/lib.rs @@ -0,0 +1,224 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! Welcome to Ciborium! +//! +//! Ciborium contains CBOR serialization and deserialization implementations for serde. +//! +//! # Quick Start +//! +//! You're probably looking for [`from_reader()`](crate::de::from_reader) +//! and [`into_writer()`](crate::ser::into_writer), which are +//! the main functions. Note that byte slices are also readers and writers and can be +//! passed to these functions just as streams can. +//! +//! For dynamic CBOR value creation/inspection, see [`Value`](crate::value::Value). +//! +//! # Design Decisions +//! +//! ## Always Serialize Numeric Values to the Smallest Size +//! +//! Although the CBOR specification has differing numeric widths, this is only +//! a form of compression on the wire and is not intended to directly +//! represent an "integer width" or "float width." Therefore, ciborium always +//! serializes numbers to the smallest possible lossless encoding. For example, +//! we serialize `1u128` as a single byte (`01`). Likewise, we will also freely +//! decode that single byte into a `u128`. +//! +//! While there is some minor performance cost for this, there are several +//! reasons for this choice. First, the specification seems to imply it by +//! using a separate bit for the sign. Second, the specification requires +//! that implementations handle leading zeroes; a liberal reading of which +//! implies a requirement for lossless coercion. Third, dynamic languages like +//! Python have no notion of "integer width," making this is a practical +//! choice for maximizing wire compatibility with those languages. +//! +//! This coercion is **always** lossless. For floats, this implies that we +//! only coerce to a smaller size if coercion back to the original size has +//! the same raw bits as the original. +//! +//! ## Compatibility with Other Implementations +//! +//! The ciborium project follows the [Robustness Principle](https://en.wikipedia.org/wiki/Robustness_principle). +//! Therefore, we aim to be liberal in what we accept. This implies that we +//! aim to be wire-compatible with other implementations in decoding, but +//! not necessarily encoding. +//! +//! One notable example of this is that `serde_cbor` uses fixed-width encoding +//! of numbers and doesn't losslessly coerce. This implies that `ciborium` will +//! successfully decode `serde_cbor` encodings, but the opposite may not be the +//! case. +//! +//! ## Representing Map as a Sequence of Values +//! +//! Other serde parsers have generally taken the route of using `BTreeMap` or +//! `HashMap` to implement their encoding's underlying `Map` type. This crate +//! chooses to represent the `Map` type using `Vec<(Value, Value)>` instead. +//! +//! This decision was made because this type preserves the order of the pairs +//! on the wire. Further, for those that need the properties of `BTreeMap` or +//! `HashMap`, you can simply `collect()` the values into the respective type. +//! This provides maximum flexibility. +//! +//! ## Low-level Library +//! +//! The ciborium crate has the beginnings of a low-level library in the +//! (private) `basic` module. We may extend this to be more robust and expose +//! it for application consumption once we have it in a good state. If you'd +//! like to collaborate with us on that, please contact us. Alternatively, +//! we might fork this code into a separate crate with no serde dependency. +//! +//! ## Internal Types +//! +//! The ciborium crate contains a number of internal types that implement +//! useful serde traits. While these are not currently exposed, we might +//! choose to expose them in the future if there is demand. Generally, this +//! crate takes a conservative approach to exposing APIs to avoid breakage. +//! +//! ## Packed Encoding? +//! +//! Packed encoding uses numerical offsets to represent structure field names +//! and enum variant names. This can save significant space on the wire. +//! +//! While the authors of this crate like packed encoding, it should generally +//! be avoided because it can be fragile as it exposes invariants of your Rust +//! code to remote actors. We might consider adding this in the future. If you +//! are interested in this, please contact us. + +#![cfg_attr(not(feature = "std"), no_std)] +#![deny(missing_docs)] +#![deny(clippy::all)] +#![deny(clippy::cargo)] +#![allow(clippy::unit_arg)] + +extern crate alloc; + +pub mod de; +pub mod ser; +pub mod tag; +pub mod value; + +// Re-export the [items recommended by serde](https://serde.rs/conventions.html). +#[doc(inline)] +pub use crate::de::from_reader; +#[doc(inline)] +pub use crate::de::from_reader_with_buffer; + +#[doc(inline)] +pub use crate::ser::into_writer; + +#[doc(inline)] +pub use crate::value::Value; + +/// Build a `Value` conveniently. +/// +/// The syntax should be intuitive if you are familiar with JSON. You can also +/// inline simple Rust expressions, including custom values that implement +/// `serde::Serialize`. Note that this macro returns `Result<Value, Error>`, +/// so you should handle the error appropriately. +/// +/// ``` +/// use ciborium::cbor; +/// +/// let value = cbor!({ +/// "code" => 415, +/// "message" => null, +/// "continue" => false, +/// "extra" => { "numbers" => [8.2341e+4, 0.251425] }, +/// }).unwrap(); +/// ``` +#[macro_export] +macro_rules! cbor { + (@map {$($key:expr => $val:expr),*} $(,)?) => {{ + $crate::value::Value::Map(vec![ + $( + (cbor!( $key )?, cbor!( $val )?) + ),* + ]) + }}; + + (@map {$($key:expr => $val:expr),*} { $($nkey:tt)* } => $($next:tt)*) => { + cbor!( + @map + { $($key => $val),* } + cbor!({ $($nkey)* })? => + $($next)* + ) + }; + + (@map {$($key:expr => $val:expr),*} [ $($nkey:tt)* ] => $($next:tt)*) => { + cbor!( + @map + { $($key => $val),* } + cbor!([ $($nkey)* ])? => + $($next)* + ) + }; + + (@map {$($key:expr => $val:expr),*} $nkey:expr => { $($nval:tt)* }, $($next:tt)*) => { + cbor!( + @map + { $($key => $val,)* $nkey => cbor!({ $($nval)* })? } + $($next)* + ) + }; + + (@map {$($key:expr => $val:expr),*} $nkey:expr => [ $($nval:tt)* ], $($next:tt)*) => { + cbor!( + @map + { $($key => $val,)* $nkey => cbor!([ $($nval)* ])? } + $($next)* + ) + }; + + (@map {$($key:expr => $val:expr),*} $nkey:expr => $nval:expr, $($next:tt)*) => { + cbor!( + @map + { $($key => $val,)* $nkey => cbor!($nval)? } + $($next)* + ) + }; + + (@seq [$($val:expr),*] $(,)?) => { + $crate::value::Value::Array( + vec![$( cbor!($val)? ),*] + ) + }; + + (@seq [$($val:expr),*] { $($item:tt)* }, $($next:tt)*) => { + cbor!( + @seq + [ $($val,)* cbor!({ $($item)* })? ] + $($next)* + ) + }; + + (@seq [$($val:expr),*] [ $($item:tt)* ], $($next:tt)*) => { + cbor!( + @seq + [ $($val,)* cbor!([ $($item)* ])? ] + $($next)* + ) + }; + + (@seq [$($val:expr),*] $item:expr, $($next:tt)*) => { + cbor!( + @seq + [ $($val,)* $item ] + $($next)* + ) + }; + + ({ $($next:tt)* }) => {(||{ + ::core::result::Result::<_, $crate::value::Error>::from(Ok(cbor!(@map {} $($next)* ,))) + })()}; + + ([ $($next:tt)* ]) => {(||{ + ::core::result::Result::<_, $crate::value::Error>::from(Ok(cbor!(@seq [] $($next)* ,))) + })()}; + + ($val:expr) => {{ + #[allow(unused_imports)] + use $crate::value::Value::Null as null; + $crate::value::Value::serialized(&$val) + }}; +} diff --git a/vendor/ciborium/src/ser/error.rs b/vendor/ciborium/src/ser/error.rs new file mode 100644 index 00000000..e9086284 --- /dev/null +++ b/vendor/ciborium/src/ser/error.rs @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: Apache-2.0 + +use alloc::string::{String, ToString}; +use core::fmt::{Debug, Display, Formatter, Result}; + +use serde::ser::{Error as SerError, StdError}; + +/// An error occurred during serialization +#[derive(Debug)] +pub enum Error<T> { + /// An error occurred while writing bytes + /// + /// Contains the underlying error reaturned while writing. + Io(T), + + /// An error indicating a value that cannot be serialized + /// + /// Contains a description of the problem. + Value(String), +} + +impl<T> From<T> for Error<T> { + #[inline] + fn from(value: T) -> Self { + Error::Io(value) + } +} + +impl<T: Debug> Display for Error<T> { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + write!(f, "{:?}", self) + } +} + +impl<T: Debug> StdError for Error<T> {} + +impl<T: Debug> SerError for Error<T> { + fn custom<U: Display>(msg: U) -> Self { + Error::Value(msg.to_string()) + } +} diff --git a/vendor/ciborium/src/ser/mod.rs b/vendor/ciborium/src/ser/mod.rs new file mode 100644 index 00000000..ff3d1184 --- /dev/null +++ b/vendor/ciborium/src/ser/mod.rs @@ -0,0 +1,499 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! Serde serialization support for CBOR + +mod error; + +pub use error::Error; + +use alloc::string::ToString; + +use ciborium_io::Write; +use ciborium_ll::*; +use serde::{ser, Serialize as _}; + +struct Serializer<W: Write>(Encoder<W>); + +impl<W: Write> From<W> for Serializer<W> { + #[inline] + fn from(writer: W) -> Self { + Self(writer.into()) + } +} + +impl<W: Write> From<Encoder<W>> for Serializer<W> { + #[inline] + fn from(writer: Encoder<W>) -> Self { + Self(writer) + } +} + +impl<'a, W: Write> ser::Serializer for &'a mut Serializer<W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + type SerializeSeq = CollectionSerializer<'a, W>; + type SerializeTuple = CollectionSerializer<'a, W>; + type SerializeTupleStruct = CollectionSerializer<'a, W>; + type SerializeTupleVariant = CollectionSerializer<'a, W>; + type SerializeMap = CollectionSerializer<'a, W>; + type SerializeStruct = CollectionSerializer<'a, W>; + type SerializeStructVariant = CollectionSerializer<'a, W>; + + #[inline] + fn serialize_bool(self, v: bool) -> Result<(), Self::Error> { + Ok(self.0.push(match v { + false => Header::Simple(simple::FALSE), + true => Header::Simple(simple::TRUE), + })?) + } + + #[inline] + fn serialize_i8(self, v: i8) -> Result<(), Self::Error> { + self.serialize_i64(v.into()) + } + + #[inline] + fn serialize_i16(self, v: i16) -> Result<(), Self::Error> { + self.serialize_i64(v.into()) + } + + #[inline] + fn serialize_i32(self, v: i32) -> Result<(), Self::Error> { + self.serialize_i64(v.into()) + } + + #[inline] + fn serialize_i64(self, v: i64) -> Result<(), Self::Error> { + Ok(self.0.push(match v.is_negative() { + false => Header::Positive(v as u64), + true => Header::Negative(v as u64 ^ !0), + })?) + } + + #[inline] + fn serialize_i128(self, v: i128) -> Result<(), Self::Error> { + let (tag, raw) = match v.is_negative() { + false => (tag::BIGPOS, v as u128), + true => (tag::BIGNEG, v as u128 ^ !0), + }; + + match (tag, u64::try_from(raw)) { + (tag::BIGPOS, Ok(x)) => return Ok(self.0.push(Header::Positive(x))?), + (tag::BIGNEG, Ok(x)) => return Ok(self.0.push(Header::Negative(x))?), + _ => {} + } + + let bytes = raw.to_be_bytes(); + + // Skip leading zeros. + let mut slice = &bytes[..]; + while !slice.is_empty() && slice[0] == 0 { + slice = &slice[1..]; + } + + self.0.push(Header::Tag(tag))?; + self.0.push(Header::Bytes(Some(slice.len())))?; + Ok(self.0.write_all(slice)?) + } + + #[inline] + fn serialize_u8(self, v: u8) -> Result<(), Self::Error> { + self.serialize_u64(v.into()) + } + + #[inline] + fn serialize_u16(self, v: u16) -> Result<(), Self::Error> { + self.serialize_u64(v.into()) + } + + #[inline] + fn serialize_u32(self, v: u32) -> Result<(), Self::Error> { + self.serialize_u64(v.into()) + } + + #[inline] + fn serialize_u64(self, v: u64) -> Result<(), Self::Error> { + Ok(self.0.push(Header::Positive(v))?) + } + + #[inline] + fn serialize_u128(self, v: u128) -> Result<(), Self::Error> { + if let Ok(x) = u64::try_from(v) { + return self.serialize_u64(x); + } + + let bytes = v.to_be_bytes(); + + // Skip leading zeros. + let mut slice = &bytes[..]; + while !slice.is_empty() && slice[0] == 0 { + slice = &slice[1..]; + } + + self.0.push(Header::Tag(tag::BIGPOS))?; + self.0.push(Header::Bytes(Some(slice.len())))?; + Ok(self.0.write_all(slice)?) + } + + #[inline] + fn serialize_f32(self, v: f32) -> Result<(), Self::Error> { + self.serialize_f64(v.into()) + } + + #[inline] + fn serialize_f64(self, v: f64) -> Result<(), Self::Error> { + Ok(self.0.push(Header::Float(v))?) + } + + #[inline] + fn serialize_char(self, v: char) -> Result<(), Self::Error> { + self.serialize_str(&v.to_string()) + } + + #[inline] + fn serialize_str(self, v: &str) -> Result<(), Self::Error> { + let bytes = v.as_bytes(); + self.0.push(Header::Text(bytes.len().into()))?; + Ok(self.0.write_all(bytes)?) + } + + #[inline] + fn serialize_bytes(self, v: &[u8]) -> Result<(), Self::Error> { + self.0.push(Header::Bytes(v.len().into()))?; + Ok(self.0.write_all(v)?) + } + + #[inline] + fn serialize_none(self) -> Result<(), Self::Error> { + Ok(self.0.push(Header::Simple(simple::NULL))?) + } + + #[inline] + fn serialize_some<U: ?Sized + ser::Serialize>(self, value: &U) -> Result<(), Self::Error> { + value.serialize(self) + } + + #[inline] + fn serialize_unit(self) -> Result<(), Self::Error> { + self.serialize_none() + } + + #[inline] + fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> { + self.serialize_unit() + } + + #[inline] + fn serialize_unit_variant( + self, + _name: &'static str, + _index: u32, + variant: &'static str, + ) -> Result<(), Self::Error> { + self.serialize_str(variant) + } + + #[inline] + fn serialize_newtype_struct<U: ?Sized + ser::Serialize>( + self, + _name: &'static str, + value: &U, + ) -> Result<(), Self::Error> { + value.serialize(self) + } + + #[inline] + fn serialize_newtype_variant<U: ?Sized + ser::Serialize>( + self, + name: &'static str, + _index: u32, + variant: &'static str, + value: &U, + ) -> Result<(), Self::Error> { + if name != "@@TAG@@" || variant != "@@UNTAGGED@@" { + self.0.push(Header::Map(Some(1)))?; + self.serialize_str(variant)?; + } + + value.serialize(self) + } + + #[inline] + fn serialize_seq(self, length: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> { + self.0.push(Header::Array(length))?; + Ok(CollectionSerializer { + encoder: self, + ending: length.is_none(), + tag: false, + }) + } + + #[inline] + fn serialize_tuple(self, length: usize) -> Result<Self::SerializeTuple, Self::Error> { + self.serialize_seq(Some(length)) + } + + #[inline] + fn serialize_tuple_struct( + self, + _name: &'static str, + length: usize, + ) -> Result<Self::SerializeTupleStruct, Self::Error> { + self.serialize_seq(Some(length)) + } + + #[inline] + fn serialize_tuple_variant( + self, + name: &'static str, + _index: u32, + variant: &'static str, + length: usize, + ) -> Result<Self::SerializeTupleVariant, Self::Error> { + match (name, variant) { + ("@@TAG@@", "@@TAGGED@@") => Ok(CollectionSerializer { + encoder: self, + ending: false, + tag: true, + }), + + _ => { + self.0.push(Header::Map(Some(1)))?; + self.serialize_str(variant)?; + self.0.push(Header::Array(Some(length)))?; + Ok(CollectionSerializer { + encoder: self, + ending: false, + tag: false, + }) + } + } + } + + #[inline] + fn serialize_map(self, length: Option<usize>) -> Result<Self::SerializeMap, Self::Error> { + self.0.push(Header::Map(length))?; + Ok(CollectionSerializer { + encoder: self, + ending: length.is_none(), + tag: false, + }) + } + + #[inline] + fn serialize_struct( + self, + _name: &'static str, + length: usize, + ) -> Result<Self::SerializeStruct, Self::Error> { + self.0.push(Header::Map(Some(length)))?; + Ok(CollectionSerializer { + encoder: self, + ending: false, + tag: false, + }) + } + + #[inline] + fn serialize_struct_variant( + self, + _name: &'static str, + _index: u32, + variant: &'static str, + length: usize, + ) -> Result<Self::SerializeStructVariant, Self::Error> { + self.0.push(Header::Map(Some(1)))?; + self.serialize_str(variant)?; + self.0.push(Header::Map(Some(length)))?; + Ok(CollectionSerializer { + encoder: self, + ending: false, + tag: false, + }) + } + + #[inline] + fn is_human_readable(&self) -> bool { + false + } +} + +macro_rules! end { + () => { + #[inline] + fn end(self) -> Result<(), Self::Error> { + if self.ending { + self.encoder.0.push(Header::Break)?; + } + + Ok(()) + } + }; +} + +struct CollectionSerializer<'a, W: Write> { + encoder: &'a mut Serializer<W>, + ending: bool, + tag: bool, +} + +impl<'a, W: Write> ser::SerializeSeq for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_element<U: ?Sized + ser::Serialize>( + &mut self, + value: &U, + ) -> Result<(), Self::Error> { + value.serialize(&mut *self.encoder) + } + + end!(); +} + +impl<'a, W: Write> ser::SerializeTuple for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_element<U: ?Sized + ser::Serialize>( + &mut self, + value: &U, + ) -> Result<(), Self::Error> { + value.serialize(&mut *self.encoder) + } + + end!(); +} + +impl<'a, W: Write> ser::SerializeTupleStruct for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + value: &U, + ) -> Result<(), Self::Error> { + value.serialize(&mut *self.encoder) + } + + end!(); +} + +impl<'a, W: Write> ser::SerializeTupleVariant for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + value: &U, + ) -> Result<(), Self::Error> { + if !self.tag { + return value.serialize(&mut *self.encoder); + } + + self.tag = false; + match value.serialize(crate::tag::Serializer) { + Ok(x) => Ok(self.encoder.0.push(Header::Tag(x))?), + _ => Err(Error::Value("expected tag".into())), + } + } + + end!(); +} + +impl<'a, W: Write> ser::SerializeMap for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_key<U: ?Sized + ser::Serialize>(&mut self, key: &U) -> Result<(), Self::Error> { + key.serialize(&mut *self.encoder) + } + + #[inline] + fn serialize_value<U: ?Sized + ser::Serialize>( + &mut self, + value: &U, + ) -> Result<(), Self::Error> { + value.serialize(&mut *self.encoder) + } + + end!(); +} + +impl<'a, W: Write> ser::SerializeStruct for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + key: &'static str, + value: &U, + ) -> Result<(), Self::Error> { + key.serialize(&mut *self.encoder)?; + value.serialize(&mut *self.encoder)?; + Ok(()) + } + + end!(); +} + +impl<'a, W: Write> ser::SerializeStructVariant for CollectionSerializer<'a, W> +where + W::Error: core::fmt::Debug, +{ + type Ok = (); + type Error = Error<W::Error>; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + key: &'static str, + value: &U, + ) -> Result<(), Self::Error> { + key.serialize(&mut *self.encoder)?; + value.serialize(&mut *self.encoder) + } + + end!(); +} + +/// Serializes as CBOR into a type with [`impl ciborium_io::Write`](ciborium_io::Write) +#[inline] +pub fn into_writer<T: ?Sized + ser::Serialize, W: Write>( + value: &T, + writer: W, +) -> Result<(), Error<W::Error>> +where + W::Error: core::fmt::Debug, +{ + let mut encoder = Serializer::from(writer); + value.serialize(&mut encoder) +} diff --git a/vendor/ciborium/src/tag.rs b/vendor/ciborium/src/tag.rs new file mode 100644 index 00000000..11965758 --- /dev/null +++ b/vendor/ciborium/src/tag.rs @@ -0,0 +1,545 @@ +//! Contains helper types for dealing with CBOR tags + +use serde::{de, de::Error as _, forward_to_deserialize_any, ser, Deserialize, Serialize}; + +#[derive(Deserialize, Serialize)] +#[serde(rename = "@@TAG@@")] +enum Internal<T> { + #[serde(rename = "@@UNTAGGED@@")] + Untagged(T), + + #[serde(rename = "@@TAGGED@@")] + Tagged(u64, T), +} + +/// An optional CBOR tag and its data item +/// +/// No semantic evaluation of the tag is made. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Captured<V>(pub Option<u64>, pub V); + +impl<'de, V: Deserialize<'de>> Deserialize<'de> for Captured<V> { + #[inline] + fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { + match Internal::deserialize(deserializer)? { + Internal::Tagged(t, v) => Ok(Captured(Some(t), v)), + Internal::Untagged(v) => Ok(Captured(None, v)), + } + } +} + +impl<V: Serialize> Serialize for Captured<V> { + #[inline] + fn serialize<S: ser::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match self.0 { + Some(tag) => Internal::Tagged(tag, &self.1).serialize(serializer), + None => Internal::Untagged(&self.1).serialize(serializer), + } + } +} + +/// A required CBOR tag +/// +/// This data type indicates that the specified tag, and **only** that tag, +/// is required during deserialization. If the tag is missing, deserialization +/// will fail. The tag will always be emitted during serialization. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Required<V, const TAG: u64>(pub V); + +impl<'de, V: Deserialize<'de>, const TAG: u64> Deserialize<'de> for Required<V, TAG> { + #[inline] + fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { + match Internal::deserialize(deserializer)? { + Internal::Tagged(t, v) if t == TAG => Ok(Required(v)), + _ => Err(de::Error::custom("required tag not found")), + } + } +} + +impl<V: Serialize, const TAG: u64> Serialize for Required<V, TAG> { + #[inline] + fn serialize<S: ser::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + Internal::Tagged(TAG, &self.0).serialize(serializer) + } +} + +/// An optional CBOR tag +/// +/// This data type indicates that the specified tag, and **only** that tag, +/// is accepted, but not required, during deserialization. The tag will always +/// be emitted during serialization. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Accepted<V, const TAG: u64>(pub V); + +impl<'de, V: Deserialize<'de>, const TAG: u64> Deserialize<'de> for Accepted<V, TAG> { + #[inline] + fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { + match Internal::deserialize(deserializer)? { + Internal::Tagged(t, v) if t == TAG => Ok(Accepted(v)), + Internal::Untagged(v) => Ok(Accepted(v)), + _ => Err(de::Error::custom("required tag not found")), + } + } +} + +impl<V: Serialize, const TAG: u64> Serialize for Accepted<V, TAG> { + #[inline] + fn serialize<S: ser::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + Internal::Tagged(TAG, &self.0).serialize(serializer) + } +} + +pub(crate) struct TagAccess<D> { + parent: Option<D>, + state: usize, + tag: Option<u64>, +} + +impl<D> TagAccess<D> { + pub fn new(parent: D, tag: Option<u64>) -> Self { + Self { + parent: Some(parent), + state: 0, + tag, + } + } +} + +impl<'de, D: de::Deserializer<'de>> de::Deserializer<'de> for &mut TagAccess<D> { + type Error = D::Error; + + #[inline] + fn deserialize_any<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.state += 1; + + match self.state { + 1 => visitor.visit_str(match self.tag { + Some(..) => "@@TAGGED@@", + None => "@@UNTAGGED@@", + }), + + _ => visitor.visit_u64(self.tag.unwrap()), + } + } + + forward_to_deserialize_any! { + i8 i16 i32 i64 i128 + u8 u16 u32 u64 u128 + bool f32 f64 + char str string + bytes byte_buf + seq map + struct tuple tuple_struct + identifier ignored_any + option unit unit_struct newtype_struct enum + } +} + +impl<'de, D: de::Deserializer<'de>> de::EnumAccess<'de> for TagAccess<D> { + type Error = D::Error; + type Variant = Self; + + #[inline] + fn variant_seed<V: de::DeserializeSeed<'de>>( + mut self, + seed: V, + ) -> Result<(V::Value, Self::Variant), Self::Error> { + let variant = seed.deserialize(&mut self)?; + Ok((variant, self)) + } +} + +impl<'de, D: de::Deserializer<'de>> de::VariantAccess<'de> for TagAccess<D> { + type Error = D::Error; + + #[inline] + fn unit_variant(self) -> Result<(), Self::Error> { + Err(Self::Error::custom("expected tag")) + } + + #[inline] + fn newtype_variant_seed<U: de::DeserializeSeed<'de>>( + mut self, + seed: U, + ) -> Result<U::Value, Self::Error> { + seed.deserialize(self.parent.take().unwrap()) + } + + #[inline] + fn tuple_variant<V: de::Visitor<'de>>( + self, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + visitor.visit_seq(self) + } + + #[inline] + fn struct_variant<V: de::Visitor<'de>>( + self, + _fields: &'static [&'static str], + _visitor: V, + ) -> Result<V::Value, Self::Error> { + Err(Self::Error::custom("expected tag")) + } +} + +impl<'de, D: de::Deserializer<'de>> de::SeqAccess<'de> for TagAccess<D> { + type Error = D::Error; + + #[inline] + fn next_element_seed<T: de::DeserializeSeed<'de>>( + &mut self, + seed: T, + ) -> Result<Option<T::Value>, Self::Error> { + if self.state < 2 { + return Ok(Some(seed.deserialize(self)?)); + } + + Ok(match self.parent.take() { + Some(x) => Some(seed.deserialize(x)?), + None => None, + }) + } +} + +#[derive(Debug)] +pub(crate) struct Error; + +impl core::fmt::Display for Error { + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{:?}", self) + } +} + +impl ser::StdError for Error {} + +impl ser::Error for Error { + fn custom<U: core::fmt::Display>(_msg: U) -> Self { + Error + } +} + +pub(crate) struct Serializer; + +impl ser::Serializer for Serializer { + type Ok = u64; + type Error = Error; + + type SerializeSeq = Self; + type SerializeTuple = Self; + type SerializeTupleStruct = Self; + type SerializeTupleVariant = Self; + type SerializeMap = Self; + type SerializeStruct = Self; + type SerializeStructVariant = Self; + + #[inline] + fn serialize_bool(self, _: bool) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_i8(self, _: i8) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_i16(self, _: i16) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_i32(self, _: i32) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_i64(self, _: i64) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_i128(self, _: i128) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_u8(self, v: u8) -> Result<u64, Self::Error> { + Ok(v.into()) + } + + #[inline] + fn serialize_u16(self, v: u16) -> Result<u64, Self::Error> { + Ok(v.into()) + } + + #[inline] + fn serialize_u32(self, v: u32) -> Result<u64, Self::Error> { + Ok(v.into()) + } + + #[inline] + fn serialize_u64(self, v: u64) -> Result<u64, Self::Error> { + Ok(v) + } + + #[inline] + fn serialize_u128(self, _: u128) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_f32(self, _: f32) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_f64(self, _: f64) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_char(self, _: char) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_str(self, _: &str) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_bytes(self, _: &[u8]) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_none(self) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_some<U: ?Sized + ser::Serialize>(self, _: &U) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_unit(self) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_unit_struct(self, _name: &'static str) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_unit_variant( + self, + _name: &'static str, + _index: u32, + _variant: &'static str, + ) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_newtype_struct<U: ?Sized + ser::Serialize>( + self, + _name: &'static str, + _value: &U, + ) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_newtype_variant<U: ?Sized + ser::Serialize>( + self, + _name: &'static str, + _index: u32, + _variant: &'static str, + _value: &U, + ) -> Result<u64, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_seq(self, _length: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_tuple(self, _length: usize) -> Result<Self::SerializeTuple, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_tuple_struct( + self, + _name: &'static str, + _length: usize, + ) -> Result<Self::SerializeTupleStruct, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_tuple_variant( + self, + _name: &'static str, + _index: u32, + _variant: &'static str, + _length: usize, + ) -> Result<Self::SerializeTupleVariant, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_map(self, _length: Option<usize>) -> Result<Self::SerializeMap, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_struct( + self, + _name: &'static str, + _length: usize, + ) -> Result<Self::SerializeStruct, Self::Error> { + Err(Error) + } + + #[inline] + fn serialize_struct_variant( + self, + _name: &'static str, + _index: u32, + _variant: &'static str, + _length: usize, + ) -> Result<Self::SerializeStructVariant, Self::Error> { + Err(Error) + } + + #[inline] + fn is_human_readable(&self) -> bool { + false + } +} + +impl ser::SerializeSeq for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_element<U: ?Sized + ser::Serialize>(&mut self, _value: &U) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} + +impl ser::SerializeTuple for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_element<U: ?Sized + ser::Serialize>(&mut self, _value: &U) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} + +impl ser::SerializeTupleStruct for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>(&mut self, _value: &U) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} + +impl ser::SerializeTupleVariant for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>(&mut self, _value: &U) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} + +impl ser::SerializeMap for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_key<U: ?Sized + ser::Serialize>(&mut self, _key: &U) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn serialize_value<U: ?Sized + ser::Serialize>(&mut self, _value: &U) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} + +impl ser::SerializeStruct for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + _key: &'static str, + _value: &U, + ) -> Result<(), Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} + +impl ser::SerializeStructVariant for Serializer { + type Ok = u64; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + _key: &'static str, + _value: &U, + ) -> Result<(), Self::Error> { + Err(Error) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Err(Error) + } +} diff --git a/vendor/ciborium/src/value/canonical.rs b/vendor/ciborium/src/value/canonical.rs new file mode 100644 index 00000000..f1196f4d --- /dev/null +++ b/vendor/ciborium/src/value/canonical.rs @@ -0,0 +1,124 @@ +// SPDX-License-Identifier: Apache-2.0 + +use crate::value::Value; +use alloc::vec::Vec; +use core::cmp::Ordering; +use serde::{de, ser}; + +/// Manually serialize values to compare them. +fn serialized_canonical_cmp(v1: &Value, v2: &Value) -> Ordering { + // There is an optimization to be done here, but it would take a lot more code + // and using mixing keys, Arrays or Maps as CanonicalValue is probably not the + // best use of this type as it is meant mainly to be used as keys. + + let mut bytes1 = Vec::new(); + let _ = crate::ser::into_writer(v1, &mut bytes1); + let mut bytes2 = Vec::new(); + let _ = crate::ser::into_writer(v2, &mut bytes2); + + match bytes1.len().cmp(&bytes2.len()) { + Ordering::Equal => bytes1.cmp(&bytes2), + x => x, + } +} + +/// Compares two values uses canonical comparison, as defined in both +/// RFC 7049 Section 3.9 (regarding key sorting) and RFC 8949 4.2.3 (as errata). +/// +/// In short, the comparison follow the following rules: +/// - If two keys have different lengths, the shorter one sorts earlier; +/// - If two keys have the same length, the one with the lower value in +/// (byte-wise) lexical order sorts earlier. +/// +/// This specific comparison allows Maps and sorting that respect these two rules. +pub fn cmp_value(v1: &Value, v2: &Value) -> Ordering { + use Value::*; + + match (v1, v2) { + (Integer(i), Integer(o)) => { + // Because of the first rule above, two numbers might be in a different + // order than regular i128 comparison. For example, 10 < -1 in + // canonical ordering, since 10 serializes to `0x0a` and -1 to `0x20`, + // and -1 < -1000 because of their lengths. + i.canonical_cmp(o) + } + (Text(s), Text(o)) => match s.len().cmp(&o.len()) { + Ordering::Equal => s.cmp(o), + x => x, + }, + (Bool(s), Bool(o)) => s.cmp(o), + (Null, Null) => Ordering::Equal, + (Tag(t, v), Tag(ot, ov)) => match Value::from(*t).partial_cmp(&Value::from(*ot)) { + Some(Ordering::Equal) | None => match v.partial_cmp(ov) { + Some(x) => x, + None => serialized_canonical_cmp(v1, v2), + }, + Some(x) => x, + }, + (_, _) => serialized_canonical_cmp(v1, v2), + } +} + +/// A CBOR Value that impl Ord and Eq to allow sorting of values as defined in both +/// RFC 7049 Section 3.9 (regarding key sorting) and RFC 8949 4.2.3 (as errata). +/// +/// Since a regular [Value] can be +#[derive(Clone, Debug)] +pub struct CanonicalValue(Value); + +impl PartialEq for CanonicalValue { + fn eq(&self, other: &Self) -> bool { + self.cmp(other) == Ordering::Equal + } +} + +impl Eq for CanonicalValue {} + +impl From<Value> for CanonicalValue { + fn from(v: Value) -> Self { + Self(v) + } +} + +impl From<CanonicalValue> for Value { + fn from(v: CanonicalValue) -> Self { + v.0 + } +} + +impl ser::Serialize for CanonicalValue { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: ser::Serializer, + { + self.0.serialize(serializer) + } +} + +impl<'de> de::Deserialize<'de> for CanonicalValue { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: de::Deserializer<'de>, + { + Value::deserialize(deserializer).map(Into::into) + } + + fn deserialize_in_place<D>(deserializer: D, place: &mut Self) -> Result<(), D::Error> + where + D: de::Deserializer<'de>, + { + Value::deserialize_in_place(deserializer, &mut place.0) + } +} + +impl Ord for CanonicalValue { + fn cmp(&self, other: &Self) -> Ordering { + cmp_value(&self.0, &other.0) + } +} + +impl PartialOrd for CanonicalValue { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(cmp_value(&self.0, &other.0)) + } +} diff --git a/vendor/ciborium/src/value/de.rs b/vendor/ciborium/src/value/de.rs new file mode 100644 index 00000000..38cfbec8 --- /dev/null +++ b/vendor/ciborium/src/value/de.rs @@ -0,0 +1,620 @@ +// SPDX-License-Identifier: Apache-2.0 + +use super::{Error, Integer, Value}; + +use alloc::{boxed::Box, string::String, vec::Vec}; +use core::iter::Peekable; + +use ciborium_ll::tag; +use serde::de::{self, Deserializer as _}; + +impl<'a> From<Integer> for de::Unexpected<'a> { + #[inline] + fn from(value: Integer) -> Self { + u64::try_from(value) + .map(de::Unexpected::Unsigned) + .unwrap_or_else(|_| { + i64::try_from(value) + .map(de::Unexpected::Signed) + .unwrap_or_else(|_| de::Unexpected::Other("large integer")) + }) + } +} + +impl<'a> From<&'a Value> for de::Unexpected<'a> { + #[inline] + fn from(value: &'a Value) -> Self { + match value { + Value::Bool(x) => Self::Bool(*x), + Value::Integer(x) => Self::from(*x), + Value::Float(x) => Self::Float(*x), + Value::Bytes(x) => Self::Bytes(x), + Value::Text(x) => Self::Str(x), + Value::Array(..) => Self::Seq, + Value::Map(..) => Self::Map, + Value::Null => Self::Other("null"), + Value::Tag(..) => Self::Other("tag"), + } + } +} + +macro_rules! mkvisit { + ($($f:ident($v:ty)),+ $(,)?) => { + $( + #[inline] + fn $f<E: de::Error>(self, v: $v) -> Result<Self::Value, E> { + Ok(v.into()) + } + )+ + }; +} + +struct Visitor; + +impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = Value; + + fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(formatter, "a valid CBOR item") + } + + mkvisit! { + visit_bool(bool), + visit_f32(f32), + visit_f64(f64), + + visit_i8(i8), + visit_i16(i16), + visit_i32(i32), + visit_i64(i64), + visit_i128(i128), + + visit_u8(u8), + visit_u16(u16), + visit_u32(u32), + visit_u64(u64), + visit_u128(u128), + + visit_char(char), + visit_str(&str), + visit_borrowed_str(&'de str), + visit_string(String), + + visit_bytes(&[u8]), + visit_borrowed_bytes(&'de [u8]), + visit_byte_buf(Vec<u8>), + } + + #[inline] + fn visit_none<E: de::Error>(self) -> Result<Self::Value, E> { + Ok(Value::Null) + } + + #[inline] + fn visit_some<D: de::Deserializer<'de>>( + self, + deserializer: D, + ) -> Result<Self::Value, D::Error> { + deserializer.deserialize_any(self) + } + + #[inline] + fn visit_unit<E: de::Error>(self) -> Result<Self::Value, E> { + Ok(Value::Null) + } + + #[inline] + fn visit_newtype_struct<D: de::Deserializer<'de>>( + self, + deserializer: D, + ) -> Result<Self::Value, D::Error> { + deserializer.deserialize_any(self) + } + + #[inline] + fn visit_seq<A: de::SeqAccess<'de>>(self, mut acc: A) -> Result<Self::Value, A::Error> { + let mut seq = Vec::new(); + + while let Some(elem) = acc.next_element()? { + seq.push(elem); + } + + Ok(Value::Array(seq)) + } + + #[inline] + fn visit_map<A: de::MapAccess<'de>>(self, mut acc: A) -> Result<Self::Value, A::Error> { + let mut map = Vec::<(Value, Value)>::new(); + + while let Some(kv) = acc.next_entry()? { + map.push(kv); + } + + Ok(Value::Map(map)) + } + + #[inline] + fn visit_enum<A: de::EnumAccess<'de>>(self, acc: A) -> Result<Self::Value, A::Error> { + use serde::de::VariantAccess; + + struct Inner; + + impl<'de> serde::de::Visitor<'de> for Inner { + type Value = Value; + + fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(formatter, "a valid CBOR item") + } + + #[inline] + fn visit_seq<A: de::SeqAccess<'de>>(self, mut acc: A) -> Result<Self::Value, A::Error> { + let tag: u64 = acc + .next_element()? + .ok_or_else(|| de::Error::custom("expected tag"))?; + let val = acc + .next_element()? + .ok_or_else(|| de::Error::custom("expected val"))?; + Ok(Value::Tag(tag, Box::new(val))) + } + } + + let (name, data): (String, _) = acc.variant()?; + assert_eq!("@@TAGGED@@", name); + data.tuple_variant(2, Inner) + } +} + +impl<'de> de::Deserialize<'de> for Value { + #[inline] + fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { + deserializer.deserialize_any(Visitor) + } +} + +struct Deserializer<T>(T); + +impl<'a> Deserializer<&'a Value> { + fn integer<N>(&self, kind: &'static str) -> Result<N, Error> + where + N: TryFrom<u128>, + N: TryFrom<i128>, + { + fn raw(value: &Value) -> Result<u128, Error> { + let mut buffer = 0u128.to_ne_bytes(); + let length = buffer.len(); + + let bytes = match value { + Value::Bytes(bytes) => { + // Skip leading zeros... + let mut bytes: &[u8] = bytes.as_ref(); + while bytes.len() > buffer.len() && bytes[0] == 0 { + bytes = &bytes[1..]; + } + + if bytes.len() > buffer.len() { + return Err(de::Error::custom("bigint too large")); + } + + bytes + } + + _ => return Err(de::Error::invalid_type(value.into(), &"bytes")), + }; + + buffer[length - bytes.len()..].copy_from_slice(bytes); + Ok(u128::from_be_bytes(buffer)) + } + + let err = || de::Error::invalid_type(self.0.into(), &kind); + + Ok(match self.0 { + Value::Integer(x) => i128::from(*x).try_into().map_err(|_| err())?, + Value::Tag(t, v) if *t == tag::BIGPOS => raw(v)?.try_into().map_err(|_| err())?, + Value::Tag(t, v) if *t == tag::BIGNEG => i128::try_from(raw(v)?) + .map(|x| x ^ !0) + .map_err(|_| err()) + .and_then(|x| x.try_into().map_err(|_| err()))?, + _ => return Err(de::Error::invalid_type(self.0.into(), &"(big)int")), + }) + } +} + +impl<'a, 'de> de::Deserializer<'de> for Deserializer<&'a Value> { + type Error = Error; + + #[inline] + fn deserialize_any<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + match self.0 { + Value::Bytes(x) => visitor.visit_bytes(x), + Value::Text(x) => visitor.visit_str(x), + Value::Array(x) => visitor.visit_seq(Deserializer(x.iter())), + Value::Map(x) => visitor.visit_map(Deserializer(x.iter().peekable())), + Value::Bool(x) => visitor.visit_bool(*x), + Value::Null => visitor.visit_none(), + + Value::Tag(t, v) => { + let parent: Deserializer<&Value> = Deserializer(v); + let access = crate::tag::TagAccess::new(parent, Some(*t)); + visitor.visit_enum(access) + } + + Value::Integer(x) => { + if let Ok(x) = u64::try_from(*x) { + visitor.visit_u64(x) + } else if let Ok(x) = i64::try_from(*x) { + visitor.visit_i64(x) + } else if let Ok(x) = i128::try_from(*x) { + visitor.visit_i128(x) + } else { + unreachable!() + } + } + + Value::Float(x) => visitor.visit_f64(*x), + } + } + + #[inline] + fn deserialize_bool<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Bool(x) => visitor.visit_bool(*x), + _ => Err(de::Error::invalid_type(value.into(), &"bool")), + } + } + + #[inline] + fn deserialize_f32<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_f64(visitor) + } + + #[inline] + fn deserialize_f64<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Float(x) => visitor.visit_f64(*x), + _ => Err(de::Error::invalid_type(value.into(), &"f64")), + } + } + + fn deserialize_i8<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_i8(self.integer("i8")?) + } + + fn deserialize_i16<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_i16(self.integer("i16")?) + } + + fn deserialize_i32<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_i32(self.integer("i32")?) + } + + fn deserialize_i64<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_i64(self.integer("i64")?) + } + + fn deserialize_i128<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_i128(self.integer("i128")?) + } + + fn deserialize_u8<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_u8(self.integer("u8")?) + } + + fn deserialize_u16<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_u16(self.integer("u16")?) + } + + fn deserialize_u32<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_u32(self.integer("u32")?) + } + + fn deserialize_u64<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_u64(self.integer("u64")?) + } + + fn deserialize_u128<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + visitor.visit_u128(self.integer("u128")?) + } + + fn deserialize_char<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Text(x) => match x.chars().count() { + 1 => visitor.visit_char(x.chars().next().unwrap()), + _ => Err(de::Error::invalid_type(value.into(), &"char")), + }, + + _ => Err(de::Error::invalid_type(value.into(), &"char")), + } + } + + fn deserialize_str<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Text(x) => visitor.visit_str(x), + _ => Err(de::Error::invalid_type(value.into(), &"str")), + } + } + + fn deserialize_string<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + self.deserialize_str(visitor) + } + + fn deserialize_bytes<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Bytes(x) => visitor.visit_bytes(x), + _ => Err(de::Error::invalid_type(value.into(), &"bytes")), + } + } + + fn deserialize_byte_buf<V: de::Visitor<'de>>( + self, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_bytes(visitor) + } + + fn deserialize_seq<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Array(x) => visitor.visit_seq(Deserializer(x.iter())), + _ => Err(de::Error::invalid_type(value.into(), &"array")), + } + } + + fn deserialize_map<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + let mut value = self.0; + while let Value::Tag(.., v) = value { + value = v; + } + + match value { + Value::Map(x) => visitor.visit_map(Deserializer(x.iter().peekable())), + _ => Err(de::Error::invalid_type(value.into(), &"map")), + } + } + + fn deserialize_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + _fields: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_map(visitor) + } + + fn deserialize_tuple<V: de::Visitor<'de>>( + self, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_seq(visitor) + } + + fn deserialize_tuple_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_seq(visitor) + } + + fn deserialize_identifier<V: de::Visitor<'de>>( + self, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_str(visitor) + } + + fn deserialize_ignored_any<V: de::Visitor<'de>>( + self, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_any(visitor) + } + + #[inline] + fn deserialize_option<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + match self.0 { + Value::Null => visitor.visit_none(), + x => visitor.visit_some(Self(x)), + } + } + + #[inline] + fn deserialize_unit<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { + match self.0 { + Value::Null => visitor.visit_unit(), + _ => Err(de::Error::invalid_type(self.0.into(), &"null")), + } + } + + #[inline] + fn deserialize_unit_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_unit(visitor) + } + + #[inline] + fn deserialize_newtype_struct<V: de::Visitor<'de>>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Self::Error> { + visitor.visit_newtype_struct(self) + } + + #[inline] + fn deserialize_enum<V: de::Visitor<'de>>( + self, + name: &'static str, + variants: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> { + if name == "@@TAG@@" { + let (tag, val) = match self.0 { + Value::Tag(t, v) => (Some(*t), v.as_ref()), + v => (None, v), + }; + + let parent: Deserializer<&Value> = Deserializer(val); + let access = crate::tag::TagAccess::new(parent, tag); + return visitor.visit_enum(access); + } + + match self.0 { + Value::Tag(.., v) => Deserializer(v.as_ref()).deserialize_enum(name, variants, visitor), + Value::Map(x) if x.len() == 1 => visitor.visit_enum(Deserializer(&x[0])), + x @ Value::Text(..) => visitor.visit_enum(Deserializer(x)), + _ => Err(de::Error::invalid_type(self.0.into(), &"map")), + } + } + + fn is_human_readable(&self) -> bool { + false + } +} + +impl<'a, 'de, T: Iterator<Item = &'a Value>> de::SeqAccess<'de> for Deserializer<T> { + type Error = Error; + + #[inline] + fn next_element_seed<U: de::DeserializeSeed<'de>>( + &mut self, + seed: U, + ) -> Result<Option<U::Value>, Self::Error> { + match self.0.next() { + None => Ok(None), + Some(v) => seed.deserialize(Deserializer(v)).map(Some), + } + } +} + +impl<'a, 'de, T: Iterator<Item = &'a (Value, Value)>> de::MapAccess<'de> + for Deserializer<Peekable<T>> +{ + type Error = Error; + + #[inline] + fn next_key_seed<K: de::DeserializeSeed<'de>>( + &mut self, + seed: K, + ) -> Result<Option<K::Value>, Self::Error> { + match self.0.peek() { + None => Ok(None), + Some(x) => Ok(Some(seed.deserialize(Deserializer(&x.0))?)), + } + } + + #[inline] + fn next_value_seed<V: de::DeserializeSeed<'de>>( + &mut self, + seed: V, + ) -> Result<V::Value, Self::Error> { + seed.deserialize(Deserializer(&self.0.next().unwrap().1)) + } +} + +impl<'a, 'de> de::EnumAccess<'de> for Deserializer<&'a (Value, Value)> { + type Error = Error; + type Variant = Deserializer<&'a Value>; + + #[inline] + fn variant_seed<V: de::DeserializeSeed<'de>>( + self, + seed: V, + ) -> Result<(V::Value, Self::Variant), Self::Error> { + let k = seed.deserialize(Deserializer(&self.0 .0))?; + Ok((k, Deserializer(&self.0 .1))) + } +} + +impl<'a, 'de> de::EnumAccess<'de> for Deserializer<&'a Value> { + type Error = Error; + type Variant = Deserializer<&'a Value>; + + #[inline] + fn variant_seed<V: de::DeserializeSeed<'de>>( + self, + seed: V, + ) -> Result<(V::Value, Self::Variant), Self::Error> { + let k = seed.deserialize(self)?; + Ok((k, Deserializer(&Value::Null))) + } +} + +impl<'a, 'de> de::VariantAccess<'de> for Deserializer<&'a Value> { + type Error = Error; + + #[inline] + fn unit_variant(self) -> Result<(), Self::Error> { + match self.0 { + Value::Null => Ok(()), + _ => Err(de::Error::invalid_type(self.0.into(), &"unit")), + } + } + + #[inline] + fn newtype_variant_seed<U: de::DeserializeSeed<'de>>( + self, + seed: U, + ) -> Result<U::Value, Self::Error> { + seed.deserialize(self) + } + + #[inline] + fn tuple_variant<V: de::Visitor<'de>>( + self, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_seq(visitor) + } + + #[inline] + fn struct_variant<V: de::Visitor<'de>>( + self, + _fields: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> { + self.deserialize_map(visitor) + } +} + +impl Value { + /// Deserializes the `Value` into an object + #[inline] + pub fn deserialized<'de, T: de::Deserialize<'de>>(&self) -> Result<T, Error> { + T::deserialize(Deserializer(self)) + } +} diff --git a/vendor/ciborium/src/value/error.rs b/vendor/ciborium/src/value/error.rs new file mode 100644 index 00000000..bcec128f --- /dev/null +++ b/vendor/ciborium/src/value/error.rs @@ -0,0 +1,32 @@ +// SPDX-License-Identifier: Apache-2.0 + +use alloc::string::{String, ToString}; + +/// The error when serializing to/from a `Value` +#[derive(Debug)] +pub enum Error { + /// A custom error string produced by serde + Custom(String), +} + +impl core::fmt::Display for Error { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{:?}", self) + } +} + +impl serde::de::StdError for Error {} + +impl serde::de::Error for Error { + #[inline] + fn custom<T: core::fmt::Display>(msg: T) -> Self { + Self::Custom(msg.to_string()) + } +} + +impl serde::ser::Error for Error { + #[inline] + fn custom<T: core::fmt::Display>(msg: T) -> Self { + Self::Custom(msg.to_string()) + } +} diff --git a/vendor/ciborium/src/value/integer.rs b/vendor/ciborium/src/value/integer.rs new file mode 100644 index 00000000..ef6ea3df --- /dev/null +++ b/vendor/ciborium/src/value/integer.rs @@ -0,0 +1,143 @@ +// SPDX-License-Identifier: Apache-2.0 +use core::cmp::Ordering; + +macro_rules! implfrom { + ($( $(#[$($attr:meta)+])? $t:ident)+) => { + $( + $(#[$($attr)+])? + impl From<$t> for Integer { + #[inline] + fn from(value: $t) -> Self { + Self(value as _) + } + } + + impl TryFrom<Integer> for $t { + type Error = core::num::TryFromIntError; + + #[inline] + fn try_from(value: Integer) -> Result<Self, Self::Error> { + $t::try_from(value.0) + } + } + )+ + }; +} + +/// An abstract integer value +/// +/// This opaque type represents an integer value which can be encoded in CBOR +/// without resulting to big integer encoding. Larger values may be encoded +/// using the big integer encoding as described in the CBOR RFC. See the +/// implementations for 128-bit integer conversions on `Value` for more +/// details. +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Integer(i128); + +impl Integer { + /// Returns the canonical length this integer will have when serialized to bytes. + /// This is called `canonical` as it is only used for canonically comparing two + /// values. It shouldn't be used in any other context. + fn canonical_len(&self) -> usize { + let x = self.0; + + if let Ok(x) = u8::try_from(x) { + if x < 24 { + 1 + } else { + 2 + } + } else if let Ok(x) = i8::try_from(x) { + if x >= -24i8 { + 1 + } else { + 2 + } + } else if u16::try_from(x).is_ok() || i16::try_from(x).is_ok() { + 3 + } else if u32::try_from(x).is_ok() || i32::try_from(x).is_ok() { + 5 + } else if u64::try_from(x).is_ok() || i64::try_from(x).is_ok() { + 9 + } else { + // Ciborium serializes u128/i128 as BigPos if they don't fit in 64 bits. + // In this special case we have to calculate the length. + // The Tag itself will always be 1 byte. + x.to_be_bytes().len() + 1 + } + } + + /// Compare two integers as if we were to serialize them, but more efficiently. + pub fn canonical_cmp(&self, other: &Self) -> Ordering { + match self.canonical_len().cmp(&other.canonical_len()) { + Ordering::Equal => { + // Negative numbers are higher in byte-order than positive numbers. + match (self.0.is_negative(), other.0.is_negative()) { + (false, true) => Ordering::Less, + (true, false) => Ordering::Greater, + (true, true) => { + // For negative numbers the byte order puts numbers closer to 0 which + // are lexically higher, lower. So -1 < -2 when sorting by be_bytes(). + match self.0.cmp(&other.0) { + Ordering::Less => Ordering::Greater, + Ordering::Equal => Ordering::Equal, + Ordering::Greater => Ordering::Less, + } + } + (_, _) => self.0.cmp(&other.0), + } + } + x => x, + } + } +} + +implfrom! { + u8 u16 u32 u64 + i8 i16 i32 i64 + + #[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] + usize + + #[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] + isize +} + +impl TryFrom<i128> for Integer { + type Error = core::num::TryFromIntError; + + #[inline] + fn try_from(value: i128) -> Result<Self, Self::Error> { + u64::try_from(match value.is_negative() { + false => value, + true => value ^ !0, + })?; + + Ok(Integer(value)) + } +} + +impl TryFrom<u128> for Integer { + type Error = core::num::TryFromIntError; + + #[inline] + fn try_from(value: u128) -> Result<Self, Self::Error> { + Ok(Self(u64::try_from(value)?.into())) + } +} + +impl From<Integer> for i128 { + #[inline] + fn from(value: Integer) -> Self { + value.0 + } +} + +impl TryFrom<Integer> for u128 { + type Error = core::num::TryFromIntError; + + #[inline] + fn try_from(value: Integer) -> Result<Self, Self::Error> { + u128::try_from(value.0) + } +} diff --git a/vendor/ciborium/src/value/mod.rs b/vendor/ciborium/src/value/mod.rs new file mode 100644 index 00000000..7233026f --- /dev/null +++ b/vendor/ciborium/src/value/mod.rs @@ -0,0 +1,685 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! A dynamic CBOR value + +mod canonical; +mod integer; + +mod de; +mod error; +mod ser; + +pub use canonical::CanonicalValue; +pub use error::Error; +pub use integer::Integer; + +use alloc::{boxed::Box, string::String, vec::Vec}; + +/// A representation of a dynamic CBOR value that can handled dynamically +#[non_exhaustive] +#[derive(Clone, Debug, PartialEq, PartialOrd)] +pub enum Value { + /// An integer + Integer(Integer), + + /// Bytes + Bytes(Vec<u8>), + + /// A float + Float(f64), + + /// A string + Text(String), + + /// A boolean + Bool(bool), + + /// Null + Null, + + /// Tag + Tag(u64, Box<Value>), + + /// An array + Array(Vec<Value>), + + /// A map + Map(Vec<(Value, Value)>), +} + +impl Value { + /// Returns true if the `Value` is an `Integer`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Integer(17.into()); + /// + /// assert!(value.is_integer()); + /// ``` + pub fn is_integer(&self) -> bool { + self.as_integer().is_some() + } + + /// If the `Value` is a `Integer`, returns a reference to the associated `Integer` data. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Integer(17.into()); + /// + /// // We can read the number + /// assert_eq!(17, value.as_integer().unwrap().try_into().unwrap()); + /// ``` + pub fn as_integer(&self) -> Option<Integer> { + match self { + Value::Integer(int) => Some(*int), + _ => None, + } + } + + /// If the `Value` is a `Integer`, returns a the associated `Integer` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::{Value, value::Integer}; + /// # + /// let value = Value::Integer(17.into()); + /// assert_eq!(value.into_integer(), Ok(Integer::from(17))); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_integer(), Err(Value::Bool(true))); + /// ``` + pub fn into_integer(self) -> Result<Integer, Self> { + match self { + Value::Integer(int) => Ok(int), + other => Err(other), + } + } + + /// Returns true if the `Value` is a `Bytes`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Bytes(vec![104, 101, 108, 108, 111]); + /// + /// assert!(value.is_bytes()); + /// ``` + pub fn is_bytes(&self) -> bool { + self.as_bytes().is_some() + } + + /// If the `Value` is a `Bytes`, returns a reference to the associated bytes vector. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Bytes(vec![104, 101, 108, 108, 111]); + /// + /// assert_eq!(std::str::from_utf8(value.as_bytes().unwrap()).unwrap(), "hello"); + /// ``` + pub fn as_bytes(&self) -> Option<&Vec<u8>> { + match *self { + Value::Bytes(ref bytes) => Some(bytes), + _ => None, + } + } + + /// If the `Value` is a `Bytes`, returns a mutable reference to the associated bytes vector. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let mut value = Value::Bytes(vec![104, 101, 108, 108, 111]); + /// value.as_bytes_mut().unwrap().clear(); + /// + /// assert_eq!(value, Value::Bytes(vec![])); + /// ``` + pub fn as_bytes_mut(&mut self) -> Option<&mut Vec<u8>> { + match *self { + Value::Bytes(ref mut bytes) => Some(bytes), + _ => None, + } + } + + /// If the `Value` is a `Bytes`, returns a the associated `Vec<u8>` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Bytes(vec![104, 101, 108, 108, 111]); + /// assert_eq!(value.into_bytes(), Ok(vec![104, 101, 108, 108, 111])); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_bytes(), Err(Value::Bool(true))); + /// ``` + pub fn into_bytes(self) -> Result<Vec<u8>, Self> { + match self { + Value::Bytes(vec) => Ok(vec), + other => Err(other), + } + } + + /// Returns true if the `Value` is a `Float`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Float(17.0.into()); + /// + /// assert!(value.is_float()); + /// ``` + pub fn is_float(&self) -> bool { + self.as_float().is_some() + } + + /// If the `Value` is a `Float`, returns a reference to the associated float data. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Float(17.0.into()); + /// + /// // We can read the float number + /// assert_eq!(value.as_float().unwrap(), 17.0_f64); + /// ``` + pub fn as_float(&self) -> Option<f64> { + match *self { + Value::Float(f) => Some(f), + _ => None, + } + } + + /// If the `Value` is a `Float`, returns a the associated `f64` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Float(17.); + /// assert_eq!(value.into_float(), Ok(17.)); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_float(), Err(Value::Bool(true))); + /// ``` + pub fn into_float(self) -> Result<f64, Self> { + match self { + Value::Float(f) => Ok(f), + other => Err(other), + } + } + + /// Returns true if the `Value` is a `Text`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Text(String::from("hello")); + /// + /// assert!(value.is_text()); + /// ``` + pub fn is_text(&self) -> bool { + self.as_text().is_some() + } + + /// If the `Value` is a `Text`, returns a reference to the associated `String` data. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Text(String::from("hello")); + /// + /// // We can read the String + /// assert_eq!(value.as_text().unwrap(), "hello"); + /// ``` + pub fn as_text(&self) -> Option<&str> { + match *self { + Value::Text(ref s) => Some(s), + _ => None, + } + } + + /// If the `Value` is a `Text`, returns a mutable reference to the associated `String` data. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let mut value = Value::Text(String::from("hello")); + /// value.as_text_mut().unwrap().clear(); + /// + /// assert_eq!(value.as_text().unwrap(), &String::from("")); + /// ``` + pub fn as_text_mut(&mut self) -> Option<&mut String> { + match *self { + Value::Text(ref mut s) => Some(s), + _ => None, + } + } + + /// If the `Value` is a `String`, returns a the associated `String` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Text(String::from("hello")); + /// assert_eq!(value.into_text().as_deref(), Ok("hello")); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_text(), Err(Value::Bool(true))); + /// ``` + pub fn into_text(self) -> Result<String, Self> { + match self { + Value::Text(s) => Ok(s), + other => Err(other), + } + } + + /// Returns true if the `Value` is a `Bool`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Bool(false); + /// + /// assert!(value.is_bool()); + /// ``` + pub fn is_bool(&self) -> bool { + self.as_bool().is_some() + } + + /// If the `Value` is a `Bool`, returns a copy of the associated boolean value. Returns None + /// otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Bool(false); + /// + /// assert_eq!(value.as_bool().unwrap(), false); + /// ``` + pub fn as_bool(&self) -> Option<bool> { + match *self { + Value::Bool(b) => Some(b), + _ => None, + } + } + + /// If the `Value` is a `Bool`, returns a the associated `bool` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Bool(false); + /// assert_eq!(value.into_bool(), Ok(false)); + /// + /// let value = Value::Float(17.); + /// assert_eq!(value.into_bool(), Err(Value::Float(17.))); + /// ``` + pub fn into_bool(self) -> Result<bool, Self> { + match self { + Value::Bool(b) => Ok(b), + other => Err(other), + } + } + + /// Returns true if the `Value` is a `Null`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Null; + /// + /// assert!(value.is_null()); + /// ``` + pub fn is_null(&self) -> bool { + matches!(self, Value::Null) + } + + /// Returns true if the `Value` is a `Tag`. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Tag(61, Box::from(Value::Null)); + /// + /// assert!(value.is_tag()); + /// ``` + pub fn is_tag(&self) -> bool { + self.as_tag().is_some() + } + + /// If the `Value` is a `Tag`, returns the associated tag value and a reference to the tag `Value`. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Tag(61, Box::from(Value::Bytes(vec![104, 101, 108, 108, 111]))); + /// + /// let (tag, data) = value.as_tag().unwrap(); + /// assert_eq!(tag, 61); + /// assert_eq!(data, &Value::Bytes(vec![104, 101, 108, 108, 111])); + /// ``` + pub fn as_tag(&self) -> Option<(u64, &Value)> { + match self { + Value::Tag(tag, data) => Some((*tag, data)), + _ => None, + } + } + + /// If the `Value` is a `Tag`, returns the associated tag value and a mutable reference + /// to the tag `Value`. Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let mut value = Value::Tag(61, Box::from(Value::Bytes(vec![104, 101, 108, 108, 111]))); + /// + /// let (tag, mut data) = value.as_tag_mut().unwrap(); + /// data.as_bytes_mut().unwrap().clear(); + /// assert_eq!(tag, &61); + /// assert_eq!(data, &Value::Bytes(vec![])); + /// ``` + pub fn as_tag_mut(&mut self) -> Option<(&mut u64, &mut Value)> { + match self { + Value::Tag(tag, data) => Some((tag, data.as_mut())), + _ => None, + } + } + + /// If the `Value` is a `Tag`, returns a the associated pair of `u64` and `Box<value>` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Tag(7, Box::new(Value::Float(12.))); + /// assert_eq!(value.into_tag(), Ok((7, Box::new(Value::Float(12.))))); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_tag(), Err(Value::Bool(true))); + /// ``` + pub fn into_tag(self) -> Result<(u64, Box<Value>), Self> { + match self { + Value::Tag(tag, value) => Ok((tag, value)), + other => Err(other), + } + } + + /// Returns true if the `Value` is an Array. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Array( + /// vec![ + /// Value::Text(String::from("foo")), + /// Value::Text(String::from("bar")) + /// ] + /// ); + /// + /// assert!(value.is_array()); + /// ``` + pub fn is_array(&self) -> bool { + self.as_array().is_some() + } + + /// If the `Value` is an Array, returns a reference to the associated vector. Returns None + /// otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Array( + /// vec![ + /// Value::Text(String::from("foo")), + /// Value::Text(String::from("bar")) + /// ] + /// ); + /// + /// // The length of `value` is 2 elements. + /// assert_eq!(value.as_array().unwrap().len(), 2); + /// ``` + pub fn as_array(&self) -> Option<&Vec<Value>> { + match *self { + Value::Array(ref array) => Some(array), + _ => None, + } + } + + /// If the `Value` is an Array, returns a mutable reference to the associated vector. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let mut value = Value::Array( + /// vec![ + /// Value::Text(String::from("foo")), + /// Value::Text(String::from("bar")) + /// ] + /// ); + /// + /// value.as_array_mut().unwrap().clear(); + /// assert_eq!(value, Value::Array(vec![])); + /// ``` + pub fn as_array_mut(&mut self) -> Option<&mut Vec<Value>> { + match *self { + Value::Array(ref mut list) => Some(list), + _ => None, + } + } + + /// If the `Value` is a `Array`, returns a the associated `Vec<Value>` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::{Value, value::Integer}; + /// # + /// let mut value = Value::Array( + /// vec![ + /// Value::Integer(17.into()), + /// Value::Float(18.), + /// ] + /// ); + /// assert_eq!(value.into_array(), Ok(vec![Value::Integer(17.into()), Value::Float(18.)])); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_array(), Err(Value::Bool(true))); + /// ``` + pub fn into_array(self) -> Result<Vec<Value>, Self> { + match self { + Value::Array(vec) => Ok(vec), + other => Err(other), + } + } + + /// Returns true if the `Value` is a Map. Returns false otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Map( + /// vec![ + /// (Value::Text(String::from("foo")), Value::Text(String::from("bar"))) + /// ] + /// ); + /// + /// assert!(value.is_map()); + /// ``` + pub fn is_map(&self) -> bool { + self.as_map().is_some() + } + + /// If the `Value` is a Map, returns a reference to the associated Map data. Returns None + /// otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let value = Value::Map( + /// vec![ + /// (Value::Text(String::from("foo")), Value::Text(String::from("bar"))) + /// ] + /// ); + /// + /// // The length of data is 1 entry (1 key/value pair). + /// assert_eq!(value.as_map().unwrap().len(), 1); + /// + /// // The content of the first element is what we expect + /// assert_eq!( + /// value.as_map().unwrap().get(0).unwrap(), + /// &(Value::Text(String::from("foo")), Value::Text(String::from("bar"))) + /// ); + /// ``` + pub fn as_map(&self) -> Option<&Vec<(Value, Value)>> { + match *self { + Value::Map(ref map) => Some(map), + _ => None, + } + } + + /// If the `Value` is a Map, returns a mutable reference to the associated Map Data. + /// Returns None otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let mut value = Value::Map( + /// vec![ + /// (Value::Text(String::from("foo")), Value::Text(String::from("bar"))) + /// ] + /// ); + /// + /// value.as_map_mut().unwrap().clear(); + /// assert_eq!(value, Value::Map(vec![])); + /// assert_eq!(value.as_map().unwrap().len(), 0); + /// ``` + pub fn as_map_mut(&mut self) -> Option<&mut Vec<(Value, Value)>> { + match *self { + Value::Map(ref mut map) => Some(map), + _ => None, + } + } + + /// If the `Value` is a `Map`, returns a the associated `Vec<(Value, Value)>` data as `Ok`. + /// Returns `Err(Self)` otherwise. + /// + /// ``` + /// # use ciborium::Value; + /// # + /// let mut value = Value::Map( + /// vec![ + /// (Value::Text(String::from("key")), Value::Float(18.)), + /// ] + /// ); + /// assert_eq!(value.into_map(), Ok(vec![(Value::Text(String::from("key")), Value::Float(18.))])); + /// + /// let value = Value::Bool(true); + /// assert_eq!(value.into_map(), Err(Value::Bool(true))); + /// ``` + pub fn into_map(self) -> Result<Vec<(Value, Value)>, Self> { + match self { + Value::Map(map) => Ok(map), + other => Err(other), + } + } +} + +macro_rules! implfrom { + ($($v:ident($t:ty)),+ $(,)?) => { + $( + impl From<$t> for Value { + #[inline] + fn from(value: $t) -> Self { + Self::$v(value.into()) + } + } + )+ + }; +} + +implfrom! { + Integer(Integer), + Integer(u64), + Integer(i64), + Integer(u32), + Integer(i32), + Integer(u16), + Integer(i16), + Integer(u8), + Integer(i8), + + Bytes(Vec<u8>), + Bytes(&[u8]), + + Float(f64), + Float(f32), + + Text(String), + Text(&str), + + Bool(bool), + + Array(&[Value]), + Array(Vec<Value>), + + Map(&[(Value, Value)]), + Map(Vec<(Value, Value)>), +} + +impl From<u128> for Value { + #[inline] + fn from(value: u128) -> Self { + if let Ok(x) = Integer::try_from(value) { + return Value::Integer(x); + } + + let mut bytes = &value.to_be_bytes()[..]; + while let Some(0) = bytes.first() { + bytes = &bytes[1..]; + } + + Value::Tag(ciborium_ll::tag::BIGPOS, Value::Bytes(bytes.into()).into()) + } +} + +impl From<i128> for Value { + #[inline] + fn from(value: i128) -> Self { + if let Ok(x) = Integer::try_from(value) { + return Value::Integer(x); + } + + let (tag, raw) = match value.is_negative() { + true => (ciborium_ll::tag::BIGNEG, value as u128 ^ !0), + false => (ciborium_ll::tag::BIGPOS, value as u128), + }; + + let mut bytes = &raw.to_be_bytes()[..]; + while let Some(0) = bytes.first() { + bytes = &bytes[1..]; + } + + Value::Tag(tag, Value::Bytes(bytes.into()).into()) + } +} + +impl From<char> for Value { + #[inline] + fn from(value: char) -> Self { + let mut v = String::with_capacity(1); + v.push(value); + Value::Text(v) + } +} diff --git a/vendor/ciborium/src/value/ser.rs b/vendor/ciborium/src/value/ser.rs new file mode 100644 index 00000000..3406d0b8 --- /dev/null +++ b/vendor/ciborium/src/value/ser.rs @@ -0,0 +1,438 @@ +// SPDX-License-Identifier: Apache-2.0 + +use super::{Error, Value}; + +use alloc::{vec, vec::Vec}; + +use ::serde::ser::{self, SerializeMap as _, SerializeSeq as _, SerializeTupleVariant as _}; + +impl ser::Serialize for Value { + #[inline] + fn serialize<S: ser::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match self { + Value::Bytes(x) => serializer.serialize_bytes(x), + Value::Bool(x) => serializer.serialize_bool(*x), + Value::Text(x) => serializer.serialize_str(x), + Value::Null => serializer.serialize_unit(), + + Value::Tag(t, v) => { + let mut acc = serializer.serialize_tuple_variant("@@TAG@@", 0, "@@TAGGED@@", 2)?; + acc.serialize_field(t)?; + acc.serialize_field(v)?; + acc.end() + } + + Value::Float(x) => { + let y = *x as f32; + if (y as f64).to_bits() == x.to_bits() { + serializer.serialize_f32(y) + } else { + serializer.serialize_f64(*x) + } + } + + Value::Integer(x) => { + if let Ok(x) = u8::try_from(*x) { + serializer.serialize_u8(x) + } else if let Ok(x) = i8::try_from(*x) { + serializer.serialize_i8(x) + } else if let Ok(x) = u16::try_from(*x) { + serializer.serialize_u16(x) + } else if let Ok(x) = i16::try_from(*x) { + serializer.serialize_i16(x) + } else if let Ok(x) = u32::try_from(*x) { + serializer.serialize_u32(x) + } else if let Ok(x) = i32::try_from(*x) { + serializer.serialize_i32(x) + } else if let Ok(x) = u64::try_from(*x) { + serializer.serialize_u64(x) + } else if let Ok(x) = i64::try_from(*x) { + serializer.serialize_i64(x) + } else if let Ok(x) = u128::try_from(*x) { + serializer.serialize_u128(x) + } else if let Ok(x) = i128::try_from(*x) { + serializer.serialize_i128(x) + } else { + unreachable!() + } + } + + Value::Array(x) => { + let mut map = serializer.serialize_seq(Some(x.len()))?; + + for v in x { + map.serialize_element(v)?; + } + + map.end() + } + + Value::Map(x) => { + let mut map = serializer.serialize_map(Some(x.len()))?; + + for (k, v) in x { + map.serialize_entry(k, v)?; + } + + map.end() + } + } + } +} + +macro_rules! mkserialize { + ($($f:ident($v:ty)),+ $(,)?) => { + $( + #[inline] + fn $f(self, v: $v) -> Result<Self::Ok, Self::Error> { + Ok(v.into()) + } + )+ + }; +} + +struct Tagged { + tag: Option<u64>, + val: Option<Value>, +} + +struct Named<T> { + name: &'static str, + data: T, + tag: Option<Tagged>, +} + +struct Map { + data: Vec<(Value, Value)>, + temp: Option<Value>, +} + +struct Serializer<T>(T); + +impl ser::Serializer for Serializer<()> { + type Ok = Value; + type Error = Error; + + type SerializeSeq = Serializer<Vec<Value>>; + type SerializeTuple = Serializer<Vec<Value>>; + type SerializeTupleStruct = Serializer<Vec<Value>>; + type SerializeTupleVariant = Serializer<Named<Vec<Value>>>; + type SerializeMap = Serializer<Map>; + type SerializeStruct = Serializer<Vec<(Value, Value)>>; + type SerializeStructVariant = Serializer<Named<Vec<(Value, Value)>>>; + + mkserialize! { + serialize_bool(bool), + + serialize_f32(f32), + serialize_f64(f64), + + serialize_i8(i8), + serialize_i16(i16), + serialize_i32(i32), + serialize_i64(i64), + serialize_i128(i128), + serialize_u8(u8), + serialize_u16(u16), + serialize_u32(u32), + serialize_u64(u64), + serialize_u128(u128), + + serialize_char(char), + serialize_str(&str), + serialize_bytes(&[u8]), + } + + #[inline] + fn serialize_none(self) -> Result<Value, Error> { + Ok(Value::Null) + } + + #[inline] + fn serialize_some<U: ?Sized + ser::Serialize>(self, value: &U) -> Result<Value, Error> { + value.serialize(self) + } + + #[inline] + fn serialize_unit(self) -> Result<Value, Error> { + self.serialize_none() + } + + #[inline] + fn serialize_unit_struct(self, _name: &'static str) -> Result<Value, Error> { + self.serialize_unit() + } + + #[inline] + fn serialize_unit_variant( + self, + _name: &'static str, + _index: u32, + variant: &'static str, + ) -> Result<Value, Error> { + Ok(variant.into()) + } + + #[inline] + fn serialize_newtype_struct<U: ?Sized + ser::Serialize>( + self, + _name: &'static str, + value: &U, + ) -> Result<Value, Error> { + value.serialize(self) + } + + #[inline] + fn serialize_newtype_variant<U: ?Sized + ser::Serialize>( + self, + name: &'static str, + _index: u32, + variant: &'static str, + value: &U, + ) -> Result<Value, Error> { + Ok(match (name, variant) { + ("@@TAG@@", "@@UNTAGGED@@") => Value::serialized(value)?, + _ => vec![(variant.into(), Value::serialized(value)?)].into(), + }) + } + + #[inline] + fn serialize_seq(self, length: Option<usize>) -> Result<Self::SerializeSeq, Error> { + Ok(Serializer(Vec::with_capacity(length.unwrap_or(0)))) + } + + #[inline] + fn serialize_tuple(self, length: usize) -> Result<Self::SerializeTuple, Error> { + self.serialize_seq(Some(length)) + } + + #[inline] + fn serialize_tuple_struct( + self, + _name: &'static str, + length: usize, + ) -> Result<Self::SerializeTupleStruct, Error> { + self.serialize_seq(Some(length)) + } + + #[inline] + fn serialize_tuple_variant( + self, + name: &'static str, + _index: u32, + variant: &'static str, + length: usize, + ) -> Result<Self::SerializeTupleVariant, Error> { + Ok(Serializer(Named { + name: variant, + data: Vec::with_capacity(length), + tag: match (name, variant) { + ("@@TAG@@", "@@TAGGED@@") => Some(Tagged { + tag: None, + val: None, + }), + + _ => None, + }, + })) + } + + #[inline] + fn serialize_map(self, length: Option<usize>) -> Result<Self::SerializeMap, Error> { + Ok(Serializer(Map { + data: Vec::with_capacity(length.unwrap_or(0)), + temp: None, + })) + } + + #[inline] + fn serialize_struct( + self, + _name: &'static str, + length: usize, + ) -> Result<Self::SerializeStruct, Error> { + Ok(Serializer(Vec::with_capacity(length))) + } + + #[inline] + fn serialize_struct_variant( + self, + _name: &'static str, + _index: u32, + variant: &'static str, + length: usize, + ) -> Result<Self::SerializeStructVariant, Error> { + Ok(Serializer(Named { + name: variant, + data: Vec::with_capacity(length), + tag: None, + })) + } + + fn is_human_readable(&self) -> bool { + false + } +} + +impl ser::SerializeSeq for Serializer<Vec<Value>> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_element<U: ?Sized + ser::Serialize>(&mut self, value: &U) -> Result<(), Error> { + self.0.push(Value::serialized(&value)?); + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(self.0.into()) + } +} + +impl ser::SerializeTuple for Serializer<Vec<Value>> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_element<U: ?Sized + ser::Serialize>(&mut self, value: &U) -> Result<(), Error> { + self.0.push(Value::serialized(&value)?); + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(self.0.into()) + } +} + +impl ser::SerializeTupleStruct for Serializer<Vec<Value>> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>(&mut self, value: &U) -> Result<(), Error> { + self.0.push(Value::serialized(&value)?); + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(self.0.into()) + } +} + +impl ser::SerializeTupleVariant for Serializer<Named<Vec<Value>>> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>(&mut self, value: &U) -> Result<(), Error> { + match self.0.tag.as_mut() { + Some(tag) => match tag.tag { + None => match value.serialize(crate::tag::Serializer) { + Ok(t) => tag.tag = Some(t), + Err(..) => return Err(ser::Error::custom("expected tag")), + }, + + Some(..) => tag.val = Some(Value::serialized(value)?), + }, + + None => self.0.data.push(Value::serialized(value)?), + } + + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(match self.0.tag { + Some(tag) => match tag { + Tagged { + tag: Some(t), + val: Some(v), + } => Value::Tag(t, v.into()), + _ => return Err(ser::Error::custom("invalid tag input")), + }, + + None => vec![(self.0.name.into(), self.0.data.into())].into(), + }) + } +} + +impl ser::SerializeMap for Serializer<Map> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_key<U: ?Sized + ser::Serialize>(&mut self, key: &U) -> Result<(), Error> { + self.0.temp = Some(Value::serialized(key)?); + Ok(()) + } + + #[inline] + fn serialize_value<U: ?Sized + ser::Serialize>(&mut self, value: &U) -> Result<(), Error> { + let key = self.0.temp.take().unwrap(); + let val = Value::serialized(&value)?; + + self.0.data.push((key, val)); + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(self.0.data.into()) + } +} + +impl ser::SerializeStruct for Serializer<Vec<(Value, Value)>> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + key: &'static str, + value: &U, + ) -> Result<(), Error> { + let k = Value::serialized(&key)?; + let v = Value::serialized(&value)?; + self.0.push((k, v)); + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(self.0.into()) + } +} + +impl ser::SerializeStructVariant for Serializer<Named<Vec<(Value, Value)>>> { + type Ok = Value; + type Error = Error; + + #[inline] + fn serialize_field<U: ?Sized + ser::Serialize>( + &mut self, + key: &'static str, + value: &U, + ) -> Result<(), Self::Error> { + let k = Value::serialized(&key)?; + let v = Value::serialized(&value)?; + self.0.data.push((k, v)); + Ok(()) + } + + #[inline] + fn end(self) -> Result<Self::Ok, Self::Error> { + Ok(vec![(self.0.name.into(), self.0.data.into())].into()) + } +} + +impl Value { + /// Serializes an object into a `Value` + #[inline] + pub fn serialized<T: ?Sized + ser::Serialize>(value: &T) -> Result<Self, Error> { + value.serialize(Serializer(())) + } +} diff --git a/vendor/ciborium/tests/canonical.rs b/vendor/ciborium/tests/canonical.rs new file mode 100644 index 00000000..d4aa33c5 --- /dev/null +++ b/vendor/ciborium/tests/canonical.rs @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: Apache-2.0 + +extern crate std; + +use ciborium::cbor; +use ciborium::tag::Required; +use ciborium::value::CanonicalValue; +use rand::prelude::*; +use std::collections::BTreeMap; + +macro_rules! cval { + ($x:expr) => { + CanonicalValue::from(val!($x)) + }; +} + +macro_rules! val { + ($x:expr) => { + cbor!($x).unwrap() + }; +} + +#[test] +fn rfc8949_example() { + let mut array: Vec<CanonicalValue> = vec![ + cval!(10), + cval!(-1), + cval!(false), + cval!(100), + cval!("z"), + cval!([-1]), + cval!("aa"), + cval!([100]), + ]; + let golden = array.clone(); + + // Shuffle the array. + array.shuffle(&mut rand::thread_rng()); + + array.sort(); + + assert_eq!(array, golden); +} + +#[test] +fn map() { + let mut map = BTreeMap::new(); + map.insert(cval!(false), val!(2)); + map.insert(cval!([-1]), val!(5)); + map.insert(cval!(-1), val!(1)); + map.insert(cval!(10), val!(0)); + map.insert(cval!(100), val!(3)); + map.insert(cval!([100]), val!(7)); + map.insert(cval!("z"), val!(4)); + map.insert(cval!("aa"), val!(6)); + + let mut bytes1 = Vec::new(); + ciborium::ser::into_writer(&map, &mut bytes1).unwrap(); + + assert_eq!( + hex::encode(&bytes1), + "a80a002001f402186403617a048120056261610681186407" + ); +} + +#[test] +fn negative_numbers() { + let mut array: Vec<CanonicalValue> = vec![ + cval!(10), + cval!(-1), + cval!(-2), + cval!(-3), + cval!(-4), + cval!(false), + cval!(100), + cval!(-100), + cval!(-200), + cval!("z"), + cval!([-1]), + cval!(-300), + cval!("aa"), + cval!([100]), + ]; + let golden = array.clone(); + + // Shuffle the array. + array.shuffle(&mut rand::thread_rng()); + + array.sort(); + + assert_eq!(array, golden); +} + +#[test] +fn tagged_option() { + let mut opt = Some(Required::<u64, 0xff>(2u32.into())); + + let mut bytes = Vec::new(); + ciborium::ser::into_writer(&opt, &mut bytes).unwrap(); + + let output = ciborium::de::from_reader(&bytes[..]).unwrap(); + assert_eq!(opt, output); + + opt = None; + + let mut bytes = Vec::new(); + ciborium::ser::into_writer(&opt, &mut bytes).unwrap(); + + let output = ciborium::de::from_reader(&bytes[..]).unwrap(); + assert_eq!(opt, output); +} diff --git a/vendor/ciborium/tests/codec.rs b/vendor/ciborium/tests/codec.rs new file mode 100644 index 00000000..d7370596 --- /dev/null +++ b/vendor/ciborium/tests/codec.rs @@ -0,0 +1,438 @@ +// SPDX-License-Identifier: Apache-2.0 + +extern crate std; + +use std::collections::{BTreeMap, HashMap}; +use std::convert::TryFrom; +use std::fmt::Debug; + +use ciborium::value::Value; +use ciborium::{cbor, de::from_reader, de::from_reader_with_buffer, ser::into_writer}; + +use rstest::rstest; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +macro_rules! val { + ($x:expr) => { + Value::try_from($x).unwrap() + }; +} + +macro_rules! hex { + ($x:expr) => { + serde_bytes::ByteBuf::from(hex::decode($x).unwrap()) + }; +} + +macro_rules! map { + ($($k:expr => $v:expr),*) => {{ + let mut map = BTreeMap::new(); + $( + map.insert($k, $v); + )* + map + }} +} + +// Keep the first "case" aligned to a line number ending in 1 for ease in finding tests. +#[allow(clippy::excessive_precision)] +#[rstest(input, value, bytes, alternate, equality, + + case(0u8, val!(0u8), "00", false, same), + case(0u16, val!(0u16), "00", false, same), + case(0u32, val!(0u32), "00", false, same), + case(0u64, val!(0u64), "00", false, same), + case(0u128, val!(0u128), "00", false, same), + case(0i8, val!(0i8), "00", false, same), + case(0i16, val!(0i16), "00", false, same), + case(0i32, val!(0i32), "00", false, same), + case(0i64, val!(0i64), "00", false, same), + case(0i128, val!(0i128), "00", false, same), + case(1u8, val!(1u8), "01", false, same), + case(1u16, val!(1u16), "01", false, same), + case(1u32, val!(1u32), "01", false, same), + case(1u64, val!(1u64), "01", false, same), + case(1u128, val!(1u128), "01", false, same), + case(1i8, val!(1i8), "01", false, same), + case(1i16, val!(1i16), "01", false, same), + case(1i32, val!(1i32), "01", false, same), + case(1i64, val!(1i64), "01", false, same), + case(1i128, val!(1i128), "01", false, same), + case(1u8, val!(1u8), "1b0000000000000001", true, same), + case(1u16, val!(1u16), "1b0000000000000001", true, same), + case(1u32, val!(1u32), "1b0000000000000001", true, same), + case(1u64, val!(1u64), "1b0000000000000001", true, same), + case(1u128, val!(1u128), "1b0000000000000001", true, same), + case(1i8, val!(1i8), "1b0000000000000001", true, same), + case(1i16, val!(1i16), "1b0000000000000001", true, same), + case(1i32, val!(1i32), "1b0000000000000001", true, same), + case(1i64, val!(1i64), "1b0000000000000001", true, same), + case(1i128, val!(1i128), "1b0000000000000001", true, same), + case(1u8, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1u16, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1u32, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1u64, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1u128, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1i8, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1i16, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1i32, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1i64, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(1i128, bigint(), "c2540000000000000000000000000000000000000001", true, same), // Not In RFC + case(10u8, val!(10u8), "0a", false, same), + case(10u16, val!(10u16), "0a", false, same), + case(10u32, val!(10u32), "0a", false, same), + case(10u64, val!(10u64), "0a", false, same), + case(10u128, val!(10u128), "0a", false, same), + case(10i8, val!(10i8), "0a", false, same), + case(10i16, val!(10i16), "0a", false, same), + case(10i32, val!(10i32), "0a", false, same), + case(10i64, val!(10i64), "0a", false, same), + case(10i128, val!(10i128), "0a", false, same), + case(23u8, val!(23u8), "17", false, same), + case(23u16, val!(23u16), "17", false, same), + case(23u32, val!(23u32), "17", false, same), + case(23u64, val!(23u64), "17", false, same), + case(23u128, val!(23u128), "17", false, same), + case(23i8, val!(23i8), "17", false, same), + case(23i16, val!(23i16), "17", false, same), + case(23i32, val!(23i32), "17", false, same), + case(23i64, val!(23i64), "17", false, same), + case(23i128, val!(23i128), "17", false, same), + case(24u8, val!(24u8), "1818", false, same), + case(24u16, val!(24u16), "1818", false, same), + case(24u32, val!(24u32), "1818", false, same), + case(24u64, val!(24u64), "1818", false, same), + case(24u128, val!(24u128), "1818", false, same), + case(24i8, val!(24i8), "1818", false, same), + case(24i16, val!(24i16), "1818", false, same), + case(24i32, val!(24i32), "1818", false, same), + case(24i64, val!(24i64), "1818", false, same), + case(24i128, val!(24i128), "1818", false, same), + case(25u8, val!(25u8), "1819", false, same), + case(25u16, val!(25u16), "1819", false, same), + case(25u32, val!(25u32), "1819", false, same), + case(25u64, val!(25u64), "1819", false, same), + case(25u128, val!(25u128), "1819", false, same), + case(25i8, val!(25i8), "1819", false, same), + case(25i16, val!(25i16), "1819", false, same), + case(25i32, val!(25i32), "1819", false, same), + case(25i64, val!(25i64), "1819", false, same), + case(25i128, val!(25i128), "1819", false, same), + case(100u8, val!(100u8), "1864", false, same), + case(100u16, val!(100u16), "1864", false, same), + case(100u32, val!(100u32), "1864", false, same), + case(100u64, val!(100u64), "1864", false, same), + case(100u128, val!(100u128), "1864", false, same), + case(100i8, val!(100i8), "1864", false, same), + case(100i16, val!(100i16), "1864", false, same), + case(100i32, val!(100i32), "1864", false, same), + case(100i64, val!(100i64), "1864", false, same), + case(100i128, val!(100i128), "1864", false, same), + case(1000u16, val!(1000u16), "1903e8", false, same), + case(1000u32, val!(1000u32), "1903e8", false, same), + case(1000u64, val!(1000u64), "1903e8", false, same), + case(1000u128, val!(1000u128), "1903e8", false, same), + case(1000i16, val!(1000i16), "1903e8", false, same), + case(1000i32, val!(1000i32), "1903e8", false, same), + case(1000i64, val!(1000i64), "1903e8", false, same), + case(1000i128, val!(1000i128), "1903e8", false, same), + case(1000000u32, val!(1000000u32), "1a000f4240", false, same), + case(1000000u64, val!(1000000u64), "1a000f4240", false, same), + case(1000000u128, val!(1000000u128), "1a000f4240", false, same), + case(1000000i32, val!(1000000i32), "1a000f4240", false, same), + case(1000000i64, val!(1000000i64), "1a000f4240", false, same), + case(1000000i128, val!(1000000i128), "1a000f4240", false, same), + case(1000000000000u64, val!(1000000000000u64), "1b000000e8d4a51000", false, same), + case(1000000000000u128, val!(1000000000000u128), "1b000000e8d4a51000", false, same), + case(1000000000000i64, val!(1000000000000i64), "1b000000e8d4a51000", false, same), + case(1000000000000i128, val!(1000000000000i128), "1b000000e8d4a51000", false, same), + case(18446744073709551615u64, val!(18446744073709551615u64), "1bffffffffffffffff", false, same), + case(18446744073709551615u128, val!(18446744073709551615u128), "1bffffffffffffffff", false, same), + case(18446744073709551615i128, val!(18446744073709551615i128), "1bffffffffffffffff", false, same), + case(18446744073709551616u128, val!(18446744073709551616u128), "c249010000000000000000", false, same), + case(18446744073709551616i128, val!(18446744073709551616i128), "c249010000000000000000", false, same), + case(-18446744073709551617i128, val!(-18446744073709551617i128), "c349010000000000000000", false, same), + case(-18446744073709551616i128, val!(-18446744073709551616i128), "3bffffffffffffffff", false, same), + case(-1000i16, val!(-1000i16), "3903e7", false, same), + case(-1000i32, val!(-1000i32), "3903e7", false, same), + case(-1000i64, val!(-1000i64), "3903e7", false, same), + case(-1000i128, val!(-1000i128), "3903e7", false, same), + case(-100i8, val!(-100i8), "3863", false, same), + case(-100i16, val!(-100i16), "3863", false, same), + case(-100i32, val!(-100i32), "3863", false, same), + case(-100i64, val!(-100i64), "3863", false, same), + case(-100i128, val!(-100i128), "3863", false, same), + case(-10i8, val!(-10i8), "29", false, same), + case(-10i16, val!(-10i16), "29", false, same), + case(-10i32, val!(-10i32), "29", false, same), + case(-10i64, val!(-10i64), "29", false, same), + case(-10i128, val!(-10i128), "29", false, same), + case(-1i8, val!(-1i8), "20", false, same), + case(-1i16, val!(-1i16), "20", false, same), + case(-1i32, val!(-1i32), "20", false, same), + case(-1i64, val!(-1i64), "20", false, same), + case(-1i128, val!(-1i128), "20", false, same), + case(-1i8, val!(-1i8), "3b0000000000000000", true, same), + case(-1i16, val!(-1i16), "3b0000000000000000", true, same), + case(-1i32, val!(-1i32), "3b0000000000000000", true, same), + case(-1i64, val!(-1i64), "3b0000000000000000", true, same), + case(-1i128, val!(-1i128), "3b0000000000000000", true, same), + case(0.0f32, val!(0.0f32), "f90000", false, Float), + case(0.0f64, val!(0.0f64), "f90000", false, Float), + case(-0.0f32, val!(-0.0f32), "f98000", false, Float), + case(-0.0f64, val!(-0.0f64), "f98000", false, Float), + case(1.0f32, val!(1.0f32), "f93c00", false, Float), + case(1.0f64, val!(1.0f64), "f93c00", false, Float), + case(1.1f32, val!(1.1f32), "fa3f8ccccd", false, Float), // Not In RFC + case(1.1f64, val!(1.1f64), "fb3ff199999999999a", false, Float), + case(1.5f32, val!(1.5f32), "f93e00", false, Float), + case(1.5f64, val!(1.5f64), "f93e00", false, Float), + case(65504.0f32, val!(65504.0f32), "f97bff", false, Float), + case(65504.0f64, val!(65504.0f64), "f97bff", false, Float), + case(100000.0f32, val!(100000.0f32), "fa47c35000", false, Float), + case(100000.0f64, val!(100000.0f64), "fa47c35000", false, Float), + case(3.4028234663852886e+38f32, val!(3.4028234663852886e+38f32), "fa7f7fffff", false, Float), + case(3.4028234663852886e+38f64, val!(3.4028234663852886e+38f64), "fa7f7fffff", false, Float), + case(1.0e+300f64, val!(1.0e+300f64), "fb7e37e43c8800759c", false, Float), + case(5.960464477539063e-8f32, val!(5.960464477539063e-8f32), "f90001", false, Float), + case(5.960464477539063e-8f64, val!(5.960464477539063e-8f64), "f90001", false, Float), + case(0.00006103515625f32, val!(0.00006103515625f32), "f90400", false, Float), + case(0.00006103515625f64, val!(0.00006103515625f64), "f90400", false, Float), + case(-4.0f32, val!(-4.0f32), "f9c400", false, Float), + case(-4.0f64, val!(-4.0f64), "f9c400", false, Float), + case(-4.1f32, val!(-4.1f32), "fac0833333", false, Float), // Not In RFC + case(-4.1f64, val!(-4.1f64), "fbc010666666666666", false, Float), + case(core::f32::INFINITY, val!(core::f32::INFINITY), "f97c00", false, Float), + case(core::f64::INFINITY, val!(core::f64::INFINITY), "f97c00", false, Float), + case(core::f32::INFINITY, val!(core::f32::INFINITY), "fa7f800000", true, Float), + case(core::f64::INFINITY, val!(core::f64::INFINITY), "fa7f800000", true, Float), + case(core::f32::INFINITY, val!(core::f32::INFINITY), "fb7ff0000000000000", true, Float), + case(core::f64::INFINITY, val!(core::f64::INFINITY), "fb7ff0000000000000", true, Float), + case(-core::f32::INFINITY, val!(-core::f32::INFINITY), "f9fc00", false, Float), + case(-core::f64::INFINITY, val!(-core::f64::INFINITY), "f9fc00", false, Float), + case(-core::f32::INFINITY, val!(-core::f32::INFINITY), "faff800000", true, Float), + case(-core::f64::INFINITY, val!(-core::f64::INFINITY), "faff800000", true, Float), + case(-core::f32::INFINITY, val!(-core::f32::INFINITY), "fbfff0000000000000", true, Float), + case(-core::f64::INFINITY, val!(-core::f64::INFINITY), "fbfff0000000000000", true, Float), + case(core::f32::NAN, val!(core::f32::NAN), "f97e00", false, Float), + case(core::f64::NAN, val!(core::f64::NAN), "f97e00", false, Float), + case(core::f32::NAN, val!(core::f32::NAN), "fa7fc00000", true, Float), + case(core::f64::NAN, val!(core::f64::NAN), "fa7fc00000", true, Float), + case(core::f32::NAN, val!(core::f32::NAN), "fb7ff8000000000000", true, Float), + case(core::f64::NAN, val!(core::f64::NAN), "fb7ff8000000000000", true, Float), + case(-core::f32::NAN, val!(-core::f32::NAN), "f9fe00", false, Float), // Not In RFC + case(-core::f64::NAN, val!(-core::f64::NAN), "f9fe00", false, Float), // Not In RFC + case(-core::f32::NAN, val!(-core::f32::NAN), "faffc00000", true, Float), // Not In RFC + case(-core::f64::NAN, val!(-core::f64::NAN), "faffc00000", true, Float), // Not In RFC + case(-core::f32::NAN, val!(-core::f32::NAN), "fbfff8000000000000", true, Float), // Not In RFC + case(-core::f64::NAN, val!(-core::f64::NAN), "fbfff8000000000000", true, Float), // Not In RFC + case(false, val!(false), "f4", false, same), + case(true, val!(true), "f5", false, same), + case(Value::Null, Value::Null, "f6", false, same), + case(hex!(""), val!(&b""[..]), "40", false, same), + case(hex!("01020304"), val!(&b"\x01\x02\x03\x04"[..]), "4401020304", false, same), + case(hex!("0102030405"), val!(&b"\x01\x02\x03\x04\x05"[..]), "5f42010243030405ff", true, same), + case("", val!(""), "60", false, ToOwned::to_owned), + case("a", val!("a"), "6161", false, ToOwned::to_owned), + case('a', val!('a'), "6161", false, same), + case("IETF", val!("IETF"), "6449455446", false, ToOwned::to_owned), + case("\"\\", val!("\"\\"), "62225c", false, ToOwned::to_owned), + case("ü", val!("ü"), "62c3bc", false, ToOwned::to_owned), + case('ü', val!('ü'), "62c3bc", false, same), + case("水", val!("水"), "63e6b0b4", false, ToOwned::to_owned), + case('水', val!('水'), "63e6b0b4", false, same), + case("𐅑", val!("𐅑"), "64f0908591", false, ToOwned::to_owned), + case('𐅑', val!('𐅑'), "64f0908591", false, same), + case("streaming", val!("streaming"), "7f657374726561646d696e67ff", true, ToOwned::to_owned), + case(cbor!([]).unwrap(), Vec::<Value>::new().into(), "80", false, same), + case(cbor!([]).unwrap(), Vec::<Value>::new().into(), "9fff", true, same), + case(cbor!([1, 2, 3]).unwrap(), cbor!([1, 2, 3]).unwrap(), "83010203", false, same), + case(cbor!([1, [2, 3], [4, 5]]).unwrap(), cbor!([1, [2, 3], [4, 5]]).unwrap(), "8301820203820405", false, same), + case(cbor!([1, [2, 3], [4, 5]]).unwrap(), cbor!([1, [2, 3], [4, 5]]).unwrap(), "9f018202039f0405ffff", true, same), + case(cbor!([1, [2, 3], [4, 5]]).unwrap(), cbor!([1, [2, 3], [4, 5]]).unwrap(), "9f01820203820405ff", true, same), + case(cbor!([1, [2, 3], [4, 5]]).unwrap(), cbor!([1, [2, 3], [4, 5]]).unwrap(), "83018202039f0405ff", true, same), + case(cbor!([1, [2, 3], [4, 5]]).unwrap(), cbor!([1, [2, 3], [4, 5]]).unwrap(), "83019f0203ff820405", true, same), + case((1..=25).collect::<Vec<u8>>(), (1..=25).map(|x| x.into()).collect::<Vec<Value>>().into(), "98190102030405060708090a0b0c0d0e0f101112131415161718181819", false, same), + case((1..=25).collect::<Vec<u8>>(), (1..=25).map(|x| x.into()).collect::<Vec<Value>>().into(), "9f0102030405060708090a0b0c0d0e0f101112131415161718181819ff", true, same), + case(HashMap::<u8, u8>::new(), Value::Map(vec![]), "a0", false, same), + case(BTreeMap::<u8, u8>::new(), Value::Map(vec![]), "a0", false, same), + case(map!{1 => 2, 3 => 4}, cbor!({1 => 2, 3 => 4}).unwrap(), "a201020304", false, same), + case(cbor!({"a" => 1, "b" => [2, 3]}).unwrap(), cbor!({"a" => 1, "b" => [2, 3]}).unwrap(), "a26161016162820203", false, same), + case(cbor!({"a" => 1, "b" => [2, 3]}).unwrap(), cbor!({"a" => 1, "b" => [2, 3]}).unwrap(), "bf61610161629f0203ffff", true, same), + case(cbor!(["a", {"b" => "c"}]).unwrap(), cbor!(["a", {"b" => "c"}]).unwrap(), "826161a161626163", false, same), + case(cbor!(["a", {"b" => "c"}]).unwrap(), cbor!(["a", {"b" => "c"}]).unwrap(), "826161bf61626163ff", true, same), + case(cbor!({"Fun" => true, "Amt" => -2}).unwrap(), cbor!({"Fun" => true, "Amt" => -2}).unwrap(), "bf6346756ef563416d7421ff", true, same), + case(map_big(), vmap_big(), "a56161614161626142616361436164614461656145", false, same), + case(Option::<u8>::None, Value::Null, "f6", false, same), // Not In RFC + case(Option::Some(7u8), val!(7u8), "07", false, same), // Not In RFC + case((), Value::Null, "f6", false, same), // Not In RFC + case(UnitStruct, Value::Null, "f6", false, same), // Not In RFC + case(Newtype(123), val!(123u8), "187b", false, same), // Not In RFC + case((22u8, 23u16), cbor!([22, 23]).unwrap(), "821617", false, same), // Not In RFC + case(TupleStruct(33, 34), cbor!([33, 34]).unwrap(), "8218211822", false, same), // Not In RFC + case(Enum::Unit, cbor!("Unit").unwrap(), "64556e6974", false, same), // Not In RFC + case(Enum::Newtype(45), cbor!({"Newtype" => 45}).unwrap(), "a1674e657774797065182d", false, same), // Not In RFC + case(Enum::Tuple(56, 67), cbor!({"Tuple" => [56, 67]}).unwrap(), "a1655475706c658218381843", false, same), // Not In RFC + case(Enum::Struct { first: 78, second: 89 }, cbor!({ "Struct" => { "first" => 78, "second" => 89 }}).unwrap(), "a166537472756374a2656669727374184e667365636f6e641859", false, same), // Not In RFC +)] +fn codec<'de, T: Serialize + Clone, V: Debug + PartialEq + DeserializeOwned, F: Fn(T) -> V>( + input: T, + value: Value, + bytes: &str, + alternate: bool, + equality: F, +) { + let bytes = hex::decode(bytes).unwrap(); + + if !alternate { + let mut encoded = Vec::new(); + into_writer(&input, &mut encoded).unwrap(); + eprintln!("{:x?} == {:x?}", bytes, encoded); + assert_eq!(bytes, encoded); + + let mut encoded = Vec::new(); + into_writer(&value, &mut encoded).unwrap(); + eprintln!("{:x?} == {:x?}", bytes, encoded); + assert_eq!(bytes, encoded); + + let encoded = Value::serialized(&input).unwrap(); + eprintln!("{:x?} == {:x?}", &value, &encoded); + assert!(veq(&value, &encoded)); + } + + let decoded: V = from_reader(&bytes[..]).unwrap(); + let answer = equality(input.clone()); + eprintln!("{:x?} == {:x?}", answer, decoded); + assert_eq!(answer, decoded); + + let decoded: Value = from_reader(&bytes[..]).unwrap(); + eprintln!("{:x?} == {:x?}", &value, &decoded); + assert!(veq(&value, &decoded)); + + let mut scratch = vec![0; 65536]; + let decoded: Value = from_reader_with_buffer(&bytes[..], &mut scratch).unwrap(); + eprintln!("{:x?} == {:x?}", &value, &decoded); + assert!(veq(&value, &decoded)); + + let decoded: V = value.deserialized().unwrap(); + let answer = equality(input); + eprintln!("{:x?} == {:x?}", answer, decoded); + assert_eq!(answer, decoded); +} + +#[inline] +fn veq(lhs: &Value, rhs: &Value) -> bool { + if let Value::Float(l) = lhs { + if let Value::Float(r) = rhs { + return Float(*l) == Float(*r); + } + } + + lhs == rhs +} + +#[inline] +fn same<T>(x: T) -> T { + x +} + +#[derive(Debug, Deserialize)] +struct Float<T>(T); + +impl PartialEq for Float<f32> { + fn eq(&self, other: &Float<f32>) -> bool { + if self.0.is_nan() && other.0.is_nan() { + return true; + } + + self.0 == other.0 + } +} + +impl PartialEq for Float<f64> { + fn eq(&self, other: &Float<f64>) -> bool { + if self.0.is_nan() && other.0.is_nan() { + return true; + } + + self.0 == other.0 + } +} + +#[inline] +fn map_big() -> BTreeMap<String, String> { + let mut map = BTreeMap::new(); + map.insert("a".into(), "A".into()); + map.insert("b".into(), "B".into()); + map.insert("c".into(), "C".into()); + map.insert("d".into(), "D".into()); + map.insert("e".into(), "E".into()); + map +} + +#[inline] +fn vmap_big() -> Value { + Value::Map( + map_big() + .into_iter() + .map(|x| (x.0.into(), x.1.into())) + .collect(), + ) +} + +#[inline] +fn bigint() -> Value { + let bytes = hex::decode("0000000000000000000000000000000000000001").unwrap(); + Value::Tag(2, Value::Bytes(bytes).into()) +} + +#[derive(Deserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)] +struct UnitStruct; + +#[derive(Deserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)] +struct TupleStruct(u8, u16); + +#[derive(Deserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)] +struct Newtype(u8); + +#[derive(Deserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)] +enum Enum { + Unit, + Newtype(u8), + Tuple(u8, u16), + Struct { first: u8, second: u16 }, +} + +#[rstest( + input, + case(vec![]), + case(vec![0u8, 1, 2, 3]), +)] +fn byte_vec_serde_bytes_compatibility(input: Vec<u8>) { + use serde_bytes::ByteBuf; + + let mut buf = Vec::new(); + into_writer(&input, &mut buf).unwrap(); + let bytes: ByteBuf = from_reader(&buf[..]).unwrap(); + assert_eq!(input, bytes.to_vec()); + + let mut buf = Vec::new(); + into_writer(&ByteBuf::from(input.clone()), &mut buf).unwrap(); + let bytes: Vec<u8> = from_reader(&buf[..]).unwrap(); + assert_eq!(input, bytes); +} + +#[derive(Serialize, Deserialize, Eq, PartialEq, Debug)] +struct Foo { + bar: u8, +} + +#[rstest(input, expected, + case("a163626172182a", Foo { bar: 42 }), + case("a143626172182a", Foo { bar: 42 }), +)] +fn handle_struct_field_names(input: &str, expected: Foo) { + let buf = hex::decode(input).unwrap(); + let read = from_reader(&buf[..]).unwrap(); + assert_eq!(expected, read); +} diff --git a/vendor/ciborium/tests/error.rs b/vendor/ciborium/tests/error.rs new file mode 100644 index 00000000..13bcd5ac --- /dev/null +++ b/vendor/ciborium/tests/error.rs @@ -0,0 +1,59 @@ +// SPDX-License-Identifier: Apache-2.0 + +use ciborium::{ + de::{from_reader, Error}, + ser::into_writer, + value::Value, +}; +use rstest::rstest; + +#[rstest(bytes, error, + // Invalid value + case("1e", Error::Syntax(0)), + + // Indeterminate integers are invalid + case("1f", Error::Syntax(0)), + + // Indeterminate integer in an array + case("83011f03", Error::Syntax(2)), + + // Integer in a string continuation + case("7F616101FF", Error::Syntax(3)), + + // Bytes in a string continuation + case("7F61614101FF", Error::Syntax(3)), + + // Invalid UTF-8 + case("62C328", Error::Syntax(0)), + + // Invalid UTF-8 in a string continuation + case("7F62C328FF", Error::Syntax(1)), +)] +fn test(bytes: &str, error: Error<std::io::Error>) { + let bytes = hex::decode(bytes).unwrap(); + + let correct = match error { + Error::Io(..) => panic!(), + Error::Syntax(x) => ("syntax", Some(x), None), + Error::Semantic(x, y) => ("semantic", x, Some(y)), + Error::RecursionLimitExceeded => panic!(), + }; + + let result: Result<Value, _> = from_reader(&bytes[..]); + let actual = match result.unwrap_err() { + Error::Io(..) => panic!(), + Error::Syntax(x) => ("syntax", Some(x), None), + Error::Semantic(x, y) => ("semantic", x, Some(y)), + Error::RecursionLimitExceeded => panic!(), + }; + + assert_eq!(correct, actual); +} + +#[test] +fn test_long_utf8_deserialization() { + let s = (0..2000).map(|_| 'ボ').collect::<String>(); + let mut v = Vec::new(); + into_writer(&s, &mut v).unwrap(); + let _: String = from_reader(&*v).unwrap(); +} diff --git a/vendor/ciborium/tests/fuzz.rs b/vendor/ciborium/tests/fuzz.rs new file mode 100644 index 00000000..2b9d0ecd --- /dev/null +++ b/vendor/ciborium/tests/fuzz.rs @@ -0,0 +1,64 @@ +// SPDX-License-Identifier: Apache-2.0 + +use std::fs::File; +use std::io::Read; +use std::io::Write; +use std::os::raw::c_int; +use std::os::unix::io::{FromRawFd, RawFd}; + +use ciborium::{de::from_reader, value::Value}; +use rand::Rng; + +const ITERATIONS: usize = 128 * 1024; + +#[allow(non_camel_case_types)] +type pid_t = i32; + +extern "C" { + fn close(fd: RawFd) -> c_int; + fn fork() -> pid_t; + fn pipe(pipefd: &mut [RawFd; 2]) -> c_int; + fn waitpid(pid: pid_t, wstatus: *mut c_int, options: c_int) -> pid_t; +} + +#[test] +fn fuzz() { + let mut fds: [RawFd; 2] = [0; 2]; + assert_eq!(unsafe { pipe(&mut fds) }, 0); + + let pid = unsafe { fork() }; + assert!(pid >= 0); + + match pid { + 0 => { + let mut child = unsafe { File::from_raw_fd(fds[1]) }; + unsafe { close(fds[0]) }; + + let mut rng = rand::thread_rng(); + let mut buffer = [0u8; 32]; + + for _ in 0..ITERATIONS { + let len = rng.gen_range(0..buffer.len()); + rng.fill(&mut buffer[..len]); + + writeln!(child, "{}", hex::encode(&buffer[..len])).unwrap(); + writeln!(child, "{:?}", from_reader::<Value, _>(&buffer[..len])).unwrap(); + } + } + + pid => { + let mut parent = unsafe { File::from_raw_fd(fds[0]) }; + unsafe { close(fds[1]) }; + + let mut string = String::new(); + parent.read_to_string(&mut string).unwrap(); + eprint!("{}", string); + + let mut status = 0; + assert_eq!(pid, unsafe { waitpid(pid, &mut status, 0) }); + + eprintln!("exit status: {:?}", status); + assert_eq!(0, status); + } + } +} diff --git a/vendor/ciborium/tests/macro.rs b/vendor/ciborium/tests/macro.rs new file mode 100644 index 00000000..a73fc849 --- /dev/null +++ b/vendor/ciborium/tests/macro.rs @@ -0,0 +1,358 @@ +// SPDX-License-Identifier: Apache-2.0 + +extern crate alloc; + +use ciborium::{ + cbor, + value::{Integer, Value, Value::Null}, +}; + +use rstest::rstest; +use serde_bytes::Bytes; + +macro_rules! map { + ($($key:expr => $val:expr),* $(,)*) => { + Value::Map(vec![$( + ( + Value::serialized(&$key).unwrap(), + Value::serialized(&$val).unwrap() + ) + ),*]) + }; +} + +macro_rules! arr { + ($($val:expr),*) => { + Value::Array(vec![$( + Value::serialized(&$val).unwrap() + ),*]) + }; +} + +#[rstest(answer, question, + // Non-numeric simple types + case(Value::Null, cbor!(null).unwrap()), + case(Value::Bool(true), cbor!(true).unwrap()), + case(Value::Bool(false), cbor!(false).unwrap()), + case(Value::Text("foo".into()), cbor!("foo").unwrap()), + case(Value::Bytes(vec![0, 1, 2]), cbor!(Bytes::new(b"\x00\x01\x02")).unwrap()), + + // Numeric simple types + case(Value::Integer(Integer::from(123)), cbor!(123).unwrap()), + case(Value::Integer(Integer::from(-123)), cbor!(-123).unwrap()), + case(Value::Float(1.23), cbor!(1.23).unwrap()), + case(Value::Float(-1.23), cbor!(-1.23).unwrap()), + case(Value::Float(2.5e+1), cbor!(2.5e+1).unwrap()), + case(Value::Float(-2.5e+1), cbor!(-2.5e+1).unwrap()), + + // Simple array formulations + case(arr![], cbor!([]).unwrap()), + case(arr![Null], cbor!([null]).unwrap()), + case(arr![true], cbor!([true]).unwrap()), + case(arr![false], cbor!([false]).unwrap()), + case(arr!["foo"], cbor!(["foo"]).unwrap()), + case(arr![123], cbor!([123]).unwrap()), + case(arr![-123], cbor!([-123]).unwrap()), + case(arr![1.23], cbor!([1.23]).unwrap()), + case(arr![-1.23], cbor!([-1.23]).unwrap()), + case(arr![2.5e+1], cbor!([2.5e+1]).unwrap()), + case(arr![2.5e+1], cbor!([2.5e+1]).unwrap()), + case(arr![[1, 2]], cbor!([[1, 2]]).unwrap()), + case(arr![map! {1=>2,3=>4}], cbor!([{1=>2,3=>4}]).unwrap()), + + // Two-item array formluations + case(arr![Null, Null], cbor!([null, null]).unwrap()), + case(arr![Null, true], cbor!([null, true]).unwrap()), + case(arr![Null, false], cbor!([null, false]).unwrap()), + case(arr![Null, "foo"], cbor!([null, "foo"]).unwrap()), + case(arr![Null, 123], cbor!([null, 123]).unwrap()), + case(arr![Null, -123], cbor!([null, -123]).unwrap()), + case(arr![Null, 1.23], cbor!([null, 1.23]).unwrap()), + case(arr![Null, -1.23], cbor!([null, -1.23]).unwrap()), + case(arr![Null, 2.5e+1], cbor!([null, 2.5e+1]).unwrap()), + case(arr![Null, 2.5e+1], cbor!([null, 2.5e+1]).unwrap()), + case(arr![Null, [1, 2]], cbor!([null, [1, 2]]).unwrap()), + case(arr![Null, map! {1=>2,3=>4}], cbor!([null, {1=>2,3=>4}]).unwrap()), + case(arr![true, Null], cbor!([true, null]).unwrap()), + case(arr![true, true], cbor!([true, true]).unwrap()), + case(arr![true, false], cbor!([true, false]).unwrap()), + case(arr![true, "foo"], cbor!([true, "foo"]).unwrap()), + case(arr![true, 123], cbor!([true, 123]).unwrap()), + case(arr![true, -123], cbor!([true, -123]).unwrap()), + case(arr![true, 1.23], cbor!([true, 1.23]).unwrap()), + case(arr![true, -1.23], cbor!([true, -1.23]).unwrap()), + case(arr![true, 2.5e+1], cbor!([true, 2.5e+1]).unwrap()), + case(arr![true, 2.5e+1], cbor!([true, 2.5e+1]).unwrap()), + case(arr![true, [1, 2]], cbor!([true, [1, 2]]).unwrap()), + case(arr![true, map! {1=>2,3=>4}], cbor!([true, {1=>2,3=>4}]).unwrap()), + case(arr![false, Null], cbor!([false, null]).unwrap()), + case(arr![false, true], cbor!([false, true]).unwrap()), + case(arr![false, false], cbor!([false, false]).unwrap()), + case(arr![false, "foo"], cbor!([false, "foo"]).unwrap()), + case(arr![false, 123], cbor!([false, 123]).unwrap()), + case(arr![false, -123], cbor!([false, -123]).unwrap()), + case(arr![false, 1.23], cbor!([false, 1.23]).unwrap()), + case(arr![false, -1.23], cbor!([false, -1.23]).unwrap()), + case(arr![false, 2.5e+1], cbor!([false, 2.5e+1]).unwrap()), + case(arr![false, 2.5e+1], cbor!([false, 2.5e+1]).unwrap()), + case(arr![false, [1, 2]], cbor!([false, [1, 2]]).unwrap()), + case(arr![false, map! {1=>2,3=>4}], cbor!([false, {1=>2,3=>4}]).unwrap()), + case(arr!["foo", Null], cbor!(["foo", null]).unwrap()), + case(arr!["foo", true], cbor!(["foo", true]).unwrap()), + case(arr!["foo", false], cbor!(["foo", false]).unwrap()), + case(arr!["foo", "foo"], cbor!(["foo", "foo"]).unwrap()), + case(arr!["foo", 123], cbor!(["foo", 123]).unwrap()), + case(arr!["foo", -123], cbor!(["foo", -123]).unwrap()), + case(arr!["foo", 1.23], cbor!(["foo", 1.23]).unwrap()), + case(arr!["foo", -1.23], cbor!(["foo", -1.23]).unwrap()), + case(arr!["foo", 2.5e+1], cbor!(["foo", 2.5e+1]).unwrap()), + case(arr!["foo", 2.5e+1], cbor!(["foo", 2.5e+1]).unwrap()), + case(arr!["foo", [1, 2]], cbor!(["foo", [1, 2]]).unwrap()), + case(arr!["foo", map! {1=>2,3=>4}], cbor!(["foo", {1=>2,3=>4}]).unwrap()), + case(arr![123, Null], cbor!([123, null]).unwrap()), + case(arr![123, true], cbor!([123, true]).unwrap()), + case(arr![123, false], cbor!([123, false]).unwrap()), + case(arr![123, "foo"], cbor!([123, "foo"]).unwrap()), + case(arr![123, 123], cbor!([123, 123]).unwrap()), + case(arr![123, -123], cbor!([123, -123]).unwrap()), + case(arr![123, 1.23], cbor!([123, 1.23]).unwrap()), + case(arr![123, -1.23], cbor!([123, -1.23]).unwrap()), + case(arr![123, 2.5e+1], cbor!([123, 2.5e+1]).unwrap()), + case(arr![123, 2.5e+1], cbor!([123, 2.5e+1]).unwrap()), + case(arr![123, [1, 2]], cbor!([123, [1, 2]]).unwrap()), + case(arr![123, map! {1=>2,3=>4}], cbor!([123, {1=>2,3=>4}]).unwrap()), + case(arr![-123, Null], cbor!([-123, null]).unwrap()), + case(arr![-123, true], cbor!([-123, true]).unwrap()), + case(arr![-123, false], cbor!([-123, false]).unwrap()), + case(arr![-123, "foo"], cbor!([-123, "foo"]).unwrap()), + case(arr![-123, 123], cbor!([-123, 123]).unwrap()), + case(arr![-123, -123], cbor!([-123, -123]).unwrap()), + case(arr![-123, 1.23], cbor!([-123, 1.23]).unwrap()), + case(arr![-123, -1.23], cbor!([-123, -1.23]).unwrap()), + case(arr![-123, 2.5e+1], cbor!([-123, 2.5e+1]).unwrap()), + case(arr![-123, 2.5e+1], cbor!([-123, 2.5e+1]).unwrap()), + case(arr![-123, [1, 2]], cbor!([-123, [1, 2]]).unwrap()), + case(arr![-123, map! {1=>2,3=>4}], cbor!([-123, {1=>2,3=>4}]).unwrap()), + case(arr![1.23, Null], cbor!([1.23, null]).unwrap()), + case(arr![1.23, true], cbor!([1.23, true]).unwrap()), + case(arr![1.23, false], cbor!([1.23, false]).unwrap()), + case(arr![1.23, "foo"], cbor!([1.23, "foo"]).unwrap()), + case(arr![1.23, 123], cbor!([1.23, 123]).unwrap()), + case(arr![1.23, -123], cbor!([1.23, -123]).unwrap()), + case(arr![1.23, 1.23], cbor!([1.23, 1.23]).unwrap()), + case(arr![1.23, -1.23], cbor!([1.23, -1.23]).unwrap()), + case(arr![1.23, 2.5e+1], cbor!([1.23, 2.5e+1]).unwrap()), + case(arr![1.23, 2.5e+1], cbor!([1.23, 2.5e+1]).unwrap()), + case(arr![1.23, [1, 2]], cbor!([1.23, [1, 2]]).unwrap()), + case(arr![1.23, map! {1=>2,3=>4}], cbor!([1.23, {1=>2,3=>4}]).unwrap()), + case(arr![-1.23, Null], cbor!([-1.23, null]).unwrap()), + case(arr![-1.23, true], cbor!([-1.23, true]).unwrap()), + case(arr![-1.23, false], cbor!([-1.23, false]).unwrap()), + case(arr![-1.23, "foo"], cbor!([-1.23, "foo"]).unwrap()), + case(arr![-1.23, 123], cbor!([-1.23, 123]).unwrap()), + case(arr![-1.23, -123], cbor!([-1.23, -123]).unwrap()), + case(arr![-1.23, 1.23], cbor!([-1.23, 1.23]).unwrap()), + case(arr![-1.23, -1.23], cbor!([-1.23, -1.23]).unwrap()), + case(arr![-1.23, 2.5e+1], cbor!([-1.23, 2.5e+1]).unwrap()), + case(arr![-1.23, 2.5e+1], cbor!([-1.23, 2.5e+1]).unwrap()), + case(arr![-1.23, [1, 2]], cbor!([-1.23, [1, 2]]).unwrap()), + case(arr![-1.23, map! {1=>2,3=>4}], cbor!([-1.23, {1=>2,3=>4}]).unwrap()), + case(arr![2.5e+1, Null], cbor!([2.5e+1, null]).unwrap()), + case(arr![2.5e+1, true], cbor!([2.5e+1, true]).unwrap()), + case(arr![2.5e+1, false], cbor!([2.5e+1, false]).unwrap()), + case(arr![2.5e+1, "foo"], cbor!([2.5e+1, "foo"]).unwrap()), + case(arr![2.5e+1, 123], cbor!([2.5e+1, 123]).unwrap()), + case(arr![2.5e+1, -123], cbor!([2.5e+1, -123]).unwrap()), + case(arr![2.5e+1, 1.23], cbor!([2.5e+1, 1.23]).unwrap()), + case(arr![2.5e+1, -1.23], cbor!([2.5e+1, -1.23]).unwrap()), + case(arr![2.5e+1, 2.5e+1], cbor!([2.5e+1, 2.5e+1]).unwrap()), + case(arr![2.5e+1, 2.5e+1], cbor!([2.5e+1, 2.5e+1]).unwrap()), + case(arr![2.5e+1, [1, 2]], cbor!([2.5e+1, [1, 2]]).unwrap()), + case(arr![2.5e+1, map! {1=>2,3=>4}], cbor!([2.5e+1, {1=>2,3=>4}]).unwrap()), + case(arr![2.5e+1, Null], cbor!([2.5e+1, null]).unwrap()), + case(arr![2.5e+1, true], cbor!([2.5e+1, true]).unwrap()), + case(arr![2.5e+1, false], cbor!([2.5e+1, false]).unwrap()), + case(arr![2.5e+1, "foo"], cbor!([2.5e+1, "foo"]).unwrap()), + case(arr![2.5e+1, 123], cbor!([2.5e+1, 123]).unwrap()), + case(arr![2.5e+1, -123], cbor!([2.5e+1, -123]).unwrap()), + case(arr![2.5e+1, 1.23], cbor!([2.5e+1, 1.23]).unwrap()), + case(arr![2.5e+1, -1.23], cbor!([2.5e+1, -1.23]).unwrap()), + case(arr![2.5e+1, 2.5e+1], cbor!([2.5e+1, 2.5e+1]).unwrap()), + case(arr![2.5e+1, 2.5e+1], cbor!([2.5e+1, 2.5e+1]).unwrap()), + case(arr![2.5e+1, [1, 2]], cbor!([2.5e+1, [1, 2]]).unwrap()), + case(arr![2.5e+1, map! {1=>2,3=>4}], cbor!([2.5e+1, {1=>2,3=>4}]).unwrap()), + case(arr![[1, 2], Null], cbor!([[1, 2], null]).unwrap()), + case(arr![[1, 2], true], cbor!([[1, 2], true]).unwrap()), + case(arr![[1, 2], false], cbor!([[1, 2], false]).unwrap()), + case(arr![[1, 2], "foo"], cbor!([[1, 2], "foo"]).unwrap()), + case(arr![[1, 2], 123], cbor!([[1, 2], 123]).unwrap()), + case(arr![[1, 2], -123], cbor!([[1, 2], -123]).unwrap()), + case(arr![[1, 2], 1.23], cbor!([[1, 2], 1.23]).unwrap()), + case(arr![[1, 2], -1.23], cbor!([[1, 2], -1.23]).unwrap()), + case(arr![[1, 2], 2.5e+1], cbor!([[1, 2], 2.5e+1]).unwrap()), + case(arr![[1, 2], 2.5e+1], cbor!([[1, 2], 2.5e+1]).unwrap()), + case(arr![[1, 2], [1, 2]], cbor!([[1, 2], [1, 2]]).unwrap()), + case(arr![[1, 2], map! {1=>2,3=>4}], cbor!([[1, 2], {1=>2,3=>4}]).unwrap()), + case(arr![map! {1=>2,3=>4}, Null], cbor!([{1=>2,3=>4}, null]).unwrap()), + case(arr![map! {1=>2,3=>4}, true], cbor!([{1=>2,3=>4}, true]).unwrap()), + case(arr![map! {1=>2,3=>4}, false], cbor!([{1=>2,3=>4}, false]).unwrap()), + case(arr![map! {1=>2,3=>4}, "foo"], cbor!([{1=>2,3=>4}, "foo"]).unwrap()), + case(arr![map! {1=>2,3=>4}, 123], cbor!([{1=>2,3=>4}, 123]).unwrap()), + case(arr![map! {1=>2,3=>4}, -123], cbor!([{1=>2,3=>4}, -123]).unwrap()), + case(arr![map! {1=>2,3=>4}, 1.23], cbor!([{1=>2,3=>4}, 1.23]).unwrap()), + case(arr![map! {1=>2,3=>4}, -1.23], cbor!([{1=>2,3=>4}, -1.23]).unwrap()), + case(arr![map! {1=>2,3=>4}, 2.5e+1], cbor!([{1=>2,3=>4}, 2.5e+1]).unwrap()), + case(arr![map! {1=>2,3=>4}, 2.5e+1], cbor!([{1=>2,3=>4}, 2.5e+1]).unwrap()), + case(arr![map! {1=>2,3=>4}, [1, 2]], cbor!([{1=>2,3=>4}, [1, 2]]).unwrap()), + case(arr![map! {1=>2,3=>4}, map! {1=>2,3=>4}], cbor!([{1=>2,3=>4}, {1=>2,3=>4}]).unwrap()), + + // Map formulations + case(map! {}, cbor!({}).unwrap()), + case(map! {Null => Null}, cbor!({ null => null }).unwrap()), + case(map! {Null => true}, cbor!({ null => true }).unwrap()), + case(map! {Null => false}, cbor!({ null => false }).unwrap()), + case(map! {Null => "foo"}, cbor!({ null => "foo" }).unwrap()), + case(map! {Null => 123}, cbor!({ null => 123 }).unwrap()), + case(map! {Null => -123}, cbor!({ null => -123 }).unwrap()), + case(map! {Null => 1.23}, cbor!({ null => 1.23 }).unwrap()), + case(map! {Null => -1.23}, cbor!({ null => -1.23 }).unwrap()), + case(map! {Null => 2.5e+1}, cbor!({ null => 2.5e+1 }).unwrap()), + case(map! {Null => 2.5e+1}, cbor!({ null => 2.5e+1 }).unwrap()), + case(map! {Null => [1, 2]}, cbor!({ null => [1, 2] }).unwrap()), + case(map! {Null => map! {1=>2,3=>4}}, cbor!({ null => {1=>2,3=>4} }).unwrap()), + case(map! {true => Null}, cbor!({ true => null }).unwrap()), + case(map! {true => true}, cbor!({ true => true }).unwrap()), + case(map! {true => false}, cbor!({ true => false }).unwrap()), + case(map! {true => "foo"}, cbor!({ true => "foo" }).unwrap()), + case(map! {true => 123}, cbor!({ true => 123 }).unwrap()), + case(map! {true => -123}, cbor!({ true => -123 }).unwrap()), + case(map! {true => 1.23}, cbor!({ true => 1.23 }).unwrap()), + case(map! {true => -1.23}, cbor!({ true => -1.23 }).unwrap()), + case(map! {true => 2.5e+1}, cbor!({ true => 2.5e+1 }).unwrap()), + case(map! {true => 2.5e+1}, cbor!({ true => 2.5e+1 }).unwrap()), + case(map! {true => [1, 2]}, cbor!({ true => [1, 2] }).unwrap()), + case(map! {true => map! {1=>2,3=>4}}, cbor!({ true => {1=>2,3=>4} }).unwrap()), + case(map! {false => Null}, cbor!({ false => null }).unwrap()), + case(map! {false => true}, cbor!({ false => true }).unwrap()), + case(map! {false => false}, cbor!({ false => false }).unwrap()), + case(map! {false => "foo"}, cbor!({ false => "foo" }).unwrap()), + case(map! {false => 123}, cbor!({ false => 123 }).unwrap()), + case(map! {false => -123}, cbor!({ false => -123 }).unwrap()), + case(map! {false => 1.23}, cbor!({ false => 1.23 }).unwrap()), + case(map! {false => -1.23}, cbor!({ false => -1.23 }).unwrap()), + case(map! {false => 2.5e+1}, cbor!({ false => 2.5e+1 }).unwrap()), + case(map! {false => 2.5e+1}, cbor!({ false => 2.5e+1 }).unwrap()), + case(map! {false => [1, 2]}, cbor!({ false => [1, 2] }).unwrap()), + case(map! {false => map! {1=>2,3=>4}}, cbor!({ false => {1=>2,3=>4} }).unwrap()), + case(map! {"foo" => Null}, cbor!({ "foo" => null }).unwrap()), + case(map! {"foo" => true}, cbor!({ "foo" => true }).unwrap()), + case(map! {"foo" => false}, cbor!({ "foo" => false }).unwrap()), + case(map! {"foo" => "foo"}, cbor!({ "foo" => "foo" }).unwrap()), + case(map! {"foo" => 123}, cbor!({ "foo" => 123 }).unwrap()), + case(map! {"foo" => -123}, cbor!({ "foo" => -123 }).unwrap()), + case(map! {"foo" => 1.23}, cbor!({ "foo" => 1.23 }).unwrap()), + case(map! {"foo" => -1.23}, cbor!({ "foo" => -1.23 }).unwrap()), + case(map! {"foo" => 2.5e+1}, cbor!({ "foo" => 2.5e+1 }).unwrap()), + case(map! {"foo" => 2.5e+1}, cbor!({ "foo" => 2.5e+1 }).unwrap()), + case(map! {"foo" => [1, 2]}, cbor!({ "foo" => [1, 2] }).unwrap()), + case(map! {"foo" => map! {1=>2,3=>4}}, cbor!({ "foo" => {1=>2,3=>4} }).unwrap()), + case(map! {123 => Null}, cbor!({ 123 => null }).unwrap()), + case(map! {123 => true}, cbor!({ 123 => true }).unwrap()), + case(map! {123 => false}, cbor!({ 123 => false }).unwrap()), + case(map! {123 => "foo"}, cbor!({ 123 => "foo" }).unwrap()), + case(map! {123 => 123}, cbor!({ 123 => 123 }).unwrap()), + case(map! {123 => -123}, cbor!({ 123 => -123 }).unwrap()), + case(map! {123 => 1.23}, cbor!({ 123 => 1.23 }).unwrap()), + case(map! {123 => -1.23}, cbor!({ 123 => -1.23 }).unwrap()), + case(map! {123 => 2.5e+1}, cbor!({ 123 => 2.5e+1 }).unwrap()), + case(map! {123 => 2.5e+1}, cbor!({ 123 => 2.5e+1 }).unwrap()), + case(map! {123 => [1, 2]}, cbor!({ 123 => [1, 2] }).unwrap()), + case(map! {123 => map! {1=>2,3=>4}}, cbor!({ 123 => {1=>2,3=>4} }).unwrap()), + case(map! {-123 => Null}, cbor!({ -123 => null }).unwrap()), + case(map! {-123 => true}, cbor!({ -123 => true }).unwrap()), + case(map! {-123 => false}, cbor!({ -123 => false }).unwrap()), + case(map! {-123 => "foo"}, cbor!({ -123 => "foo" }).unwrap()), + case(map! {-123 => 123}, cbor!({ -123 => 123 }).unwrap()), + case(map! {-123 => -123}, cbor!({ -123 => -123 }).unwrap()), + case(map! {-123 => 1.23}, cbor!({ -123 => 1.23 }).unwrap()), + case(map! {-123 => -1.23}, cbor!({ -123 => -1.23 }).unwrap()), + case(map! {-123 => 2.5e+1}, cbor!({ -123 => 2.5e+1 }).unwrap()), + case(map! {-123 => 2.5e+1}, cbor!({ -123 => 2.5e+1 }).unwrap()), + case(map! {-123 => [1, 2]}, cbor!({ -123 => [1, 2] }).unwrap()), + case(map! {-123 => map! {1=>2,3=>4}}, cbor!({ -123 => {1=>2,3=>4} }).unwrap()), + case(map! {1.23 => Null}, cbor!({ 1.23 => null }).unwrap()), + case(map! {1.23 => true}, cbor!({ 1.23 => true }).unwrap()), + case(map! {1.23 => false}, cbor!({ 1.23 => false }).unwrap()), + case(map! {1.23 => "foo"}, cbor!({ 1.23 => "foo" }).unwrap()), + case(map! {1.23 => 123}, cbor!({ 1.23 => 123 }).unwrap()), + case(map! {1.23 => -123}, cbor!({ 1.23 => -123 }).unwrap()), + case(map! {1.23 => 1.23}, cbor!({ 1.23 => 1.23 }).unwrap()), + case(map! {1.23 => -1.23}, cbor!({ 1.23 => -1.23 }).unwrap()), + case(map! {1.23 => 2.5e+1}, cbor!({ 1.23 => 2.5e+1 }).unwrap()), + case(map! {1.23 => 2.5e+1}, cbor!({ 1.23 => 2.5e+1 }).unwrap()), + case(map! {1.23 => [1, 2]}, cbor!({ 1.23 => [1, 2] }).unwrap()), + case(map! {1.23 => map! {1=>2,3=>4}}, cbor!({ 1.23 => {1=>2,3=>4} }).unwrap()), + case(map! {-1.23 => Null}, cbor!({ -1.23 => null }).unwrap()), + case(map! {-1.23 => true}, cbor!({ -1.23 => true }).unwrap()), + case(map! {-1.23 => false}, cbor!({ -1.23 => false }).unwrap()), + case(map! {-1.23 => "foo"}, cbor!({ -1.23 => "foo" }).unwrap()), + case(map! {-1.23 => 123}, cbor!({ -1.23 => 123 }).unwrap()), + case(map! {-1.23 => -123}, cbor!({ -1.23 => -123 }).unwrap()), + case(map! {-1.23 => 1.23}, cbor!({ -1.23 => 1.23 }).unwrap()), + case(map! {-1.23 => -1.23}, cbor!({ -1.23 => -1.23 }).unwrap()), + case(map! {-1.23 => 2.5e+1}, cbor!({ -1.23 => 2.5e+1 }).unwrap()), + case(map! {-1.23 => 2.5e+1}, cbor!({ -1.23 => 2.5e+1 }).unwrap()), + case(map! {-1.23 => [1, 2]}, cbor!({ -1.23 => [1, 2] }).unwrap()), + case(map! {-1.23 => map! {1=>2,3=>4}}, cbor!({ -1.23 => {1=>2,3=>4} }).unwrap()), + case(map! {2.5e+1 => Null}, cbor!({ 2.5e+1 => null }).unwrap()), + case(map! {2.5e+1 => true}, cbor!({ 2.5e+1 => true }).unwrap()), + case(map! {2.5e+1 => false}, cbor!({ 2.5e+1 => false }).unwrap()), + case(map! {2.5e+1 => "foo"}, cbor!({ 2.5e+1 => "foo" }).unwrap()), + case(map! {2.5e+1 => 123}, cbor!({ 2.5e+1 => 123 }).unwrap()), + case(map! {2.5e+1 => -123}, cbor!({ 2.5e+1 => -123 }).unwrap()), + case(map! {2.5e+1 => 1.23}, cbor!({ 2.5e+1 => 1.23 }).unwrap()), + case(map! {2.5e+1 => -1.23}, cbor!({ 2.5e+1 => -1.23 }).unwrap()), + case(map! {2.5e+1 => 2.5e+1}, cbor!({ 2.5e+1 => 2.5e+1 }).unwrap()), + case(map! {2.5e+1 => 2.5e+1}, cbor!({ 2.5e+1 => 2.5e+1 }).unwrap()), + case(map! {2.5e+1 => [1, 2]}, cbor!({ 2.5e+1 => [1, 2] }).unwrap()), + case(map! {2.5e+1 => map! {1=>2,3=>4}}, cbor!({ 2.5e+1 => {1=>2,3=>4} }).unwrap()), + case(map! {2.5e+1 => Null}, cbor!({ 2.5e+1 => null }).unwrap()), + case(map! {2.5e+1 => true}, cbor!({ 2.5e+1 => true }).unwrap()), + case(map! {2.5e+1 => false}, cbor!({ 2.5e+1 => false }).unwrap()), + case(map! {2.5e+1 => "foo"}, cbor!({ 2.5e+1 => "foo" }).unwrap()), + case(map! {2.5e+1 => 123}, cbor!({ 2.5e+1 => 123 }).unwrap()), + case(map! {2.5e+1 => -123}, cbor!({ 2.5e+1 => -123 }).unwrap()), + case(map! {2.5e+1 => 1.23}, cbor!({ 2.5e+1 => 1.23 }).unwrap()), + case(map! {2.5e+1 => -1.23}, cbor!({ 2.5e+1 => -1.23 }).unwrap()), + case(map! {2.5e+1 => 2.5e+1}, cbor!({ 2.5e+1 => 2.5e+1 }).unwrap()), + case(map! {2.5e+1 => 2.5e+1}, cbor!({ 2.5e+1 => 2.5e+1 }).unwrap()), + case(map! {2.5e+1 => [1, 2]}, cbor!({ 2.5e+1 => [1, 2] }).unwrap()), + case(map! {2.5e+1 => map! {1=>2,3=>4}}, cbor!({ 2.5e+1 => {1=>2,3=>4} }).unwrap()), + case(map! {[1, 2] => Null}, cbor!({ [1, 2] => null }).unwrap()), + case(map! {[1, 2] => true}, cbor!({ [1, 2] => true }).unwrap()), + case(map! {[1, 2] => false}, cbor!({ [1, 2] => false }).unwrap()), + case(map! {[1, 2] => "foo"}, cbor!({ [1, 2] => "foo" }).unwrap()), + case(map! {[1, 2] => 123}, cbor!({ [1, 2] => 123 }).unwrap()), + case(map! {[1, 2] => -123}, cbor!({ [1, 2] => -123 }).unwrap()), + case(map! {[1, 2] => 1.23}, cbor!({ [1, 2] => 1.23 }).unwrap()), + case(map! {[1, 2] => -1.23}, cbor!({ [1, 2] => -1.23 }).unwrap()), + case(map! {[1, 2] => 2.5e+1}, cbor!({ [1, 2] => 2.5e+1 }).unwrap()), + case(map! {[1, 2] => 2.5e+1}, cbor!({ [1, 2] => 2.5e+1 }).unwrap()), + case(map! {[1, 2] => [1, 2]}, cbor!({ [1, 2] => [1, 2] }).unwrap()), + case(map! {[1, 2] => map! {1=>2,3=>4}}, cbor!({ [1, 2] => {1=>2,3=>4} }).unwrap()), + case(map! {map! {1=>2,3=>4} => Null}, cbor!({ {1=>2,3=>4} => null }).unwrap()), + case(map! {map! {1=>2,3=>4} => true}, cbor!({ {1=>2,3=>4} => true }).unwrap()), + case(map! {map! {1=>2,3=>4} => false}, cbor!({ {1=>2,3=>4} => false }).unwrap()), + case(map! {map! {1=>2,3=>4} => "foo"}, cbor!({ {1=>2,3=>4} => "foo" }).unwrap()), + case(map! {map! {1=>2,3=>4} => 123}, cbor!({ {1=>2,3=>4} => 123 }).unwrap()), + case(map! {map! {1=>2,3=>4} => -123}, cbor!({ {1=>2,3=>4} => -123 }).unwrap()), + case(map! {map! {1=>2,3=>4} => 1.23}, cbor!({ {1=>2,3=>4} => 1.23 }).unwrap()), + case(map! {map! {1=>2,3=>4} => -1.23}, cbor!({ {1=>2,3=>4} => -1.23 }).unwrap()), + case(map! {map! {1=>2,3=>4} => 2.5e+1}, cbor!({ {1=>2,3=>4} => 2.5e+1 }).unwrap()), + case(map! {map! {1=>2,3=>4} => 2.5e+1}, cbor!({ {1=>2,3=>4} => 2.5e+1 }).unwrap()), + case(map! {map! {1=>2,3=>4} => [1, 2]}, cbor!({ {1=>2,3=>4} => [1, 2] }).unwrap()), + case(map! {map! {1=>2,3=>4} => map! {1=>2,3=>4}}, cbor!({ {1=>2,3=>4} => {1=>2,3=>4} }).unwrap()), +)] +fn test(answer: Value, question: Value) { + assert_eq!(answer, question); +} diff --git a/vendor/ciborium/tests/no_std.rs b/vendor/ciborium/tests/no_std.rs new file mode 100644 index 00000000..6ea008a4 --- /dev/null +++ b/vendor/ciborium/tests/no_std.rs @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: Apache-2.0 + +#![cfg(all(feature = "serde", not(feature = "std")))] +#![no_std] + +extern crate alloc; + +use alloc::vec::Vec; + +use ciborium::{de::from_reader, ser::into_writer}; + +#[test] +fn decode() { + assert_eq!(from_reader::<u8, &[u8]>(&[7u8][..]).unwrap(), 7); +} + +#[test] +fn eof() { + from_reader::<u8, &[u8]>(&[]).unwrap_err(); +} + +#[test] +fn encode_slice() { + let mut buffer = [0u8; 1]; + into_writer(&3u8, &mut buffer[..]).unwrap(); + assert_eq!(buffer[0], 3); +} + +#[test] +fn encode_vec() { + let mut buffer = Vec::with_capacity(1); + into_writer(&3u8, &mut buffer).unwrap(); + assert_eq!(buffer[0], 3); +} + +#[test] +fn oos() { + into_writer(&3u8, &mut [][..]).unwrap_err(); +} diff --git a/vendor/ciborium/tests/recursion.rs b/vendor/ciborium/tests/recursion.rs new file mode 100644 index 00000000..cda1ce23 --- /dev/null +++ b/vendor/ciborium/tests/recursion.rs @@ -0,0 +1,92 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! This test validates that we don't get stack overflows. +//! +//! If container types cause recursion, then a long list of prefixes which +//! indicate nested container types could cause the stack to overflow. We +//! test each of these types here to ensure there is no stack overflow. + +use ciborium::{ + de::{from_reader, from_reader_with_recursion_limit, Error}, + value::Value, +}; + +#[test] +fn array() { + let bytes = [0x9f; 128 * 1024]; + match from_reader::<Value, _>(&bytes[..]).unwrap_err() { + Error::RecursionLimitExceeded => (), + e => panic!("incorrect error: {:?}", e), + } +} + +#[test] +fn map() { + let bytes = [0xbf; 128 * 1024]; + match from_reader::<Value, _>(&bytes[..]).unwrap_err() { + Error::RecursionLimitExceeded => (), + e => panic!("incorrect error: {:?}", e), + } +} + +#[test] +fn bytes() { + let bytes = [0x5f; 128 * 1024]; + match from_reader::<Value, _>(&bytes[..]).unwrap_err() { + Error::Io(..) => (), + e => panic!("incorrect error: {:?}", e), + } +} + +#[test] +fn text() { + let bytes = [0x7f; 128 * 1024]; + match from_reader::<Value, _>(&bytes[..]).unwrap_err() { + Error::Io(..) => (), + e => panic!("incorrect error: {:?}", e), + } +} + +#[test] +fn array_limit() { + let bytes = [0x9f; 128 * 1024]; + for limit in 16..256 { + match from_reader_with_recursion_limit::<Value, _>(&bytes[..], limit).unwrap_err() { + Error::RecursionLimitExceeded => (), + e => panic!("incorrect error with limit {}: {:?}", limit, e), + } + // Data that is nested beyond the limit should fail with `RecursionLimitExceeded` + match from_reader_with_recursion_limit::<Value, _>(&bytes[..limit + 1], limit).unwrap_err() + { + Error::RecursionLimitExceeded => (), + e => panic!("incorrect error with limit {}: {:?}", limit, e), + } + // Data that is nested within the limit fails with a different error. + match from_reader_with_recursion_limit::<Value, _>(&bytes[..limit], limit).unwrap_err() { + Error::Io(..) => (), + e => panic!("incorrect error with limit {}: {:?}", limit, e), + } + } +} + +#[test] +fn map_limit() { + let bytes = [0xbf; 128 * 1024]; + for limit in 16..256 { + match from_reader_with_recursion_limit::<Value, _>(&bytes[..], limit).unwrap_err() { + Error::RecursionLimitExceeded => (), + e => panic!("incorrect error with limit {}: {:?}", limit, e), + } + // Data that is nested beyond the limit should fail with `RecursionLimitExceeded` + match from_reader_with_recursion_limit::<Value, _>(&bytes[..limit + 1], limit).unwrap_err() + { + Error::RecursionLimitExceeded => (), + e => panic!("incorrect error with limit {}: {:?}", limit, e), + } + // Data that is nested within the limit fails with a different error. + match from_reader_with_recursion_limit::<Value, _>(&bytes[..limit], limit).unwrap_err() { + Error::Io(..) => (), + e => panic!("incorrect error with limit {}: {:?}", limit, e), + } + } +} diff --git a/vendor/ciborium/tests/tag.rs b/vendor/ciborium/tests/tag.rs new file mode 100644 index 00000000..c19ee009 --- /dev/null +++ b/vendor/ciborium/tests/tag.rs @@ -0,0 +1,57 @@ +// SPDX-License-Identifier: Apache-2.0 + +extern crate alloc; + +use ciborium::{de::from_reader, ser::into_writer, tag::*, value::Value}; +use rstest::rstest; +use serde::{de::DeserializeOwned, Serialize}; + +use core::fmt::Debug; + +#[rstest(item, bytes, value, encode, success, + case(Captured(Some(6), true), "c6f5", Value::Tag(6, Value::Bool(true).into()), true, true), + case(Captured(None, true), "f5", Value::Bool(true), true, true), + + case(Required::<_, 6>(true), "c6f5", Value::Tag(6, Value::Bool(true).into()), true, true), + case(Required::<_, 6>(true), "c7f5", Value::Tag(7, Value::Bool(true).into()), false, false), + case(Required::<_, 6>(true), "f5", Value::Bool(true), false, false), + + case(Accepted::<_, 6>(true), "c6f5", Value::Tag(6, Value::Bool(true).into()), true, true), + case(Accepted::<_, 6>(true), "c7f5", Value::Tag(7, Value::Bool(true).into()), false, false), + case(Accepted::<_, 6>(true), "f5", Value::Bool(true), false, true), +)] +fn test<T: Serialize + DeserializeOwned + Debug + Eq>( + item: T, + bytes: &str, + value: Value, + encode: bool, + success: bool, +) { + let bytes = hex::decode(bytes).unwrap(); + + if encode { + // Encode into bytes + let mut encoded = Vec::new(); + into_writer(&item, &mut encoded).unwrap(); + assert_eq!(bytes, encoded); + + // Encode into value + assert_eq!(value, Value::serialized(&item).unwrap()); + } + + // Decode from bytes + match from_reader(&bytes[..]) { + Ok(x) if success => assert_eq!(item, x), + Ok(..) => panic!("unexpected success"), + Err(e) if success => Err(e).unwrap(), + Err(..) => (), + } + + // Decode from value + match value.deserialized() { + Ok(x) if success => assert_eq!(item, x), + Ok(..) => panic!("unexpected success"), + Err(e) if success => Err(e).unwrap(), + Err(..) => (), + } +} diff --git a/vendor/crunchy/.cargo-checksum.json b/vendor/crunchy/.cargo-checksum.json new file mode 100644 index 00000000..79d7b596 --- /dev/null +++ b/vendor/crunchy/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"0ff6097e9ad168cf9fab5ad73cf5db1e563d1ff4f09b2f1215dd08c6dd78389b","src/lib.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"},"package":"460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"} \ No newline at end of file diff --git a/vendor/crunchy/Cargo.toml b/vendor/crunchy/Cargo.toml new file mode 100644 index 00000000..1ddc7a7f --- /dev/null +++ b/vendor/crunchy/Cargo.toml @@ -0,0 +1,30 @@ +[dependencies] + +[features] +default = ["limit_128"] +limit_1024 = [] +limit_128 = [] +limit_2048 = [] +limit_256 = [] +limit_512 = [] +limit_64 = [] +std = [] + +[lib] +path = "src/lib.rs" + +[package] +authors = ["Eira Fransham <jackefransham@gmail.com>"] +autobenches = false +autobins = false +autoexamples = false +autolib = false +autotests = false +description = "Crunchy unroller: deterministically unroll constant loops" +edition = "2021" +homepage = "https://github.com/eira-fransham/crunchy" +license = "MIT" +name = "crunchy" +readme = "README.md" +repository = "https://github.com/eira-fransham/crunchy" +version = "0.2.4" diff --git a/vendor/crunchy/src/lib.rs b/vendor/crunchy/src/lib.rs new file mode 100644 index 00000000..e69de29b diff --git a/vendor/half/.cargo-checksum.json b/vendor/half/.cargo-checksum.json new file mode 100644 index 00000000..822dfb85 --- /dev/null +++ b/vendor/half/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"c6edc1660d23a3319db7917273499c815e8bbdea7d7e8c571343053a0acced82",".reuse/dep5":"d3824f88d34cd8a6eb4b015e7cd209eb0a06e159501bc5760c7b0e465cbb7ec4","CHANGELOG.md":"f5045a49d22c03a1fcf3ebe1009768fa053ab935af059f68ec2151ef2d04cf31","Cargo.lock":"966a5aa6a239acf0a55051fcea9dacb02d8e58a0472bf98e3e2b13745881e3ac","Cargo.toml":"cb0325b14b6e12ea7c027da7084877327b91368efc7a3c9cfac1c19c0fd4061d","Cargo.toml.orig":"f5b027b09e127f8718d4e119c62cdab806f28059dfa3e02cd42b567089fc2027","LICENSE-APACHE":"a6cba85bc92e0cff7a450b1d873c0eaa2e9fc96bf472df0247a26bec77bf3ff9","LICENSE-MIT":"508a77d2e7b51d98adeed32648ad124b7b30241a8e70b2e72c99f92d8e5874d1","Makefile.toml":"9a7c73f926e9a03e24331852fe31c5c0b034215ea18c23de91b5d98e097b9ed9","README.md":"4e94b1b26742b14959c73c70a379e0af63e09a0763e21eb7bdfc5c2b8cd26cf3","benches/convert.rs":"95c905467f132ea37e79b704a8fc3a0843f5f9dc6d61750c4abc57408321b8f6","src/bfloat.rs":"edfc793379e07cf0ffab8e0605608a50facfd41c04b8501a3c878a9e7f05e2c8","src/bfloat/convert.rs":"d05c02225e8f44a2d41132c1babf9ec13ea111e0125ca95a0ea31f8889a18384","src/binary16.rs":"48146947945ff5747b9e6e73b3a176d612b17b944f1b54d802c540d6563ecab3","src/binary16/arch.rs":"6299ef7f0ac8da0891707f765b702ac367b37509624a6a0d9892479b3840a036","src/binary16/arch/aarch64.rs":"c86b1c3d421ba4dffdc5174bc0274fac32a96bff82cfaf95670376ca1b7e1a72","src/binary16/arch/loongarch64.rs":"131232ad564b8b8ebf34ae3a96c52401850580d398411da80e1ef8c57b044517","src/binary16/arch/x86.rs":"1afdb345c288a0b319c5ec9ecd4c7df81c6873547f13e4d2bfcf3221dd6cba57","src/leading_zeros.rs":"37fe7595856f9d0b1ed0a806314ab7a8860bdebf4223fb3747768a077935bea0","src/lib.rs":"a5e2a9b5d2947bb03fba165446f34a5433055a755843560ccc41642cd7daa7cc","src/num_traits.rs":"4ec1ab5a2ace7afabcce4e1265753a5a309c04c28144888e3ce8f181d149d973","src/rand_distr.rs":"4871eb7cab19da019f5d66723a14f39b27d23da53b8e103990ed8969de2e7595","src/slice.rs":"aa71171371c875f6df7aa3400bef3a444f5bb4012ebe7470e5ffd3309d22b6ca","src/vec.rs":"1858a6485b26027ad7160a834977c424b05983ab42de2e618aa3f9d11221b04d"},"package":"6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b"} \ No newline at end of file diff --git a/vendor/half/.cargo_vcs_info.json b/vendor/half/.cargo_vcs_info.json new file mode 100644 index 00000000..c4afff83 --- /dev/null +++ b/vendor/half/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "8cc891f3e4aad956eca7fa79b1f42f87ecd141ae" + }, + "path_in_vcs": "" +} \ No newline at end of file diff --git a/vendor/half/.reuse/dep5 b/vendor/half/.reuse/dep5 new file mode 100644 index 00000000..9af2e912 --- /dev/null +++ b/vendor/half/.reuse/dep5 @@ -0,0 +1,7 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Copyright: Kathryn Long <squeeself@gmail.com> +License: MIT OR Apache-2.0 + +Files: * +Copyright: 2021 Kathryn Long <squeeself@gmail.com> +License: MIT OR Apache-2.0 diff --git a/vendor/half/CHANGELOG.md b/vendor/half/CHANGELOG.md new file mode 100644 index 00000000..a289b277 --- /dev/null +++ b/vendor/half/CHANGELOG.md @@ -0,0 +1,452 @@ +# Changelog + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [2.7.1] - 2025-10-13 <a name="2.7.1"></a> +### Fixed +- `loongarch64` `lsx` hardware intrinsics for `f16` conversions now enabled only under + `nightly` cargo feature, fixing compile errors on stable Rust. + +## [2.7.0] - 2025-10-08 <a name="2.7.0"></a> +### Changed +- `zerocopy` is now a required dependency. The optional `zerocopy` crate feature is deprecated. + This change is to ensure better code safety and prevent potential unsound behavior. +- Git repository URL has changed due to GitHub user name change. Old URL is redirected. + +### Added +- New `num-traits` implementations: `Signed` for `f16` and `bf16`. By [@djsell]. +- `loongarch64` `lsx` hardware intrinsic support for `f16` conversions. By [@heiher]. +- Implemented `Weight` trait from `rand` crate for `f16` and `bf16` with `rand` optional cargo + feature. By [@majian4work]. + +### Fixed +- `min` and `max` incorrectly propagate `NaN` values when `self` is `NaN`. Fixes [#126], + by [@mgottscho]. +- Suppressed warnings from new `unnecessary_transmutes` lint. + +### Removed +- `doc_auto_cfg` feature has been removed from docs.rs documentation due to removal of rust + feature. + +## [2.6.0] - 2025-04-08 <a name="2.6.0"></a> +### Changed +- Fixed some incorrect minimum supported versions of dependencies that weren't caught due to + improper `Cargo.lock`: + * `num-traits` 0.2.14 -> 0.2.16 + * `zerocopy` 0.8.0 -> 0.8.23 + * `arbitrary` 1.3.2 -> 1.4.1 + +### Added +- `f16` and `bf16` now implement `Immutable` and `KnownLayout` for `zerocopy` crate. By [@usamoi]. + +## [2.5.0] - 2025-03-13 <a name="2.5.0"></a> +### Changed +- Updated optional dependencies to latest major versions: + * `zercopy` 0.6 -> 0.8 + * `rand` 0.8 -> 0.9 + * `rand_distr` 0.4 -> 0.5 + * `rkyv` 0.7 -> 0.8 + * (dev) `criterion` 0.4 -> 0.5 +- Minimum supported Rust version has been changed to 1.81 due to above dependency updates. +- Minor restructuring of included license file locations to be more consistent with crates ecosystem. + +### Added +- Added support for `arbitrary` crate. Fixes [#110]. By [@FL33TW00D]. +- New `num-traits` implementations: `FromBytes` and `ToBytes` for `f16` and `bf16`. By [@kpreid]. + +### Fixed +- Suppressed unexpected_cfg lint warnings on newer versions of stable Rust. +- Resolved ambiguous rustdoc warnings due to new unstable `f16` primitive in compiler. + +## [2.4.1] - 2024-04-06 <a name="2.4.1"></a> +### Fixed +- Missing macro import causing build failure on `no_std` + `alloc` feature set. Fixes [#107]. +- Clippy warning on nightly rust. + +## [2.4.0] - 2024-02-25 <a name="2.4.0"></a> +### Added +- Optional `rkyv` support. Fixes [#100], by [@comath]. +- New `num-traits` implementations: `AsPrimitive<f16>` for `bf16` and `AsPrimitive<bf16>` for + `f16`, allowing lossy conversions between the two types. By [@charles-r-earp]. +- `Cargo.lock` added to vcs as is now recommended for library crates. +### Fixed +- Remove some unit NaN conversion sign tests due to non-deterministic hardware. Fixes [#103]. +- Redundant import warnings on nightly Rust. + +## [2.3.1] - 2023-06-24 <a name="2.3.1"></a> +### Fixed +- Compile error on x86 (not x86_64) targets. Fixes [#93]. + +## [2.3.0] - 2023-06-24 <a name="2.3.0"></a> +### Added +- Support for Kani Rust Verifier. By [@cameron1024]. +- Support for `rand_distr::Distribution` implementations behind `rand_distr` optional cargo + feature. By [@coreylowman]. +- Floating point formatting options in `Display` and `Debug` implementations. By [@eiz]. + +### Changed +- **Breaking Change** Minimum supported Rust version is now 1.70. +- **Breaking Change** Minimum supported Rust version policy reverted to original policy of allowing + minimum supported Rust version updates for minor releases instead of only major to avoid + segmentation and allow optimizing hardware implementations without unnecessary major releases. +- Hardware intrinsics/assembly is finally available on stable Rust, including using hardware + feature detection (`std` only), including: + - AArch64 now uses FP16 hardware instructions for conversions and math operations when + available. + - x86/x86-64 now uses F16C hardware instructions for conversions (but no math operations) when + available. Fixes [#54]. + +### Deprecated +- `use-intrinsics` cargo feature no longer used. Hardware support will now always be used whenever + possible. A future version may output deprecation warnings if this feature is enabled. + +### Fixed +- Improve code generation of `leading_zeros` functions by inlining. By [@encounter]. +- `Sum` implementation of `bf16` incorrectly performed product instead of sum. By [@wx-csy]. +- Compile failed when `serde` cargo feature enabled but `std` not enabled. +- Incorrect black boxing of benchmark tests. +- Rustdoc cfg display on docs.rs not getting enabled. + +## [2.2.1] - 2023-01-08 <a name="2.2.1"></a> +### Changed +- Reduced unnecessary bounds checks for SIMD operations on slices. By [@Shnatsel]. +- Further slice conversion optimizations for slices. Resolves [#66]. + +## [2.2.0] - 2022-12-30 <a name="2.2.0"></a> +### Added +- Add `serialize_as_f32` and `serialize_as_string` functions when `serde` cargo feature is enabled. + They allowing customizing the serialization by using + `#[serde(serialize_with="f16::serialize_as_f32")]` attribute in serde derive macros. Closes [#60]. +- Deserialize now supports deserializing from `f32`, `f64`, and string values in addition to its + previous default deserialization. Closes [#60]. + +### Changed +- Add `#[inline]` on fallback functions, which improved conversion execution on non-nightly rust + by up to 50%. By [@Shnatsel]. + +## [2.1.0] - 2022-07-18 <a name="2.1.0"></a> +### Added +- Add support for target_arch `spirv`. Some traits and functions are unavailble on this + architecture. By [@charles-r-earp]. +- Add `total_cmp` method to both float types. Closes [#55], by [@joseluis]. + +## [2.0.0] - 2022-06-21 <a name="2.0.0"></a> +### Changed +- **Breaking Change** Minimum supported Rust version is now 1.58. +- **Breaking Change** `std` is now enabled as a default cargo feature. Disable default features to + continue using `no_std` support. +- Migrated to Rust Edition 2021. +- Added `#[must_use]` attributes to functions, as appropriate. + +### Fixed +- Fix a soundness bug with `slice::as_ptr` not correctly using mutable reference. By [@Nilstrieb]. + +### Added +- Added `const` conversion methods to both `f16` and `bf16`. These methods never use hardware + intrinsics, unlike the current conversion methods, which is why they are separated into new + methods. The following `const` methods were added: + - `from_f32_const` + - `from_f64_const` + - `to_f32_const` + - `to_f64_const` +- Added `Neg` trait support for borrowed values `&f16` and `&bf16`. By [@pthariensflame]. +- Added `AsPrimitive` implementations from and to self, `usize`, and `isize`. By [@kali]. + +### Removed +- **Breaking Change** The deprecated `serialize` cargo feature has been removed. Use `serde` cargo + feature instead. +- **Breaking Change** The deprecated `consts` module has been removed. Use associated constants on + `f16` instead. +- **Breaking Change** The following deprecated functions have been removed: + - `f16::as_bits` + - `slice::from_bits_mut` + - `slice::to_bits_mut` + - `slice::from_bits` + - `slice::to_bits` + - `vec::from_bits` + - `vec::to_bits` + +## [1.8.2] - 2021-10-22 <a name="1.8.2"></a> +### Fixed +- Remove cargo resolver=2 from manifest to resolve errors in older versions of Rust that still + worked with 1.8.0. Going forward, MSRV increases will be major version increases. Fixes [#48]. + +## [1.8.1] - 2021-10-21 - **Yanked** <a name="1.8.1"></a> +### ***Yanked*** +*Not recommended due to introducing compilation error in Rust versions that worked with 1.8.0.* +### Changed +- Now uses cargo resolver version 2 to prevent dev-dependencies from enabling `std` feature on + optional dependencies. + +### Fixed +- Fixed compile failure when `std` feature is not enabled and `num-traits` is enabled under new + resolver. Now properly uses `libm` num-traits feature. + +## [1.8.0] - 2021-10-13 <a name="1.8.0"></a> +### Changed +- Now always implements `Add`, `Div`, `Mul`, `Neg`, `Rem`, and `Sub` traits. + Previously, these were only implemented under the `num-traits` feature. Keep in mind they still + convert to `f32` and back in the implementation. +- Minimum supported Rust version is now 1.51. +- Made crate package [REUSE compliant](https://reuse.software/). +- Docs now use intra-doc links instead of manual (and hard to maintain) links. +- The following methods on both `f16` and `bf16` are now `const`: + - `to_le_bytes` + - `to_be_bytes` + - `to_ne_bytes` + - `from_le_bytes` + - `from_be_bytes` + - `from_ne_bytes` + - `is_normal` + - `classify` + - `signum` + +### Added +- Added optional implementations of `zerocopy` traits `AsBytes` and `FromBytes` + under `zerocopy` cargo feature. By [@samcrow]. +- Implemented the `core::iter::Product` and `core::iter::Sum` traits, with the same caveat as above + about converting to `f32` and back under the hood. +- Added new associated const `NEG_ONE` to both `f16` and `bf16`. +- Added the following new methods on both `f16` and `bf16`: + - `copysign` + - `max` + - `min` + - `clamp` + +### Fixed +- Fixed a number of minor lints discovered due to improved CI. + +## [1.7.1] - 2021-01-17 <a name="1.7.1"></a> +### Fixed +- Docs.rs now generates docs for `bytemuck` and `num-traits` optional features. + +## [1.7.0] - 2021-01-17 <a name="1.7.0"></a> +### Added +- Added optional implementations of `bytemuck` traits `Zeroable` and `Pod` under `bytemuck` cargo + feature. By [@charles-r-earp]. +- Added optional implementations of `num-traits` traits `ToPrimitive` and `FromPrimitive` under + `num-traits` cargo feature. By [@charles-r-earp]. +- Added implementations of `Binary`, `Octal`, `LowerHex`, and `UpperHex` string format traits to + format raw `f16`/`bf16` bytes to string. + +### Changed +- `Debug` trait implementation now formats `f16`/`bf16` as float instead of raw bytes hex. Use newly + implemented formatting traits to format in hex instead of `Debug`. Fixes [#37]. + + +## [1.6.0] - 2020-05-09 <a name="1.6.0"></a> +### Added +- Added `LOG2_10` and `LOG10_2` constants to both `f16` and `bf16`, which were added to `f32` and + `f64` in the standard library in 1.43.0. By [@tspiteri]. +- Added `to_le/be/ne_bytes` and `from_le/be/ne_bytes` to both `f16` and `bf16`, which were added to + the standard library in 1.40.0. By [@bzm3r]. + +## [1.5.0] - 2020-03-03 <a name="1.5.0"></a> +### Added +- Added the `alloc` feature to support the `alloc` crate in `no_std` environments. By [@zserik]. The + `vec` module is now available with either `alloc` or `std` feature. + +## [1.4.1] - 2020-02-10 <a name="1.4.1"></a> +### Fixed +- Added `#[repr(transparent)]` to `f16`/`bf16` to remove undefined behavior. By [@jfrimmel]. + +## [1.4.0] - 2019-10-13 <a name="1.4.0"></a> +### Added +- Added a `bf16` type implementing the alternative + [`bfloat16`](https://en.wikipedia.org/wiki/Bfloat16_floating-point_format) 16-bit floating point + format. By [@tspiteri]. +- `f16::from_bits`, `f16::to_bits`, `f16::is_nan`, `f16::is_infinite`, `f16::is_finite`, + `f16::is_sign_positive`, and `f16::is_sign_negative` are now `const` fns. +- `slice::HalfBitsSliceExt` and `slice::HalfBitsSliceExt` extension traits have been added for + performing efficient reinterpret casts and conversions of slices to and from `[f16]` and + `[bf16]`. These traits will use hardware SIMD conversion instructions when available and the + `use-intrinsics` cargo feature is enabled. +- `vec::HalfBitsVecExt` and `vec::HalfFloatVecExt` extension traits have been added for + performing efficient reinterpret casts to and from `Vec<f16>` and `Vec<bf16>`. These traits + are only available with the `std` cargo feature. +- `prelude` has been added, for easy importing of most common functionality. Currently the + prelude imports `f16`, `bf16`, and the new slice and vec extension traits. +- New associated constants on `f16` type to replace deprecated `consts` module. + +### Fixed +- Software conversion (when not using `use-intrinsics` feature) now matches hardware rounding + by rounding to nearest, ties to even. Fixes [#24], by [@tspiteri]. +- NaN value conversions now behave like `f32` to `f64` conversions, retaining sign. Fixes [#23], + by [@tspiteri]. + +### Changed +- Minimum rustc version bumped to 1.32. +- Runtime target host feature detection is now used if both `std` and `use-intrinsics` features are + enabled and the compile target host does not support required features. +- When `use-intrinsics` feature is enabled, will now always compile and run without error correctly + regardless of compile target options. + +### Deprecated +- `consts` module and all its constants have been deprecated; use the associated constants on `f16` + instead. +- `slice::from_bits` has been deprecated; use `slice::HalfBitsSliceExt::reinterpret_cast` instead. +- `slice::from_bits_mut` has been deprecated; use `slice::HalfBitsSliceExt::reinterpret_cast_mut` + instead. +- `slice::to_bits` has been deprecated; use `slice::HalfFloatSliceExt::reinterpret_cast` instead. +- `slice::to_bits_mut` has been deprecated; use `slice::HalfFloatSliceExt::reinterpret_cast_mut` + instead. +- `vec::from_bits` has been deprecated; use `vec::HalfBitsVecExt::reinterpret_into` instead. +- `vec::to_bits` has been deprecated; use `vec::HalfFloatVecExt::reinterpret_into` instead. + +## [1.3.1] - 2019-10-04 <a name="1.3.1"></a> +### Fixed +- Corrected values of constants `EPSILON`, `MAX_10_EXP`, `MAX_EXP`, `MIN_10_EXP`, and `MIN_EXP` + in `consts` module, as well as setting `consts::NAN` to match value of `f32::NAN` converted to + `f16`. By [@tspiteri]. + +## [1.3.0] - 2018-10-02 <a name="1.3.0"></a> +### Added +- `slice::from_bits_mut` and `slice::to_bits_mut` for conversion between mutable `u16` and `f16` + slices. Fixes [#16], by [@johannesvollmer]. + +## [1.2.0] - 2018-09-03 <a name="1.2.0"></a> +### Added +- `slice` and optional `vec` (only included with `std` feature) modules for conversions between + `u16` and `f16` buffers. Fixes [#14], by [@johannesvollmer]. +- `to_bits` added to replace `as_bits`. Fixes [#12], by [@tspiteri]. +### Fixed +- `serde` optional dependency no longer uses its default `std` feature. +### Deprecated +- `as_bits` has been deprecated; use `to_bits` instead. +- `serialize` cargo feature is deprecated; use `serde` instead. + +## [1.1.2] - 2018-07-12 <a name="1.1.2"></a> +### Fixed +- Fixed compilation error in 1.1.1 on rustc < 1.27, now compiles again on rustc >= 1.10. Fixes + [#11]. + +## [1.1.1] - 2018-06-24 - **Yanked** <a name="1.1.1"></a> +### ***Yanked*** +*Not recommended due to introducing compilation error on rustc versions prior to 1.27.* +### Fixed +- Fix subnormal float conversions when `use-intrinsics` is not enabled. By [@Moongoodboy-K]. + +## [1.1.0] - 2018-03-17 <a name="1.1.0"></a> +### Added +- Made `to_f32` and `to_f64` public. Fixes [#7], by [@PSeitz]. + +## [1.0.2] - 2018-01-12 <a name="1.0.2"></a> +### Changed +- Update behavior of `is_sign_positive` and `is_sign_negative` to match the IEEE754 conforming + behavior of the standard library since Rust 1.20.0. Fixes [#3], by [@tspiteri]. +- Small optimization on `is_nan` and `is_infinite` from [@tspiteri]. +### Fixed +- Fix comparisons of +0 to -0 and comparisons involving negative numbers. Fixes [#2], by + [@tspiteri]. +- Fix loss of sign when converting `f16` and `f32` to `f16`, and case where `f64` NaN could be + converted to `f16` infinity instead of NaN. Fixes [#5], by [@tspiteri]. + +## [1.0.1] - 2017-08-30 <a name="1.0.1"></a> +### Added +- More README documentation. +- Badges and categories in crate metadata. +### Changed +- `serde` dependency updated to 1.0 stable. +- Writing changelog manually. + +## [1.0.0] - 2017-02-03 <a name="1.0.0"></a> +### Added +- Update to `serde` 0.9 and stable Rust 1.15 for `serialize` feature. + +## [0.1.1] - 2017-01-08 <a name="0.1.1"></a> +### Added +- Add `serde` support under new `serialize` feature. +### Changed +- Use `no_std` for crate by default. + +## 0.1.0 - 2016-03-17 <a name="0.1.0"></a> +### Added +- Initial release of `f16` type. + +[#2]: https://github.com/starkat99/half-rs/issues/2 +[#3]: https://github.com/starkat99/half-rs/issues/3 +[#5]: https://github.com/starkat99/half-rs/issues/5 +[#7]: https://github.com/starkat99/half-rs/issues/7 +[#11]: https://github.com/starkat99/half-rs/issues/11 +[#12]: https://github.com/starkat99/half-rs/issues/12 +[#14]: https://github.com/starkat99/half-rs/issues/14 +[#16]: https://github.com/starkat99/half-rs/issues/16 +[#23]: https://github.com/starkat99/half-rs/issues/23 +[#24]: https://github.com/starkat99/half-rs/issues/24 +[#37]: https://github.com/starkat99/half-rs/issues/37 +[#48]: https://github.com/starkat99/half-rs/issues/48 +[#55]: https://github.com/starkat99/half-rs/issues/55 +[#60]: https://github.com/starkat99/half-rs/issues/60 +[#66]: https://github.com/starkat99/half-rs/issues/66 +[#54]: https://github.com/starkat99/half-rs/issues/54 +[#93]: https://github.com/starkat99/half-rs/issues/54 +[#100]: https://github.com/starkat99/half-rs/issues/100 +[#103]: https://github.com/starkat99/half-rs/issues/103 +[#107]: https://github.com/starkat99/half-rs/issues/107 +[#110]: https://github.com/starkat99/half-rs/issues/110 +[#126]: https://github.com/starkat99/half-rs/issues/126 + +[@tspiteri]: https://github.com/tspiteri +[@PSeitz]: https://github.com/PSeitz +[@Moongoodboy-K]: https://github.com/Moongoodboy-K +[@johannesvollmer]: https://github.com/johannesvollmer +[@jfrimmel]: https://github.com/jfrimmel +[@zserik]: https://github.com/zserik +[@bzm3r]: https://github.com/bzm3r +[@charles-r-earp]: https://github.com/charles-r-earp +[@samcrow]: https://github.com/samcrow +[@pthariensflame]: https://github.com/pthariensflame +[@kali]: https://github.com/kali +[@Nilstrieb]: https://github.com/Nilstrieb +[@joseluis]: https://github.com/joseluis +[@Shnatsel]: https://github.com/Shnatsel +[@cameron1024]: https://github.com/cameron1024 +[@encounter]: https://github.com/encounter +[@coreylowman]: https://github.com/coreylowman +[@wx-csy]: https://github.com/wx-csy +[@eiz]: https://github.com/eiz +[@comath]: https://github.com/comath +[@FL33TW00D]: https://github.com/FL33TW00D +[@kpreid]: https://github.com/kpreid +[@usamoi]: https://github.com/usamoi +[@mgottscho]: https://github.com/mgottscho +[@djsell]: https://github.com/djsell +[@heiher]: https://github.com/heiher +[@majian4work]: https://github.com/majian4work + + +[Unreleased]: https://github.com/starkat99/half-rs/compare/v2.7.1...HEAD +[2.7.1]: https://github.com/starkat99/half-rs/compare/v2.7.0...v2.7.1 +[2.7.0]: https://github.com/starkat99/half-rs/compare/v2.6.0...v2.7.0 +[2.6.0]: https://github.com/starkat99/half-rs/compare/v2.5.0...v2.6.0 +[2.5.0]: https://github.com/starkat99/half-rs/compare/v2.4.1...v2.5.0 +[2.4.1]: https://github.com/starkat99/half-rs/compare/v2.4.0...v2.4.1 +[2.4.0]: https://github.com/starkat99/half-rs/compare/v2.3.1...v2.4.0 +[2.3.1]: https://github.com/starkat99/half-rs/compare/v2.3.0...v2.3.1 +[2.3.0]: https://github.com/starkat99/half-rs/compare/v2.2.1...v2.3.0 +[2.2.1]: https://github.com/starkat99/half-rs/compare/v2.2.0...v2.2.1 +[2.2.0]: https://github.com/starkat99/half-rs/compare/v2.1.0...v2.2.0 +[2.1.0]: https://github.com/starkat99/half-rs/compare/v2.0.0...v2.1.0 +[2.0.0]: https://github.com/starkat99/half-rs/compare/v1.8.2...v2.0.0 +[1.8.2]: https://github.com/starkat99/half-rs/compare/v1.8.1...v1.8.2 +[1.8.1]: https://github.com/starkat99/half-rs/compare/v1.8.0...v1.8.1 +[1.8.0]: https://github.com/starkat99/half-rs/compare/v1.7.1...v1.8.0 +[1.7.1]: https://github.com/starkat99/half-rs/compare/v1.7.0...v1.7.1 +[1.7.0]: https://github.com/starkat99/half-rs/compare/v1.6.0...v1.7.0 +[1.6.0]: https://github.com/starkat99/half-rs/compare/v1.5.0...v1.6.0 +[1.5.0]: https://github.com/starkat99/half-rs/compare/v1.4.1...v1.5.0 +[1.4.1]: https://github.com/starkat99/half-rs/compare/v1.4.0...v1.4.1 +[1.4.0]: https://github.com/starkat99/half-rs/compare/v1.3.1...v1.4.0 +[1.3.1]: https://github.com/starkat99/half-rs/compare/v1.3.0...v1.3.1 +[1.3.0]: https://github.com/starkat99/half-rs/compare/v1.2.0...v1.3.0 +[1.2.0]: https://github.com/starkat99/half-rs/compare/v1.1.2...v1.2.0 +[1.1.2]: https://github.com/starkat99/half-rs/compare/v1.1.1...v1.1.2 +[1.1.1]: https://github.com/starkat99/half-rs/compare/v1.1.0...v1.1.1 +[1.1.0]: https://github.com/starkat99/half-rs/compare/v1.0.2...v1.1.0 +[1.0.2]: https://github.com/starkat99/half-rs/compare/v1.0.1...v1.0.2 +[1.0.1]: https://github.com/starkat99/half-rs/compare/v1.0.0...v1.0.1 +[1.0.0]: https://github.com/starkat99/half-rs/compare/v0.1.1...v1.0.0 +[0.1.1]: https://github.com/starkat99/half-rs/compare/v0.1.0...v0.1.1 diff --git a/vendor/half/Cargo.lock b/vendor/half/Cargo.lock new file mode 100644 index 00000000..ed87645a --- /dev/null +++ b/vendor/half/Cargo.lock @@ -0,0 +1,1020 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "bitflags" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" + +[[package]] +name = "bumpalo" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" + +[[package]] +name = "bytecheck" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50690fb3370fb9fe3550372746084c46f2ac8c9685c583d2be10eefd89d3d1a3" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "rancor", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb7846e0cb180355c2dec69e721edafa36919850f1a9f52ffba4ebc0393cb71" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "bytemuck" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41aa2ec95ca3b5c54cf73c91acf06d24f4495d5f1b1c12506ae3483d646177ac" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half 2.4.1", +] + +[[package]] +name = "clap" +version = "4.5.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" + +[[package]] +name = "derive_arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "env_logger" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets", +] + +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "half" +version = "2.7.1" +dependencies = [ + "arbitrary", + "bytemuck", + "cfg-if", + "criterion", + "crunchy", + "num-traits", + "quickcheck", + "quickcheck_macros", + "rand 0.9.0", + "rand_distr", + "rkyv", + "serde", + "zerocopy", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "hermit-abi" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" + +[[package]] +name = "indexmap" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", +] + +[[package]] +name = "is-terminal" +version = "0.4.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.171" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" + +[[package]] +name = "libm" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" + +[[package]] +name = "log" +version = "0.4.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "munge" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0091202c98cf06da46c279fdf50cccb6b1c43b4521abdf6a27b4c7e71d5d9d7" +dependencies = [ + "munge_macro", +] + +[[package]] +name = "munge_macro" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "734799cf91479720b2f970c61a22850940dd91e27d4f02b1c6fc792778df2459" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "num-traits" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "once_cell" +version = "1.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" + +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "ptr_meta" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe9e76f66d3f9606f44e45598d155cb13ecf09f4a28199e48daf8c8fc937ea90" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca414edb151b4c8d125c12566ab0d74dc9cdba36fb80eb7b848c15f495fd32d1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "quickcheck" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" +dependencies = [ + "env_logger", + "log", + "rand 0.8.5", +] + +[[package]] +name = "quickcheck_macros" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rancor" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caf5f7161924b9d1cea0e4cabc97c372cea92b5f927fc13c6bca67157a0ad947" +dependencies = [ + "ptr_meta", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +dependencies = [ + "rand_chacha", + "rand_core 0.9.3", + "zerocopy", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.15", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.1", +] + +[[package]] +name = "rand_distr" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddc3b5afe4c995c44540865b8ca5c52e6a59fa362da96c5d30886930ddc8da1c" +dependencies = [ + "num-traits", + "rand 0.9.0", +] + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "rend" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35e8a6bf28cd121053a66aa2e6a2e3eaffad4a60012179f0e864aa5ffeff215" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "rkyv" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7fa2297190bd08087add407c3dedf28eb3be1d75955ffbd3bc312834325760" +dependencies = [ + "bytecheck", + "bytes", + "hashbrown 0.14.5", + "indexmap", + "munge", + "ptr_meta", + "rancor", + "rend", + "rkyv_derive", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4aad510db4f88722adf0e4586ff0dedfca4af57b17c075b2420bac1db446d22c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "rustversion" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "serde_json" +version = "1.0.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "uuid" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f540e3240398cce6128b64ba83fdbdd86129c16a3aa1a3a252efd66eb3d587" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.100", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags", +] + +[[package]] +name = "zerocopy" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] diff --git a/vendor/half/Cargo.toml b/vendor/half/Cargo.toml new file mode 100644 index 00000000..426a0d34 --- /dev/null +++ b/vendor/half/Cargo.toml @@ -0,0 +1,137 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.81" +name = "half" +version = "2.7.1" +authors = ["Kathryn Long <squeeself@gmail.com>"] +build = false +exclude = [ + ".git*", + ".editorconfig", + ".circleci", +] +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "Half-precision floating point f16 and bf16 types for Rust implementing the IEEE 754-2008 standard binary16 and bfloat16 types." +readme = "README.md" +keywords = [ + "f16", + "bfloat16", + "no_std", +] +categories = [ + "no-std", + "data-structures", + "encoding", +] +license = "MIT OR Apache-2.0" +repository = "https://github.com/VoidStarKat/half-rs" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = [ + "--cfg", + "docsrs", +] + +[features] +alloc = [] +default = ["std"] +nightly = [] +rand_distr = [ + "dep:rand", + "dep:rand_distr", +] +std = ["alloc"] +use-intrinsics = [] +zerocopy = [] + +[lib] +name = "half" +path = "src/lib.rs" + +[[bench]] +name = "convert" +path = "benches/convert.rs" +harness = false + +[dependencies.arbitrary] +version = "1.4.1" +features = ["derive"] +optional = true + +[dependencies.bytemuck] +version = "1.4.1" +features = ["derive"] +optional = true +default-features = false + +[dependencies.cfg-if] +version = "1.0.0" + +[dependencies.num-traits] +version = "0.2.16" +features = ["libm"] +optional = true +default-features = false + +[dependencies.rand] +version = "0.9.0" +features = ["thread_rng"] +optional = true +default-features = false + +[dependencies.rand_distr] +version = "0.5.0" +optional = true +default-features = false + +[dependencies.rkyv] +version = "0.8.0" +optional = true + +[dependencies.serde] +version = "1.0" +features = ["derive"] +optional = true +default-features = false + +[dependencies.zerocopy] +version = "0.8.26" +features = [ + "derive", + "simd", +] +default-features = false + +[dev-dependencies.criterion] +version = "0.5" + +[dev-dependencies.crunchy] +version = "0.2.2" + +[dev-dependencies.quickcheck] +version = "1.0" + +[dev-dependencies.quickcheck_macros] +version = "1.0" + +[dev-dependencies.rand] +version = "0.9.0" + +[target.'cfg(target_arch = "spirv")'.dependencies.crunchy] +version = "0.2.2" diff --git a/vendor/half/Cargo.toml.orig b/vendor/half/Cargo.toml.orig new file mode 100644 index 00000000..9c8a3fd8 --- /dev/null +++ b/vendor/half/Cargo.toml.orig @@ -0,0 +1,63 @@ +[package] +name = "half" +# Remember to keep in sync with html_root_url crate attribute +version = "2.7.1" +authors = ["Kathryn Long <squeeself@gmail.com>"] +description = "Half-precision floating point f16 and bf16 types for Rust implementing the IEEE 754-2008 standard binary16 and bfloat16 types." +repository = "https://github.com/VoidStarKat/half-rs" +readme = "README.md" +keywords = ["f16", "bfloat16", "no_std"] +license = "MIT OR Apache-2.0" +categories = ["no-std", "data-structures", "encoding"] +edition = "2021" +rust-version = "1.81" +exclude = [".git*", ".editorconfig", ".circleci"] + +[features] +default = ["std"] +std = ["alloc"] +use-intrinsics = [] # Deprecated +alloc = [] +rand_distr = ["dep:rand", "dep:rand_distr"] +zerocopy = [] # Deprecated +nightly = [] + +[dependencies] +cfg-if = "1.0.0" +bytemuck = { version = "1.4.1", default-features = false, features = [ + "derive", +], optional = true } +serde = { version = "1.0", default-features = false, features = [ + "derive", +], optional = true } +num-traits = { version = "0.2.16", default-features = false, features = [ + "libm", +], optional = true } +zerocopy = { version = "0.8.26", default-features = false, features = [ + "derive", + "simd", +] } +rand = { version = "0.9.0", default-features = false, features = [ + "thread_rng", +], optional = true } +rand_distr = { version = "0.5.0", default-features = false, optional = true } +rkyv = { version = "0.8.0", optional = true } +arbitrary = { version = "1.4.1", features = ["derive"], optional = true } + +[target.'cfg(target_arch = "spirv")'.dependencies] +crunchy = "0.2.2" + +[dev-dependencies] +criterion = "0.5" +quickcheck = "1.0" +quickcheck_macros = "1.0" +rand = "0.9.0" +crunchy = "0.2.2" + +[[bench]] +name = "convert" +harness = false + +[package.metadata.docs.rs] +rustdoc-args = ["--cfg", "docsrs"] +all-features = true diff --git a/vendor/half/LICENSE-APACHE b/vendor/half/LICENSE-APACHE new file mode 100644 index 00000000..d9a10c0d --- /dev/null +++ b/vendor/half/LICENSE-APACHE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/vendor/half/LICENSE-MIT b/vendor/half/LICENSE-MIT new file mode 100644 index 00000000..9cf10627 --- /dev/null +++ b/vendor/half/LICENSE-MIT @@ -0,0 +1,19 @@ +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/half/Makefile.toml b/vendor/half/Makefile.toml new file mode 100644 index 00000000..1479210b --- /dev/null +++ b/vendor/half/Makefile.toml @@ -0,0 +1,78 @@ +[config] +min_version = "0.35.0" + +[env] +CI_CARGO_TEST_FLAGS = { value = "--locked -- --nocapture", condition = { env_true = [ + "CARGO_MAKE_CI", +] } } +CARGO_MAKE_CARGO_ALL_FEATURES = { source = "${CARGO_MAKE_RUST_CHANNEL}", default_value = "--all-features", mapping = { "nightly" = "--all-features" } } +CARGO_MAKE_CLIPPY_ARGS = { value = "${CARGO_MAKE_CLIPPY_ALL_FEATURES_WARN}", condition = { env_true = [ + "CARGO_MAKE_CI", +] } } + +# Override for CI flag additions +[tasks.test] +args = [ + "test", + "@@remove-empty(CARGO_MAKE_CARGO_VERBOSE_FLAGS)", + "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )", + "@@split(CI_CARGO_TEST_FLAGS, )", +] + +# Let clippy run on non-nightly CI +[tasks.clippy-ci-flow] +condition = { env_set = ["CARGO_MAKE_RUN_CLIPPY"] } + +# Let format check run on non-nightly CI +[tasks.check-format-ci-flow] +condition = { env_set = ["CARGO_MAKE_RUN_CHECK_FORMAT"] } + +[tasks.check-docs] +description = "Checks docs for errors." +category = "Documentation" +install_crate = false +env = { RUSTDOCFLAGS = "-D warnings" } +command = "cargo" +args = [ + "doc", + "--workspace", + "--no-deps", + "@@remove-empty(CARGO_MAKE_CARGO_VERBOSE_FLAGS)", + "${CARGO_MAKE_CARGO_ALL_FEATURES}", +] + +# Build & Test with no features enabled +[tasks.post-ci-flow] +run_task = [ + { name = [ + "check-docs", + "build-no-std", + "test-no-std", + "build-no-std-alloc", + "test-no-std-alloc", + ] }, +] + +[tasks.build-no-std] +description = "Build without any features" +category = "Build" +env = { CARGO_MAKE_CARGO_BUILD_TEST_FLAGS = "--no-default-features" } +run_task = "build" + +[tasks.test-no-std] +description = "Run tests without any features" +category = "Test" +env = { CARGO_MAKE_CARGO_BUILD_TEST_FLAGS = "--no-default-features" } +run_task = "test" + +[tasks.build-no-std-alloc] +description = "Build without any features except alloc" +category = "Build" +env = { CARGO_MAKE_CARGO_BUILD_TEST_FLAGS = "--no-default-features --features alloc" } +run_task = "build" + +[tasks.test-no-std-alloc] +description = "Run tests without any features except alloc" +category = "Test" +env = { CARGO_MAKE_CARGO_BUILD_TEST_FLAGS = "--no-default-features --features alloc" } +run_task = "test" diff --git a/vendor/half/README.md b/vendor/half/README.md new file mode 100644 index 00000000..bb6f815f --- /dev/null +++ b/vendor/half/README.md @@ -0,0 +1,90 @@ +# `f16` and `bf16` floating point types for Rust +[![Crates.io](https://img.shields.io/crates/v/half.svg)](https://crates.io/crates/half/) [![Documentation](https://docs.rs/half/badge.svg)](https://docs.rs/half/) ![Crates.io](https://img.shields.io/crates/l/half) [![Build status](https://github.com/VoidStarKat/half-rs/actions/workflows/rust.yml/badge.svg?branch=main&event=push)](https://github.com/VoidStarKat/half-rs/actions/workflows/rust.yml) [![CircleCI](https://dl.circleci.com/status-badge/img/gh/VoidStarKat/half-rs/tree/main.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/VoidStarKat/half-rs/tree/main) + +This crate implements a half-precision floating point `f16` type for Rust implementing the IEEE +754-2008 standard [`binary16`](https://en.wikipedia.org/wiki/Half-precision_floating-point_format) +a.k.a "half" format, as well as a `bf16` type implementing the +[`bfloat16`](https://en.wikipedia.org/wiki/Bfloat16_floating-point_format) format. + +## Usage + +The `f16` and `bf16` types attempt to match existing Rust floating point type functionality where possible, and provides both conversion operations (such as to/from `f32` and `f64`) and basic +arithmetic operations. Hardware support for these operations will be used whenever hardware support +is available—either through instrinsics or targeted assembly—although a nightly Rust toolchain may +be required for some hardware. + +This crate provides [`no_std`](https://rust-embedded.github.io/book/intro/no-std.html) support so can easily be used in embedded code where a smaller float format is most useful. + +*Requires Rust 1.81 or greater.* If you need support for older versions of Rust, use previous +versions of this crate. + +See the [crate documentation](https://docs.rs/half/) for more details. + +### Optional Features + +- **`alloc`** — Enable use of the [`alloc`](https://doc.rust-lang.org/alloc/) crate when not using + the `std` library. + + This enables the `vec` module, which contains zero-copy conversions for the `Vec` type. This + allows fast conversion between raw `Vec<u16>` bits and `Vec<f16>` or `Vec<bf16>` arrays, and vice + versa. + +- **`std`** — Enable features that depend on the Rust `std` library, including everything in the + `alloc` feature. + + Enabling the `std` feature enables runtime CPU feature detection of hardware support. + Without this feature detection, harware is only used when compiler target supports them. + +- **`serde`** - Implement `Serialize` and `Deserialize` traits for `f16` and `bf16`. This adds a + dependency on the [`serde`](https://crates.io/crates/serde) crate. + +- **`num-traits`** — Enable `ToPrimitive`, `FromPrimitive`, `ToBytes`, `FromBytes`, `Num`, `Float`, + `FloatCore`, `Signed`, and `Bounded` trait implementations from the + [`num-traits`](https://crates.io/crates/num-traits) crate. + +- **`bytemuck`** — Enable `Zeroable` and `Pod` trait implementations from the + [`bytemuck`](https://crates.io/crates/bytemuck) crate. + +- **`rand_distr`** — Enable sampling from distributions like `StandardUniform` and `StandardNormal` + from the [`rand_distr`](https://crates.io/crates/rand_distr) crate. + +- **`rkyv`** -- Enable zero-copy deserializtion with [`rkyv`](https://crates.io/crates/rkyv) crate. + +- **`aribtrary`** -- Enable fuzzing support with [`arbitrary`](https://crates.io/crates/arbitrary) + crate by implementing `Arbitrary` trait. + +- **`nightly`** -- Enable nightly-only features (currently `loongarch64` intrinsics). + +### Hardware support + +The following list details hardware support for floating point types in this crate. When using `std` +library, runtime CPU target detection will be used. To get the most performance benefits, compile +for specific CPU features which avoids the runtime overhead and works in a `no_std` environment. + +| Architecture | CPU Target Feature | Notes | +| ------------ | ------------------ |--------------------------------------------------------------------------------------------------------------------------------------------------------| +| `x86`/`x86_64` | `f16c` | This supports conversion to/from `f16` only (including vector SIMD) and does not support any `bf16` or arithmetic operations. | +| `aarch64` | `fp16` | This supports all operations on `f16` only. | +| `loongarch64` | `lsx` | (`nightly` feature only) This supports conversion to/from `f16` only (including vector SIMD) and does not support any `bf16` or arithmetic operations. | + +### More Documentation + +- [Crate API Reference](https://docs.rs/half/) +- [Latest Changes](CHANGELOG.md) + +## License + +All files in this library are dual-licensed and distributed under the terms of either of: + +* [MIT License](LICENSE-MIT) + ([http://opensource.org/licenses/MIT](http://opensource.org/licenses/MIT)) +* [Apache License, Version 2.0](LICENSE-APACHE) + ([http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)) + +at your option. + +### Contributing + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the +work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any +additional terms or conditions. diff --git a/vendor/half/benches/convert.rs b/vendor/half/benches/convert.rs new file mode 100644 index 00000000..e9d4faf5 --- /dev/null +++ b/vendor/half/benches/convert.rs @@ -0,0 +1,343 @@ +use criterion::{black_box, criterion_group, criterion_main, Bencher, BenchmarkId, Criterion}; +use half::prelude::*; +use std::{f32, f64, iter}; + +const SIMD_LARGE_BENCH_SLICE_LEN: usize = 1024; + +fn bench_f32_to_f16(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert f16 From f32"); + for val in &[ + 0., + -0., + 1., + f32::MIN, + f32::MAX, + f32::MIN_POSITIVE, + f32::NEG_INFINITY, + f32::INFINITY, + f32::NAN, + f32::consts::E, + f32::consts::PI, + ] { + group.bench_with_input(BenchmarkId::new("f16::from_f32", val), val, |b, i| { + b.iter(|| f16::from_f32(*i)) + }); + } +} + +fn bench_f64_to_f16(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert f16 From f64"); + for val in &[ + 0., + -0., + 1., + f64::MIN, + f64::MAX, + f64::MIN_POSITIVE, + f64::NEG_INFINITY, + f64::INFINITY, + f64::NAN, + f64::consts::E, + f64::consts::PI, + ] { + group.bench_with_input(BenchmarkId::new("f16::from_f64", val), val, |b, i| { + b.iter(|| f16::from_f64(*i)) + }); + } +} + +fn bench_f16_to_f32(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert f16 to f32"); + for val in &[ + f16::ZERO, + f16::NEG_ZERO, + f16::ONE, + f16::MIN, + f16::MAX, + f16::MIN_POSITIVE, + f16::NEG_INFINITY, + f16::INFINITY, + f16::NAN, + f16::E, + f16::PI, + ] { + group.bench_with_input(BenchmarkId::new("f16::to_f32", val), val, |b, i| { + b.iter(|| i.to_f32()) + }); + } +} + +fn bench_f16_to_f64(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert f16 to f64"); + for val in &[ + f16::ZERO, + f16::NEG_ZERO, + f16::ONE, + f16::MIN, + f16::MAX, + f16::MIN_POSITIVE, + f16::NEG_INFINITY, + f16::INFINITY, + f16::NAN, + f16::E, + f16::PI, + ] { + group.bench_with_input(BenchmarkId::new("f16::to_f64", val), val, |b, i| { + b.iter(|| i.to_f64()) + }); + } +} + +criterion_group!( + f16_sisd, + bench_f32_to_f16, + bench_f64_to_f16, + bench_f16_to_f32, + bench_f16_to_f64 +); + +fn bench_slice_f32_to_f16(c: &mut Criterion) { + let mut constant_buffer = [f16::ZERO; 11]; + let constants = [ + 0., + -0., + 1., + f32::MIN, + f32::MAX, + f32::MIN_POSITIVE, + f32::NEG_INFINITY, + f32::INFINITY, + f32::NAN, + f32::consts::E, + f32::consts::PI, + ]; + c.bench_function( + "HalfFloatSliceExt::convert_from_f32_slice/constants", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&mut constant_buffer).convert_from_f32_slice(black_box(&constants))) + }, + ); + + let large: Vec<_> = iter::repeat(0) + .enumerate() + .map(|(i, _)| i as f32) + .take(SIMD_LARGE_BENCH_SLICE_LEN) + .collect(); + let mut large_buffer = [f16::ZERO; SIMD_LARGE_BENCH_SLICE_LEN]; + c.bench_function( + "HalfFloatSliceExt::convert_from_f32_slice/large", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&mut large_buffer).convert_from_f32_slice(black_box(&large))) + }, + ); +} + +fn bench_slice_f64_to_f16(c: &mut Criterion) { + let mut constant_buffer = [f16::ZERO; 11]; + let constants = [ + 0., + -0., + 1., + f64::MIN, + f64::MAX, + f64::MIN_POSITIVE, + f64::NEG_INFINITY, + f64::INFINITY, + f64::NAN, + f64::consts::E, + f64::consts::PI, + ]; + c.bench_function( + "HalfFloatSliceExt::convert_from_f64_slice/constants", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&mut constant_buffer).convert_from_f64_slice(black_box(&constants))) + }, + ); + + let large: Vec<_> = iter::repeat(0) + .enumerate() + .map(|(i, _)| i as f64) + .take(SIMD_LARGE_BENCH_SLICE_LEN) + .collect(); + let mut large_buffer = [f16::ZERO; SIMD_LARGE_BENCH_SLICE_LEN]; + c.bench_function( + "HalfFloatSliceExt::convert_from_f64_slice/large", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&mut large_buffer).convert_from_f64_slice(black_box(&large))) + }, + ); +} + +fn bench_slice_f16_to_f32(c: &mut Criterion) { + let mut constant_buffer = [0f32; 11]; + let constants = [ + f16::ZERO, + f16::NEG_ZERO, + f16::ONE, + f16::MIN, + f16::MAX, + f16::MIN_POSITIVE, + f16::NEG_INFINITY, + f16::INFINITY, + f16::NAN, + f16::E, + f16::PI, + ]; + c.bench_function( + "HalfFloatSliceExt::convert_to_f32_slice/constants", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&constants).convert_to_f32_slice(black_box(&mut constant_buffer))) + }, + ); + + let large: Vec<_> = iter::repeat(0) + .enumerate() + .map(|(i, _)| f16::from_f32(i as f32)) + .take(SIMD_LARGE_BENCH_SLICE_LEN) + .collect(); + let mut large_buffer = [0f32; SIMD_LARGE_BENCH_SLICE_LEN]; + c.bench_function( + "HalfFloatSliceExt::convert_to_f32_slice/large", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&large).convert_to_f32_slice(black_box(&mut large_buffer))) + }, + ); +} + +fn bench_slice_f16_to_f64(c: &mut Criterion) { + let mut constant_buffer = [0f64; 11]; + let constants = [ + f16::ZERO, + f16::NEG_ZERO, + f16::ONE, + f16::MIN, + f16::MAX, + f16::MIN_POSITIVE, + f16::NEG_INFINITY, + f16::INFINITY, + f16::NAN, + f16::E, + f16::PI, + ]; + c.bench_function( + "HalfFloatSliceExt::convert_to_f64_slice/constants", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&constants).convert_to_f64_slice(black_box(&mut constant_buffer))) + }, + ); + + let large: Vec<_> = iter::repeat(0) + .enumerate() + .map(|(i, _)| f16::from_f64(i as f64)) + .take(SIMD_LARGE_BENCH_SLICE_LEN) + .collect(); + let mut large_buffer = [0f64; SIMD_LARGE_BENCH_SLICE_LEN]; + c.bench_function( + "HalfFloatSliceExt::convert_to_f64_slice/large", + |b: &mut Bencher<'_>| { + b.iter(|| black_box(&large).convert_to_f64_slice(black_box(&mut large_buffer))) + }, + ); +} + +criterion_group!( + f16_simd, + bench_slice_f32_to_f16, + bench_slice_f64_to_f16, + bench_slice_f16_to_f32, + bench_slice_f16_to_f64 +); + +fn bench_f32_to_bf16(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert bf16 From f32"); + for val in &[ + 0., + -0., + 1., + f32::MIN, + f32::MAX, + f32::MIN_POSITIVE, + f32::NEG_INFINITY, + f32::INFINITY, + f32::NAN, + f32::consts::E, + f32::consts::PI, + ] { + group.bench_with_input(BenchmarkId::new("bf16::from_f32", val), val, |b, i| { + b.iter(|| bf16::from_f32(*i)) + }); + } +} + +fn bench_f64_to_bf16(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert bf16 From f64"); + for val in &[ + 0., + -0., + 1., + f64::MIN, + f64::MAX, + f64::MIN_POSITIVE, + f64::NEG_INFINITY, + f64::INFINITY, + f64::NAN, + f64::consts::E, + f64::consts::PI, + ] { + group.bench_with_input(BenchmarkId::new("bf16::from_f64", val), val, |b, i| { + b.iter(|| bf16::from_f64(*i)) + }); + } +} + +fn bench_bf16_to_f32(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert bf16 to f32"); + for val in &[ + bf16::ZERO, + bf16::NEG_ZERO, + bf16::ONE, + bf16::MIN, + bf16::MAX, + bf16::MIN_POSITIVE, + bf16::NEG_INFINITY, + bf16::INFINITY, + bf16::NAN, + bf16::E, + bf16::PI, + ] { + group.bench_with_input(BenchmarkId::new("bf16::to_f32", val), val, |b, i| { + b.iter(|| i.to_f32()) + }); + } +} + +fn bench_bf16_to_f64(c: &mut Criterion) { + let mut group = c.benchmark_group("Convert bf16 to f64"); + for val in &[ + bf16::ZERO, + bf16::NEG_ZERO, + bf16::ONE, + bf16::MIN, + bf16::MAX, + bf16::MIN_POSITIVE, + bf16::NEG_INFINITY, + bf16::INFINITY, + bf16::NAN, + bf16::E, + bf16::PI, + ] { + group.bench_with_input(BenchmarkId::new("bf16::to_f64", val), val, |b, i| { + b.iter(|| i.to_f64()) + }); + } +} + +criterion_group!( + bf16_sisd, + bench_f32_to_bf16, + bench_f64_to_bf16, + bench_bf16_to_f32, + bench_bf16_to_f64 +); + +criterion_main!(f16_sisd, bf16_sisd, f16_simd); diff --git a/vendor/half/src/bfloat.rs b/vendor/half/src/bfloat.rs new file mode 100644 index 00000000..4324d61c --- /dev/null +++ b/vendor/half/src/bfloat.rs @@ -0,0 +1,1925 @@ +#[cfg(all(feature = "serde", feature = "alloc"))] +#[allow(unused_imports)] +use alloc::string::ToString; +#[cfg(feature = "bytemuck")] +use bytemuck::{Pod, Zeroable}; +use core::{ + cmp::Ordering, + iter::{Product, Sum}, + num::FpCategory, + ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Sub, SubAssign}, +}; +#[cfg(not(target_arch = "spirv"))] +use core::{ + fmt::{ + Binary, Debug, Display, Error, Formatter, LowerExp, LowerHex, Octal, UpperExp, UpperHex, + }, + num::ParseFloatError, + str::FromStr, +}; +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; +use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; + +pub(crate) mod convert; + +/// A 16-bit floating point type implementing the [`bfloat16`] format. +/// +/// The [`bfloat16`] floating point format is a truncated 16-bit version of the IEEE 754 standard +/// `binary32`, a.k.a [`f32`]. [`struct@bf16`] has approximately the same dynamic range as [`f32`] by +/// having a lower precision than [`struct@f16`][crate::f16]. While [`struct@f16`][crate::f16] has a precision of +/// 11 bits, [`struct@bf16`] has a precision of only 8 bits. +/// +/// [`bfloat16`]: https://en.wikipedia.org/wiki/Bfloat16_floating-point_format +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Default)] +#[repr(transparent)] +#[cfg_attr(feature = "serde", derive(Serialize))] +#[cfg_attr( + feature = "rkyv", + derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize) +)] +#[cfg_attr(feature = "rkyv", rkyv(resolver = Bf16Resolver))] +#[cfg_attr(feature = "bytemuck", derive(Zeroable, Pod))] +#[cfg_attr(kani, derive(kani::Arbitrary))] +#[derive(FromBytes, Immutable, IntoBytes, KnownLayout)] +pub struct bf16(u16); + +impl bf16 { + /// Constructs a [`struct@bf16`] value from the raw bits. + #[inline] + #[must_use] + pub const fn from_bits(bits: u16) -> bf16 { + bf16(bits) + } + + /// Constructs a [`struct@bf16`] value from a 32-bit floating point value. + /// + /// This operation is lossy. If the 32-bit value is too large to fit, ±∞ will result. NaN values + /// are preserved. Subnormal values that are too tiny to be represented will result in ±0. All + /// other values are truncated and rounded to the nearest representable value. + #[inline] + #[must_use] + pub fn from_f32(value: f32) -> bf16 { + Self::from_f32_const(value) + } + + /// Constructs a [`struct@bf16`] value from a 32-bit floating point value. + /// + /// This function is identical to [`from_f32`][Self::from_f32] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`from_f32`][Self::from_f32] should be preferred + /// in any non-`const` context. + /// + /// This operation is lossy. If the 32-bit value is too large to fit, ±∞ will result. NaN values + /// are preserved. Subnormal values that are too tiny to be represented will result in ±0. All + /// other values are truncated and rounded to the nearest representable value. + #[inline] + #[must_use] + pub const fn from_f32_const(value: f32) -> bf16 { + bf16(convert::f32_to_bf16(value)) + } + + /// Constructs a [`struct@bf16`] value from a 64-bit floating point value. + /// + /// This operation is lossy. If the 64-bit value is to large to fit, ±∞ will result. NaN values + /// are preserved. 64-bit subnormal values are too tiny to be represented and result in ±0. + /// Exponents that underflow the minimum exponent will result in subnormals or ±0. All other + /// values are truncated and rounded to the nearest representable value. + #[inline] + #[must_use] + pub fn from_f64(value: f64) -> bf16 { + Self::from_f64_const(value) + } + + /// Constructs a [`struct@bf16`] value from a 64-bit floating point value. + /// + /// This function is identical to [`from_f64`][Self::from_f64] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`from_f64`][Self::from_f64] should be preferred + /// in any non-`const` context. + /// + /// This operation is lossy. If the 64-bit value is to large to fit, ±∞ will result. NaN values + /// are preserved. 64-bit subnormal values are too tiny to be represented and result in ±0. + /// Exponents that underflow the minimum exponent will result in subnormals or ±0. All other + /// values are truncated and rounded to the nearest representable value. + #[inline] + #[must_use] + pub const fn from_f64_const(value: f64) -> bf16 { + bf16(convert::f64_to_bf16(value)) + } + + /// Converts a [`struct@bf16`] into the underlying bit representation. + #[inline] + #[must_use] + pub const fn to_bits(self) -> u16 { + self.0 + } + + /// Returns the memory representation of the underlying bit representation as a byte array in + /// little-endian byte order. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let bytes = bf16::from_f32(12.5).to_le_bytes(); + /// assert_eq!(bytes, [0x48, 0x41]); + /// ``` + #[inline] + #[must_use] + pub const fn to_le_bytes(self) -> [u8; 2] { + self.0.to_le_bytes() + } + + /// Returns the memory representation of the underlying bit representation as a byte array in + /// big-endian (network) byte order. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let bytes = bf16::from_f32(12.5).to_be_bytes(); + /// assert_eq!(bytes, [0x41, 0x48]); + /// ``` + #[inline] + #[must_use] + pub const fn to_be_bytes(self) -> [u8; 2] { + self.0.to_be_bytes() + } + + /// Returns the memory representation of the underlying bit representation as a byte array in + /// native byte order. + /// + /// As the target platform's native endianness is used, portable code should use + /// [`to_be_bytes`][bf16::to_be_bytes] or [`to_le_bytes`][bf16::to_le_bytes], as appropriate, + /// instead. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let bytes = bf16::from_f32(12.5).to_ne_bytes(); + /// assert_eq!(bytes, if cfg!(target_endian = "big") { + /// [0x41, 0x48] + /// } else { + /// [0x48, 0x41] + /// }); + /// ``` + #[inline] + #[must_use] + pub const fn to_ne_bytes(self) -> [u8; 2] { + self.0.to_ne_bytes() + } + + /// Creates a floating point value from its representation as a byte array in little endian. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let value = bf16::from_le_bytes([0x48, 0x41]); + /// assert_eq!(value, bf16::from_f32(12.5)); + /// ``` + #[inline] + #[must_use] + pub const fn from_le_bytes(bytes: [u8; 2]) -> bf16 { + bf16::from_bits(u16::from_le_bytes(bytes)) + } + + /// Creates a floating point value from its representation as a byte array in big endian. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let value = bf16::from_be_bytes([0x41, 0x48]); + /// assert_eq!(value, bf16::from_f32(12.5)); + /// ``` + #[inline] + #[must_use] + pub const fn from_be_bytes(bytes: [u8; 2]) -> bf16 { + bf16::from_bits(u16::from_be_bytes(bytes)) + } + + /// Creates a floating point value from its representation as a byte array in native endian. + /// + /// As the target platform's native endianness is used, portable code likely wants to use + /// [`from_be_bytes`][bf16::from_be_bytes] or [`from_le_bytes`][bf16::from_le_bytes], as + /// appropriate instead. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let value = bf16::from_ne_bytes(if cfg!(target_endian = "big") { + /// [0x41, 0x48] + /// } else { + /// [0x48, 0x41] + /// }); + /// assert_eq!(value, bf16::from_f32(12.5)); + /// ``` + #[inline] + #[must_use] + pub const fn from_ne_bytes(bytes: [u8; 2]) -> bf16 { + bf16::from_bits(u16::from_ne_bytes(bytes)) + } + + /// Converts a [`struct@bf16`] value into an [`f32`] value. + /// + /// This conversion is lossless as all values can be represented exactly in [`f32`]. + #[inline] + #[must_use] + pub fn to_f32(self) -> f32 { + self.to_f32_const() + } + + /// Converts a [`struct@bf16`] value into an [`f32`] value. + /// + /// This function is identical to [`to_f32`][Self::to_f32] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`to_f32`][Self::to_f32] should be preferred + /// in any non-`const` context. + /// + /// This conversion is lossless as all values can be represented exactly in [`f32`]. + #[inline] + #[must_use] + pub const fn to_f32_const(self) -> f32 { + convert::bf16_to_f32(self.0) + } + + /// Converts a [`struct@bf16`] value into an [`f64`] value. + /// + /// This conversion is lossless as all values can be represented exactly in [`f64`]. + #[inline] + #[must_use] + pub fn to_f64(self) -> f64 { + self.to_f64_const() + } + + /// Converts a [`struct@bf16`] value into an [`f64`] value. + /// + /// This function is identical to [`to_f64`][Self::to_f64] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`to_f64`][Self::to_f64] should be preferred + /// in any non-`const` context. + /// + /// This conversion is lossless as all values can be represented exactly in [`f64`]. + #[inline] + #[must_use] + pub const fn to_f64_const(self) -> f64 { + convert::bf16_to_f64(self.0) + } + + /// Returns `true` if this value is NaN and `false` otherwise. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let nan = bf16::NAN; + /// let f = bf16::from_f32(7.0_f32); + /// + /// assert!(nan.is_nan()); + /// assert!(!f.is_nan()); + /// ``` + #[inline] + #[must_use] + pub const fn is_nan(self) -> bool { + self.0 & 0x7FFFu16 > 0x7F80u16 + } + + /// Returns `true` if this value is ±∞ and `false` otherwise. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let f = bf16::from_f32(7.0f32); + /// let inf = bf16::INFINITY; + /// let neg_inf = bf16::NEG_INFINITY; + /// let nan = bf16::NAN; + /// + /// assert!(!f.is_infinite()); + /// assert!(!nan.is_infinite()); + /// + /// assert!(inf.is_infinite()); + /// assert!(neg_inf.is_infinite()); + /// ``` + #[inline] + #[must_use] + pub const fn is_infinite(self) -> bool { + self.0 & 0x7FFFu16 == 0x7F80u16 + } + + /// Returns `true` if this number is neither infinite nor NaN. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let f = bf16::from_f32(7.0f32); + /// let inf = bf16::INFINITY; + /// let neg_inf = bf16::NEG_INFINITY; + /// let nan = bf16::NAN; + /// + /// assert!(f.is_finite()); + /// + /// assert!(!nan.is_finite()); + /// assert!(!inf.is_finite()); + /// assert!(!neg_inf.is_finite()); + /// ``` + #[inline] + #[must_use] + pub const fn is_finite(self) -> bool { + self.0 & 0x7F80u16 != 0x7F80u16 + } + + /// Returns `true` if the number is neither zero, infinite, subnormal, or NaN. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let min = bf16::MIN_POSITIVE; + /// let max = bf16::MAX; + /// let lower_than_min = bf16::from_f32(1.0e-39_f32); + /// let zero = bf16::from_f32(0.0_f32); + /// + /// assert!(min.is_normal()); + /// assert!(max.is_normal()); + /// + /// assert!(!zero.is_normal()); + /// assert!(!bf16::NAN.is_normal()); + /// assert!(!bf16::INFINITY.is_normal()); + /// // Values between 0 and `min` are subnormal. + /// assert!(!lower_than_min.is_normal()); + /// ``` + #[inline] + #[must_use] + pub const fn is_normal(self) -> bool { + let exp = self.0 & 0x7F80u16; + exp != 0x7F80u16 && exp != 0 + } + + /// Returns the floating point category of the number. + /// + /// If only one property is going to be tested, it is generally faster to use the specific + /// predicate instead. + /// + /// # Examples + /// + /// ```rust + /// use std::num::FpCategory; + /// # use half::prelude::*; + /// + /// let num = bf16::from_f32(12.4_f32); + /// let inf = bf16::INFINITY; + /// + /// assert_eq!(num.classify(), FpCategory::Normal); + /// assert_eq!(inf.classify(), FpCategory::Infinite); + /// ``` + #[must_use] + pub const fn classify(self) -> FpCategory { + let exp = self.0 & 0x7F80u16; + let man = self.0 & 0x007Fu16; + match (exp, man) { + (0, 0) => FpCategory::Zero, + (0, _) => FpCategory::Subnormal, + (0x7F80u16, 0) => FpCategory::Infinite, + (0x7F80u16, _) => FpCategory::Nan, + _ => FpCategory::Normal, + } + } + + /// Returns a number that represents the sign of `self`. + /// + /// * 1.0 if the number is positive, +0.0 or [`INFINITY`][bf16::INFINITY] + /// * −1.0 if the number is negative, −0.0` or [`NEG_INFINITY`][bf16::NEG_INFINITY] + /// * [`NAN`][bf16::NAN] if the number is NaN + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let f = bf16::from_f32(3.5_f32); + /// + /// assert_eq!(f.signum(), bf16::from_f32(1.0)); + /// assert_eq!(bf16::NEG_INFINITY.signum(), bf16::from_f32(-1.0)); + /// + /// assert!(bf16::NAN.signum().is_nan()); + /// ``` + #[must_use] + pub const fn signum(self) -> bf16 { + if self.is_nan() { + self + } else if self.0 & 0x8000u16 != 0 { + Self::NEG_ONE + } else { + Self::ONE + } + } + + /// Returns `true` if and only if `self` has a positive sign, including +0.0, NaNs with a + /// positive sign bit and +∞. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let nan = bf16::NAN; + /// let f = bf16::from_f32(7.0_f32); + /// let g = bf16::from_f32(-7.0_f32); + /// + /// assert!(f.is_sign_positive()); + /// assert!(!g.is_sign_positive()); + /// // NaN can be either positive or negative + /// assert!(nan.is_sign_positive() != nan.is_sign_negative()); + /// ``` + #[inline] + #[must_use] + pub const fn is_sign_positive(self) -> bool { + self.0 & 0x8000u16 == 0 + } + + /// Returns `true` if and only if `self` has a negative sign, including −0.0, NaNs with a + /// negative sign bit and −∞. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let nan = bf16::NAN; + /// let f = bf16::from_f32(7.0f32); + /// let g = bf16::from_f32(-7.0f32); + /// + /// assert!(!f.is_sign_negative()); + /// assert!(g.is_sign_negative()); + /// // NaN can be either positive or negative + /// assert!(nan.is_sign_positive() != nan.is_sign_negative()); + /// ``` + #[inline] + #[must_use] + pub const fn is_sign_negative(self) -> bool { + self.0 & 0x8000u16 != 0 + } + + /// Returns a number composed of the magnitude of `self` and the sign of `sign`. + /// + /// Equal to `self` if the sign of `self` and `sign` are the same, otherwise equal to `-self`. + /// If `self` is NaN, then NaN with the sign of `sign` is returned. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// let f = bf16::from_f32(3.5); + /// + /// assert_eq!(f.copysign(bf16::from_f32(0.42)), bf16::from_f32(3.5)); + /// assert_eq!(f.copysign(bf16::from_f32(-0.42)), bf16::from_f32(-3.5)); + /// assert_eq!((-f).copysign(bf16::from_f32(0.42)), bf16::from_f32(3.5)); + /// assert_eq!((-f).copysign(bf16::from_f32(-0.42)), bf16::from_f32(-3.5)); + /// + /// assert!(bf16::NAN.copysign(bf16::from_f32(1.0)).is_nan()); + /// ``` + #[inline] + #[must_use] + pub const fn copysign(self, sign: bf16) -> bf16 { + bf16((sign.0 & 0x8000u16) | (self.0 & 0x7FFFu16)) + } + + /// Returns the maximum of the two numbers. + /// + /// If one of the arguments is NaN, then the other argument is returned. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// let x = bf16::from_f32(1.0); + /// let y = bf16::from_f32(2.0); + /// + /// assert_eq!(x.max(y), y); + /// ``` + #[inline] + #[must_use] + pub fn max(self, other: bf16) -> bf16 { + if self.is_nan() || other > self { + other + } else { + self + } + } + + /// Returns the minimum of the two numbers. + /// + /// If one of the arguments is NaN, then the other argument is returned. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// let x = bf16::from_f32(1.0); + /// let y = bf16::from_f32(2.0); + /// + /// assert_eq!(x.min(y), x); + /// ``` + #[inline] + #[must_use] + pub fn min(self, other: bf16) -> bf16 { + if self.is_nan() || other < self { + other + } else { + self + } + } + + /// Restrict a value to a certain interval unless it is NaN. + /// + /// Returns `max` if `self` is greater than `max`, and `min` if `self` is less than `min`. + /// Otherwise this returns `self`. + /// + /// Note that this function returns NaN if the initial value was NaN as well. + /// + /// # Panics + /// Panics if `min > max`, `min` is NaN, or `max` is NaN. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// assert!(bf16::from_f32(-3.0).clamp(bf16::from_f32(-2.0), bf16::from_f32(1.0)) == bf16::from_f32(-2.0)); + /// assert!(bf16::from_f32(0.0).clamp(bf16::from_f32(-2.0), bf16::from_f32(1.0)) == bf16::from_f32(0.0)); + /// assert!(bf16::from_f32(2.0).clamp(bf16::from_f32(-2.0), bf16::from_f32(1.0)) == bf16::from_f32(1.0)); + /// assert!(bf16::NAN.clamp(bf16::from_f32(-2.0), bf16::from_f32(1.0)).is_nan()); + /// ``` + #[inline] + #[must_use] + pub fn clamp(self, min: bf16, max: bf16) -> bf16 { + assert!(min <= max); + let mut x = self; + if x < min { + x = min; + } + if x > max { + x = max; + } + x + } + + /// Returns the ordering between `self` and `other`. + /// + /// Unlike the standard partial comparison between floating point numbers, + /// this comparison always produces an ordering in accordance to + /// the `totalOrder` predicate as defined in the IEEE 754 (2008 revision) + /// floating point standard. The values are ordered in the following sequence: + /// + /// - negative quiet NaN + /// - negative signaling NaN + /// - negative infinity + /// - negative numbers + /// - negative subnormal numbers + /// - negative zero + /// - positive zero + /// - positive subnormal numbers + /// - positive numbers + /// - positive infinity + /// - positive signaling NaN + /// - positive quiet NaN. + /// + /// The ordering established by this function does not always agree with the + /// [`PartialOrd`] and [`PartialEq`] implementations of `bf16`. For example, + /// they consider negative and positive zero equal, while `total_cmp` + /// doesn't. + /// + /// The interpretation of the signaling NaN bit follows the definition in + /// the IEEE 754 standard, which may not match the interpretation by some of + /// the older, non-conformant (e.g. MIPS) hardware implementations. + /// + /// # Examples + /// ``` + /// # use half::bf16; + /// let mut v: Vec<bf16> = vec![]; + /// v.push(bf16::ONE); + /// v.push(bf16::INFINITY); + /// v.push(bf16::NEG_INFINITY); + /// v.push(bf16::NAN); + /// v.push(bf16::MAX_SUBNORMAL); + /// v.push(-bf16::MAX_SUBNORMAL); + /// v.push(bf16::ZERO); + /// v.push(bf16::NEG_ZERO); + /// v.push(bf16::NEG_ONE); + /// v.push(bf16::MIN_POSITIVE); + /// + /// v.sort_by(|a, b| a.total_cmp(&b)); + /// + /// assert!(v + /// .into_iter() + /// .zip( + /// [ + /// bf16::NEG_INFINITY, + /// bf16::NEG_ONE, + /// -bf16::MAX_SUBNORMAL, + /// bf16::NEG_ZERO, + /// bf16::ZERO, + /// bf16::MAX_SUBNORMAL, + /// bf16::MIN_POSITIVE, + /// bf16::ONE, + /// bf16::INFINITY, + /// bf16::NAN + /// ] + /// .iter() + /// ) + /// .all(|(a, b)| a.to_bits() == b.to_bits())); + /// ``` + // Implementation based on: https://doc.rust-lang.org/std/primitive.f32.html#method.total_cmp + #[inline] + #[must_use] + pub fn total_cmp(&self, other: &Self) -> Ordering { + let mut left = self.to_bits() as i16; + let mut right = other.to_bits() as i16; + left ^= (((left >> 15) as u16) >> 1) as i16; + right ^= (((right >> 15) as u16) >> 1) as i16; + left.cmp(&right) + } + + /// Alternate serialize adapter for serializing as a float. + /// + /// By default, [`struct@bf16`] serializes as a newtype of [`u16`]. This is an alternate serialize + /// implementation that serializes as an [`f32`] value. It is designed for use with + /// `serialize_with` serde attributes. Deserialization from `f32` values is already supported by + /// the default deserialize implementation. + /// + /// # Examples + /// + /// A demonstration on how to use this adapater: + /// + /// ``` + /// use serde::{Serialize, Deserialize}; + /// use half::bf16; + /// + /// #[derive(Serialize, Deserialize)] + /// struct MyStruct { + /// #[serde(serialize_with = "bf16::serialize_as_f32")] + /// value: bf16 // Will be serialized as f32 instead of u16 + /// } + /// ``` + #[cfg(feature = "serde")] + pub fn serialize_as_f32<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + serializer.serialize_f32(self.to_f32()) + } + + /// Alternate serialize adapter for serializing as a string. + /// + /// By default, [`struct@bf16`] serializes as a newtype of [`u16`]. This is an alternate serialize + /// implementation that serializes as a string value. It is designed for use with + /// `serialize_with` serde attributes. Deserialization from string values is already supported + /// by the default deserialize implementation. + /// + /// # Examples + /// + /// A demonstration on how to use this adapater: + /// + /// ``` + /// use serde::{Serialize, Deserialize}; + /// use half::bf16; + /// + /// #[derive(Serialize, Deserialize)] + /// struct MyStruct { + /// #[serde(serialize_with = "bf16::serialize_as_string")] + /// value: bf16 // Will be serialized as a string instead of u16 + /// } + /// ``` + #[cfg(all(feature = "serde", feature = "alloc"))] + pub fn serialize_as_string<S: serde::Serializer>( + &self, + serializer: S, + ) -> Result<S::Ok, S::Error> { + serializer.serialize_str(&self.to_string()) + } + + /// Approximate number of [`struct@bf16`] significant digits in base 10 + pub const DIGITS: u32 = 2; + /// [`struct@bf16`] + /// [machine epsilon](https://en.wikipedia.org/wiki/Machine_epsilon) value + /// + /// This is the difference between 1.0 and the next largest representable number. + pub const EPSILON: bf16 = bf16(0x3C00u16); + /// [`struct@bf16`] positive Infinity (+∞) + pub const INFINITY: bf16 = bf16(0x7F80u16); + /// Number of [`struct@bf16`] significant digits in base 2 + pub const MANTISSA_DIGITS: u32 = 8; + /// Largest finite [`struct@bf16`] value + pub const MAX: bf16 = bf16(0x7F7F); + /// Maximum possible [`struct@bf16`] power of 10 exponent + pub const MAX_10_EXP: i32 = 38; + /// Maximum possible [`struct@bf16`] power of 2 exponent + pub const MAX_EXP: i32 = 128; + /// Smallest finite [`struct@bf16`] value + pub const MIN: bf16 = bf16(0xFF7F); + /// Minimum possible normal [`struct@bf16`] power of 10 exponent + pub const MIN_10_EXP: i32 = -37; + /// One greater than the minimum possible normal [`struct@bf16`] power of 2 exponent + pub const MIN_EXP: i32 = -125; + /// Smallest positive normal [`struct@bf16`] value + pub const MIN_POSITIVE: bf16 = bf16(0x0080u16); + /// [`struct@bf16`] Not a Number (NaN) + pub const NAN: bf16 = bf16(0x7FC0u16); + /// [`struct@bf16`] negative infinity (-∞). + pub const NEG_INFINITY: bf16 = bf16(0xFF80u16); + /// The radix or base of the internal representation of [`struct@bf16`] + pub const RADIX: u32 = 2; + + /// Minimum positive subnormal [`struct@bf16`] value + pub const MIN_POSITIVE_SUBNORMAL: bf16 = bf16(0x0001u16); + /// Maximum subnormal [`struct@bf16`] value + pub const MAX_SUBNORMAL: bf16 = bf16(0x007Fu16); + + /// [`struct@bf16`] 1 + pub const ONE: bf16 = bf16(0x3F80u16); + /// [`struct@bf16`] 0 + pub const ZERO: bf16 = bf16(0x0000u16); + /// [`struct@bf16`] -0 + pub const NEG_ZERO: bf16 = bf16(0x8000u16); + /// [`struct@bf16`] -1 + pub const NEG_ONE: bf16 = bf16(0xBF80u16); + + /// [`struct@bf16`] Euler's number (ℯ) + pub const E: bf16 = bf16(0x402Eu16); + /// [`struct@bf16`] Archimedes' constant (π) + pub const PI: bf16 = bf16(0x4049u16); + /// [`struct@bf16`] 1/π + pub const FRAC_1_PI: bf16 = bf16(0x3EA3u16); + /// [`struct@bf16`] 1/√2 + pub const FRAC_1_SQRT_2: bf16 = bf16(0x3F35u16); + /// [`struct@bf16`] 2/π + pub const FRAC_2_PI: bf16 = bf16(0x3F23u16); + /// [`struct@bf16`] 2/√π + pub const FRAC_2_SQRT_PI: bf16 = bf16(0x3F90u16); + /// [`struct@bf16`] π/2 + pub const FRAC_PI_2: bf16 = bf16(0x3FC9u16); + /// [`struct@bf16`] π/3 + pub const FRAC_PI_3: bf16 = bf16(0x3F86u16); + /// [`struct@bf16`] π/4 + pub const FRAC_PI_4: bf16 = bf16(0x3F49u16); + /// [`struct@bf16`] π/6 + pub const FRAC_PI_6: bf16 = bf16(0x3F06u16); + /// [`struct@bf16`] π/8 + pub const FRAC_PI_8: bf16 = bf16(0x3EC9u16); + /// [`struct@bf16`] 𝗅𝗇 10 + pub const LN_10: bf16 = bf16(0x4013u16); + /// [`struct@bf16`] 𝗅𝗇 2 + pub const LN_2: bf16 = bf16(0x3F31u16); + /// [`struct@bf16`] 𝗅𝗈𝗀₁₀ℯ + pub const LOG10_E: bf16 = bf16(0x3EDEu16); + /// [`struct@bf16`] 𝗅𝗈𝗀₁₀2 + pub const LOG10_2: bf16 = bf16(0x3E9Au16); + /// [`struct@bf16`] 𝗅𝗈𝗀₂ℯ + pub const LOG2_E: bf16 = bf16(0x3FB9u16); + /// [`struct@bf16`] 𝗅𝗈𝗀₂10 + pub const LOG2_10: bf16 = bf16(0x4055u16); + /// [`struct@bf16`] √2 + pub const SQRT_2: bf16 = bf16(0x3FB5u16); +} + +impl From<bf16> for f32 { + #[inline] + fn from(x: bf16) -> f32 { + x.to_f32() + } +} + +impl From<bf16> for f64 { + #[inline] + fn from(x: bf16) -> f64 { + x.to_f64() + } +} + +impl From<i8> for bf16 { + #[inline] + fn from(x: i8) -> bf16 { + // Convert to f32, then to bf16 + bf16::from_f32(f32::from(x)) + } +} + +impl From<u8> for bf16 { + #[inline] + fn from(x: u8) -> bf16 { + // Convert to f32, then to f16 + bf16::from_f32(f32::from(x)) + } +} + +impl PartialEq for bf16 { + fn eq(&self, other: &bf16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + (self.0 == other.0) || ((self.0 | other.0) & 0x7FFFu16 == 0) + } + } +} + +impl PartialOrd for bf16 { + fn partial_cmp(&self, other: &bf16) -> Option<Ordering> { + if self.is_nan() || other.is_nan() { + None + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => Some(self.0.cmp(&other.0)), + (false, true) => { + if (self.0 | other.0) & 0x7FFFu16 == 0 { + Some(Ordering::Equal) + } else { + Some(Ordering::Greater) + } + } + (true, false) => { + if (self.0 | other.0) & 0x7FFFu16 == 0 { + Some(Ordering::Equal) + } else { + Some(Ordering::Less) + } + } + (true, true) => Some(other.0.cmp(&self.0)), + } + } + } + + fn lt(&self, other: &bf16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 < other.0, + (false, true) => false, + (true, false) => (self.0 | other.0) & 0x7FFFu16 != 0, + (true, true) => self.0 > other.0, + } + } + } + + fn le(&self, other: &bf16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 <= other.0, + (false, true) => (self.0 | other.0) & 0x7FFFu16 == 0, + (true, false) => true, + (true, true) => self.0 >= other.0, + } + } + } + + fn gt(&self, other: &bf16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 > other.0, + (false, true) => (self.0 | other.0) & 0x7FFFu16 != 0, + (true, false) => false, + (true, true) => self.0 < other.0, + } + } + } + + fn ge(&self, other: &bf16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 >= other.0, + (false, true) => true, + (true, false) => (self.0 | other.0) & 0x7FFFu16 == 0, + (true, true) => self.0 <= other.0, + } + } + } +} + +#[cfg(not(target_arch = "spirv"))] +impl FromStr for bf16 { + type Err = ParseFloatError; + fn from_str(src: &str) -> Result<bf16, ParseFloatError> { + f32::from_str(src).map(bf16::from_f32) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Debug for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + Debug::fmt(&self.to_f32(), f) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Display for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + Display::fmt(&self.to_f32(), f) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl LowerExp for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:e}", self.to_f32()) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl UpperExp for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:E}", self.to_f32()) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Binary for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:b}", self.0) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Octal for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:o}", self.0) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl LowerHex for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:x}", self.0) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl UpperHex for bf16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:X}", self.0) + } +} + +impl Neg for bf16 { + type Output = Self; + + fn neg(self) -> Self::Output { + Self(self.0 ^ 0x8000) + } +} + +impl Neg for &bf16 { + type Output = <bf16 as Neg>::Output; + + #[inline] + fn neg(self) -> Self::Output { + Neg::neg(*self) + } +} + +impl Add for bf16 { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + Self::from_f32(Self::to_f32(self) + Self::to_f32(rhs)) + } +} + +impl Add<&bf16> for bf16 { + type Output = <bf16 as Add<bf16>>::Output; + + #[inline] + fn add(self, rhs: &bf16) -> Self::Output { + self.add(*rhs) + } +} + +impl Add<&bf16> for &bf16 { + type Output = <bf16 as Add<bf16>>::Output; + + #[inline] + fn add(self, rhs: &bf16) -> Self::Output { + (*self).add(*rhs) + } +} + +impl Add<bf16> for &bf16 { + type Output = <bf16 as Add<bf16>>::Output; + + #[inline] + fn add(self, rhs: bf16) -> Self::Output { + (*self).add(rhs) + } +} + +impl AddAssign for bf16 { + #[inline] + fn add_assign(&mut self, rhs: Self) { + *self = (*self).add(rhs); + } +} + +impl AddAssign<&bf16> for bf16 { + #[inline] + fn add_assign(&mut self, rhs: &bf16) { + *self = (*self).add(rhs); + } +} + +impl Sub for bf16 { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + Self::from_f32(Self::to_f32(self) - Self::to_f32(rhs)) + } +} + +impl Sub<&bf16> for bf16 { + type Output = <bf16 as Sub<bf16>>::Output; + + #[inline] + fn sub(self, rhs: &bf16) -> Self::Output { + self.sub(*rhs) + } +} + +impl Sub<&bf16> for &bf16 { + type Output = <bf16 as Sub<bf16>>::Output; + + #[inline] + fn sub(self, rhs: &bf16) -> Self::Output { + (*self).sub(*rhs) + } +} + +impl Sub<bf16> for &bf16 { + type Output = <bf16 as Sub<bf16>>::Output; + + #[inline] + fn sub(self, rhs: bf16) -> Self::Output { + (*self).sub(rhs) + } +} + +impl SubAssign for bf16 { + #[inline] + fn sub_assign(&mut self, rhs: Self) { + *self = (*self).sub(rhs); + } +} + +impl SubAssign<&bf16> for bf16 { + #[inline] + fn sub_assign(&mut self, rhs: &bf16) { + *self = (*self).sub(rhs); + } +} + +impl Mul for bf16 { + type Output = Self; + + fn mul(self, rhs: Self) -> Self::Output { + Self::from_f32(Self::to_f32(self) * Self::to_f32(rhs)) + } +} + +impl Mul<&bf16> for bf16 { + type Output = <bf16 as Mul<bf16>>::Output; + + #[inline] + fn mul(self, rhs: &bf16) -> Self::Output { + self.mul(*rhs) + } +} + +impl Mul<&bf16> for &bf16 { + type Output = <bf16 as Mul<bf16>>::Output; + + #[inline] + fn mul(self, rhs: &bf16) -> Self::Output { + (*self).mul(*rhs) + } +} + +impl Mul<bf16> for &bf16 { + type Output = <bf16 as Mul<bf16>>::Output; + + #[inline] + fn mul(self, rhs: bf16) -> Self::Output { + (*self).mul(rhs) + } +} + +impl MulAssign for bf16 { + #[inline] + fn mul_assign(&mut self, rhs: Self) { + *self = (*self).mul(rhs); + } +} + +impl MulAssign<&bf16> for bf16 { + #[inline] + fn mul_assign(&mut self, rhs: &bf16) { + *self = (*self).mul(rhs); + } +} + +impl Div for bf16 { + type Output = Self; + + fn div(self, rhs: Self) -> Self::Output { + Self::from_f32(Self::to_f32(self) / Self::to_f32(rhs)) + } +} + +impl Div<&bf16> for bf16 { + type Output = <bf16 as Div<bf16>>::Output; + + #[inline] + fn div(self, rhs: &bf16) -> Self::Output { + self.div(*rhs) + } +} + +impl Div<&bf16> for &bf16 { + type Output = <bf16 as Div<bf16>>::Output; + + #[inline] + fn div(self, rhs: &bf16) -> Self::Output { + (*self).div(*rhs) + } +} + +impl Div<bf16> for &bf16 { + type Output = <bf16 as Div<bf16>>::Output; + + #[inline] + fn div(self, rhs: bf16) -> Self::Output { + (*self).div(rhs) + } +} + +impl DivAssign for bf16 { + #[inline] + fn div_assign(&mut self, rhs: Self) { + *self = (*self).div(rhs); + } +} + +impl DivAssign<&bf16> for bf16 { + #[inline] + fn div_assign(&mut self, rhs: &bf16) { + *self = (*self).div(rhs); + } +} + +impl Rem for bf16 { + type Output = Self; + + fn rem(self, rhs: Self) -> Self::Output { + Self::from_f32(Self::to_f32(self) % Self::to_f32(rhs)) + } +} + +impl Rem<&bf16> for bf16 { + type Output = <bf16 as Rem<bf16>>::Output; + + #[inline] + fn rem(self, rhs: &bf16) -> Self::Output { + self.rem(*rhs) + } +} + +impl Rem<&bf16> for &bf16 { + type Output = <bf16 as Rem<bf16>>::Output; + + #[inline] + fn rem(self, rhs: &bf16) -> Self::Output { + (*self).rem(*rhs) + } +} + +impl Rem<bf16> for &bf16 { + type Output = <bf16 as Rem<bf16>>::Output; + + #[inline] + fn rem(self, rhs: bf16) -> Self::Output { + (*self).rem(rhs) + } +} + +impl RemAssign for bf16 { + #[inline] + fn rem_assign(&mut self, rhs: Self) { + *self = (*self).rem(rhs); + } +} + +impl RemAssign<&bf16> for bf16 { + #[inline] + fn rem_assign(&mut self, rhs: &bf16) { + *self = (*self).rem(rhs); + } +} + +impl Product for bf16 { + #[inline] + fn product<I: Iterator<Item = Self>>(iter: I) -> Self { + bf16::from_f32(iter.map(|f| f.to_f32()).product()) + } +} + +impl<'a> Product<&'a bf16> for bf16 { + #[inline] + fn product<I: Iterator<Item = &'a bf16>>(iter: I) -> Self { + bf16::from_f32(iter.map(|f| f.to_f32()).product()) + } +} + +impl Sum for bf16 { + #[inline] + fn sum<I: Iterator<Item = Self>>(iter: I) -> Self { + bf16::from_f32(iter.map(|f| f.to_f32()).sum()) + } +} + +impl<'a> Sum<&'a bf16> for bf16 { + #[inline] + fn sum<I: Iterator<Item = &'a bf16>>(iter: I) -> Self { + bf16::from_f32(iter.map(|f| f.to_f32()).sum()) + } +} + +#[cfg(feature = "serde")] +struct Visitor; + +#[cfg(feature = "serde")] +impl<'de> Deserialize<'de> for bf16 { + fn deserialize<D>(deserializer: D) -> Result<bf16, D::Error> + where + D: serde::de::Deserializer<'de>, + { + deserializer.deserialize_newtype_struct("bf16", Visitor) + } +} + +#[cfg(feature = "serde")] +impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = bf16; + + fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { + write!(formatter, "tuple struct bf16") + } + + fn visit_newtype_struct<D>(self, deserializer: D) -> Result<Self::Value, D::Error> + where + D: serde::Deserializer<'de>, + { + Ok(bf16(<u16 as Deserialize>::deserialize(deserializer)?)) + } + + fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + v.parse().map_err(|_| { + serde::de::Error::invalid_value(serde::de::Unexpected::Str(v), &"a float string") + }) + } + + fn visit_f32<E>(self, v: f32) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + Ok(bf16::from_f32(v)) + } + + fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + Ok(bf16::from_f64(v)) + } +} + +#[allow( + clippy::cognitive_complexity, + clippy::float_cmp, + clippy::neg_cmp_op_on_partial_ord +)] +#[cfg(test)] +mod test { + use super::*; + #[allow(unused_imports)] + use core::cmp::Ordering; + #[cfg(feature = "num-traits")] + use num_traits::{AsPrimitive, FromBytes, FromPrimitive, ToBytes, ToPrimitive}; + use quickcheck_macros::quickcheck; + + #[cfg(feature = "num-traits")] + #[test] + fn as_primitive() { + let two = bf16::from_f32(2.0); + assert_eq!(<i32 as AsPrimitive<bf16>>::as_(2), two); + assert_eq!(<bf16 as AsPrimitive<i32>>::as_(two), 2); + + assert_eq!(<f32 as AsPrimitive<bf16>>::as_(2.0), two); + assert_eq!(<bf16 as AsPrimitive<f32>>::as_(two), 2.0); + + assert_eq!(<f64 as AsPrimitive<bf16>>::as_(2.0), two); + assert_eq!(<bf16 as AsPrimitive<f64>>::as_(two), 2.0); + } + + #[cfg(feature = "num-traits")] + #[test] + fn to_primitive() { + let two = bf16::from_f32(2.0); + assert_eq!(ToPrimitive::to_i32(&two).unwrap(), 2i32); + assert_eq!(ToPrimitive::to_f32(&two).unwrap(), 2.0f32); + assert_eq!(ToPrimitive::to_f64(&two).unwrap(), 2.0f64); + } + + #[cfg(feature = "num-traits")] + #[test] + fn from_primitive() { + let two = bf16::from_f32(2.0); + assert_eq!(<bf16 as FromPrimitive>::from_i32(2).unwrap(), two); + assert_eq!(<bf16 as FromPrimitive>::from_f32(2.0).unwrap(), two); + assert_eq!(<bf16 as FromPrimitive>::from_f64(2.0).unwrap(), two); + } + + #[cfg(feature = "num-traits")] + #[test] + fn to_and_from_bytes() { + let two = bf16::from_f32(2.0); + assert_eq!(<bf16 as ToBytes>::to_le_bytes(&two), [0, 64]); + assert_eq!(<bf16 as FromBytes>::from_le_bytes(&[0, 64]), two); + assert_eq!(<bf16 as ToBytes>::to_be_bytes(&two), [64, 0]); + assert_eq!(<bf16 as FromBytes>::from_be_bytes(&[64, 0]), two); + } + + #[test] + fn test_bf16_consts_from_f32() { + let one = bf16::from_f32(1.0); + let zero = bf16::from_f32(0.0); + let neg_zero = bf16::from_f32(-0.0); + let neg_one = bf16::from_f32(-1.0); + let inf = bf16::from_f32(core::f32::INFINITY); + let neg_inf = bf16::from_f32(core::f32::NEG_INFINITY); + let nan = bf16::from_f32(core::f32::NAN); + + assert_eq!(bf16::ONE, one); + assert_eq!(bf16::ZERO, zero); + assert!(zero.is_sign_positive()); + assert_eq!(bf16::NEG_ZERO, neg_zero); + assert!(neg_zero.is_sign_negative()); + assert_eq!(bf16::NEG_ONE, neg_one); + assert!(neg_one.is_sign_negative()); + assert_eq!(bf16::INFINITY, inf); + assert_eq!(bf16::NEG_INFINITY, neg_inf); + assert!(nan.is_nan()); + assert!(bf16::NAN.is_nan()); + + let e = bf16::from_f32(core::f32::consts::E); + let pi = bf16::from_f32(core::f32::consts::PI); + let frac_1_pi = bf16::from_f32(core::f32::consts::FRAC_1_PI); + let frac_1_sqrt_2 = bf16::from_f32(core::f32::consts::FRAC_1_SQRT_2); + let frac_2_pi = bf16::from_f32(core::f32::consts::FRAC_2_PI); + let frac_2_sqrt_pi = bf16::from_f32(core::f32::consts::FRAC_2_SQRT_PI); + let frac_pi_2 = bf16::from_f32(core::f32::consts::FRAC_PI_2); + let frac_pi_3 = bf16::from_f32(core::f32::consts::FRAC_PI_3); + let frac_pi_4 = bf16::from_f32(core::f32::consts::FRAC_PI_4); + let frac_pi_6 = bf16::from_f32(core::f32::consts::FRAC_PI_6); + let frac_pi_8 = bf16::from_f32(core::f32::consts::FRAC_PI_8); + let ln_10 = bf16::from_f32(core::f32::consts::LN_10); + let ln_2 = bf16::from_f32(core::f32::consts::LN_2); + let log10_e = bf16::from_f32(core::f32::consts::LOG10_E); + // core::f32::consts::LOG10_2 requires rustc 1.43.0 + let log10_2 = bf16::from_f32(2f32.log10()); + let log2_e = bf16::from_f32(core::f32::consts::LOG2_E); + // core::f32::consts::LOG2_10 requires rustc 1.43.0 + let log2_10 = bf16::from_f32(10f32.log2()); + let sqrt_2 = bf16::from_f32(core::f32::consts::SQRT_2); + + assert_eq!(bf16::E, e); + assert_eq!(bf16::PI, pi); + assert_eq!(bf16::FRAC_1_PI, frac_1_pi); + assert_eq!(bf16::FRAC_1_SQRT_2, frac_1_sqrt_2); + assert_eq!(bf16::FRAC_2_PI, frac_2_pi); + assert_eq!(bf16::FRAC_2_SQRT_PI, frac_2_sqrt_pi); + assert_eq!(bf16::FRAC_PI_2, frac_pi_2); + assert_eq!(bf16::FRAC_PI_3, frac_pi_3); + assert_eq!(bf16::FRAC_PI_4, frac_pi_4); + assert_eq!(bf16::FRAC_PI_6, frac_pi_6); + assert_eq!(bf16::FRAC_PI_8, frac_pi_8); + assert_eq!(bf16::LN_10, ln_10); + assert_eq!(bf16::LN_2, ln_2); + assert_eq!(bf16::LOG10_E, log10_e); + assert_eq!(bf16::LOG10_2, log10_2); + assert_eq!(bf16::LOG2_E, log2_e); + assert_eq!(bf16::LOG2_10, log2_10); + assert_eq!(bf16::SQRT_2, sqrt_2); + } + + #[test] + fn test_bf16_consts_from_f64() { + let one = bf16::from_f64(1.0); + let zero = bf16::from_f64(0.0); + let neg_zero = bf16::from_f64(-0.0); + let inf = bf16::from_f64(core::f64::INFINITY); + let neg_inf = bf16::from_f64(core::f64::NEG_INFINITY); + let nan = bf16::from_f64(core::f64::NAN); + + assert_eq!(bf16::ONE, one); + assert_eq!(bf16::ZERO, zero); + assert_eq!(bf16::NEG_ZERO, neg_zero); + assert_eq!(bf16::INFINITY, inf); + assert_eq!(bf16::NEG_INFINITY, neg_inf); + assert!(nan.is_nan()); + assert!(bf16::NAN.is_nan()); + + let e = bf16::from_f64(core::f64::consts::E); + let pi = bf16::from_f64(core::f64::consts::PI); + let frac_1_pi = bf16::from_f64(core::f64::consts::FRAC_1_PI); + let frac_1_sqrt_2 = bf16::from_f64(core::f64::consts::FRAC_1_SQRT_2); + let frac_2_pi = bf16::from_f64(core::f64::consts::FRAC_2_PI); + let frac_2_sqrt_pi = bf16::from_f64(core::f64::consts::FRAC_2_SQRT_PI); + let frac_pi_2 = bf16::from_f64(core::f64::consts::FRAC_PI_2); + let frac_pi_3 = bf16::from_f64(core::f64::consts::FRAC_PI_3); + let frac_pi_4 = bf16::from_f64(core::f64::consts::FRAC_PI_4); + let frac_pi_6 = bf16::from_f64(core::f64::consts::FRAC_PI_6); + let frac_pi_8 = bf16::from_f64(core::f64::consts::FRAC_PI_8); + let ln_10 = bf16::from_f64(core::f64::consts::LN_10); + let ln_2 = bf16::from_f64(core::f64::consts::LN_2); + let log10_e = bf16::from_f64(core::f64::consts::LOG10_E); + // core::f64::consts::LOG10_2 requires rustc 1.43.0 + let log10_2 = bf16::from_f64(2f64.log10()); + let log2_e = bf16::from_f64(core::f64::consts::LOG2_E); + // core::f64::consts::LOG2_10 requires rustc 1.43.0 + let log2_10 = bf16::from_f64(10f64.log2()); + let sqrt_2 = bf16::from_f64(core::f64::consts::SQRT_2); + + assert_eq!(bf16::E, e); + assert_eq!(bf16::PI, pi); + assert_eq!(bf16::FRAC_1_PI, frac_1_pi); + assert_eq!(bf16::FRAC_1_SQRT_2, frac_1_sqrt_2); + assert_eq!(bf16::FRAC_2_PI, frac_2_pi); + assert_eq!(bf16::FRAC_2_SQRT_PI, frac_2_sqrt_pi); + assert_eq!(bf16::FRAC_PI_2, frac_pi_2); + assert_eq!(bf16::FRAC_PI_3, frac_pi_3); + assert_eq!(bf16::FRAC_PI_4, frac_pi_4); + assert_eq!(bf16::FRAC_PI_6, frac_pi_6); + assert_eq!(bf16::FRAC_PI_8, frac_pi_8); + assert_eq!(bf16::LN_10, ln_10); + assert_eq!(bf16::LN_2, ln_2); + assert_eq!(bf16::LOG10_E, log10_e); + assert_eq!(bf16::LOG10_2, log10_2); + assert_eq!(bf16::LOG2_E, log2_e); + assert_eq!(bf16::LOG2_10, log2_10); + assert_eq!(bf16::SQRT_2, sqrt_2); + } + + #[test] + fn test_nan_conversion_to_smaller() { + let nan64 = f64::from_bits(0x7FF0_0000_0000_0001u64); + let neg_nan64 = f64::from_bits(0xFFF0_0000_0000_0001u64); + let nan32 = f32::from_bits(0x7F80_0001u32); + let neg_nan32 = f32::from_bits(0xFF80_0001u32); + let nan32_from_64 = nan64 as f32; + let neg_nan32_from_64 = neg_nan64 as f32; + let nan16_from_64 = bf16::from_f64(nan64); + let neg_nan16_from_64 = bf16::from_f64(neg_nan64); + let nan16_from_32 = bf16::from_f32(nan32); + let neg_nan16_from_32 = bf16::from_f32(neg_nan32); + + assert!(nan64.is_nan() && nan64.is_sign_positive()); + assert!(neg_nan64.is_nan() && neg_nan64.is_sign_negative()); + assert!(nan32.is_nan() && nan32.is_sign_positive()); + assert!(neg_nan32.is_nan() && neg_nan32.is_sign_negative()); + + // f32/f64 NaN conversion sign is non-deterministic: https://github.com/VoidStarKat/half-rs/issues/103 + assert!(neg_nan32_from_64.is_nan()); + assert!(nan32_from_64.is_nan()); + assert!(nan16_from_64.is_nan()); + assert!(neg_nan16_from_64.is_nan()); + assert!(nan16_from_32.is_nan()); + assert!(neg_nan16_from_32.is_nan()); + } + + #[test] + fn test_nan_conversion_to_larger() { + let nan16 = bf16::from_bits(0x7F81u16); + let neg_nan16 = bf16::from_bits(0xFF81u16); + let nan32 = f32::from_bits(0x7F80_0001u32); + let neg_nan32 = f32::from_bits(0xFF80_0001u32); + let nan32_from_16 = f32::from(nan16); + let neg_nan32_from_16 = f32::from(neg_nan16); + let nan64_from_16 = f64::from(nan16); + let neg_nan64_from_16 = f64::from(neg_nan16); + let nan64_from_32 = f64::from(nan32); + let neg_nan64_from_32 = f64::from(neg_nan32); + + assert!(nan16.is_nan() && nan16.is_sign_positive()); + assert!(neg_nan16.is_nan() && neg_nan16.is_sign_negative()); + assert!(nan32.is_nan() && nan32.is_sign_positive()); + assert!(neg_nan32.is_nan() && neg_nan32.is_sign_negative()); + + // // f32/f64 NaN conversion sign is non-deterministic: https://github.com/VoidStarKat/half-rs/issues/103 + assert!(nan32_from_16.is_nan()); + assert!(neg_nan32_from_16.is_nan()); + assert!(nan64_from_16.is_nan()); + assert!(neg_nan64_from_16.is_nan()); + assert!(nan64_from_32.is_nan()); + assert!(neg_nan64_from_32.is_nan()); + } + + #[test] + fn test_bf16_to_f32() { + let f = bf16::from_f32(7.0); + assert_eq!(f.to_f32(), 7.0f32); + + // 7.1 is NOT exactly representable in 16-bit, it's rounded + let f = bf16::from_f32(7.1); + let diff = (f.to_f32() - 7.1f32).abs(); + // diff must be <= 4 * EPSILON, as 7 has two more significant bits than 1 + assert!(diff <= 4.0 * bf16::EPSILON.to_f32()); + + let tiny32 = f32::from_bits(0x0001_0000u32); + assert_eq!(bf16::from_bits(0x0001).to_f32(), tiny32); + assert_eq!(bf16::from_bits(0x0005).to_f32(), 5.0 * tiny32); + + assert_eq!(bf16::from_bits(0x0001), bf16::from_f32(tiny32)); + assert_eq!(bf16::from_bits(0x0005), bf16::from_f32(5.0 * tiny32)); + } + + #[test] + #[cfg_attr(miri, ignore)] + fn test_bf16_to_f64() { + let f = bf16::from_f64(7.0); + assert_eq!(f.to_f64(), 7.0f64); + + // 7.1 is NOT exactly representable in 16-bit, it's rounded + let f = bf16::from_f64(7.1); + let diff = (f.to_f64() - 7.1f64).abs(); + // diff must be <= 4 * EPSILON, as 7 has two more significant bits than 1 + assert!(diff <= 4.0 * bf16::EPSILON.to_f64()); + + let tiny64 = 2.0f64.powi(-133); + assert_eq!(bf16::from_bits(0x0001).to_f64(), tiny64); + assert_eq!(bf16::from_bits(0x0005).to_f64(), 5.0 * tiny64); + + assert_eq!(bf16::from_bits(0x0001), bf16::from_f64(tiny64)); + assert_eq!(bf16::from_bits(0x0005), bf16::from_f64(5.0 * tiny64)); + } + + #[test] + fn test_comparisons() { + let zero = bf16::from_f64(0.0); + let one = bf16::from_f64(1.0); + let neg_zero = bf16::from_f64(-0.0); + let neg_one = bf16::from_f64(-1.0); + + assert_eq!(zero.partial_cmp(&neg_zero), Some(Ordering::Equal)); + assert_eq!(neg_zero.partial_cmp(&zero), Some(Ordering::Equal)); + assert!(zero == neg_zero); + assert!(neg_zero == zero); + assert!(!(zero != neg_zero)); + assert!(!(neg_zero != zero)); + assert!(!(zero < neg_zero)); + assert!(!(neg_zero < zero)); + assert!(zero <= neg_zero); + assert!(neg_zero <= zero); + assert!(!(zero > neg_zero)); + assert!(!(neg_zero > zero)); + assert!(zero >= neg_zero); + assert!(neg_zero >= zero); + + assert_eq!(one.partial_cmp(&neg_zero), Some(Ordering::Greater)); + assert_eq!(neg_zero.partial_cmp(&one), Some(Ordering::Less)); + assert!(!(one == neg_zero)); + assert!(!(neg_zero == one)); + assert!(one != neg_zero); + assert!(neg_zero != one); + assert!(!(one < neg_zero)); + assert!(neg_zero < one); + assert!(!(one <= neg_zero)); + assert!(neg_zero <= one); + assert!(one > neg_zero); + assert!(!(neg_zero > one)); + assert!(one >= neg_zero); + assert!(!(neg_zero >= one)); + + assert_eq!(one.partial_cmp(&neg_one), Some(Ordering::Greater)); + assert_eq!(neg_one.partial_cmp(&one), Some(Ordering::Less)); + assert!(!(one == neg_one)); + assert!(!(neg_one == one)); + assert!(one != neg_one); + assert!(neg_one != one); + assert!(!(one < neg_one)); + assert!(neg_one < one); + assert!(!(one <= neg_one)); + assert!(neg_one <= one); + assert!(one > neg_one); + assert!(!(neg_one > one)); + assert!(one >= neg_one); + assert!(!(neg_one >= one)); + } + + #[test] + #[allow(clippy::erasing_op, clippy::identity_op)] + #[cfg_attr(miri, ignore)] + fn round_to_even_f32() { + // smallest positive subnormal = 0b0.0000_001 * 2^-126 = 2^-133 + let min_sub = bf16::from_bits(1); + let min_sub_f = (-133f32).exp2(); + assert_eq!(bf16::from_f32(min_sub_f).to_bits(), min_sub.to_bits()); + assert_eq!(f32::from(min_sub).to_bits(), min_sub_f.to_bits()); + + // 0.0000000_011111 rounded to 0.0000000 (< tie, no rounding) + // 0.0000000_100000 rounded to 0.0000000 (tie and even, remains at even) + // 0.0000000_100001 rounded to 0.0000001 (> tie, rounds up) + assert_eq!( + bf16::from_f32(min_sub_f * 0.49).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + bf16::from_f32(min_sub_f * 0.50).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + bf16::from_f32(min_sub_f * 0.51).to_bits(), + min_sub.to_bits() * 1 + ); + + // 0.0000001_011111 rounded to 0.0000001 (< tie, no rounding) + // 0.0000001_100000 rounded to 0.0000010 (tie and odd, rounds up to even) + // 0.0000001_100001 rounded to 0.0000010 (> tie, rounds up) + assert_eq!( + bf16::from_f32(min_sub_f * 1.49).to_bits(), + min_sub.to_bits() * 1 + ); + assert_eq!( + bf16::from_f32(min_sub_f * 1.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + bf16::from_f32(min_sub_f * 1.51).to_bits(), + min_sub.to_bits() * 2 + ); + + // 0.0000010_011111 rounded to 0.0000010 (< tie, no rounding) + // 0.0000010_100000 rounded to 0.0000010 (tie and even, remains at even) + // 0.0000010_100001 rounded to 0.0000011 (> tie, rounds up) + assert_eq!( + bf16::from_f32(min_sub_f * 2.49).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + bf16::from_f32(min_sub_f * 2.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + bf16::from_f32(min_sub_f * 2.51).to_bits(), + min_sub.to_bits() * 3 + ); + + assert_eq!( + bf16::from_f32(250.49f32).to_bits(), + bf16::from_f32(250.0).to_bits() + ); + assert_eq!( + bf16::from_f32(250.50f32).to_bits(), + bf16::from_f32(250.0).to_bits() + ); + assert_eq!( + bf16::from_f32(250.51f32).to_bits(), + bf16::from_f32(251.0).to_bits() + ); + assert_eq!( + bf16::from_f32(251.49f32).to_bits(), + bf16::from_f32(251.0).to_bits() + ); + assert_eq!( + bf16::from_f32(251.50f32).to_bits(), + bf16::from_f32(252.0).to_bits() + ); + assert_eq!( + bf16::from_f32(251.51f32).to_bits(), + bf16::from_f32(252.0).to_bits() + ); + assert_eq!( + bf16::from_f32(252.49f32).to_bits(), + bf16::from_f32(252.0).to_bits() + ); + assert_eq!( + bf16::from_f32(252.50f32).to_bits(), + bf16::from_f32(252.0).to_bits() + ); + assert_eq!( + bf16::from_f32(252.51f32).to_bits(), + bf16::from_f32(253.0).to_bits() + ); + } + + #[test] + #[allow(clippy::erasing_op, clippy::identity_op)] + #[cfg_attr(miri, ignore)] + fn round_to_even_f64() { + // smallest positive subnormal = 0b0.0000_001 * 2^-126 = 2^-133 + let min_sub = bf16::from_bits(1); + let min_sub_f = (-133f64).exp2(); + assert_eq!(bf16::from_f64(min_sub_f).to_bits(), min_sub.to_bits()); + assert_eq!(f64::from(min_sub).to_bits(), min_sub_f.to_bits()); + + // 0.0000000_011111 rounded to 0.0000000 (< tie, no rounding) + // 0.0000000_100000 rounded to 0.0000000 (tie and even, remains at even) + // 0.0000000_100001 rounded to 0.0000001 (> tie, rounds up) + assert_eq!( + bf16::from_f64(min_sub_f * 0.49).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + bf16::from_f64(min_sub_f * 0.50).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + bf16::from_f64(min_sub_f * 0.51).to_bits(), + min_sub.to_bits() * 1 + ); + + // 0.0000001_011111 rounded to 0.0000001 (< tie, no rounding) + // 0.0000001_100000 rounded to 0.0000010 (tie and odd, rounds up to even) + // 0.0000001_100001 rounded to 0.0000010 (> tie, rounds up) + assert_eq!( + bf16::from_f64(min_sub_f * 1.49).to_bits(), + min_sub.to_bits() * 1 + ); + assert_eq!( + bf16::from_f64(min_sub_f * 1.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + bf16::from_f64(min_sub_f * 1.51).to_bits(), + min_sub.to_bits() * 2 + ); + + // 0.0000010_011111 rounded to 0.0000010 (< tie, no rounding) + // 0.0000010_100000 rounded to 0.0000010 (tie and even, remains at even) + // 0.0000010_100001 rounded to 0.0000011 (> tie, rounds up) + assert_eq!( + bf16::from_f64(min_sub_f * 2.49).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + bf16::from_f64(min_sub_f * 2.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + bf16::from_f64(min_sub_f * 2.51).to_bits(), + min_sub.to_bits() * 3 + ); + + assert_eq!( + bf16::from_f64(250.49f64).to_bits(), + bf16::from_f64(250.0).to_bits() + ); + assert_eq!( + bf16::from_f64(250.50f64).to_bits(), + bf16::from_f64(250.0).to_bits() + ); + assert_eq!( + bf16::from_f64(250.51f64).to_bits(), + bf16::from_f64(251.0).to_bits() + ); + assert_eq!( + bf16::from_f64(251.49f64).to_bits(), + bf16::from_f64(251.0).to_bits() + ); + assert_eq!( + bf16::from_f64(251.50f64).to_bits(), + bf16::from_f64(252.0).to_bits() + ); + assert_eq!( + bf16::from_f64(251.51f64).to_bits(), + bf16::from_f64(252.0).to_bits() + ); + assert_eq!( + bf16::from_f64(252.49f64).to_bits(), + bf16::from_f64(252.0).to_bits() + ); + assert_eq!( + bf16::from_f64(252.50f64).to_bits(), + bf16::from_f64(252.0).to_bits() + ); + assert_eq!( + bf16::from_f64(252.51f64).to_bits(), + bf16::from_f64(253.0).to_bits() + ); + } + + #[cfg(feature = "std")] + #[test] + fn formatting() { + let f = bf16::from_f32(0.1152344); + + assert_eq!(format!("{:.3}", f), "0.115"); + assert_eq!(format!("{:.4}", f), "0.1152"); + assert_eq!(format!("{:+.4}", f), "+0.1152"); + assert_eq!(format!("{:>+10.4}", f), " +0.1152"); + + assert_eq!(format!("{:.3?}", f), "0.115"); + assert_eq!(format!("{:.4?}", f), "0.1152"); + assert_eq!(format!("{:+.4?}", f), "+0.1152"); + assert_eq!(format!("{:>+10.4?}", f), " +0.1152"); + } + + impl quickcheck::Arbitrary for bf16 { + fn arbitrary(g: &mut quickcheck::Gen) -> Self { + bf16(u16::arbitrary(g)) + } + } + + #[quickcheck] + fn qc_roundtrip_bf16_f32_is_identity(f: bf16) -> bool { + let roundtrip = bf16::from_f32(f.to_f32()); + if f.is_nan() { + roundtrip.is_nan() && f.is_sign_negative() == roundtrip.is_sign_negative() + } else { + f.0 == roundtrip.0 + } + } + + #[quickcheck] + fn qc_roundtrip_bf16_f64_is_identity(f: bf16) -> bool { + let roundtrip = bf16::from_f64(f.to_f64()); + if f.is_nan() { + roundtrip.is_nan() && f.is_sign_negative() == roundtrip.is_sign_negative() + } else { + f.0 == roundtrip.0 + } + } + + #[test] + fn test_max() { + let a = bf16::from_f32(0.0); + let b = bf16::from_f32(42.0); + assert_eq!(a.max(b), b); + + let a = bf16::from_f32(42.0); + let b = bf16::from_f32(0.0); + assert_eq!(a.max(b), a); + + let a = bf16::NAN; + let b = bf16::from_f32(42.0); + assert_eq!(a.max(b), b); + + let a = bf16::from_f32(42.0); + let b = bf16::NAN; + assert_eq!(a.max(b), a); + + let a = bf16::NAN; + let b = bf16::NAN; + assert!(a.max(b).is_nan()); + } + + #[test] + fn test_min() { + let a = bf16::from_f32(0.0); + let b = bf16::from_f32(42.0); + assert_eq!(a.min(b), a); + + let a = bf16::from_f32(42.0); + let b = bf16::from_f32(0.0); + assert_eq!(a.min(b), b); + + let a = bf16::NAN; + let b = bf16::from_f32(42.0); + assert_eq!(a.min(b), b); + + let a = bf16::from_f32(42.0); + let b = bf16::NAN; + assert_eq!(a.min(b), a); + + let a = bf16::NAN; + let b = bf16::NAN; + assert!(a.min(b).is_nan()); + } +} diff --git a/vendor/half/src/bfloat/convert.rs b/vendor/half/src/bfloat/convert.rs new file mode 100644 index 00000000..9de8a016 --- /dev/null +++ b/vendor/half/src/bfloat/convert.rs @@ -0,0 +1,146 @@ +use crate::leading_zeros::leading_zeros_u16; +use zerocopy::transmute; + +#[inline] +pub(crate) const fn f32_to_bf16(value: f32) -> u16 { + // TODO: Replace transmute with to_bits() once to_bits is const-stabilized + // Convert to raw bytes + let x: u32 = transmute!(value); + + // check for NaN + if x & 0x7FFF_FFFFu32 > 0x7F80_0000u32 { + // Keep high part of current mantissa but also set most significiant mantissa bit + return ((x >> 16) | 0x0040u32) as u16; + } + + // round and shift + let round_bit = 0x0000_8000u32; + if (x & round_bit) != 0 && (x & (3 * round_bit - 1)) != 0 { + (x >> 16) as u16 + 1 + } else { + (x >> 16) as u16 + } +} + +#[inline] +pub(crate) const fn f64_to_bf16(value: f64) -> u16 { + // TODO: Replace transmute with to_bits() once to_bits is const-stabilized + // Convert to raw bytes, truncating the last 32-bits of mantissa; that precision will always + // be lost on half-precision. + let val: u64 = transmute!(value); + let x = (val >> 32) as u32; + + // Extract IEEE754 components + let sign = x & 0x8000_0000u32; + let exp = x & 0x7FF0_0000u32; + let man = x & 0x000F_FFFFu32; + + // Check for all exponent bits being set, which is Infinity or NaN + if exp == 0x7FF0_0000u32 { + // Set mantissa MSB for NaN (and also keep shifted mantissa bits). + // We also have to check the last 32 bits. + let nan_bit = if man == 0 && (val as u32 == 0) { + 0 + } else { + 0x0040u32 + }; + return ((sign >> 16) | 0x7F80u32 | nan_bit | (man >> 13)) as u16; + } + + // The number is normalized, start assembling half precision version + let half_sign = sign >> 16; + // Unbias the exponent, then bias for bfloat16 precision + let unbiased_exp = ((exp >> 20) as i64) - 1023; + let half_exp = unbiased_exp + 127; + + // Check for exponent overflow, return +infinity + if half_exp >= 0xFF { + return (half_sign | 0x7F80u32) as u16; + } + + // Check for underflow + if half_exp <= 0 { + // Check mantissa for what we can do + if 7 - half_exp > 21 { + // No rounding possibility, so this is a full underflow, return signed zero + return half_sign as u16; + } + // Don't forget about hidden leading mantissa bit when assembling mantissa + let man = man | 0x0010_0000u32; + let mut half_man = man >> (14 - half_exp); + // Check for rounding + let round_bit = 1 << (13 - half_exp); + if (man & round_bit) != 0 && (man & (3 * round_bit - 1)) != 0 { + half_man += 1; + } + // No exponent for subnormals + return (half_sign | half_man) as u16; + } + + // Rebias the exponent + let half_exp = (half_exp as u32) << 7; + let half_man = man >> 13; + // Check for rounding + let round_bit = 0x0000_1000u32; + if (man & round_bit) != 0 && (man & (3 * round_bit - 1)) != 0 { + // Round it + ((half_sign | half_exp | half_man) + 1) as u16 + } else { + (half_sign | half_exp | half_man) as u16 + } +} + +#[inline] +pub(crate) const fn bf16_to_f32(i: u16) -> f32 { + // TODO: Replace transmute with from_bits() once from_bits is const-stabilized + // If NaN, keep current mantissa but also set most significiant mantissa bit + if i & 0x7FFFu16 > 0x7F80u16 { + transmute!((i as u32 | 0x0040u32) << 16) + } else { + transmute!((i as u32) << 16) + } +} + +#[inline] +pub(crate) const fn bf16_to_f64(i: u16) -> f64 { + // TODO: Replace transmute with from_bits() once from_bits is const-stabilized + // Check for signed zero + if i & 0x7FFFu16 == 0 { + return transmute!((i as u64) << 48); + } + + let half_sign = (i & 0x8000u16) as u64; + let half_exp = (i & 0x7F80u16) as u64; + let half_man = (i & 0x007Fu16) as u64; + + // Check for an infinity or NaN when all exponent bits set + if half_exp == 0x7F80u64 { + // Check for signed infinity if mantissa is zero + if half_man == 0 { + return transmute!((half_sign << 48) | 0x7FF0_0000_0000_0000u64); + } else { + // NaN, keep current mantissa but also set most significiant mantissa bit + return transmute!((half_sign << 48) | 0x7FF8_0000_0000_0000u64 | (half_man << 45)); + } + } + + // Calculate double-precision components with adjusted exponent + let sign = half_sign << 48; + // Unbias exponent + let unbiased_exp = ((half_exp as i64) >> 7) - 127; + + // Check for subnormals, which will be normalized by adjusting exponent + if half_exp == 0 { + // Calculate how much to adjust the exponent by + let e = leading_zeros_u16(half_man as u16) - 9; + + // Rebias and adjust exponent + let exp = ((1023 - 127 - e) as u64) << 52; + let man = (half_man << (46 + e)) & 0xF_FFFF_FFFF_FFFFu64; + return transmute!(sign | exp | man); + } + // Rebias exponent for a normalized normal + let exp = ((unbiased_exp + 1023) as u64) << 52; + let man = (half_man & 0x007Fu64) << 45; + transmute!(sign | exp | man) +} diff --git a/vendor/half/src/binary16.rs b/vendor/half/src/binary16.rs new file mode 100644 index 00000000..0559962f --- /dev/null +++ b/vendor/half/src/binary16.rs @@ -0,0 +1,2011 @@ +#[cfg(all(feature = "serde", feature = "alloc"))] +#[allow(unused_imports)] +use alloc::string::ToString; +#[cfg(feature = "bytemuck")] +use bytemuck::{Pod, Zeroable}; +use core::{ + cmp::Ordering, + iter::{Product, Sum}, + num::FpCategory, + ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Sub, SubAssign}, +}; +#[cfg(not(target_arch = "spirv"))] +use core::{ + fmt::{ + Binary, Debug, Display, Error, Formatter, LowerExp, LowerHex, Octal, UpperExp, UpperHex, + }, + num::ParseFloatError, + str::FromStr, +}; +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; +use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; + +pub(crate) mod arch; + +/// A 16-bit floating point type implementing the IEEE 754-2008 standard [`binary16`] a.k.a "half" +/// format. +/// +/// This 16-bit floating point type is intended for efficient storage where the full range and +/// precision of a larger floating point value is not required. +/// +/// [`binary16`]: https://en.wikipedia.org/wiki/Half-precision_floating-point_format +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Default)] +#[repr(transparent)] +#[cfg_attr(feature = "serde", derive(Serialize))] +#[cfg_attr( + feature = "rkyv", + derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize) +)] +#[cfg_attr(feature = "rkyv", rkyv(resolver = F16Resolver))] +#[cfg_attr(feature = "bytemuck", derive(Zeroable, Pod))] +#[cfg_attr(kani, derive(kani::Arbitrary))] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[derive(FromBytes, Immutable, IntoBytes, KnownLayout)] +pub struct f16(u16); + +impl f16 { + /// Constructs a 16-bit floating point value from the raw bits. + #[inline] + #[must_use] + pub const fn from_bits(bits: u16) -> f16 { + f16(bits) + } + + /// Constructs a 16-bit floating point value from a 32-bit floating point value. + /// + /// This operation is lossy. If the 32-bit value is to large to fit in 16-bits, ±∞ will result. + /// NaN values are preserved. 32-bit subnormal values are too tiny to be represented in 16-bits + /// and result in ±0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit + /// subnormals or ±0. All other values are truncated and rounded to the nearest representable + /// 16-bit value. + #[inline] + #[must_use] + pub fn from_f32(value: f32) -> f16 { + f16(arch::f32_to_f16(value)) + } + + /// Constructs a 16-bit floating point value from a 32-bit floating point value. + /// + /// This function is identical to [`from_f32`][Self::from_f32] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`from_f32`][Self::from_f32] should be preferred + /// in any non-`const` context. + /// + /// This operation is lossy. If the 32-bit value is to large to fit in 16-bits, ±∞ will result. + /// NaN values are preserved. 32-bit subnormal values are too tiny to be represented in 16-bits + /// and result in ±0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit + /// subnormals or ±0. All other values are truncated and rounded to the nearest representable + /// 16-bit value. + #[inline] + #[must_use] + pub const fn from_f32_const(value: f32) -> f16 { + f16(arch::f32_to_f16_fallback(value)) + } + + /// Constructs a 16-bit floating point value from a 64-bit floating point value. + /// + /// This operation is lossy. If the 64-bit value is to large to fit in 16-bits, ±∞ will result. + /// NaN values are preserved. 64-bit subnormal values are too tiny to be represented in 16-bits + /// and result in ±0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit + /// subnormals or ±0. All other values are truncated and rounded to the nearest representable + /// 16-bit value. + #[inline] + #[must_use] + pub fn from_f64(value: f64) -> f16 { + f16(arch::f64_to_f16(value)) + } + + /// Constructs a 16-bit floating point value from a 64-bit floating point value. + /// + /// This function is identical to [`from_f64`][Self::from_f64] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`from_f64`][Self::from_f64] should be preferred + /// in any non-`const` context. + /// + /// This operation is lossy. If the 64-bit value is to large to fit in 16-bits, ±∞ will result. + /// NaN values are preserved. 64-bit subnormal values are too tiny to be represented in 16-bits + /// and result in ±0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit + /// subnormals or ±0. All other values are truncated and rounded to the nearest representable + /// 16-bit value. + #[inline] + #[must_use] + pub const fn from_f64_const(value: f64) -> f16 { + f16(arch::f64_to_f16_fallback(value)) + } + + /// Converts a [`struct@f16`] into the underlying bit representation. + #[inline] + #[must_use] + pub const fn to_bits(self) -> u16 { + self.0 + } + + /// Returns the memory representation of the underlying bit representation as a byte array in + /// little-endian byte order. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let bytes = f16::from_f32(12.5).to_le_bytes(); + /// assert_eq!(bytes, [0x40, 0x4A]); + /// ``` + #[inline] + #[must_use] + pub const fn to_le_bytes(self) -> [u8; 2] { + self.0.to_le_bytes() + } + + /// Returns the memory representation of the underlying bit representation as a byte array in + /// big-endian (network) byte order. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let bytes = f16::from_f32(12.5).to_be_bytes(); + /// assert_eq!(bytes, [0x4A, 0x40]); + /// ``` + #[inline] + #[must_use] + pub const fn to_be_bytes(self) -> [u8; 2] { + self.0.to_be_bytes() + } + + /// Returns the memory representation of the underlying bit representation as a byte array in + /// native byte order. + /// + /// As the target platform's native endianness is used, portable code should use + /// [`to_be_bytes`][Self::to_be_bytes] or [`to_le_bytes`][Self::to_le_bytes], as appropriate, + /// instead. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let bytes = f16::from_f32(12.5).to_ne_bytes(); + /// assert_eq!(bytes, if cfg!(target_endian = "big") { + /// [0x4A, 0x40] + /// } else { + /// [0x40, 0x4A] + /// }); + /// ``` + #[inline] + #[must_use] + pub const fn to_ne_bytes(self) -> [u8; 2] { + self.0.to_ne_bytes() + } + + /// Creates a floating point value from its representation as a byte array in little endian. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let value = f16::from_le_bytes([0x40, 0x4A]); + /// assert_eq!(value, f16::from_f32(12.5)); + /// ``` + #[inline] + #[must_use] + pub const fn from_le_bytes(bytes: [u8; 2]) -> f16 { + f16::from_bits(u16::from_le_bytes(bytes)) + } + + /// Creates a floating point value from its representation as a byte array in big endian. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let value = f16::from_be_bytes([0x4A, 0x40]); + /// assert_eq!(value, f16::from_f32(12.5)); + /// ``` + #[inline] + #[must_use] + pub const fn from_be_bytes(bytes: [u8; 2]) -> f16 { + f16::from_bits(u16::from_be_bytes(bytes)) + } + + /// Creates a floating point value from its representation as a byte array in native endian. + /// + /// As the target platform's native endianness is used, portable code likely wants to use + /// [`from_be_bytes`][Self::from_be_bytes] or [`from_le_bytes`][Self::from_le_bytes], as + /// appropriate instead. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let value = f16::from_ne_bytes(if cfg!(target_endian = "big") { + /// [0x4A, 0x40] + /// } else { + /// [0x40, 0x4A] + /// }); + /// assert_eq!(value, f16::from_f32(12.5)); + /// ``` + #[inline] + #[must_use] + pub const fn from_ne_bytes(bytes: [u8; 2]) -> f16 { + f16::from_bits(u16::from_ne_bytes(bytes)) + } + + /// Converts a [`struct@f16`] value into a `f32` value. + /// + /// This conversion is lossless as all 16-bit floating point values can be represented exactly + /// in 32-bit floating point. + #[inline] + #[must_use] + pub fn to_f32(self) -> f32 { + arch::f16_to_f32(self.0) + } + + /// Converts a [`struct@f16`] value into a `f32` value. + /// + /// This function is identical to [`to_f32`][Self::to_f32] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`to_f32`][Self::to_f32] should be preferred + /// in any non-`const` context. + /// + /// This conversion is lossless as all 16-bit floating point values can be represented exactly + /// in 32-bit floating point. + #[inline] + #[must_use] + pub const fn to_f32_const(self) -> f32 { + arch::f16_to_f32_fallback(self.0) + } + + /// Converts a [`struct@f16`] value into a `f64` value. + /// + /// This conversion is lossless as all 16-bit floating point values can be represented exactly + /// in 64-bit floating point. + #[inline] + #[must_use] + pub fn to_f64(self) -> f64 { + arch::f16_to_f64(self.0) + } + + /// Converts a [`struct@f16`] value into a `f64` value. + /// + /// This function is identical to [`to_f64`][Self::to_f64] except it never uses hardware + /// intrinsics, which allows it to be `const`. [`to_f64`][Self::to_f64] should be preferred + /// in any non-`const` context. + /// + /// This conversion is lossless as all 16-bit floating point values can be represented exactly + /// in 64-bit floating point. + #[inline] + #[must_use] + pub const fn to_f64_const(self) -> f64 { + arch::f16_to_f64_fallback(self.0) + } + + /// Returns `true` if this value is `NaN` and `false` otherwise. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let nan = f16::NAN; + /// let f = f16::from_f32(7.0_f32); + /// + /// assert!(nan.is_nan()); + /// assert!(!f.is_nan()); + /// ``` + #[inline] + #[must_use] + pub const fn is_nan(self) -> bool { + self.0 & 0x7FFFu16 > 0x7C00u16 + } + + /// Returns `true` if this value is ±∞ and `false`. + /// otherwise. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let f = f16::from_f32(7.0f32); + /// let inf = f16::INFINITY; + /// let neg_inf = f16::NEG_INFINITY; + /// let nan = f16::NAN; + /// + /// assert!(!f.is_infinite()); + /// assert!(!nan.is_infinite()); + /// + /// assert!(inf.is_infinite()); + /// assert!(neg_inf.is_infinite()); + /// ``` + #[inline] + #[must_use] + pub const fn is_infinite(self) -> bool { + self.0 & 0x7FFFu16 == 0x7C00u16 + } + + /// Returns `true` if this number is neither infinite nor `NaN`. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let f = f16::from_f32(7.0f32); + /// let inf = f16::INFINITY; + /// let neg_inf = f16::NEG_INFINITY; + /// let nan = f16::NAN; + /// + /// assert!(f.is_finite()); + /// + /// assert!(!nan.is_finite()); + /// assert!(!inf.is_finite()); + /// assert!(!neg_inf.is_finite()); + /// ``` + #[inline] + #[must_use] + pub const fn is_finite(self) -> bool { + self.0 & 0x7C00u16 != 0x7C00u16 + } + + /// Returns `true` if the number is neither zero, infinite, subnormal, or `NaN`. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let min = f16::MIN_POSITIVE; + /// let max = f16::MAX; + /// let lower_than_min = f16::from_f32(1.0e-10_f32); + /// let zero = f16::from_f32(0.0_f32); + /// + /// assert!(min.is_normal()); + /// assert!(max.is_normal()); + /// + /// assert!(!zero.is_normal()); + /// assert!(!f16::NAN.is_normal()); + /// assert!(!f16::INFINITY.is_normal()); + /// // Values between `0` and `min` are Subnormal. + /// assert!(!lower_than_min.is_normal()); + /// ``` + #[inline] + #[must_use] + pub const fn is_normal(self) -> bool { + let exp = self.0 & 0x7C00u16; + exp != 0x7C00u16 && exp != 0 + } + + /// Returns the floating point category of the number. + /// + /// If only one property is going to be tested, it is generally faster to use the specific + /// predicate instead. + /// + /// # Examples + /// + /// ```rust + /// use std::num::FpCategory; + /// # use half::prelude::*; + /// + /// let num = f16::from_f32(12.4_f32); + /// let inf = f16::INFINITY; + /// + /// assert_eq!(num.classify(), FpCategory::Normal); + /// assert_eq!(inf.classify(), FpCategory::Infinite); + /// ``` + #[must_use] + pub const fn classify(self) -> FpCategory { + let exp = self.0 & 0x7C00u16; + let man = self.0 & 0x03FFu16; + match (exp, man) { + (0, 0) => FpCategory::Zero, + (0, _) => FpCategory::Subnormal, + (0x7C00u16, 0) => FpCategory::Infinite, + (0x7C00u16, _) => FpCategory::Nan, + _ => FpCategory::Normal, + } + } + + /// Returns a number that represents the sign of `self`. + /// + /// * `1.0` if the number is positive, `+0.0` or [`INFINITY`][f16::INFINITY] + /// * `-1.0` if the number is negative, `-0.0` or [`NEG_INFINITY`][f16::NEG_INFINITY] + /// * [`NAN`][f16::NAN] if the number is `NaN` + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let f = f16::from_f32(3.5_f32); + /// + /// assert_eq!(f.signum(), f16::from_f32(1.0)); + /// assert_eq!(f16::NEG_INFINITY.signum(), f16::from_f32(-1.0)); + /// + /// assert!(f16::NAN.signum().is_nan()); + /// ``` + #[must_use] + pub const fn signum(self) -> f16 { + if self.is_nan() { + self + } else if self.0 & 0x8000u16 != 0 { + Self::NEG_ONE + } else { + Self::ONE + } + } + + /// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaNs` with a + /// positive sign bit and +∞. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let nan = f16::NAN; + /// let f = f16::from_f32(7.0_f32); + /// let g = f16::from_f32(-7.0_f32); + /// + /// assert!(f.is_sign_positive()); + /// assert!(!g.is_sign_positive()); + /// // `NaN` can be either positive or negative + /// assert!(nan.is_sign_positive() != nan.is_sign_negative()); + /// ``` + #[inline] + #[must_use] + pub const fn is_sign_positive(self) -> bool { + self.0 & 0x8000u16 == 0 + } + + /// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaNs` with a + /// negative sign bit and −∞. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// + /// let nan = f16::NAN; + /// let f = f16::from_f32(7.0f32); + /// let g = f16::from_f32(-7.0f32); + /// + /// assert!(!f.is_sign_negative()); + /// assert!(g.is_sign_negative()); + /// // `NaN` can be either positive or negative + /// assert!(nan.is_sign_positive() != nan.is_sign_negative()); + /// ``` + #[inline] + #[must_use] + pub const fn is_sign_negative(self) -> bool { + self.0 & 0x8000u16 != 0 + } + + /// Returns a number composed of the magnitude of `self` and the sign of `sign`. + /// + /// Equal to `self` if the sign of `self` and `sign` are the same, otherwise equal to `-self`. + /// If `self` is NaN, then NaN with the sign of `sign` is returned. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// let f = f16::from_f32(3.5); + /// + /// assert_eq!(f.copysign(f16::from_f32(0.42)), f16::from_f32(3.5)); + /// assert_eq!(f.copysign(f16::from_f32(-0.42)), f16::from_f32(-3.5)); + /// assert_eq!((-f).copysign(f16::from_f32(0.42)), f16::from_f32(3.5)); + /// assert_eq!((-f).copysign(f16::from_f32(-0.42)), f16::from_f32(-3.5)); + /// + /// assert!(f16::NAN.copysign(f16::from_f32(1.0)).is_nan()); + /// ``` + #[inline] + #[must_use] + pub const fn copysign(self, sign: f16) -> f16 { + f16((sign.0 & 0x8000u16) | (self.0 & 0x7FFFu16)) + } + + /// Returns the maximum of the two numbers. + /// + /// If one of the arguments is NaN, then the other argument is returned. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// let x = f16::from_f32(1.0); + /// let y = f16::from_f32(2.0); + /// + /// assert_eq!(x.max(y), y); + /// ``` + #[inline] + #[must_use] + pub fn max(self, other: f16) -> f16 { + if self.is_nan() || other > self { + other + } else { + self + } + } + + /// Returns the minimum of the two numbers. + /// + /// If one of the arguments is NaN, then the other argument is returned. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// let x = f16::from_f32(1.0); + /// let y = f16::from_f32(2.0); + /// + /// assert_eq!(x.min(y), x); + /// ``` + #[inline] + #[must_use] + pub fn min(self, other: f16) -> f16 { + if self.is_nan() || other < self { + other + } else { + self + } + } + + /// Restrict a value to a certain interval unless it is NaN. + /// + /// Returns `max` if `self` is greater than `max`, and `min` if `self` is less than `min`. + /// Otherwise this returns `self`. + /// + /// Note that this function returns NaN if the initial value was NaN as well. + /// + /// # Panics + /// Panics if `min > max`, `min` is NaN, or `max` is NaN. + /// + /// # Examples + /// + /// ``` + /// # use half::prelude::*; + /// assert!(f16::from_f32(-3.0).clamp(f16::from_f32(-2.0), f16::from_f32(1.0)) == f16::from_f32(-2.0)); + /// assert!(f16::from_f32(0.0).clamp(f16::from_f32(-2.0), f16::from_f32(1.0)) == f16::from_f32(0.0)); + /// assert!(f16::from_f32(2.0).clamp(f16::from_f32(-2.0), f16::from_f32(1.0)) == f16::from_f32(1.0)); + /// assert!(f16::NAN.clamp(f16::from_f32(-2.0), f16::from_f32(1.0)).is_nan()); + /// ``` + #[inline] + #[must_use] + pub fn clamp(self, min: f16, max: f16) -> f16 { + assert!(min <= max); + let mut x = self; + if x < min { + x = min; + } + if x > max { + x = max; + } + x + } + + /// Returns the ordering between `self` and `other`. + /// + /// Unlike the standard partial comparison between floating point numbers, + /// this comparison always produces an ordering in accordance to + /// the `totalOrder` predicate as defined in the IEEE 754 (2008 revision) + /// floating point standard. The values are ordered in the following sequence: + /// + /// - negative quiet NaN + /// - negative signaling NaN + /// - negative infinity + /// - negative numbers + /// - negative subnormal numbers + /// - negative zero + /// - positive zero + /// - positive subnormal numbers + /// - positive numbers + /// - positive infinity + /// - positive signaling NaN + /// - positive quiet NaN. + /// + /// The ordering established by this function does not always agree with the + /// [`PartialOrd`] and [`PartialEq`] implementations of `f16`. For example, + /// they consider negative and positive zero equal, while `total_cmp` + /// doesn't. + /// + /// The interpretation of the signaling NaN bit follows the definition in + /// the IEEE 754 standard, which may not match the interpretation by some of + /// the older, non-conformant (e.g. MIPS) hardware implementations. + /// + /// # Examples + /// ``` + /// # use half::f16; + /// let mut v: Vec<f16> = vec![]; + /// v.push(f16::ONE); + /// v.push(f16::INFINITY); + /// v.push(f16::NEG_INFINITY); + /// v.push(f16::NAN); + /// v.push(f16::MAX_SUBNORMAL); + /// v.push(-f16::MAX_SUBNORMAL); + /// v.push(f16::ZERO); + /// v.push(f16::NEG_ZERO); + /// v.push(f16::NEG_ONE); + /// v.push(f16::MIN_POSITIVE); + /// + /// v.sort_by(|a, b| a.total_cmp(&b)); + /// + /// assert!(v + /// .into_iter() + /// .zip( + /// [ + /// f16::NEG_INFINITY, + /// f16::NEG_ONE, + /// -f16::MAX_SUBNORMAL, + /// f16::NEG_ZERO, + /// f16::ZERO, + /// f16::MAX_SUBNORMAL, + /// f16::MIN_POSITIVE, + /// f16::ONE, + /// f16::INFINITY, + /// f16::NAN + /// ] + /// .iter() + /// ) + /// .all(|(a, b)| a.to_bits() == b.to_bits())); + /// ``` + // Implementation based on: https://doc.rust-lang.org/std/primitive.f32.html#method.total_cmp + #[inline] + #[must_use] + pub fn total_cmp(&self, other: &Self) -> Ordering { + let mut left = self.to_bits() as i16; + let mut right = other.to_bits() as i16; + left ^= (((left >> 15) as u16) >> 1) as i16; + right ^= (((right >> 15) as u16) >> 1) as i16; + left.cmp(&right) + } + + /// Alternate serialize adapter for serializing as a float. + /// + /// By default, [`struct@f16`] serializes as a newtype of [`u16`]. This is an alternate serialize + /// implementation that serializes as an [`f32`] value. It is designed for use with + /// `serialize_with` serde attributes. Deserialization from `f32` values is already supported by + /// the default deserialize implementation. + /// + /// # Examples + /// + /// A demonstration on how to use this adapater: + /// + /// ``` + /// use serde::{Serialize, Deserialize}; + /// use half::f16; + /// + /// #[derive(Serialize, Deserialize)] + /// struct MyStruct { + /// #[serde(serialize_with = "f16::serialize_as_f32")] + /// value: f16 // Will be serialized as f32 instead of u16 + /// } + /// ``` + #[cfg(feature = "serde")] + pub fn serialize_as_f32<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + serializer.serialize_f32(self.to_f32()) + } + + /// Alternate serialize adapter for serializing as a string. + /// + /// By default, [`struct@f16`] serializes as a newtype of [`u16`]. This is an alternate serialize + /// implementation that serializes as a string value. It is designed for use with + /// `serialize_with` serde attributes. Deserialization from string values is already supported + /// by the default deserialize implementation. + /// + /// # Examples + /// + /// A demonstration on how to use this adapater: + /// + /// ``` + /// use serde::{Serialize, Deserialize}; + /// use half::f16; + /// + /// #[derive(Serialize, Deserialize)] + /// struct MyStruct { + /// #[serde(serialize_with = "f16::serialize_as_string")] + /// value: f16 // Will be serialized as a string instead of u16 + /// } + /// ``` + #[cfg(all(feature = "serde", feature = "alloc"))] + pub fn serialize_as_string<S: serde::Serializer>( + &self, + serializer: S, + ) -> Result<S::Ok, S::Error> { + serializer.serialize_str(&self.to_string()) + } + + /// Approximate number of [`struct@f16`] significant digits in base 10 + pub const DIGITS: u32 = 3; + /// [`struct@f16`] + /// [machine epsilon](https://en.wikipedia.org/wiki/Machine_epsilon) value + /// + /// This is the difference between 1.0 and the next largest representable number. + pub const EPSILON: f16 = f16(0x1400u16); + /// [`struct@f16`] positive Infinity (+∞) + pub const INFINITY: f16 = f16(0x7C00u16); + /// Number of [`struct@f16`] significant digits in base 2 + pub const MANTISSA_DIGITS: u32 = 11; + /// Largest finite [`struct@f16`] value + pub const MAX: f16 = f16(0x7BFF); + /// Maximum possible [`struct@f16`] power of 10 exponent + pub const MAX_10_EXP: i32 = 4; + /// Maximum possible [`struct@f16`] power of 2 exponent + pub const MAX_EXP: i32 = 16; + /// Smallest finite [`struct@f16`] value + pub const MIN: f16 = f16(0xFBFF); + /// Minimum possible normal [`struct@f16`] power of 10 exponent + pub const MIN_10_EXP: i32 = -4; + /// One greater than the minimum possible normal [`struct@f16`] power of 2 exponent + pub const MIN_EXP: i32 = -13; + /// Smallest positive normal [`struct@f16`] value + pub const MIN_POSITIVE: f16 = f16(0x0400u16); + /// [`struct@f16`] Not a Number (NaN) + pub const NAN: f16 = f16(0x7E00u16); + /// [`struct@f16`] negative infinity (-∞) + pub const NEG_INFINITY: f16 = f16(0xFC00u16); + /// The radix or base of the internal representation of [`struct@f16`] + pub const RADIX: u32 = 2; + + /// Minimum positive subnormal [`struct@f16`] value + pub const MIN_POSITIVE_SUBNORMAL: f16 = f16(0x0001u16); + /// Maximum subnormal [`struct@f16`] value + pub const MAX_SUBNORMAL: f16 = f16(0x03FFu16); + + /// [`struct@f16`] 1 + pub const ONE: f16 = f16(0x3C00u16); + /// [`struct@f16`] 0 + pub const ZERO: f16 = f16(0x0000u16); + /// [`struct@f16`] -0 + pub const NEG_ZERO: f16 = f16(0x8000u16); + /// [`struct@f16`] -1 + pub const NEG_ONE: f16 = f16(0xBC00u16); + + /// [`struct@f16`] Euler's number (ℯ) + pub const E: f16 = f16(0x4170u16); + /// [`struct@f16`] Archimedes' constant (π) + pub const PI: f16 = f16(0x4248u16); + /// [`struct@f16`] 1/π + pub const FRAC_1_PI: f16 = f16(0x3518u16); + /// [`struct@f16`] 1/√2 + pub const FRAC_1_SQRT_2: f16 = f16(0x39A8u16); + /// [`struct@f16`] 2/π + pub const FRAC_2_PI: f16 = f16(0x3918u16); + /// [`struct@f16`] 2/√π + pub const FRAC_2_SQRT_PI: f16 = f16(0x3C83u16); + /// [`struct@f16`] π/2 + pub const FRAC_PI_2: f16 = f16(0x3E48u16); + /// [`struct@f16`] π/3 + pub const FRAC_PI_3: f16 = f16(0x3C30u16); + /// [`struct@f16`] π/4 + pub const FRAC_PI_4: f16 = f16(0x3A48u16); + /// [`struct@f16`] π/6 + pub const FRAC_PI_6: f16 = f16(0x3830u16); + /// [`struct@f16`] π/8 + pub const FRAC_PI_8: f16 = f16(0x3648u16); + /// [`struct@f16`] 𝗅𝗇 10 + pub const LN_10: f16 = f16(0x409Bu16); + /// [`struct@f16`] 𝗅𝗇 2 + pub const LN_2: f16 = f16(0x398Cu16); + /// [`struct@f16`] 𝗅𝗈𝗀₁₀ℯ + pub const LOG10_E: f16 = f16(0x36F3u16); + /// [`struct@f16`] 𝗅𝗈𝗀₁₀2 + pub const LOG10_2: f16 = f16(0x34D1u16); + /// [`struct@f16`] 𝗅𝗈𝗀₂ℯ + pub const LOG2_E: f16 = f16(0x3DC5u16); + /// [`struct@f16`] 𝗅𝗈𝗀₂10 + pub const LOG2_10: f16 = f16(0x42A5u16); + /// [`struct@f16`] √2 + pub const SQRT_2: f16 = f16(0x3DA8u16); +} + +impl From<f16> for f32 { + #[inline] + fn from(x: f16) -> f32 { + x.to_f32() + } +} + +impl From<f16> for f64 { + #[inline] + fn from(x: f16) -> f64 { + x.to_f64() + } +} + +impl From<i8> for f16 { + #[inline] + fn from(x: i8) -> f16 { + // Convert to f32, then to f16 + f16::from_f32(f32::from(x)) + } +} + +impl From<u8> for f16 { + #[inline] + fn from(x: u8) -> f16 { + // Convert to f32, then to f16 + f16::from_f32(f32::from(x)) + } +} + +impl PartialEq for f16 { + fn eq(&self, other: &f16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + (self.0 == other.0) || ((self.0 | other.0) & 0x7FFFu16 == 0) + } + } +} + +impl PartialOrd for f16 { + fn partial_cmp(&self, other: &f16) -> Option<Ordering> { + if self.is_nan() || other.is_nan() { + None + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => Some(self.0.cmp(&other.0)), + (false, true) => { + if (self.0 | other.0) & 0x7FFFu16 == 0 { + Some(Ordering::Equal) + } else { + Some(Ordering::Greater) + } + } + (true, false) => { + if (self.0 | other.0) & 0x7FFFu16 == 0 { + Some(Ordering::Equal) + } else { + Some(Ordering::Less) + } + } + (true, true) => Some(other.0.cmp(&self.0)), + } + } + } + + fn lt(&self, other: &f16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 < other.0, + (false, true) => false, + (true, false) => (self.0 | other.0) & 0x7FFFu16 != 0, + (true, true) => self.0 > other.0, + } + } + } + + fn le(&self, other: &f16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 <= other.0, + (false, true) => (self.0 | other.0) & 0x7FFFu16 == 0, + (true, false) => true, + (true, true) => self.0 >= other.0, + } + } + } + + fn gt(&self, other: &f16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 > other.0, + (false, true) => (self.0 | other.0) & 0x7FFFu16 != 0, + (true, false) => false, + (true, true) => self.0 < other.0, + } + } + } + + fn ge(&self, other: &f16) -> bool { + if self.is_nan() || other.is_nan() { + false + } else { + let neg = self.0 & 0x8000u16 != 0; + let other_neg = other.0 & 0x8000u16 != 0; + match (neg, other_neg) { + (false, false) => self.0 >= other.0, + (false, true) => true, + (true, false) => (self.0 | other.0) & 0x7FFFu16 == 0, + (true, true) => self.0 <= other.0, + } + } + } +} + +#[cfg(not(target_arch = "spirv"))] +impl FromStr for f16 { + type Err = ParseFloatError; + fn from_str(src: &str) -> Result<f16, ParseFloatError> { + f32::from_str(src).map(f16::from_f32) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Debug for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + Debug::fmt(&self.to_f32(), f) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Display for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + Display::fmt(&self.to_f32(), f) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl LowerExp for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:e}", self.to_f32()) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl UpperExp for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:E}", self.to_f32()) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Binary for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:b}", self.0) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl Octal for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:o}", self.0) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl LowerHex for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:x}", self.0) + } +} + +#[cfg(not(target_arch = "spirv"))] +impl UpperHex for f16 { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { + write!(f, "{:X}", self.0) + } +} + +impl Neg for f16 { + type Output = Self; + + #[inline] + fn neg(self) -> Self::Output { + Self(self.0 ^ 0x8000) + } +} + +impl Neg for &f16 { + type Output = <f16 as Neg>::Output; + + #[inline] + fn neg(self) -> Self::Output { + Neg::neg(*self) + } +} + +impl Add for f16 { + type Output = Self; + + #[inline] + fn add(self, rhs: Self) -> Self::Output { + f16(arch::add_f16(self.0, rhs.0)) + } +} + +impl Add<&f16> for f16 { + type Output = <f16 as Add<f16>>::Output; + + #[inline] + fn add(self, rhs: &f16) -> Self::Output { + self.add(*rhs) + } +} + +impl Add<&f16> for &f16 { + type Output = <f16 as Add<f16>>::Output; + + #[inline] + fn add(self, rhs: &f16) -> Self::Output { + (*self).add(*rhs) + } +} + +impl Add<f16> for &f16 { + type Output = <f16 as Add<f16>>::Output; + + #[inline] + fn add(self, rhs: f16) -> Self::Output { + (*self).add(rhs) + } +} + +impl AddAssign for f16 { + #[inline] + fn add_assign(&mut self, rhs: Self) { + *self = (*self).add(rhs); + } +} + +impl AddAssign<&f16> for f16 { + #[inline] + fn add_assign(&mut self, rhs: &f16) { + *self = (*self).add(rhs); + } +} + +impl Sub for f16 { + type Output = Self; + + #[inline] + fn sub(self, rhs: Self) -> Self::Output { + f16(arch::subtract_f16(self.0, rhs.0)) + } +} + +impl Sub<&f16> for f16 { + type Output = <f16 as Sub<f16>>::Output; + + #[inline] + fn sub(self, rhs: &f16) -> Self::Output { + self.sub(*rhs) + } +} + +impl Sub<&f16> for &f16 { + type Output = <f16 as Sub<f16>>::Output; + + #[inline] + fn sub(self, rhs: &f16) -> Self::Output { + (*self).sub(*rhs) + } +} + +impl Sub<f16> for &f16 { + type Output = <f16 as Sub<f16>>::Output; + + #[inline] + fn sub(self, rhs: f16) -> Self::Output { + (*self).sub(rhs) + } +} + +impl SubAssign for f16 { + #[inline] + fn sub_assign(&mut self, rhs: Self) { + *self = (*self).sub(rhs); + } +} + +impl SubAssign<&f16> for f16 { + #[inline] + fn sub_assign(&mut self, rhs: &f16) { + *self = (*self).sub(rhs); + } +} + +impl Mul for f16 { + type Output = Self; + + #[inline] + fn mul(self, rhs: Self) -> Self::Output { + f16(arch::multiply_f16(self.0, rhs.0)) + } +} + +impl Mul<&f16> for f16 { + type Output = <f16 as Mul<f16>>::Output; + + #[inline] + fn mul(self, rhs: &f16) -> Self::Output { + self.mul(*rhs) + } +} + +impl Mul<&f16> for &f16 { + type Output = <f16 as Mul<f16>>::Output; + + #[inline] + fn mul(self, rhs: &f16) -> Self::Output { + (*self).mul(*rhs) + } +} + +impl Mul<f16> for &f16 { + type Output = <f16 as Mul<f16>>::Output; + + #[inline] + fn mul(self, rhs: f16) -> Self::Output { + (*self).mul(rhs) + } +} + +impl MulAssign for f16 { + #[inline] + fn mul_assign(&mut self, rhs: Self) { + *self = (*self).mul(rhs); + } +} + +impl MulAssign<&f16> for f16 { + #[inline] + fn mul_assign(&mut self, rhs: &f16) { + *self = (*self).mul(rhs); + } +} + +impl Div for f16 { + type Output = Self; + + #[inline] + fn div(self, rhs: Self) -> Self::Output { + f16(arch::divide_f16(self.0, rhs.0)) + } +} + +impl Div<&f16> for f16 { + type Output = <f16 as Div<f16>>::Output; + + #[inline] + fn div(self, rhs: &f16) -> Self::Output { + self.div(*rhs) + } +} + +impl Div<&f16> for &f16 { + type Output = <f16 as Div<f16>>::Output; + + #[inline] + fn div(self, rhs: &f16) -> Self::Output { + (*self).div(*rhs) + } +} + +impl Div<f16> for &f16 { + type Output = <f16 as Div<f16>>::Output; + + #[inline] + fn div(self, rhs: f16) -> Self::Output { + (*self).div(rhs) + } +} + +impl DivAssign for f16 { + #[inline] + fn div_assign(&mut self, rhs: Self) { + *self = (*self).div(rhs); + } +} + +impl DivAssign<&f16> for f16 { + #[inline] + fn div_assign(&mut self, rhs: &f16) { + *self = (*self).div(rhs); + } +} + +impl Rem for f16 { + type Output = Self; + + #[inline] + fn rem(self, rhs: Self) -> Self::Output { + f16(arch::remainder_f16(self.0, rhs.0)) + } +} + +impl Rem<&f16> for f16 { + type Output = <f16 as Rem<f16>>::Output; + + #[inline] + fn rem(self, rhs: &f16) -> Self::Output { + self.rem(*rhs) + } +} + +impl Rem<&f16> for &f16 { + type Output = <f16 as Rem<f16>>::Output; + + #[inline] + fn rem(self, rhs: &f16) -> Self::Output { + (*self).rem(*rhs) + } +} + +impl Rem<f16> for &f16 { + type Output = <f16 as Rem<f16>>::Output; + + #[inline] + fn rem(self, rhs: f16) -> Self::Output { + (*self).rem(rhs) + } +} + +impl RemAssign for f16 { + #[inline] + fn rem_assign(&mut self, rhs: Self) { + *self = (*self).rem(rhs); + } +} + +impl RemAssign<&f16> for f16 { + #[inline] + fn rem_assign(&mut self, rhs: &f16) { + *self = (*self).rem(rhs); + } +} + +impl Product for f16 { + #[inline] + fn product<I: Iterator<Item = Self>>(iter: I) -> Self { + f16(arch::product_f16(iter.map(|f| f.to_bits()))) + } +} + +impl<'a> Product<&'a f16> for f16 { + #[inline] + fn product<I: Iterator<Item = &'a f16>>(iter: I) -> Self { + f16(arch::product_f16(iter.map(|f| f.to_bits()))) + } +} + +impl Sum for f16 { + #[inline] + fn sum<I: Iterator<Item = Self>>(iter: I) -> Self { + f16(arch::sum_f16(iter.map(|f| f.to_bits()))) + } +} + +impl<'a> Sum<&'a f16> for f16 { + #[inline] + fn sum<I: Iterator<Item = &'a f16>>(iter: I) -> Self { + f16(arch::sum_f16(iter.map(|f| f.to_bits()))) + } +} + +#[cfg(feature = "serde")] +struct Visitor; + +#[cfg(feature = "serde")] +impl<'de> Deserialize<'de> for f16 { + fn deserialize<D>(deserializer: D) -> Result<f16, D::Error> + where + D: serde::de::Deserializer<'de>, + { + deserializer.deserialize_newtype_struct("f16", Visitor) + } +} + +#[cfg(feature = "serde")] +impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = f16; + + fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { + write!(formatter, "tuple struct f16") + } + + fn visit_newtype_struct<D>(self, deserializer: D) -> Result<Self::Value, D::Error> + where + D: serde::Deserializer<'de>, + { + Ok(f16(<u16 as Deserialize>::deserialize(deserializer)?)) + } + + fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + v.parse().map_err(|_| { + serde::de::Error::invalid_value(serde::de::Unexpected::Str(v), &"a float string") + }) + } + + fn visit_f32<E>(self, v: f32) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + Ok(f16::from_f32(v)) + } + + fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + Ok(f16::from_f64(v)) + } +} + +#[allow( + clippy::cognitive_complexity, + clippy::float_cmp, + clippy::neg_cmp_op_on_partial_ord +)] +#[cfg(test)] +mod test { + use super::*; + #[allow(unused_imports)] + use core::cmp::Ordering; + #[cfg(feature = "num-traits")] + use num_traits::{AsPrimitive, FromBytes, FromPrimitive, ToBytes, ToPrimitive}; + use quickcheck_macros::quickcheck; + + #[cfg(feature = "num-traits")] + #[test] + fn as_primitive() { + let two = f16::from_f32(2.0); + assert_eq!(<i32 as AsPrimitive<f16>>::as_(2), two); + assert_eq!(<f16 as AsPrimitive<i32>>::as_(two), 2); + + assert_eq!(<f32 as AsPrimitive<f16>>::as_(2.0), two); + assert_eq!(<f16 as AsPrimitive<f32>>::as_(two), 2.0); + + assert_eq!(<f64 as AsPrimitive<f16>>::as_(2.0), two); + assert_eq!(<f16 as AsPrimitive<f64>>::as_(two), 2.0); + } + + #[cfg(feature = "num-traits")] + #[test] + fn to_primitive() { + let two = f16::from_f32(2.0); + assert_eq!(ToPrimitive::to_i32(&two).unwrap(), 2i32); + assert_eq!(ToPrimitive::to_f32(&two).unwrap(), 2.0f32); + assert_eq!(ToPrimitive::to_f64(&two).unwrap(), 2.0f64); + } + + #[cfg(feature = "num-traits")] + #[test] + fn from_primitive() { + let two = f16::from_f32(2.0); + assert_eq!(<f16 as FromPrimitive>::from_i32(2).unwrap(), two); + assert_eq!(<f16 as FromPrimitive>::from_f32(2.0).unwrap(), two); + assert_eq!(<f16 as FromPrimitive>::from_f64(2.0).unwrap(), two); + } + + #[cfg(feature = "num-traits")] + #[test] + fn to_and_from_bytes() { + let two = f16::from_f32(2.0); + assert_eq!(<f16 as ToBytes>::to_le_bytes(&two), [0, 64]); + assert_eq!(<f16 as FromBytes>::from_le_bytes(&[0, 64]), two); + assert_eq!(<f16 as ToBytes>::to_be_bytes(&two), [64, 0]); + assert_eq!(<f16 as FromBytes>::from_be_bytes(&[64, 0]), two); + } + + #[test] + #[cfg_attr(miri, ignore)] + fn test_f16_consts() { + // DIGITS + let digits = ((f16::MANTISSA_DIGITS as f32 - 1.0) * 2f32.log10()).floor() as u32; + assert_eq!(f16::DIGITS, digits); + // sanity check to show test is good + let digits32 = ((core::f32::MANTISSA_DIGITS as f32 - 1.0) * 2f32.log10()).floor() as u32; + assert_eq!(core::f32::DIGITS, digits32); + + // EPSILON + let one = f16::from_f32(1.0); + let one_plus_epsilon = f16::from_bits(one.to_bits() + 1); + let epsilon = f16::from_f32(one_plus_epsilon.to_f32() - 1.0); + assert_eq!(f16::EPSILON, epsilon); + // sanity check to show test is good + let one_plus_epsilon32 = f32::from_bits(1.0f32.to_bits() + 1); + let epsilon32 = one_plus_epsilon32 - 1f32; + assert_eq!(core::f32::EPSILON, epsilon32); + + // MAX, MIN and MIN_POSITIVE + let max = f16::from_bits(f16::INFINITY.to_bits() - 1); + let min = f16::from_bits(f16::NEG_INFINITY.to_bits() - 1); + let min_pos = f16::from_f32(2f32.powi(f16::MIN_EXP - 1)); + assert_eq!(f16::MAX, max); + assert_eq!(f16::MIN, min); + assert_eq!(f16::MIN_POSITIVE, min_pos); + // sanity check to show test is good + let max32 = f32::from_bits(core::f32::INFINITY.to_bits() - 1); + let min32 = f32::from_bits(core::f32::NEG_INFINITY.to_bits() - 1); + let min_pos32 = 2f32.powi(core::f32::MIN_EXP - 1); + assert_eq!(core::f32::MAX, max32); + assert_eq!(core::f32::MIN, min32); + assert_eq!(core::f32::MIN_POSITIVE, min_pos32); + + // MIN_10_EXP and MAX_10_EXP + let ten_to_min = 10f32.powi(f16::MIN_10_EXP); + assert!(ten_to_min / 10.0 < f16::MIN_POSITIVE.to_f32()); + assert!(ten_to_min > f16::MIN_POSITIVE.to_f32()); + let ten_to_max = 10f32.powi(f16::MAX_10_EXP); + assert!(ten_to_max < f16::MAX.to_f32()); + assert!(ten_to_max * 10.0 > f16::MAX.to_f32()); + // sanity check to show test is good + let ten_to_min32 = 10f64.powi(core::f32::MIN_10_EXP); + assert!(ten_to_min32 / 10.0 < f64::from(core::f32::MIN_POSITIVE)); + assert!(ten_to_min32 > f64::from(core::f32::MIN_POSITIVE)); + let ten_to_max32 = 10f64.powi(core::f32::MAX_10_EXP); + assert!(ten_to_max32 < f64::from(core::f32::MAX)); + assert!(ten_to_max32 * 10.0 > f64::from(core::f32::MAX)); + } + + #[test] + fn test_f16_consts_from_f32() { + let one = f16::from_f32(1.0); + let zero = f16::from_f32(0.0); + let neg_zero = f16::from_f32(-0.0); + let neg_one = f16::from_f32(-1.0); + let inf = f16::from_f32(core::f32::INFINITY); + let neg_inf = f16::from_f32(core::f32::NEG_INFINITY); + let nan = f16::from_f32(core::f32::NAN); + + assert_eq!(f16::ONE, one); + assert_eq!(f16::ZERO, zero); + assert!(zero.is_sign_positive()); + assert_eq!(f16::NEG_ZERO, neg_zero); + assert!(neg_zero.is_sign_negative()); + assert_eq!(f16::NEG_ONE, neg_one); + assert!(neg_one.is_sign_negative()); + assert_eq!(f16::INFINITY, inf); + assert_eq!(f16::NEG_INFINITY, neg_inf); + assert!(nan.is_nan()); + assert!(f16::NAN.is_nan()); + + let e = f16::from_f32(core::f32::consts::E); + let pi = f16::from_f32(core::f32::consts::PI); + let frac_1_pi = f16::from_f32(core::f32::consts::FRAC_1_PI); + let frac_1_sqrt_2 = f16::from_f32(core::f32::consts::FRAC_1_SQRT_2); + let frac_2_pi = f16::from_f32(core::f32::consts::FRAC_2_PI); + let frac_2_sqrt_pi = f16::from_f32(core::f32::consts::FRAC_2_SQRT_PI); + let frac_pi_2 = f16::from_f32(core::f32::consts::FRAC_PI_2); + let frac_pi_3 = f16::from_f32(core::f32::consts::FRAC_PI_3); + let frac_pi_4 = f16::from_f32(core::f32::consts::FRAC_PI_4); + let frac_pi_6 = f16::from_f32(core::f32::consts::FRAC_PI_6); + let frac_pi_8 = f16::from_f32(core::f32::consts::FRAC_PI_8); + let ln_10 = f16::from_f32(core::f32::consts::LN_10); + let ln_2 = f16::from_f32(core::f32::consts::LN_2); + let log10_e = f16::from_f32(core::f32::consts::LOG10_E); + // core::f32::consts::LOG10_2 requires rustc 1.43.0 + let log10_2 = f16::from_f32(2f32.log10()); + let log2_e = f16::from_f32(core::f32::consts::LOG2_E); + // core::f32::consts::LOG2_10 requires rustc 1.43.0 + let log2_10 = f16::from_f32(10f32.log2()); + let sqrt_2 = f16::from_f32(core::f32::consts::SQRT_2); + + assert_eq!(f16::E, e); + assert_eq!(f16::PI, pi); + assert_eq!(f16::FRAC_1_PI, frac_1_pi); + assert_eq!(f16::FRAC_1_SQRT_2, frac_1_sqrt_2); + assert_eq!(f16::FRAC_2_PI, frac_2_pi); + assert_eq!(f16::FRAC_2_SQRT_PI, frac_2_sqrt_pi); + assert_eq!(f16::FRAC_PI_2, frac_pi_2); + assert_eq!(f16::FRAC_PI_3, frac_pi_3); + assert_eq!(f16::FRAC_PI_4, frac_pi_4); + assert_eq!(f16::FRAC_PI_6, frac_pi_6); + assert_eq!(f16::FRAC_PI_8, frac_pi_8); + assert_eq!(f16::LN_10, ln_10); + assert_eq!(f16::LN_2, ln_2); + assert_eq!(f16::LOG10_E, log10_e); + assert_eq!(f16::LOG10_2, log10_2); + assert_eq!(f16::LOG2_E, log2_e); + assert_eq!(f16::LOG2_10, log2_10); + assert_eq!(f16::SQRT_2, sqrt_2); + } + + #[test] + fn test_f16_consts_from_f64() { + let one = f16::from_f64(1.0); + let zero = f16::from_f64(0.0); + let neg_zero = f16::from_f64(-0.0); + let inf = f16::from_f64(core::f64::INFINITY); + let neg_inf = f16::from_f64(core::f64::NEG_INFINITY); + let nan = f16::from_f64(core::f64::NAN); + + assert_eq!(f16::ONE, one); + assert_eq!(f16::ZERO, zero); + assert!(zero.is_sign_positive()); + assert_eq!(f16::NEG_ZERO, neg_zero); + assert!(neg_zero.is_sign_negative()); + assert_eq!(f16::INFINITY, inf); + assert_eq!(f16::NEG_INFINITY, neg_inf); + assert!(nan.is_nan()); + assert!(f16::NAN.is_nan()); + + let e = f16::from_f64(core::f64::consts::E); + let pi = f16::from_f64(core::f64::consts::PI); + let frac_1_pi = f16::from_f64(core::f64::consts::FRAC_1_PI); + let frac_1_sqrt_2 = f16::from_f64(core::f64::consts::FRAC_1_SQRT_2); + let frac_2_pi = f16::from_f64(core::f64::consts::FRAC_2_PI); + let frac_2_sqrt_pi = f16::from_f64(core::f64::consts::FRAC_2_SQRT_PI); + let frac_pi_2 = f16::from_f64(core::f64::consts::FRAC_PI_2); + let frac_pi_3 = f16::from_f64(core::f64::consts::FRAC_PI_3); + let frac_pi_4 = f16::from_f64(core::f64::consts::FRAC_PI_4); + let frac_pi_6 = f16::from_f64(core::f64::consts::FRAC_PI_6); + let frac_pi_8 = f16::from_f64(core::f64::consts::FRAC_PI_8); + let ln_10 = f16::from_f64(core::f64::consts::LN_10); + let ln_2 = f16::from_f64(core::f64::consts::LN_2); + let log10_e = f16::from_f64(core::f64::consts::LOG10_E); + // core::f64::consts::LOG10_2 requires rustc 1.43.0 + let log10_2 = f16::from_f64(2f64.log10()); + let log2_e = f16::from_f64(core::f64::consts::LOG2_E); + // core::f64::consts::LOG2_10 requires rustc 1.43.0 + let log2_10 = f16::from_f64(10f64.log2()); + let sqrt_2 = f16::from_f64(core::f64::consts::SQRT_2); + + assert_eq!(f16::E, e); + assert_eq!(f16::PI, pi); + assert_eq!(f16::FRAC_1_PI, frac_1_pi); + assert_eq!(f16::FRAC_1_SQRT_2, frac_1_sqrt_2); + assert_eq!(f16::FRAC_2_PI, frac_2_pi); + assert_eq!(f16::FRAC_2_SQRT_PI, frac_2_sqrt_pi); + assert_eq!(f16::FRAC_PI_2, frac_pi_2); + assert_eq!(f16::FRAC_PI_3, frac_pi_3); + assert_eq!(f16::FRAC_PI_4, frac_pi_4); + assert_eq!(f16::FRAC_PI_6, frac_pi_6); + assert_eq!(f16::FRAC_PI_8, frac_pi_8); + assert_eq!(f16::LN_10, ln_10); + assert_eq!(f16::LN_2, ln_2); + assert_eq!(f16::LOG10_E, log10_e); + assert_eq!(f16::LOG10_2, log10_2); + assert_eq!(f16::LOG2_E, log2_e); + assert_eq!(f16::LOG2_10, log2_10); + assert_eq!(f16::SQRT_2, sqrt_2); + } + + #[test] + fn test_nan_conversion_to_smaller() { + let nan64 = f64::from_bits(0x7FF0_0000_0000_0001u64); + let neg_nan64 = f64::from_bits(0xFFF0_0000_0000_0001u64); + let nan32 = f32::from_bits(0x7F80_0001u32); + let neg_nan32 = f32::from_bits(0xFF80_0001u32); + let nan32_from_64 = nan64 as f32; + let neg_nan32_from_64 = neg_nan64 as f32; + let nan16_from_64 = f16::from_f64(nan64); + let neg_nan16_from_64 = f16::from_f64(neg_nan64); + let nan16_from_32 = f16::from_f32(nan32); + let neg_nan16_from_32 = f16::from_f32(neg_nan32); + + assert!(nan64.is_nan() && nan64.is_sign_positive()); + assert!(neg_nan64.is_nan() && neg_nan64.is_sign_negative()); + assert!(nan32.is_nan() && nan32.is_sign_positive()); + assert!(neg_nan32.is_nan() && neg_nan32.is_sign_negative()); + + // f32/f64 NaN conversion sign is non-deterministic: https://github.com/VoidStarKat/half-rs/issues/103 + assert!(nan32_from_64.is_nan()); + assert!(neg_nan32_from_64.is_nan()); + assert!(nan16_from_64.is_nan()); + assert!(neg_nan16_from_64.is_nan()); + assert!(nan16_from_32.is_nan()); + assert!(neg_nan16_from_32.is_nan()); + } + + #[test] + fn test_nan_conversion_to_larger() { + let nan16 = f16::from_bits(0x7C01u16); + let neg_nan16 = f16::from_bits(0xFC01u16); + let nan32 = f32::from_bits(0x7F80_0001u32); + let neg_nan32 = f32::from_bits(0xFF80_0001u32); + let nan32_from_16 = f32::from(nan16); + let neg_nan32_from_16 = f32::from(neg_nan16); + let nan64_from_16 = f64::from(nan16); + let neg_nan64_from_16 = f64::from(neg_nan16); + let nan64_from_32 = f64::from(nan32); + let neg_nan64_from_32 = f64::from(neg_nan32); + + assert!(nan16.is_nan() && nan16.is_sign_positive()); + assert!(neg_nan16.is_nan() && neg_nan16.is_sign_negative()); + assert!(nan32.is_nan() && nan32.is_sign_positive()); + assert!(neg_nan32.is_nan() && neg_nan32.is_sign_negative()); + + // f32/f64 NaN conversion sign is non-deterministic: https://github.com/VoidStarKat/half-rs/issues/103 + assert!(nan32_from_16.is_nan()); + assert!(neg_nan32_from_16.is_nan()); + assert!(nan64_from_16.is_nan()); + assert!(neg_nan64_from_16.is_nan()); + assert!(nan64_from_32.is_nan()); + assert!(neg_nan64_from_32.is_nan()); + } + + #[test] + #[cfg_attr(miri, ignore)] + fn test_f16_to_f32() { + let f = f16::from_f32(7.0); + assert_eq!(f.to_f32(), 7.0f32); + + // 7.1 is NOT exactly representable in 16-bit, it's rounded + let f = f16::from_f32(7.1); + let diff = (f.to_f32() - 7.1f32).abs(); + // diff must be <= 4 * EPSILON, as 7 has two more significant bits than 1 + assert!(diff <= 4.0 * f16::EPSILON.to_f32()); + + assert_eq!(f16::from_bits(0x0000_0001).to_f32(), 2.0f32.powi(-24)); + assert_eq!(f16::from_bits(0x0000_0005).to_f32(), 5.0 * 2.0f32.powi(-24)); + + assert_eq!(f16::from_bits(0x0000_0001), f16::from_f32(2.0f32.powi(-24))); + assert_eq!( + f16::from_bits(0x0000_0005), + f16::from_f32(5.0 * 2.0f32.powi(-24)) + ); + } + + #[test] + #[cfg_attr(miri, ignore)] + fn test_f16_to_f64() { + let f = f16::from_f64(7.0); + assert_eq!(f.to_f64(), 7.0f64); + + // 7.1 is NOT exactly representable in 16-bit, it's rounded + let f = f16::from_f64(7.1); + let diff = (f.to_f64() - 7.1f64).abs(); + // diff must be <= 4 * EPSILON, as 7 has two more significant bits than 1 + assert!(diff <= 4.0 * f16::EPSILON.to_f64()); + + assert_eq!(f16::from_bits(0x0000_0001).to_f64(), 2.0f64.powi(-24)); + assert_eq!(f16::from_bits(0x0000_0005).to_f64(), 5.0 * 2.0f64.powi(-24)); + + assert_eq!(f16::from_bits(0x0000_0001), f16::from_f64(2.0f64.powi(-24))); + assert_eq!( + f16::from_bits(0x0000_0005), + f16::from_f64(5.0 * 2.0f64.powi(-24)) + ); + } + + #[test] + fn test_comparisons() { + let zero = f16::from_f64(0.0); + let one = f16::from_f64(1.0); + let neg_zero = f16::from_f64(-0.0); + let neg_one = f16::from_f64(-1.0); + + assert_eq!(zero.partial_cmp(&neg_zero), Some(Ordering::Equal)); + assert_eq!(neg_zero.partial_cmp(&zero), Some(Ordering::Equal)); + assert!(zero == neg_zero); + assert!(neg_zero == zero); + assert!(!(zero != neg_zero)); + assert!(!(neg_zero != zero)); + assert!(!(zero < neg_zero)); + assert!(!(neg_zero < zero)); + assert!(zero <= neg_zero); + assert!(neg_zero <= zero); + assert!(!(zero > neg_zero)); + assert!(!(neg_zero > zero)); + assert!(zero >= neg_zero); + assert!(neg_zero >= zero); + + assert_eq!(one.partial_cmp(&neg_zero), Some(Ordering::Greater)); + assert_eq!(neg_zero.partial_cmp(&one), Some(Ordering::Less)); + assert!(!(one == neg_zero)); + assert!(!(neg_zero == one)); + assert!(one != neg_zero); + assert!(neg_zero != one); + assert!(!(one < neg_zero)); + assert!(neg_zero < one); + assert!(!(one <= neg_zero)); + assert!(neg_zero <= one); + assert!(one > neg_zero); + assert!(!(neg_zero > one)); + assert!(one >= neg_zero); + assert!(!(neg_zero >= one)); + + assert_eq!(one.partial_cmp(&neg_one), Some(Ordering::Greater)); + assert_eq!(neg_one.partial_cmp(&one), Some(Ordering::Less)); + assert!(!(one == neg_one)); + assert!(!(neg_one == one)); + assert!(one != neg_one); + assert!(neg_one != one); + assert!(!(one < neg_one)); + assert!(neg_one < one); + assert!(!(one <= neg_one)); + assert!(neg_one <= one); + assert!(one > neg_one); + assert!(!(neg_one > one)); + assert!(one >= neg_one); + assert!(!(neg_one >= one)); + } + + #[test] + #[allow(clippy::erasing_op, clippy::identity_op)] + #[cfg_attr(miri, ignore)] + fn round_to_even_f32() { + // smallest positive subnormal = 0b0.0000_0000_01 * 2^-14 = 2^-24 + let min_sub = f16::from_bits(1); + let min_sub_f = (-24f32).exp2(); + assert_eq!(f16::from_f32(min_sub_f).to_bits(), min_sub.to_bits()); + assert_eq!(f32::from(min_sub).to_bits(), min_sub_f.to_bits()); + + // 0.0000000000_011111 rounded to 0.0000000000 (< tie, no rounding) + // 0.0000000000_100000 rounded to 0.0000000000 (tie and even, remains at even) + // 0.0000000000_100001 rounded to 0.0000000001 (> tie, rounds up) + assert_eq!( + f16::from_f32(min_sub_f * 0.49).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + f16::from_f32(min_sub_f * 0.50).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + f16::from_f32(min_sub_f * 0.51).to_bits(), + min_sub.to_bits() * 1 + ); + + // 0.0000000001_011111 rounded to 0.0000000001 (< tie, no rounding) + // 0.0000000001_100000 rounded to 0.0000000010 (tie and odd, rounds up to even) + // 0.0000000001_100001 rounded to 0.0000000010 (> tie, rounds up) + assert_eq!( + f16::from_f32(min_sub_f * 1.49).to_bits(), + min_sub.to_bits() * 1 + ); + assert_eq!( + f16::from_f32(min_sub_f * 1.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + f16::from_f32(min_sub_f * 1.51).to_bits(), + min_sub.to_bits() * 2 + ); + + // 0.0000000010_011111 rounded to 0.0000000010 (< tie, no rounding) + // 0.0000000010_100000 rounded to 0.0000000010 (tie and even, remains at even) + // 0.0000000010_100001 rounded to 0.0000000011 (> tie, rounds up) + assert_eq!( + f16::from_f32(min_sub_f * 2.49).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + f16::from_f32(min_sub_f * 2.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + f16::from_f32(min_sub_f * 2.51).to_bits(), + min_sub.to_bits() * 3 + ); + + assert_eq!( + f16::from_f32(2000.49f32).to_bits(), + f16::from_f32(2000.0).to_bits() + ); + assert_eq!( + f16::from_f32(2000.50f32).to_bits(), + f16::from_f32(2000.0).to_bits() + ); + assert_eq!( + f16::from_f32(2000.51f32).to_bits(), + f16::from_f32(2001.0).to_bits() + ); + assert_eq!( + f16::from_f32(2001.49f32).to_bits(), + f16::from_f32(2001.0).to_bits() + ); + assert_eq!( + f16::from_f32(2001.50f32).to_bits(), + f16::from_f32(2002.0).to_bits() + ); + assert_eq!( + f16::from_f32(2001.51f32).to_bits(), + f16::from_f32(2002.0).to_bits() + ); + assert_eq!( + f16::from_f32(2002.49f32).to_bits(), + f16::from_f32(2002.0).to_bits() + ); + assert_eq!( + f16::from_f32(2002.50f32).to_bits(), + f16::from_f32(2002.0).to_bits() + ); + assert_eq!( + f16::from_f32(2002.51f32).to_bits(), + f16::from_f32(2003.0).to_bits() + ); + } + + #[test] + #[allow(clippy::erasing_op, clippy::identity_op)] + #[cfg_attr(miri, ignore)] + fn round_to_even_f64() { + // smallest positive subnormal = 0b0.0000_0000_01 * 2^-14 = 2^-24 + let min_sub = f16::from_bits(1); + let min_sub_f = (-24f64).exp2(); + assert_eq!(f16::from_f64(min_sub_f).to_bits(), min_sub.to_bits()); + assert_eq!(f64::from(min_sub).to_bits(), min_sub_f.to_bits()); + + // 0.0000000000_011111 rounded to 0.0000000000 (< tie, no rounding) + // 0.0000000000_100000 rounded to 0.0000000000 (tie and even, remains at even) + // 0.0000000000_100001 rounded to 0.0000000001 (> tie, rounds up) + assert_eq!( + f16::from_f64(min_sub_f * 0.49).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + f16::from_f64(min_sub_f * 0.50).to_bits(), + min_sub.to_bits() * 0 + ); + assert_eq!( + f16::from_f64(min_sub_f * 0.51).to_bits(), + min_sub.to_bits() * 1 + ); + + // 0.0000000001_011111 rounded to 0.0000000001 (< tie, no rounding) + // 0.0000000001_100000 rounded to 0.0000000010 (tie and odd, rounds up to even) + // 0.0000000001_100001 rounded to 0.0000000010 (> tie, rounds up) + assert_eq!( + f16::from_f64(min_sub_f * 1.49).to_bits(), + min_sub.to_bits() * 1 + ); + assert_eq!( + f16::from_f64(min_sub_f * 1.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + f16::from_f64(min_sub_f * 1.51).to_bits(), + min_sub.to_bits() * 2 + ); + + // 0.0000000010_011111 rounded to 0.0000000010 (< tie, no rounding) + // 0.0000000010_100000 rounded to 0.0000000010 (tie and even, remains at even) + // 0.0000000010_100001 rounded to 0.0000000011 (> tie, rounds up) + assert_eq!( + f16::from_f64(min_sub_f * 2.49).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + f16::from_f64(min_sub_f * 2.50).to_bits(), + min_sub.to_bits() * 2 + ); + assert_eq!( + f16::from_f64(min_sub_f * 2.51).to_bits(), + min_sub.to_bits() * 3 + ); + + assert_eq!( + f16::from_f64(2000.49f64).to_bits(), + f16::from_f64(2000.0).to_bits() + ); + assert_eq!( + f16::from_f64(2000.50f64).to_bits(), + f16::from_f64(2000.0).to_bits() + ); + assert_eq!( + f16::from_f64(2000.51f64).to_bits(), + f16::from_f64(2001.0).to_bits() + ); + assert_eq!( + f16::from_f64(2001.49f64).to_bits(), + f16::from_f64(2001.0).to_bits() + ); + assert_eq!( + f16::from_f64(2001.50f64).to_bits(), + f16::from_f64(2002.0).to_bits() + ); + assert_eq!( + f16::from_f64(2001.51f64).to_bits(), + f16::from_f64(2002.0).to_bits() + ); + assert_eq!( + f16::from_f64(2002.49f64).to_bits(), + f16::from_f64(2002.0).to_bits() + ); + assert_eq!( + f16::from_f64(2002.50f64).to_bits(), + f16::from_f64(2002.0).to_bits() + ); + assert_eq!( + f16::from_f64(2002.51f64).to_bits(), + f16::from_f64(2003.0).to_bits() + ); + } + + #[test] + fn arithmetic() { + assert_eq!(f16::ONE + f16::ONE, f16::from_f32(2.)); + assert_eq!(f16::ONE - f16::ONE, f16::ZERO); + assert_eq!(f16::ONE * f16::ONE, f16::ONE); + assert_eq!(f16::from_f32(2.) * f16::from_f32(2.), f16::from_f32(4.)); + assert_eq!(f16::ONE / f16::ONE, f16::ONE); + assert_eq!(f16::from_f32(4.) / f16::from_f32(2.), f16::from_f32(2.)); + assert_eq!(f16::from_f32(4.) % f16::from_f32(3.), f16::from_f32(1.)); + } + + #[cfg(feature = "std")] + #[test] + fn formatting() { + let f = f16::from_f32(0.1152344); + + assert_eq!(format!("{:.3}", f), "0.115"); + assert_eq!(format!("{:.4}", f), "0.1152"); + assert_eq!(format!("{:+.4}", f), "+0.1152"); + assert_eq!(format!("{:>+10.4}", f), " +0.1152"); + + assert_eq!(format!("{:.3?}", f), "0.115"); + assert_eq!(format!("{:.4?}", f), "0.1152"); + assert_eq!(format!("{:+.4?}", f), "+0.1152"); + assert_eq!(format!("{:>+10.4?}", f), " +0.1152"); + } + + impl quickcheck::Arbitrary for f16 { + fn arbitrary(g: &mut quickcheck::Gen) -> Self { + f16(u16::arbitrary(g)) + } + } + + #[quickcheck] + fn qc_roundtrip_f16_f32_is_identity(f: f16) -> bool { + let roundtrip = f16::from_f32(f.to_f32()); + if f.is_nan() { + roundtrip.is_nan() && f.is_sign_negative() == roundtrip.is_sign_negative() + } else { + f.0 == roundtrip.0 + } + } + + #[quickcheck] + fn qc_roundtrip_f16_f64_is_identity(f: f16) -> bool { + let roundtrip = f16::from_f64(f.to_f64()); + if f.is_nan() { + roundtrip.is_nan() && f.is_sign_negative() == roundtrip.is_sign_negative() + } else { + f.0 == roundtrip.0 + } + } + + #[test] + fn test_max() { + let a = f16::from_f32(0.0); + let b = f16::from_f32(42.0); + assert_eq!(a.max(b), b); + + let a = f16::from_f32(42.0); + let b = f16::from_f32(0.0); + assert_eq!(a.max(b), a); + + let a = f16::NAN; + let b = f16::from_f32(42.0); + assert_eq!(a.max(b), b); + + let a = f16::from_f32(42.0); + let b = f16::NAN; + assert_eq!(a.max(b), a); + + let a = f16::NAN; + let b = f16::NAN; + assert!(a.max(b).is_nan()); + } + + #[test] + fn test_min() { + let a = f16::from_f32(0.0); + let b = f16::from_f32(42.0); + assert_eq!(a.min(b), a); + + let a = f16::from_f32(42.0); + let b = f16::from_f32(0.0); + assert_eq!(a.min(b), b); + + let a = f16::NAN; + let b = f16::from_f32(42.0); + assert_eq!(a.min(b), b); + + let a = f16::from_f32(42.0); + let b = f16::NAN; + assert_eq!(a.min(b), a); + + let a = f16::NAN; + let b = f16::NAN; + assert!(a.min(b).is_nan()); + } +} diff --git a/vendor/half/src/binary16/arch.rs b/vendor/half/src/binary16/arch.rs new file mode 100644 index 00000000..3414e325 --- /dev/null +++ b/vendor/half/src/binary16/arch.rs @@ -0,0 +1,921 @@ +#![allow(dead_code, unused_imports)] +use crate::leading_zeros::leading_zeros_u16; +use core::mem; + +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +mod x86; + +#[cfg(target_arch = "aarch64")] +mod aarch64; + +#[cfg(all(feature = "nightly", target_arch = "loongarch64"))] +mod loongarch64; + +macro_rules! convert_fn { + (if x86_feature("f16c") { $f16c:expr } + else if aarch64_feature("fp16") { $aarch64:expr } + else if loongarch64_feature("lsx") { $loongarch64:expr } + else { $fallback:expr }) => { + cfg_if::cfg_if! { + // Use intrinsics directly when a compile target or using no_std + if #[cfg(all( + any(target_arch = "x86", target_arch = "x86_64"), + target_feature = "f16c" + ))] { + $f16c + } + else if #[cfg(all( + target_arch = "aarch64", + target_feature = "fp16" + ))] { + $aarch64 + } + else if #[cfg(all( + feature = "nightly", + target_arch = "loongarch64", + target_feature = "lsx" + ))] { + $loongarch64 + } + + // Use CPU feature detection if using std + else if #[cfg(all( + feature = "std", + any(target_arch = "x86", target_arch = "x86_64") + ))] { + use std::arch::is_x86_feature_detected; + if is_x86_feature_detected!("f16c") { + $f16c + } else { + $fallback + } + } + else if #[cfg(all( + feature = "std", + target_arch = "aarch64", + ))] { + use std::arch::is_aarch64_feature_detected; + if is_aarch64_feature_detected!("fp16") { + $aarch64 + } else { + $fallback + } + } + else if #[cfg(all( + feature = "std", + feature = "nightly", + target_arch = "loongarch64", + ))] { + use std::arch::is_loongarch_feature_detected; + if is_loongarch_feature_detected!("lsx") { + $loongarch64 + } else { + $fallback + } + } + + // Fallback to software + else { + $fallback + } + } + }; +} + +#[inline] +pub(crate) fn f32_to_f16(f: f32) -> u16 { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f32_to_f16_x86_f16c(f) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f32_to_f16_fp16(f) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f32_to_f16_lsx(f) } + } else { + f32_to_f16_fallback(f) + } + } +} + +#[inline] +pub(crate) fn f64_to_f16(f: f64) -> u16 { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f32_to_f16_x86_f16c(f as f32) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f64_to_f16_fp16(f) } + } else if loongarch64_feature("lsx") { + f64_to_f16_fallback(f) + } else { + f64_to_f16_fallback(f) + } + } +} + +#[inline] +pub(crate) fn f16_to_f32(i: u16) -> f32 { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f16_to_f32_x86_f16c(i) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f16_to_f32_fp16(i) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f16_to_f32_lsx(i) } + } else { + f16_to_f32_fallback(i) + } + } +} + +#[inline] +pub(crate) fn f16_to_f64(i: u16) -> f64 { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f16_to_f32_x86_f16c(i) as f64 } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f16_to_f64_fp16(i) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f16_to_f32_lsx(i) as f64 } + } else { + f16_to_f64_fallback(i) + } + } +} + +#[inline] +pub(crate) fn f32x4_to_f16x4(f: &[f32; 4]) -> [u16; 4] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f32x4_to_f16x4_x86_f16c(f) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f32x4_to_f16x4_fp16(f) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f32x4_to_f16x4_lsx(f) } + } else { + f32x4_to_f16x4_fallback(f) + } + } +} + +#[inline] +pub(crate) fn f16x4_to_f32x4(i: &[u16; 4]) -> [f32; 4] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f16x4_to_f32x4_x86_f16c(i) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f16x4_to_f32x4_fp16(i) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f16x4_to_f32x4_lsx(i) } + } else { + f16x4_to_f32x4_fallback(i) + } + } +} + +#[inline] +pub(crate) fn f64x4_to_f16x4(f: &[f64; 4]) -> [u16; 4] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f64x4_to_f16x4_x86_f16c(f) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f64x4_to_f16x4_fp16(f) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f64x4_to_f16x4_lsx(f) } + } else { + f64x4_to_f16x4_fallback(f) + } + } +} + +#[inline] +pub(crate) fn f16x4_to_f64x4(i: &[u16; 4]) -> [f64; 4] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f16x4_to_f64x4_x86_f16c(i) } + } else if aarch64_feature("fp16") { + unsafe { aarch64::f16x4_to_f64x4_fp16(i) } + } else if loongarch64_feature("lsx") { + unsafe { loongarch64::f16x4_to_f64x4_lsx(i) } + } else { + f16x4_to_f64x4_fallback(i) + } + } +} + +#[inline] +pub(crate) fn f32x8_to_f16x8(f: &[f32; 8]) -> [u16; 8] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f32x8_to_f16x8_x86_f16c(f) } + } else if aarch64_feature("fp16") { + { + let mut result = [0u16; 8]; + convert_chunked_slice_4(f.as_slice(), result.as_mut_slice(), + aarch64::f32x4_to_f16x4_fp16); + result + } + } else if loongarch64_feature("lsx") { + { + let mut result = [0u16; 8]; + convert_chunked_slice_4(f.as_slice(), result.as_mut_slice(), + loongarch64::f32x4_to_f16x4_lsx); + result + } + } else { + f32x8_to_f16x8_fallback(f) + } + } +} + +#[inline] +pub(crate) fn f16x8_to_f32x8(i: &[u16; 8]) -> [f32; 8] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f16x8_to_f32x8_x86_f16c(i) } + } else if aarch64_feature("fp16") { + { + let mut result = [0f32; 8]; + convert_chunked_slice_4(i.as_slice(), result.as_mut_slice(), + aarch64::f16x4_to_f32x4_fp16); + result + } + } else if loongarch64_feature("lsx") { + { + let mut result = [0f32; 8]; + convert_chunked_slice_4(i.as_slice(), result.as_mut_slice(), + loongarch64::f16x4_to_f32x4_lsx); + result + } + } else { + f16x8_to_f32x8_fallback(i) + } + } +} + +#[inline] +pub(crate) fn f64x8_to_f16x8(f: &[f64; 8]) -> [u16; 8] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f64x8_to_f16x8_x86_f16c(f) } + } else if aarch64_feature("fp16") { + { + let mut result = [0u16; 8]; + convert_chunked_slice_4(f.as_slice(), result.as_mut_slice(), + aarch64::f64x4_to_f16x4_fp16); + result + } + } else if loongarch64_feature("lsx") { + { + let mut result = [0u16; 8]; + convert_chunked_slice_4(f.as_slice(), result.as_mut_slice(), + loongarch64::f64x4_to_f16x4_lsx); + result + } + } else { + f64x8_to_f16x8_fallback(f) + } + } +} + +#[inline] +pub(crate) fn f16x8_to_f64x8(i: &[u16; 8]) -> [f64; 8] { + convert_fn! { + if x86_feature("f16c") { + unsafe { x86::f16x8_to_f64x8_x86_f16c(i) } + } else if aarch64_feature("fp16") { + { + let mut result = [0f64; 8]; + convert_chunked_slice_4(i.as_slice(), result.as_mut_slice(), + aarch64::f16x4_to_f64x4_fp16); + result + } + } else if loongarch64_feature("lsx") { + { + let mut result = [0f64; 8]; + convert_chunked_slice_4(i.as_slice(), result.as_mut_slice(), + loongarch64::f16x4_to_f64x4_lsx); + result + } + } else { + f16x8_to_f64x8_fallback(i) + } + } +} + +#[inline] +pub(crate) fn f32_to_f16_slice(src: &[f32], dst: &mut [u16]) { + convert_fn! { + if x86_feature("f16c") { + convert_chunked_slice_8(src, dst, x86::f32x8_to_f16x8_x86_f16c, + x86::f32x4_to_f16x4_x86_f16c) + } else if aarch64_feature("fp16") { + convert_chunked_slice_4(src, dst, aarch64::f32x4_to_f16x4_fp16) + } else if loongarch64_feature("lsx") { + convert_chunked_slice_4(src, dst, loongarch64::f32x4_to_f16x4_lsx) + } else { + slice_fallback(src, dst, f32_to_f16_fallback) + } + } +} + +#[inline] +pub(crate) fn f16_to_f32_slice(src: &[u16], dst: &mut [f32]) { + convert_fn! { + if x86_feature("f16c") { + convert_chunked_slice_8(src, dst, x86::f16x8_to_f32x8_x86_f16c, + x86::f16x4_to_f32x4_x86_f16c) + } else if aarch64_feature("fp16") { + convert_chunked_slice_4(src, dst, aarch64::f16x4_to_f32x4_fp16) + } else if loongarch64_feature("lsx") { + convert_chunked_slice_4(src, dst, loongarch64::f16x4_to_f32x4_lsx) + } else { + slice_fallback(src, dst, f16_to_f32_fallback) + } + } +} + +#[inline] +pub(crate) fn f64_to_f16_slice(src: &[f64], dst: &mut [u16]) { + convert_fn! { + if x86_feature("f16c") { + convert_chunked_slice_8(src, dst, x86::f64x8_to_f16x8_x86_f16c, + x86::f64x4_to_f16x4_x86_f16c) + } else if aarch64_feature("fp16") { + convert_chunked_slice_4(src, dst, aarch64::f64x4_to_f16x4_fp16) + } else if loongarch64_feature("lsx") { + convert_chunked_slice_4(src, dst, loongarch64::f64x4_to_f16x4_lsx) + } else { + slice_fallback(src, dst, f64_to_f16_fallback) + } + } +} + +#[inline] +pub(crate) fn f16_to_f64_slice(src: &[u16], dst: &mut [f64]) { + convert_fn! { + if x86_feature("f16c") { + convert_chunked_slice_8(src, dst, x86::f16x8_to_f64x8_x86_f16c, + x86::f16x4_to_f64x4_x86_f16c) + } else if aarch64_feature("fp16") { + convert_chunked_slice_4(src, dst, aarch64::f16x4_to_f64x4_fp16) + } else if loongarch64_feature("lsx") { + convert_chunked_slice_4(src, dst, loongarch64::f16x4_to_f64x4_lsx) + } else { + slice_fallback(src, dst, f16_to_f64_fallback) + } + } +} + +macro_rules! math_fn { + (if aarch64_feature("fp16") { $aarch64:expr } + else { $fallback:expr }) => { + cfg_if::cfg_if! { + // Use intrinsics directly when a compile target or using no_std + if #[cfg(all( + target_arch = "aarch64", + target_feature = "fp16" + ))] { + $aarch64 + } + + // Use CPU feature detection if using std + else if #[cfg(all( + feature = "std", + target_arch = "aarch64", + not(target_feature = "fp16") + ))] { + use std::arch::is_aarch64_feature_detected; + if is_aarch64_feature_detected!("fp16") { + $aarch64 + } else { + $fallback + } + } + + // Fallback to software + else { + $fallback + } + } + }; +} + +#[inline] +pub(crate) fn add_f16(a: u16, b: u16) -> u16 { + math_fn! { + if aarch64_feature("fp16") { + unsafe { aarch64::add_f16_fp16(a, b) } + } else { + add_f16_fallback(a, b) + } + } +} + +#[inline] +pub(crate) fn subtract_f16(a: u16, b: u16) -> u16 { + math_fn! { + if aarch64_feature("fp16") { + unsafe { aarch64::subtract_f16_fp16(a, b) } + } else { + subtract_f16_fallback(a, b) + } + } +} + +#[inline] +pub(crate) fn multiply_f16(a: u16, b: u16) -> u16 { + math_fn! { + if aarch64_feature("fp16") { + unsafe { aarch64::multiply_f16_fp16(a, b) } + } else { + multiply_f16_fallback(a, b) + } + } +} + +#[inline] +pub(crate) fn divide_f16(a: u16, b: u16) -> u16 { + math_fn! { + if aarch64_feature("fp16") { + unsafe { aarch64::divide_f16_fp16(a, b) } + } else { + divide_f16_fallback(a, b) + } + } +} + +#[inline] +pub(crate) fn remainder_f16(a: u16, b: u16) -> u16 { + remainder_f16_fallback(a, b) +} + +#[inline] +pub(crate) fn product_f16<I: Iterator<Item = u16>>(iter: I) -> u16 { + math_fn! { + if aarch64_feature("fp16") { + iter.fold(0, |acc, x| unsafe { aarch64::multiply_f16_fp16(acc, x) }) + } else { + product_f16_fallback(iter) + } + } +} + +#[inline] +pub(crate) fn sum_f16<I: Iterator<Item = u16>>(iter: I) -> u16 { + math_fn! { + if aarch64_feature("fp16") { + iter.fold(0, |acc, x| unsafe { aarch64::add_f16_fp16(acc, x) }) + } else { + sum_f16_fallback(iter) + } + } +} + +/// Chunks sliced into x8 or x4 arrays +#[inline] +fn convert_chunked_slice_8<S: Copy + Default, D: Copy>( + src: &[S], + dst: &mut [D], + fn8: unsafe fn(&[S; 8]) -> [D; 8], + fn4: unsafe fn(&[S; 4]) -> [D; 4], +) { + assert_eq!(src.len(), dst.len()); + + // TODO: Can be further optimized with array_chunks when it becomes stabilized + + let src_chunks = src.chunks_exact(8); + let mut dst_chunks = dst.chunks_exact_mut(8); + let src_remainder = src_chunks.remainder(); + for (s, d) in src_chunks.zip(&mut dst_chunks) { + let chunk: &[S; 8] = s.try_into().unwrap(); + d.copy_from_slice(unsafe { &fn8(chunk) }); + } + + // Process remainder + if src_remainder.len() > 4 { + let mut buf: [S; 8] = Default::default(); + buf[..src_remainder.len()].copy_from_slice(src_remainder); + let vec = unsafe { fn8(&buf) }; + let dst_remainder = dst_chunks.into_remainder(); + dst_remainder.copy_from_slice(&vec[..dst_remainder.len()]); + } else if !src_remainder.is_empty() { + let mut buf: [S; 4] = Default::default(); + buf[..src_remainder.len()].copy_from_slice(src_remainder); + let vec = unsafe { fn4(&buf) }; + let dst_remainder = dst_chunks.into_remainder(); + dst_remainder.copy_from_slice(&vec[..dst_remainder.len()]); + } +} + +/// Chunks sliced into x4 arrays +#[inline] +fn convert_chunked_slice_4<S: Copy + Default, D: Copy>( + src: &[S], + dst: &mut [D], + f: unsafe fn(&[S; 4]) -> [D; 4], +) { + assert_eq!(src.len(), dst.len()); + + // TODO: Can be further optimized with array_chunks when it becomes stabilized + + let src_chunks = src.chunks_exact(4); + let mut dst_chunks = dst.chunks_exact_mut(4); + let src_remainder = src_chunks.remainder(); + for (s, d) in src_chunks.zip(&mut dst_chunks) { + let chunk: &[S; 4] = s.try_into().unwrap(); + d.copy_from_slice(unsafe { &f(chunk) }); + } + + // Process remainder + if !src_remainder.is_empty() { + let mut buf: [S; 4] = Default::default(); + buf[..src_remainder.len()].copy_from_slice(src_remainder); + let vec = unsafe { f(&buf) }; + let dst_remainder = dst_chunks.into_remainder(); + dst_remainder.copy_from_slice(&vec[..dst_remainder.len()]); + } +} + +/////////////// Fallbacks //////////////// + +// In the below functions, round to nearest, with ties to even. +// Let us call the most significant bit that will be shifted out the round_bit. +// +// Round up if either +// a) Removed part > tie. +// (mantissa & round_bit) != 0 && (mantissa & (round_bit - 1)) != 0 +// b) Removed part == tie, and retained part is odd. +// (mantissa & round_bit) != 0 && (mantissa & (2 * round_bit)) != 0 +// (If removed part == tie and retained part is even, do not round up.) +// These two conditions can be combined into one: +// (mantissa & round_bit) != 0 && (mantissa & ((round_bit - 1) | (2 * round_bit))) != 0 +// which can be simplified into +// (mantissa & round_bit) != 0 && (mantissa & (3 * round_bit - 1)) != 0 + +#[inline] +pub(crate) const fn f32_to_f16_fallback(value: f32) -> u16 { + // TODO: Replace mem::transmute with to_bits() once to_bits is const-stabilized + // Convert to raw bytes + let x: u32 = unsafe { mem::transmute::<f32, u32>(value) }; + + // Extract IEEE754 components + let sign = x & 0x8000_0000u32; + let exp = x & 0x7F80_0000u32; + let man = x & 0x007F_FFFFu32; + + // Check for all exponent bits being set, which is Infinity or NaN + if exp == 0x7F80_0000u32 { + // Set mantissa MSB for NaN (and also keep shifted mantissa bits) + let nan_bit = if man == 0 { 0 } else { 0x0200u32 }; + return ((sign >> 16) | 0x7C00u32 | nan_bit | (man >> 13)) as u16; + } + + // The number is normalized, start assembling half precision version + let half_sign = sign >> 16; + // Unbias the exponent, then bias for half precision + let unbiased_exp = ((exp >> 23) as i32) - 127; + let half_exp = unbiased_exp + 15; + + // Check for exponent overflow, return +infinity + if half_exp >= 0x1F { + return (half_sign | 0x7C00u32) as u16; + } + + // Check for underflow + if half_exp <= 0 { + // Check mantissa for what we can do + if 14 - half_exp > 24 { + // No rounding possibility, so this is a full underflow, return signed zero + return half_sign as u16; + } + // Don't forget about hidden leading mantissa bit when assembling mantissa + let man = man | 0x0080_0000u32; + let mut half_man = man >> (14 - half_exp); + // Check for rounding (see comment above functions) + let round_bit = 1 << (13 - half_exp); + if (man & round_bit) != 0 && (man & (3 * round_bit - 1)) != 0 { + half_man += 1; + } + // No exponent for subnormals + return (half_sign | half_man) as u16; + } + + // Rebias the exponent + let half_exp = (half_exp as u32) << 10; + let half_man = man >> 13; + // Check for rounding (see comment above functions) + let round_bit = 0x0000_1000u32; + if (man & round_bit) != 0 && (man & (3 * round_bit - 1)) != 0 { + // Round it + ((half_sign | half_exp | half_man) + 1) as u16 + } else { + (half_sign | half_exp | half_man) as u16 + } +} + +#[inline] +pub(crate) const fn f64_to_f16_fallback(value: f64) -> u16 { + // Convert to raw bytes, truncating the last 32-bits of mantissa; that precision will always + // be lost on half-precision. + // TODO: Replace mem::transmute with to_bits() once to_bits is const-stabilized + let val: u64 = unsafe { mem::transmute::<f64, u64>(value) }; + let x = (val >> 32) as u32; + + // Extract IEEE754 components + let sign = x & 0x8000_0000u32; + let exp = x & 0x7FF0_0000u32; + let man = x & 0x000F_FFFFu32; + + // Check for all exponent bits being set, which is Infinity or NaN + if exp == 0x7FF0_0000u32 { + // Set mantissa MSB for NaN (and also keep shifted mantissa bits). + // We also have to check the last 32 bits. + let nan_bit = if man == 0 && (val as u32 == 0) { + 0 + } else { + 0x0200u32 + }; + return ((sign >> 16) | 0x7C00u32 | nan_bit | (man >> 10)) as u16; + } + + // The number is normalized, start assembling half precision version + let half_sign = sign >> 16; + // Unbias the exponent, then bias for half precision + let unbiased_exp = ((exp >> 20) as i64) - 1023; + let half_exp = unbiased_exp + 15; + + // Check for exponent overflow, return +infinity + if half_exp >= 0x1F { + return (half_sign | 0x7C00u32) as u16; + } + + // Check for underflow + if half_exp <= 0 { + // Check mantissa for what we can do + if 10 - half_exp > 21 { + // No rounding possibility, so this is a full underflow, return signed zero + return half_sign as u16; + } + // Don't forget about hidden leading mantissa bit when assembling mantissa + let man = man | 0x0010_0000u32; + let mut half_man = man >> (11 - half_exp); + // Check for rounding (see comment above functions) + let round_bit = 1 << (10 - half_exp); + if (man & round_bit) != 0 && (man & (3 * round_bit - 1)) != 0 { + half_man += 1; + } + // No exponent for subnormals + return (half_sign | half_man) as u16; + } + + // Rebias the exponent + let half_exp = (half_exp as u32) << 10; + let half_man = man >> 10; + // Check for rounding (see comment above functions) + let round_bit = 0x0000_0200u32; + if (man & round_bit) != 0 && (man & (3 * round_bit - 1)) != 0 { + // Round it + ((half_sign | half_exp | half_man) + 1) as u16 + } else { + (half_sign | half_exp | half_man) as u16 + } +} + +#[inline] +pub(crate) const fn f16_to_f32_fallback(i: u16) -> f32 { + // Check for signed zero + // TODO: Replace mem::transmute with from_bits() once from_bits is const-stabilized + if i & 0x7FFFu16 == 0 { + return unsafe { mem::transmute::<u32, f32>((i as u32) << 16) }; + } + + let half_sign = (i & 0x8000u16) as u32; + let half_exp = (i & 0x7C00u16) as u32; + let half_man = (i & 0x03FFu16) as u32; + + // Check for an infinity or NaN when all exponent bits set + if half_exp == 0x7C00u32 { + // Check for signed infinity if mantissa is zero + if half_man == 0 { + return unsafe { mem::transmute::<u32, f32>((half_sign << 16) | 0x7F80_0000u32) }; + } else { + // NaN, keep current mantissa but also set most significiant mantissa bit + return unsafe { + mem::transmute::<u32, f32>((half_sign << 16) | 0x7FC0_0000u32 | (half_man << 13)) + }; + } + } + + // Calculate single-precision components with adjusted exponent + let sign = half_sign << 16; + // Unbias exponent + let unbiased_exp = ((half_exp as i32) >> 10) - 15; + + // Check for subnormals, which will be normalized by adjusting exponent + if half_exp == 0 { + // Calculate how much to adjust the exponent by + let e = leading_zeros_u16(half_man as u16) - 6; + + // Rebias and adjust exponent + let exp = (127 - 15 - e) << 23; + let man = (half_man << (14 + e)) & 0x7F_FF_FFu32; + return unsafe { mem::transmute::<u32, f32>(sign | exp | man) }; + } + + // Rebias exponent for a normalized normal + let exp = ((unbiased_exp + 127) as u32) << 23; + let man = (half_man & 0x03FFu32) << 13; + unsafe { mem::transmute::<u32, f32>(sign | exp | man) } +} + +#[inline] +pub(crate) const fn f16_to_f64_fallback(i: u16) -> f64 { + // Check for signed zero + // TODO: Replace mem::transmute with from_bits() once from_bits is const-stabilized + if i & 0x7FFFu16 == 0 { + return unsafe { mem::transmute::<u64, f64>((i as u64) << 48) }; + } + + let half_sign = (i & 0x8000u16) as u64; + let half_exp = (i & 0x7C00u16) as u64; + let half_man = (i & 0x03FFu16) as u64; + + // Check for an infinity or NaN when all exponent bits set + if half_exp == 0x7C00u64 { + // Check for signed infinity if mantissa is zero + if half_man == 0 { + return unsafe { + mem::transmute::<u64, f64>((half_sign << 48) | 0x7FF0_0000_0000_0000u64) + }; + } else { + // NaN, keep current mantissa but also set most significiant mantissa bit + return unsafe { + mem::transmute::<u64, f64>( + (half_sign << 48) | 0x7FF8_0000_0000_0000u64 | (half_man << 42), + ) + }; + } + } + + // Calculate double-precision components with adjusted exponent + let sign = half_sign << 48; + // Unbias exponent + let unbiased_exp = ((half_exp as i64) >> 10) - 15; + + // Check for subnormals, which will be normalized by adjusting exponent + if half_exp == 0 { + // Calculate how much to adjust the exponent by + let e = leading_zeros_u16(half_man as u16) - 6; + + // Rebias and adjust exponent + let exp = ((1023 - 15 - e) as u64) << 52; + let man = (half_man << (43 + e)) & 0xF_FFFF_FFFF_FFFFu64; + return unsafe { mem::transmute::<u64, f64>(sign | exp | man) }; + } + + // Rebias exponent for a normalized normal + let exp = ((unbiased_exp + 1023) as u64) << 52; + let man = (half_man & 0x03FFu64) << 42; + unsafe { mem::transmute::<u64, f64>(sign | exp | man) } +} + +#[inline] +fn f16x4_to_f32x4_fallback(v: &[u16; 4]) -> [f32; 4] { + [ + f16_to_f32_fallback(v[0]), + f16_to_f32_fallback(v[1]), + f16_to_f32_fallback(v[2]), + f16_to_f32_fallback(v[3]), + ] +} + +#[inline] +fn f32x4_to_f16x4_fallback(v: &[f32; 4]) -> [u16; 4] { + [ + f32_to_f16_fallback(v[0]), + f32_to_f16_fallback(v[1]), + f32_to_f16_fallback(v[2]), + f32_to_f16_fallback(v[3]), + ] +} + +#[inline] +fn f16x4_to_f64x4_fallback(v: &[u16; 4]) -> [f64; 4] { + [ + f16_to_f64_fallback(v[0]), + f16_to_f64_fallback(v[1]), + f16_to_f64_fallback(v[2]), + f16_to_f64_fallback(v[3]), + ] +} + +#[inline] +fn f64x4_to_f16x4_fallback(v: &[f64; 4]) -> [u16; 4] { + [ + f64_to_f16_fallback(v[0]), + f64_to_f16_fallback(v[1]), + f64_to_f16_fallback(v[2]), + f64_to_f16_fallback(v[3]), + ] +} + +#[inline] +fn f16x8_to_f32x8_fallback(v: &[u16; 8]) -> [f32; 8] { + [ + f16_to_f32_fallback(v[0]), + f16_to_f32_fallback(v[1]), + f16_to_f32_fallback(v[2]), + f16_to_f32_fallback(v[3]), + f16_to_f32_fallback(v[4]), + f16_to_f32_fallback(v[5]), + f16_to_f32_fallback(v[6]), + f16_to_f32_fallback(v[7]), + ] +} + +#[inline] +fn f32x8_to_f16x8_fallback(v: &[f32; 8]) -> [u16; 8] { + [ + f32_to_f16_fallback(v[0]), + f32_to_f16_fallback(v[1]), + f32_to_f16_fallback(v[2]), + f32_to_f16_fallback(v[3]), + f32_to_f16_fallback(v[4]), + f32_to_f16_fallback(v[5]), + f32_to_f16_fallback(v[6]), + f32_to_f16_fallback(v[7]), + ] +} + +#[inline] +fn f16x8_to_f64x8_fallback(v: &[u16; 8]) -> [f64; 8] { + [ + f16_to_f64_fallback(v[0]), + f16_to_f64_fallback(v[1]), + f16_to_f64_fallback(v[2]), + f16_to_f64_fallback(v[3]), + f16_to_f64_fallback(v[4]), + f16_to_f64_fallback(v[5]), + f16_to_f64_fallback(v[6]), + f16_to_f64_fallback(v[7]), + ] +} + +#[inline] +fn f64x8_to_f16x8_fallback(v: &[f64; 8]) -> [u16; 8] { + [ + f64_to_f16_fallback(v[0]), + f64_to_f16_fallback(v[1]), + f64_to_f16_fallback(v[2]), + f64_to_f16_fallback(v[3]), + f64_to_f16_fallback(v[4]), + f64_to_f16_fallback(v[5]), + f64_to_f16_fallback(v[6]), + f64_to_f16_fallback(v[7]), + ] +} + +#[inline] +fn slice_fallback<S: Copy, D>(src: &[S], dst: &mut [D], f: fn(S) -> D) { + assert_eq!(src.len(), dst.len()); + for (s, d) in src.iter().copied().zip(dst.iter_mut()) { + *d = f(s); + } +} + +#[inline] +fn add_f16_fallback(a: u16, b: u16) -> u16 { + f32_to_f16(f16_to_f32(a) + f16_to_f32(b)) +} + +#[inline] +fn subtract_f16_fallback(a: u16, b: u16) -> u16 { + f32_to_f16(f16_to_f32(a) - f16_to_f32(b)) +} + +#[inline] +fn multiply_f16_fallback(a: u16, b: u16) -> u16 { + f32_to_f16(f16_to_f32(a) * f16_to_f32(b)) +} + +#[inline] +fn divide_f16_fallback(a: u16, b: u16) -> u16 { + f32_to_f16(f16_to_f32(a) / f16_to_f32(b)) +} + +#[inline] +fn remainder_f16_fallback(a: u16, b: u16) -> u16 { + f32_to_f16(f16_to_f32(a) % f16_to_f32(b)) +} + +#[inline] +fn product_f16_fallback<I: Iterator<Item = u16>>(iter: I) -> u16 { + f32_to_f16(iter.map(f16_to_f32).product()) +} + +#[inline] +fn sum_f16_fallback<I: Iterator<Item = u16>>(iter: I) -> u16 { + f32_to_f16(iter.map(f16_to_f32).sum()) +} + +// TODO SIMD arithmetic diff --git a/vendor/half/src/binary16/arch/aarch64.rs b/vendor/half/src/binary16/arch/aarch64.rs new file mode 100644 index 00000000..e25971a4 --- /dev/null +++ b/vendor/half/src/binary16/arch/aarch64.rs @@ -0,0 +1,173 @@ +use core::{ + arch::{ + aarch64::{float32x4_t, float64x2_t, uint16x4_t}, + asm, + }, + mem::MaybeUninit, + ptr, +}; +use zerocopy::transmute; + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f16_to_f32_fp16(i: u16) -> f32 { + let result: f32; + asm!( + "fcvt {0:s}, {1:h}", + out(vreg) result, + in(vreg) i, + options(pure, nomem, nostack, preserves_flags)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f16_to_f64_fp16(i: u16) -> f64 { + let result: f64; + asm!( + "fcvt {0:d}, {1:h}", + out(vreg) result, + in(vreg) i, + options(pure, nomem, nostack, preserves_flags)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f32_to_f16_fp16(f: f32) -> u16 { + let result: u16; + asm!( + "fcvt {0:h}, {1:s}", + out(vreg) result, + in(vreg) f, + options(pure, nomem, nostack, preserves_flags)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f64_to_f16_fp16(f: f64) -> u16 { + let result: u16; + asm!( + "fcvt {0:h}, {1:d}", + out(vreg) result, + in(vreg) f, + options(pure, nomem, nostack, preserves_flags)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f16x4_to_f32x4_fp16(v: &[u16; 4]) -> [f32; 4] { + let vec: uint16x4_t = transmute!(*v); + let result: float32x4_t; + asm!( + "fcvtl {0:v}.4s, {1:v}.4h", + out(vreg) result, + in(vreg) vec, + options(pure, nomem, nostack)); + transmute!(result) +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f32x4_to_f16x4_fp16(v: &[f32; 4]) -> [u16; 4] { + let vec: float32x4_t = transmute!(*v); + let result: uint16x4_t; + asm!( + "fcvtn {0:v}.4h, {1:v}.4s", + out(vreg) result, + in(vreg) vec, + options(pure, nomem, nostack)); + transmute!(result) +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f16x4_to_f64x4_fp16(v: &[u16; 4]) -> [f64; 4] { + let vec: uint16x4_t = transmute!(*v); + let low: float64x2_t; + let high: float64x2_t; + asm!( + "fcvtl {2:v}.4s, {3:v}.4h", // Convert to f32 + "fcvtl {0:v}.2d, {2:v}.2s", // Convert low part to f64 + "fcvtl2 {1:v}.2d, {2:v}.4s", // Convert high part to f64 + lateout(vreg) low, + lateout(vreg) high, + out(vreg) _, + in(vreg) vec, + options(pure, nomem, nostack)); + transmute!([low, high]) +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn f64x4_to_f16x4_fp16(v: &[f64; 4]) -> [u16; 4] { + let mut low = MaybeUninit::<float64x2_t>::uninit(); + let mut high = MaybeUninit::<float64x2_t>::uninit(); + ptr::copy_nonoverlapping(v.as_ptr(), low.as_mut_ptr().cast(), 2); + ptr::copy_nonoverlapping(v[2..].as_ptr(), high.as_mut_ptr().cast(), 2); + let result: uint16x4_t; + asm!( + "fcvtn {1:v}.2s, {2:v}.2d", // Convert low to f32 + "fcvtn2 {1:v}.4s, {3:v}.2d", // Convert high to f32 + "fcvtn {0:v}.4h, {1:v}.4s", // Convert to f16 + lateout(vreg) result, + out(vreg) _, + in(vreg) low.assume_init(), + in(vreg) high.assume_init(), + options(pure, nomem, nostack)); + transmute!(result) +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn add_f16_fp16(a: u16, b: u16) -> u16 { + let result: u16; + asm!( + "fadd {0:h}, {1:h}, {2:h}", + out(vreg) result, + in(vreg) a, + in(vreg) b, + options(pure, nomem, nostack)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn subtract_f16_fp16(a: u16, b: u16) -> u16 { + let result: u16; + asm!( + "fsub {0:h}, {1:h}, {2:h}", + out(vreg) result, + in(vreg) a, + in(vreg) b, + options(pure, nomem, nostack)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn multiply_f16_fp16(a: u16, b: u16) -> u16 { + let result: u16; + asm!( + "fmul {0:h}, {1:h}, {2:h}", + out(vreg) result, + in(vreg) a, + in(vreg) b, + options(pure, nomem, nostack)); + result +} + +#[target_feature(enable = "fp16")] +#[inline] +pub(super) unsafe fn divide_f16_fp16(a: u16, b: u16) -> u16 { + let result: u16; + asm!( + "fdiv {0:h}, {1:h}, {2:h}", + out(vreg) result, + in(vreg) a, + in(vreg) b, + options(pure, nomem, nostack)); + result +} diff --git a/vendor/half/src/binary16/arch/loongarch64.rs b/vendor/half/src/binary16/arch/loongarch64.rs new file mode 100644 index 00000000..bf2579e1 --- /dev/null +++ b/vendor/half/src/binary16/arch/loongarch64.rs @@ -0,0 +1,63 @@ +use core::{mem::MaybeUninit, ptr}; + +#[cfg(target_arch = "loongarch64")] +use core::arch::loongarch64::{lsx_vfcvt_h_s, lsx_vfcvtl_s_h, m128, m128i}; + +/////////////// loongarch64 lsx/lasx //////////////// + +#[target_feature(enable = "lsx")] +#[inline] +pub(super) unsafe fn f16_to_f32_lsx(i: u16) -> f32 { + let mut vec = MaybeUninit::<m128i>::zeroed(); + vec.as_mut_ptr().cast::<u16>().write(i); + let retval = lsx_vfcvtl_s_h(vec.assume_init()); + *(&retval as *const m128).cast() +} + +#[target_feature(enable = "lsx")] +#[inline] +pub(super) unsafe fn f32_to_f16_lsx(f: f32) -> u16 { + let mut vec = MaybeUninit::<m128>::zeroed(); + vec.as_mut_ptr().cast::<f32>().write(f); + let retval = lsx_vfcvt_h_s(vec.assume_init(), vec.assume_init()); + *(&retval as *const m128i).cast() +} + +#[target_feature(enable = "lsx")] +#[inline] +pub(super) unsafe fn f16x4_to_f32x4_lsx(v: &[u16; 4]) -> [f32; 4] { + let mut vec = MaybeUninit::<m128i>::zeroed(); + ptr::copy_nonoverlapping(v.as_ptr(), vec.as_mut_ptr().cast(), 4); + let retval = lsx_vfcvtl_s_h(vec.assume_init()); + *(&retval as *const m128).cast() +} + +#[target_feature(enable = "lsx")] +#[inline] +pub(super) unsafe fn f32x4_to_f16x4_lsx(v: &[f32; 4]) -> [u16; 4] { + let mut vec = MaybeUninit::<m128>::uninit(); + ptr::copy_nonoverlapping(v.as_ptr(), vec.as_mut_ptr().cast(), 4); + let retval = lsx_vfcvt_h_s(vec.assume_init(), vec.assume_init()); + *(&retval as *const m128i).cast() +} + +#[target_feature(enable = "lsx")] +#[inline] +pub(super) unsafe fn f16x4_to_f64x4_lsx(v: &[u16; 4]) -> [f64; 4] { + let array = f16x4_to_f32x4_lsx(v); + // Let compiler vectorize this regular cast for now. + [ + array[0] as f64, + array[1] as f64, + array[2] as f64, + array[3] as f64, + ] +} + +#[target_feature(enable = "lsx")] +#[inline] +pub(super) unsafe fn f64x4_to_f16x4_lsx(v: &[f64; 4]) -> [u16; 4] { + // Let compiler vectorize this regular cast for now. + let v = [v[0] as f32, v[1] as f32, v[2] as f32, v[3] as f32]; + f32x4_to_f16x4_lsx(&v) +} diff --git a/vendor/half/src/binary16/arch/x86.rs b/vendor/half/src/binary16/arch/x86.rs new file mode 100644 index 00000000..4bb191ff --- /dev/null +++ b/vendor/half/src/binary16/arch/x86.rs @@ -0,0 +1,127 @@ +use core::{mem::MaybeUninit, ptr}; +use zerocopy::transmute; + +#[cfg(target_arch = "x86")] +use core::arch::x86::{ + __m128, __m128i, __m256, _mm256_cvtph_ps, _mm256_cvtps_ph, _mm_cvtph_ps, + _MM_FROUND_TO_NEAREST_INT, +}; +#[cfg(target_arch = "x86_64")] +use core::arch::x86_64::{ + __m128, __m128i, __m256, _mm256_cvtph_ps, _mm256_cvtps_ph, _mm_cvtph_ps, _mm_cvtps_ph, + _MM_FROUND_TO_NEAREST_INT, +}; + +#[cfg(target_arch = "x86")] +use core::arch::x86::_mm_cvtps_ph; + +use super::convert_chunked_slice_8; + +/////////////// x86/x86_64 f16c //////////////// + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f16_to_f32_x86_f16c(i: u16) -> f32 { + let vec: __m128i = transmute!([i, 0, 0, 0, 0, 0, 0, 0]); + let retval: [f32; 4] = transmute!(_mm_cvtph_ps(vec)); + retval[0] +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f32_to_f16_x86_f16c(f: f32) -> u16 { + let vec: __m128 = transmute!([f, 0.0, 0.0, 0.0]); + let retval = _mm_cvtps_ph(vec, _MM_FROUND_TO_NEAREST_INT); + let retval: [u16; 8] = transmute!(retval); + retval[0] +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f16x4_to_f32x4_x86_f16c(v: &[u16; 4]) -> [f32; 4] { + let vec: __m128i = transmute!([*v, [0, 0, 0, 0]]); + transmute!(_mm_cvtph_ps(vec)) +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f32x4_to_f16x4_x86_f16c(v: &[f32; 4]) -> [u16; 4] { + let vec: __m128 = zerocopy::transmute!(*v); + let retval = _mm_cvtps_ph(vec, _MM_FROUND_TO_NEAREST_INT); + let retval: [[u16; 4]; 2] = transmute!(retval); + retval[0] +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f16x4_to_f64x4_x86_f16c(v: &[u16; 4]) -> [f64; 4] { + let array = f16x4_to_f32x4_x86_f16c(v); + // Let compiler vectorize this regular cast for now. + // TODO: investigate auto-detecting sse2/avx convert features + [ + array[0] as f64, + array[1] as f64, + array[2] as f64, + array[3] as f64, + ] +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f64x4_to_f16x4_x86_f16c(v: &[f64; 4]) -> [u16; 4] { + // Let compiler vectorize this regular cast for now. + // TODO: investigate auto-detecting sse2/avx convert features + let v = [v[0] as f32, v[1] as f32, v[2] as f32, v[3] as f32]; + f32x4_to_f16x4_x86_f16c(&v) +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f16x8_to_f32x8_x86_f16c(v: &[u16; 8]) -> [f32; 8] { + let vec: __m128i = transmute!(*v); + transmute!(_mm256_cvtph_ps(vec)) +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f32x8_to_f16x8_x86_f16c(v: &[f32; 8]) -> [u16; 8] { + let vec: __m256 = transmute!(*v); + let retval = _mm256_cvtps_ph(vec, _MM_FROUND_TO_NEAREST_INT); + transmute!(retval) +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f16x8_to_f64x8_x86_f16c(v: &[u16; 8]) -> [f64; 8] { + let array = f16x8_to_f32x8_x86_f16c(v); + // Let compiler vectorize this regular cast for now. + // TODO: investigate auto-detecting sse2/avx convert features + [ + array[0] as f64, + array[1] as f64, + array[2] as f64, + array[3] as f64, + array[4] as f64, + array[5] as f64, + array[6] as f64, + array[7] as f64, + ] +} + +#[target_feature(enable = "f16c")] +#[inline] +pub(super) unsafe fn f64x8_to_f16x8_x86_f16c(v: &[f64; 8]) -> [u16; 8] { + // Let compiler vectorize this regular cast for now. + // TODO: investigate auto-detecting sse2/avx convert features + let v = [ + v[0] as f32, + v[1] as f32, + v[2] as f32, + v[3] as f32, + v[4] as f32, + v[5] as f32, + v[6] as f32, + v[7] as f32, + ]; + f32x8_to_f16x8_x86_f16c(&v) +} diff --git a/vendor/half/src/leading_zeros.rs b/vendor/half/src/leading_zeros.rs new file mode 100644 index 00000000..bf2bbaa3 --- /dev/null +++ b/vendor/half/src/leading_zeros.rs @@ -0,0 +1,65 @@ +// https://doc.rust-lang.org/std/primitive.u16.html#method.leading_zeros + +#[cfg(not(any(all( + target_arch = "spirv", + not(all( + target_feature = "IntegerFunctions2INTEL", + target_feature = "SPV_INTEL_shader_integer_functions2" + )) +))))] +#[inline] +pub(crate) const fn leading_zeros_u16(x: u16) -> u32 { + x.leading_zeros() +} + +#[cfg(all( + target_arch = "spirv", + not(all( + target_feature = "IntegerFunctions2INTEL", + target_feature = "SPV_INTEL_shader_integer_functions2" + )) +))] +#[inline] +pub(crate) const fn leading_zeros_u16(x: u16) -> u32 { + leading_zeros_u16_fallback(x) +} + +#[cfg(any( + test, + all( + target_arch = "spirv", + not(all( + target_feature = "IntegerFunctions2INTEL", + target_feature = "SPV_INTEL_shader_integer_functions2" + )) + ) +))] +#[inline] +const fn leading_zeros_u16_fallback(mut x: u16) -> u32 { + use crunchy::unroll; + let mut c = 0; + let msb = 1 << 15; + unroll! { for i in 0 .. 16 { + if x & msb == 0 { + c += 1; + } else { + return c; + } + #[allow(unused_assignments)] + if i < 15 { + x <<= 1; + } + }} + c +} + +#[cfg(test)] +mod test { + + #[test] + fn leading_zeros_u16_fallback() { + for x in [44, 97, 304, 1179, 23571] { + assert_eq!(super::leading_zeros_u16_fallback(x), x.leading_zeros()); + } + } +} diff --git a/vendor/half/src/lib.rs b/vendor/half/src/lib.rs new file mode 100644 index 00000000..06d38a68 --- /dev/null +++ b/vendor/half/src/lib.rs @@ -0,0 +1,283 @@ +//! A crate that provides support for half-precision 16-bit floating point types. +//! +//! This crate provides the [`struct@f16`] type, which is an implementation of the IEEE 754-2008 standard +//! [`binary16`] a.k.a "half" floating point type. This 16-bit floating point type is intended for +//! efficient storage where the full range and precision of a larger floating point value is not +//! required. This is especially useful for image storage formats. +//! +//! This crate also provides a [`struct@bf16`] type, an alternative 16-bit floating point format. The +//! [`bfloat16`] format is a truncated IEEE 754 standard `binary32` float that preserves the +//! exponent to allow the same range as [`f32`] but with only 8 bits of precision (instead of 11 +//! bits for [`struct@f16`]). See the [`struct@bf16`] type for details. +//! +//! Because [`struct@f16`] and [`struct@bf16`] are primarily for efficient storage, floating point operations such +//! as addition, multiplication, etc. are not always implemented by hardware. When hardware does not +//! support these operations, this crate emulates them by converting the value to +//! [`f32`] before performing the operation and then back afterward. +//! +//! Note that conversion from [`f32`]/[`f64`] to both [`struct@f16`] and [`struct@bf16`] are lossy operations, and +//! just as converting a [`f64`] to [`f32`] is lossy and does not have `Into`/`From` trait +//! implementations, so too do these smaller types not have those trait implementations either. +//! Instead, use `from_f32`/`from_f64` functions for the types in this crate. If you don't care +//! about lossy conversions and need trait conversions, use the appropriate [`num-traits`] +//! traits that are implemented. +//! +//! This crate also provides a [`slice`][mod@slice] module for zero-copy in-place conversions of +//! [`u16`] slices to both [`struct@f16`] and [`struct@bf16`], as well as efficient vectorized conversions of +//! larger buffers of floating point values to and from these half formats. +//! +//! The crate supports `#[no_std]` when the `std` cargo feature is not enabled, so can be used in +//! embedded environments without using the Rust [`std`] library. The `std` feature enables support +//! for the standard library and is enabled by default, see the [Cargo Features](#cargo-features) +//! section below. +//! +//! A [`prelude`] module is provided for easy importing of available utility traits. +//! +//! # Serialization +//! +//! When the `serde` feature is enabled, [`struct@f16`] and [`struct@bf16`] will be serialized as a newtype of +//! [`u16`] by default. In binary formats this is ideal, as it will generally use just two bytes for +//! storage. For string formats like JSON, however, this isn't as useful, and due to design +//! limitations of serde, it's not possible for the default `Serialize` implementation to support +//! different serialization for different formats. +//! +//! Instead, it's up to the containter type of the floats to control how it is serialized. This can +//! easily be controlled when using the derive macros using `#[serde(serialize_with="")]` +//! attributes. For both [`struct@f16`] and [`struct@bf16`] a `serialize_as_f32` and `serialize_as_string` are +//! provided for use with this attribute. +//! +//! Deserialization of both float types supports deserializing from the default serialization, +//! strings, and `f32`/`f64` values, so no additional work is required. +//! +//! # Hardware support +//! +//! Hardware support for these conversions and arithmetic will be used +//! whenever hardware support is available—either through instrinsics or targeted assembly—although +//! a nightly Rust toolchain may be required for some hardware. When hardware supports it the +//! functions and traits in the [`slice`][mod@slice] and [`vec`] modules will also use vectorized +//! SIMD intructions for increased efficiency. +//! +//! The following list details hardware support for floating point types in this crate. When using +//! `std` cargo feature, runtime CPU target detection will be used. To get the most performance +//! benefits, compile for specific CPU features which avoids the runtime overhead and works in a +//! `no_std` environment. +//! +//! | Architecture | CPU Target Feature | Notes | +//! | ------------ | ------------------ | ----- | +//! | `x86`/`x86_64` | `f16c` | This supports conversion to/from [`struct@f16`] only (including vector SIMD) and does not support any [`struct@bf16`] or arithmetic operations. | +//! | `aarch64` | `fp16` | This supports all operations on [`struct@f16`] only. | +//! | `loongarch64` | `lsx` | This supports conversion to/from [`struct@f16`] only (including vector SIMD) and does not support any [`struct@bf16`] or arithmetic operations. | +//! +//! # Cargo Features +//! +//! This crate supports a number of optional cargo features. None of these features are enabled by +//! default, even `std`. +//! +//! - **`alloc`** — Enable use of the [`alloc`] crate when not using the `std` library. +//! +//! Among other functions, this enables the [`vec`] module, which contains zero-copy +//! conversions for the [`Vec`] type. This allows fast conversion between raw `Vec<u16>` bits and +//! `Vec<f16>` or `Vec<bf16>` arrays, and vice versa. +//! +//! - **`std`** — Enable features that depend on the Rust [`std`] library. This also enables the +//! `alloc` feature automatically. +//! +//! Enabling the `std` feature enables runtime CPU feature detection of hardware support. +//! Without this feature detection, harware is only used when compiler target supports them. +//! +//! - **`serde`** — Adds support for the [`serde`] crate by implementing [`Serialize`] and +//! [`Deserialize`] traits for both [`struct@f16`] and [`struct@bf16`]. +//! +//! - **`num-traits`** — Adds support for the [`num-traits`] crate by implementing [`ToPrimitive`], +//! [`FromPrimitive`], [`ToBytes`], `FromBytes`, [`AsPrimitive`], [`Num`], [`Float`], +//! [`FloatCore`], [`Signed`], and [`Bounded`] traits for both [`struct@f16`] and [`struct@bf16`]. +//! +//! - **`bytemuck`** — Adds support for the [`bytemuck`] crate by implementing [`Zeroable`] and +//! [`Pod`] traits for both [`struct@f16`] and [`struct@bf16`]. +//! +//! - **`rand_distr`** — Adds support for the [`rand_distr`] crate by implementing [`Distribution`] +//! and other traits for both [`struct@f16`] and [`struct@bf16`]. +//! +//! - **`rkyv`** -- Enable zero-copy deserializtion with [`rkyv`] crate. +//! +//! - **`aribtrary`** -- Enable fuzzing support with [`arbitrary`] crate by implementing +//! [`Arbitrary`] trait. +//! +//! - **`nightly`** -- Enable nightly-only features. +//! +//! [`alloc`]: https://doc.rust-lang.org/alloc/ +//! [`std`]: https://doc.rust-lang.org/std/ +//! [`binary16`]: https://en.wikipedia.org/wiki/Half-precision_floating-point_format +//! [`bfloat16`]: https://en.wikipedia.org/wiki/Bfloat16_floating-point_format +//! [`serde`]: https://crates.io/crates/serde +//! [`bytemuck`]: https://crates.io/crates/bytemuck +//! [`num-traits`]: https://crates.io/crates/num-traits +//! [`zerocopy`]: https://crates.io/crates/zerocopy +//! [`rand_distr`]: https://crates.io/crates/rand_distr +//! [`rkyv`]: (https://crates.io/crates/rkyv) +//! [`arbitrary`]: (https://crates.io/crates/arbitrary) +#![cfg_attr( + feature = "alloc", + doc = " +[`vec`]: mod@vec" +)] +#![cfg_attr( + not(feature = "alloc"), + doc = " +[`vec`]: # +[`Vec`]: https://docs.rust-lang.org/stable/alloc/vec/struct.Vec.html" +)] +#![cfg_attr( + feature = "serde", + doc = " +[`Serialize`]: serde::Serialize +[`Deserialize`]: serde::Deserialize" +)] +#![cfg_attr( + not(feature = "serde"), + doc = " +[`Serialize`]: https://docs.rs/serde/*/serde/trait.Serialize.html +[`Deserialize`]: https://docs.rs/serde/*/serde/trait.Deserialize.html" +)] +#![cfg_attr( + feature = "num-traits", + doc = " +[`ToPrimitive`]: ::num_traits::ToPrimitive +[`FromPrimitive`]: ::num_traits::FromPrimitive +[`ToBytes`]: ::num_traits::ToBytes +[`AsPrimitive`]: ::num_traits::AsPrimitive +[`Num`]: ::num_traits::Num +[`Float`]: ::num_traits::Float +[`FloatCore`]: ::num_traits::float::FloatCore +[`Signed`]: ::num_traits::Signed +[`Bounded`]: ::num_traits::Bounded" +)] +#![cfg_attr( + not(feature = "num-traits"), + doc = " +[`ToPrimitive`]: https://docs.rs/num-traits/*/num_traits/cast/trait.ToPrimitive.html +[`FromPrimitive`]: https://docs.rs/num-traits/*/num_traits/cast/trait.FromPrimitive.html +[`ToBytes`]: https://docs.rs/num-traits/*/num_traits/ops/bytes/trait.ToBytes.html +[`AsPrimitive`]: https://docs.rs/num-traits/*/num_traits/cast/trait.AsPrimitive.html +[`Num`]: https://docs.rs/num-traits/*/num_traits/trait.Num.html +[`Float`]: https://docs.rs/num-traits/*/num_traits/float/trait.Float.html +[`FloatCore`]: https://docs.rs/num-traits/*/num_traits/float/trait.FloatCore.html +[`Bounded`]: https://docs.rs/num-traits/*/num_traits/bounds/trait.Bounded.html" +)] +#![cfg_attr( + feature = "bytemuck", + doc = " +[`Zeroable`]: bytemuck::Zeroable +[`Pod`]: bytemuck::Pod" +)] +#![cfg_attr( + not(feature = "bytemuck"), + doc = " +[`Zeroable`]: https://docs.rs/bytemuck/*/bytemuck/trait.Zeroable.html +[`Pod`]: https://docs.rs/bytemuck/*bytemuck/trait.Pod.html" +)] +#![cfg_attr( + feature = "zerocopy", + doc = " +[`IntoBytes`]: zerocopy::IntoBytes +[`FromBytes`]: zerocopy::FromBytes" +)] +#![cfg_attr( + not(feature = "zerocopy"), + doc = " +[`IntoBytes`]: https://docs.rs/zerocopy/*/zerocopy/trait.IntoBytes.html +[`FromBytes`]: https://docs.rs/zerocopy/*/zerocopy/trait.FromBytes.html" +)] +#![cfg_attr( + feature = "rand_distr", + doc = " +[`Distribution`]: rand::distr::Distribution" +)] +#![cfg_attr( + not(feature = "rand_distr"), + doc = " +[`Distribution`]: https://docs.rs/rand/*/rand/distr/trait.Distribution.html" +)] +#![cfg_attr( + feature = "arbitrary", + doc = " +[`Arbitrary`]: arbitrary::Arbitrary" +)] +#![cfg_attr( + not(feature = "arbitrary"), + doc = " +[`Arbitrary`]: https://docs.rs/arbitrary/*/arbitrary/trait.Arbitrary.html" +)] +#![warn( + missing_docs, + missing_copy_implementations, + trivial_numeric_casts, + future_incompatible +)] +#![cfg_attr(not(target_arch = "spirv"), warn(missing_debug_implementations))] +#![cfg_attr( + all(feature = "nightly", target_arch = "loongarch64"), + feature( + stdarch_loongarch, + stdarch_loongarch_feature_detection, + loongarch_target_feature + ) +)] +#![allow(clippy::verbose_bit_mask, clippy::cast_lossless, unexpected_cfgs)] +#![cfg_attr(not(feature = "std"), no_std)] +#![doc(html_root_url = "https://docs.rs/half/2.7.1")] +#![doc(test(attr(deny(warnings), allow(unused))))] +// Until updated to use newly stabilized `from_bits`, disable new lint warning about the transmutes +#![allow(unknown_lints, unnecessary_transmutes)] +#![warn(unknown_lints)] + +#[cfg(feature = "alloc")] +extern crate alloc; + +mod bfloat; +mod binary16; +mod leading_zeros; +#[cfg(feature = "num-traits")] +mod num_traits; + +#[cfg(not(target_arch = "spirv"))] +pub mod slice; +#[cfg(feature = "alloc")] +pub mod vec; + +pub use bfloat::bf16; +pub use binary16::f16; + +#[cfg(feature = "rand_distr")] +mod rand_distr; + +/// A collection of the most used items and traits in this crate for easy importing. +/// +/// # Examples +/// +/// ```rust +/// use half::prelude::*; +/// ``` +pub mod prelude { + #[doc(no_inline)] + pub use crate::{bf16, f16}; + + #[cfg(not(target_arch = "spirv"))] + #[doc(no_inline)] + pub use crate::slice::{HalfBitsSliceExt, HalfFloatSliceExt}; + + #[cfg(feature = "alloc")] + #[doc(no_inline)] + pub use crate::vec::{HalfBitsVecExt, HalfFloatVecExt}; +} + +// Keep this module private to crate +mod private { + use crate::{bf16, f16}; + use zerocopy::{FromBytes, Immutable, IntoBytes}; + + pub trait SealedHalf: FromBytes + IntoBytes + Immutable {} + + impl SealedHalf for f16 {} + impl SealedHalf for bf16 {} +} diff --git a/vendor/half/src/num_traits.rs b/vendor/half/src/num_traits.rs new file mode 100644 index 00000000..88a9989b --- /dev/null +++ b/vendor/half/src/num_traits.rs @@ -0,0 +1,1584 @@ +use crate::{bf16, f16}; +use core::cmp::Ordering; +use core::{num::FpCategory, ops::Div}; +use num_traits::{ + AsPrimitive, Bounded, FloatConst, FromBytes, FromPrimitive, Num, NumCast, One, ToBytes, + ToPrimitive, Zero, +}; + +impl ToPrimitive for f16 { + #[inline] + fn to_i64(&self) -> Option<i64> { + Self::to_f32(*self).to_i64() + } + #[inline] + fn to_u64(&self) -> Option<u64> { + Self::to_f32(*self).to_u64() + } + #[inline] + fn to_i8(&self) -> Option<i8> { + Self::to_f32(*self).to_i8() + } + #[inline] + fn to_u8(&self) -> Option<u8> { + Self::to_f32(*self).to_u8() + } + #[inline] + fn to_i16(&self) -> Option<i16> { + Self::to_f32(*self).to_i16() + } + #[inline] + fn to_u16(&self) -> Option<u16> { + Self::to_f32(*self).to_u16() + } + #[inline] + fn to_i32(&self) -> Option<i32> { + Self::to_f32(*self).to_i32() + } + #[inline] + fn to_u32(&self) -> Option<u32> { + Self::to_f32(*self).to_u32() + } + #[inline] + fn to_f32(&self) -> Option<f32> { + Some(Self::to_f32(*self)) + } + #[inline] + fn to_f64(&self) -> Option<f64> { + Some(Self::to_f64(*self)) + } +} + +impl FromPrimitive for f16 { + #[inline] + fn from_i64(n: i64) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u64(n: u64) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_i8(n: i8) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u8(n: u8) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_i16(n: i16) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u16(n: u16) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_i32(n: i32) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u32(n: u32) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_f32(n: f32) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_f64(n: f64) -> Option<Self> { + n.to_f64().map(Self::from_f64) + } +} + +impl Num for f16 { + type FromStrRadixErr = <f32 as Num>::FromStrRadixErr; + + #[inline] + fn from_str_radix(str: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr> { + Ok(Self::from_f32(f32::from_str_radix(str, radix)?)) + } +} + +impl One for f16 { + #[inline] + fn one() -> Self { + Self::ONE + } +} + +impl Zero for f16 { + #[inline] + fn zero() -> Self { + Self::ZERO + } + + #[inline] + fn is_zero(&self) -> bool { + *self == Self::ZERO + } +} + +impl NumCast for f16 { + #[inline] + fn from<T: ToPrimitive>(n: T) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } +} + +impl num_traits::float::FloatCore for f16 { + #[inline] + fn infinity() -> Self { + Self::INFINITY + } + + #[inline] + fn neg_infinity() -> Self { + Self::NEG_INFINITY + } + + #[inline] + fn nan() -> Self { + Self::NAN + } + + #[inline] + fn neg_zero() -> Self { + Self::NEG_ZERO + } + + #[inline] + fn min_value() -> Self { + Self::MIN + } + + #[inline] + fn min_positive_value() -> Self { + Self::MIN_POSITIVE + } + + #[inline] + fn epsilon() -> Self { + Self::EPSILON + } + + #[inline] + fn max_value() -> Self { + Self::MAX + } + + #[inline] + fn is_nan(self) -> bool { + self.is_nan() + } + + #[inline] + fn is_infinite(self) -> bool { + self.is_infinite() + } + + #[inline] + fn is_finite(self) -> bool { + self.is_finite() + } + + #[inline] + fn is_normal(self) -> bool { + self.is_normal() + } + + #[inline] + fn classify(self) -> FpCategory { + self.classify() + } + + #[inline] + fn floor(self) -> Self { + Self::from_f32(self.to_f32().floor()) + } + + #[inline] + fn ceil(self) -> Self { + Self::from_f32(self.to_f32().ceil()) + } + + #[inline] + fn round(self) -> Self { + Self::from_f32(self.to_f32().round()) + } + + #[inline] + fn trunc(self) -> Self { + Self::from_f32(self.to_f32().trunc()) + } + + #[inline] + fn fract(self) -> Self { + Self::from_f32(self.to_f32().fract()) + } + + #[inline] + fn abs(self) -> Self { + Self::from_bits(self.to_bits() & 0x7FFF) + } + + #[inline] + fn signum(self) -> Self { + self.signum() + } + + #[inline] + fn is_sign_positive(self) -> bool { + self.is_sign_positive() + } + + #[inline] + fn is_sign_negative(self) -> bool { + self.is_sign_negative() + } + + fn min(self, other: Self) -> Self { + match self.partial_cmp(&other) { + None => { + if self.is_nan() { + other + } else { + self + } + } + Some(Ordering::Greater) | Some(Ordering::Equal) => other, + Some(Ordering::Less) => self, + } + } + + fn max(self, other: Self) -> Self { + match self.partial_cmp(&other) { + None => { + if self.is_nan() { + other + } else { + self + } + } + Some(Ordering::Greater) | Some(Ordering::Equal) => self, + Some(Ordering::Less) => other, + } + } + + #[inline] + fn recip(self) -> Self { + Self::from_f32(self.to_f32().recip()) + } + + #[inline] + fn powi(self, exp: i32) -> Self { + Self::from_f32(self.to_f32().powi(exp)) + } + + #[inline] + fn to_degrees(self) -> Self { + Self::from_f32(self.to_f32().to_degrees()) + } + + #[inline] + fn to_radians(self) -> Self { + Self::from_f32(self.to_f32().to_radians()) + } + + #[inline] + fn integer_decode(self) -> (u64, i16, i8) { + num_traits::float::FloatCore::integer_decode(self.to_f32()) + } +} + +impl num_traits::float::Float for f16 { + #[inline] + fn nan() -> Self { + Self::NAN + } + + #[inline] + fn infinity() -> Self { + Self::INFINITY + } + + #[inline] + fn neg_infinity() -> Self { + Self::NEG_INFINITY + } + + #[inline] + fn neg_zero() -> Self { + Self::NEG_ZERO + } + + #[inline] + fn min_value() -> Self { + Self::MIN + } + + #[inline] + fn min_positive_value() -> Self { + Self::MIN_POSITIVE + } + + #[inline] + fn epsilon() -> Self { + Self::EPSILON + } + + #[inline] + fn max_value() -> Self { + Self::MAX + } + + #[inline] + fn is_nan(self) -> bool { + self.is_nan() + } + + #[inline] + fn is_infinite(self) -> bool { + self.is_infinite() + } + + #[inline] + fn is_finite(self) -> bool { + self.is_finite() + } + + #[inline] + fn is_normal(self) -> bool { + self.is_normal() + } + + #[inline] + fn classify(self) -> FpCategory { + self.classify() + } + + #[inline] + fn floor(self) -> Self { + Self::from_f32(self.to_f32().floor()) + } + + #[inline] + fn ceil(self) -> Self { + Self::from_f32(self.to_f32().ceil()) + } + + #[inline] + fn round(self) -> Self { + Self::from_f32(self.to_f32().round()) + } + + #[inline] + fn trunc(self) -> Self { + Self::from_f32(self.to_f32().trunc()) + } + + #[inline] + fn fract(self) -> Self { + Self::from_f32(self.to_f32().fract()) + } + + #[inline] + fn abs(self) -> Self { + Self::from_f32(self.to_f32().abs()) + } + + #[inline] + fn signum(self) -> Self { + Self::from_f32(self.to_f32().signum()) + } + + #[inline] + fn is_sign_positive(self) -> bool { + self.is_sign_positive() + } + + #[inline] + fn is_sign_negative(self) -> bool { + self.is_sign_negative() + } + + #[inline] + fn mul_add(self, a: Self, b: Self) -> Self { + Self::from_f32(self.to_f32().mul_add(a.to_f32(), b.to_f32())) + } + + #[inline] + fn recip(self) -> Self { + Self::from_f32(self.to_f32().recip()) + } + + #[inline] + fn powi(self, n: i32) -> Self { + Self::from_f32(self.to_f32().powi(n)) + } + + #[inline] + fn powf(self, n: Self) -> Self { + Self::from_f32(self.to_f32().powf(n.to_f32())) + } + + #[inline] + fn sqrt(self) -> Self { + Self::from_f32(self.to_f32().sqrt()) + } + + #[inline] + fn exp(self) -> Self { + Self::from_f32(self.to_f32().exp()) + } + + #[inline] + fn exp2(self) -> Self { + Self::from_f32(self.to_f32().exp2()) + } + + #[inline] + fn ln(self) -> Self { + Self::from_f32(self.to_f32().ln()) + } + + #[inline] + fn log(self, base: Self) -> Self { + Self::from_f32(self.to_f32().log(base.to_f32())) + } + + #[inline] + fn log2(self) -> Self { + Self::from_f32(self.to_f32().log2()) + } + + #[inline] + fn log10(self) -> Self { + Self::from_f32(self.to_f32().log10()) + } + + #[inline] + fn to_degrees(self) -> Self { + Self::from_f32(self.to_f32().to_degrees()) + } + + #[inline] + fn to_radians(self) -> Self { + Self::from_f32(self.to_f32().to_radians()) + } + + #[inline] + fn max(self, other: Self) -> Self { + self.max(other) + } + + #[inline] + fn min(self, other: Self) -> Self { + self.min(other) + } + + #[inline] + fn abs_sub(self, other: Self) -> Self { + Self::from_f32((self.to_f32() - other.to_f32()).max(0.0)) + } + + #[inline] + fn cbrt(self) -> Self { + Self::from_f32(self.to_f32().cbrt()) + } + + #[inline] + fn hypot(self, other: Self) -> Self { + Self::from_f32(self.to_f32().hypot(other.to_f32())) + } + + #[inline] + fn sin(self) -> Self { + Self::from_f32(self.to_f32().sin()) + } + + #[inline] + fn cos(self) -> Self { + Self::from_f32(self.to_f32().cos()) + } + + #[inline] + fn tan(self) -> Self { + Self::from_f32(self.to_f32().tan()) + } + + #[inline] + fn asin(self) -> Self { + Self::from_f32(self.to_f32().asin()) + } + + #[inline] + fn acos(self) -> Self { + Self::from_f32(self.to_f32().acos()) + } + + #[inline] + fn atan(self) -> Self { + Self::from_f32(self.to_f32().atan()) + } + + #[inline] + fn atan2(self, other: Self) -> Self { + Self::from_f32(self.to_f32().atan2(other.to_f32())) + } + + #[inline] + fn sin_cos(self) -> (Self, Self) { + let (sin, cos) = self.to_f32().sin_cos(); + (Self::from_f32(sin), Self::from_f32(cos)) + } + + #[inline] + fn exp_m1(self) -> Self { + Self::from_f32(self.to_f32().exp_m1()) + } + + #[inline] + fn ln_1p(self) -> Self { + Self::from_f32(self.to_f32().ln_1p()) + } + + #[inline] + fn sinh(self) -> Self { + Self::from_f32(self.to_f32().sinh()) + } + + #[inline] + fn cosh(self) -> Self { + Self::from_f32(self.to_f32().cosh()) + } + + #[inline] + fn tanh(self) -> Self { + Self::from_f32(self.to_f32().tanh()) + } + + #[inline] + fn asinh(self) -> Self { + Self::from_f32(self.to_f32().asinh()) + } + + #[inline] + fn acosh(self) -> Self { + Self::from_f32(self.to_f32().acosh()) + } + + #[inline] + fn atanh(self) -> Self { + Self::from_f32(self.to_f32().atanh()) + } + + #[inline] + fn integer_decode(self) -> (u64, i16, i8) { + num_traits::float::Float::integer_decode(self.to_f32()) + } +} + +impl FloatConst for f16 { + #[inline] + fn E() -> Self { + Self::E + } + + #[inline] + fn FRAC_1_PI() -> Self { + Self::FRAC_1_PI + } + + #[inline] + fn FRAC_1_SQRT_2() -> Self { + Self::FRAC_1_SQRT_2 + } + + #[inline] + fn FRAC_2_PI() -> Self { + Self::FRAC_2_PI + } + + #[inline] + fn FRAC_2_SQRT_PI() -> Self { + Self::FRAC_2_SQRT_PI + } + + #[inline] + fn FRAC_PI_2() -> Self { + Self::FRAC_PI_2 + } + + #[inline] + fn FRAC_PI_3() -> Self { + Self::FRAC_PI_3 + } + + #[inline] + fn FRAC_PI_4() -> Self { + Self::FRAC_PI_4 + } + + #[inline] + fn FRAC_PI_6() -> Self { + Self::FRAC_PI_6 + } + + #[inline] + fn FRAC_PI_8() -> Self { + Self::FRAC_PI_8 + } + + #[inline] + fn LN_10() -> Self { + Self::LN_10 + } + + #[inline] + fn LN_2() -> Self { + Self::LN_2 + } + + #[inline] + fn LOG10_E() -> Self { + Self::LOG10_E + } + + #[inline] + fn LOG2_E() -> Self { + Self::LOG2_E + } + + #[inline] + fn PI() -> Self { + Self::PI + } + + fn SQRT_2() -> Self { + Self::SQRT_2 + } + + #[inline] + fn LOG10_2() -> Self + where + Self: Sized + Div<Self, Output = Self>, + { + Self::LOG10_2 + } + + #[inline] + fn LOG2_10() -> Self + where + Self: Sized + Div<Self, Output = Self>, + { + Self::LOG2_10 + } +} + +impl Bounded for f16 { + #[inline] + fn min_value() -> Self { + f16::MIN + } + + #[inline] + fn max_value() -> Self { + f16::MAX + } +} + +macro_rules! impl_as_primitive_to_f16 { + ($ty:ty, $meth:ident) => { + impl AsPrimitive<$ty> for f16 { + #[inline] + fn as_(self) -> $ty { + self.$meth().as_() + } + } + }; +} + +impl AsPrimitive<f16> for f16 { + #[inline] + fn as_(self) -> f16 { + self + } +} + +impl_as_primitive_to_f16!(i64, to_f32); +impl_as_primitive_to_f16!(u64, to_f32); +impl_as_primitive_to_f16!(i8, to_f32); +impl_as_primitive_to_f16!(u8, to_f32); +impl_as_primitive_to_f16!(i16, to_f32); +impl_as_primitive_to_f16!(u16, to_f32); +impl_as_primitive_to_f16!(i32, to_f32); +impl_as_primitive_to_f16!(u32, to_f32); +impl_as_primitive_to_f16!(isize, to_f32); +impl_as_primitive_to_f16!(usize, to_f32); +impl_as_primitive_to_f16!(f32, to_f32); +impl_as_primitive_to_f16!(f64, to_f64); +impl_as_primitive_to_f16!(bf16, to_f32); + +macro_rules! impl_as_primitive_f16_from { + ($ty:ty, $meth:ident) => { + impl AsPrimitive<f16> for $ty { + #[inline] + fn as_(self) -> f16 { + f16::$meth(self.as_()) + } + } + }; +} + +impl_as_primitive_f16_from!(i64, from_f32); +impl_as_primitive_f16_from!(u64, from_f32); +impl_as_primitive_f16_from!(i8, from_f32); +impl_as_primitive_f16_from!(u8, from_f32); +impl_as_primitive_f16_from!(i16, from_f32); +impl_as_primitive_f16_from!(u16, from_f32); +impl_as_primitive_f16_from!(i32, from_f32); +impl_as_primitive_f16_from!(u32, from_f32); +impl_as_primitive_f16_from!(isize, from_f32); +impl_as_primitive_f16_from!(usize, from_f32); +impl_as_primitive_f16_from!(f32, from_f32); +impl_as_primitive_f16_from!(f64, from_f64); + +impl ToBytes for f16 { + type Bytes = [u8; 2]; + + fn to_be_bytes(&self) -> Self::Bytes { + Self::to_be_bytes(*self) + } + + fn to_le_bytes(&self) -> Self::Bytes { + Self::to_le_bytes(*self) + } + + fn to_ne_bytes(&self) -> Self::Bytes { + Self::to_ne_bytes(*self) + } +} + +impl FromBytes for f16 { + type Bytes = [u8; 2]; + + fn from_be_bytes(bytes: &Self::Bytes) -> Self { + Self::from_be_bytes(*bytes) + } + + fn from_le_bytes(bytes: &Self::Bytes) -> Self { + Self::from_le_bytes(*bytes) + } + + fn from_ne_bytes(bytes: &Self::Bytes) -> Self { + Self::from_ne_bytes(*bytes) + } +} + +impl ToPrimitive for bf16 { + #[inline] + fn to_i64(&self) -> Option<i64> { + Self::to_f32(*self).to_i64() + } + #[inline] + fn to_u64(&self) -> Option<u64> { + Self::to_f32(*self).to_u64() + } + #[inline] + fn to_i8(&self) -> Option<i8> { + Self::to_f32(*self).to_i8() + } + #[inline] + fn to_u8(&self) -> Option<u8> { + Self::to_f32(*self).to_u8() + } + #[inline] + fn to_i16(&self) -> Option<i16> { + Self::to_f32(*self).to_i16() + } + #[inline] + fn to_u16(&self) -> Option<u16> { + Self::to_f32(*self).to_u16() + } + #[inline] + fn to_i32(&self) -> Option<i32> { + Self::to_f32(*self).to_i32() + } + #[inline] + fn to_u32(&self) -> Option<u32> { + Self::to_f32(*self).to_u32() + } + #[inline] + fn to_f32(&self) -> Option<f32> { + Some(Self::to_f32(*self)) + } + #[inline] + fn to_f64(&self) -> Option<f64> { + Some(Self::to_f64(*self)) + } +} + +impl FromPrimitive for bf16 { + #[inline] + fn from_i64(n: i64) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u64(n: u64) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_i8(n: i8) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u8(n: u8) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_i16(n: i16) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u16(n: u16) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_i32(n: i32) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_u32(n: u32) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_f32(n: f32) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } + #[inline] + fn from_f64(n: f64) -> Option<Self> { + n.to_f64().map(Self::from_f64) + } +} + +impl Num for bf16 { + type FromStrRadixErr = <f32 as Num>::FromStrRadixErr; + + #[inline] + fn from_str_radix(str: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr> { + Ok(Self::from_f32(f32::from_str_radix(str, radix)?)) + } +} + +impl One for bf16 { + #[inline] + fn one() -> Self { + Self::ONE + } +} + +impl Zero for bf16 { + #[inline] + fn zero() -> Self { + Self::ZERO + } + + #[inline] + fn is_zero(&self) -> bool { + *self == Self::ZERO + } +} + +impl NumCast for bf16 { + #[inline] + fn from<T: ToPrimitive>(n: T) -> Option<Self> { + n.to_f32().map(Self::from_f32) + } +} + +impl num_traits::float::FloatCore for bf16 { + #[inline] + fn infinity() -> Self { + Self::INFINITY + } + + #[inline] + fn neg_infinity() -> Self { + Self::NEG_INFINITY + } + + #[inline] + fn nan() -> Self { + Self::NAN + } + + #[inline] + fn neg_zero() -> Self { + Self::NEG_ZERO + } + + #[inline] + fn min_value() -> Self { + Self::MIN + } + + #[inline] + fn min_positive_value() -> Self { + Self::MIN_POSITIVE + } + + #[inline] + fn epsilon() -> Self { + Self::EPSILON + } + + #[inline] + fn max_value() -> Self { + Self::MAX + } + + #[inline] + fn is_nan(self) -> bool { + self.is_nan() + } + + #[inline] + fn is_infinite(self) -> bool { + self.is_infinite() + } + + #[inline] + fn is_finite(self) -> bool { + self.is_finite() + } + + #[inline] + fn is_normal(self) -> bool { + self.is_normal() + } + + #[inline] + fn classify(self) -> FpCategory { + self.classify() + } + + #[inline] + fn floor(self) -> Self { + Self::from_f32(self.to_f32().floor()) + } + + #[inline] + fn ceil(self) -> Self { + Self::from_f32(self.to_f32().ceil()) + } + + #[inline] + fn round(self) -> Self { + Self::from_f32(self.to_f32().round()) + } + + #[inline] + fn trunc(self) -> Self { + Self::from_f32(self.to_f32().trunc()) + } + + #[inline] + fn fract(self) -> Self { + Self::from_f32(self.to_f32().fract()) + } + + #[inline] + fn abs(self) -> Self { + Self::from_bits(self.to_bits() & 0x7FFF) + } + + #[inline] + fn signum(self) -> Self { + self.signum() + } + + #[inline] + fn is_sign_positive(self) -> bool { + self.is_sign_positive() + } + + #[inline] + fn is_sign_negative(self) -> bool { + self.is_sign_negative() + } + + fn min(self, other: Self) -> Self { + match self.partial_cmp(&other) { + None => { + if self.is_nan() { + other + } else { + self + } + } + Some(Ordering::Greater) | Some(Ordering::Equal) => other, + Some(Ordering::Less) => self, + } + } + + fn max(self, other: Self) -> Self { + match self.partial_cmp(&other) { + None => { + if self.is_nan() { + other + } else { + self + } + } + Some(Ordering::Greater) | Some(Ordering::Equal) => self, + Some(Ordering::Less) => other, + } + } + + #[inline] + fn recip(self) -> Self { + Self::from_f32(self.to_f32().recip()) + } + + #[inline] + fn powi(self, exp: i32) -> Self { + Self::from_f32(self.to_f32().powi(exp)) + } + + #[inline] + fn to_degrees(self) -> Self { + Self::from_f32(self.to_f32().to_degrees()) + } + + #[inline] + fn to_radians(self) -> Self { + Self::from_f32(self.to_f32().to_radians()) + } + + #[inline] + fn integer_decode(self) -> (u64, i16, i8) { + num_traits::float::FloatCore::integer_decode(self.to_f32()) + } +} + +impl num_traits::float::Float for bf16 { + #[inline] + fn nan() -> Self { + Self::NAN + } + + #[inline] + fn infinity() -> Self { + Self::INFINITY + } + + #[inline] + fn neg_infinity() -> Self { + Self::NEG_INFINITY + } + + #[inline] + fn neg_zero() -> Self { + Self::NEG_ZERO + } + + #[inline] + fn min_value() -> Self { + Self::MIN + } + + #[inline] + fn min_positive_value() -> Self { + Self::MIN_POSITIVE + } + + #[inline] + fn epsilon() -> Self { + Self::EPSILON + } + + #[inline] + fn max_value() -> Self { + Self::MAX + } + + #[inline] + fn is_nan(self) -> bool { + self.is_nan() + } + + #[inline] + fn is_infinite(self) -> bool { + self.is_infinite() + } + + #[inline] + fn is_finite(self) -> bool { + self.is_finite() + } + + #[inline] + fn is_normal(self) -> bool { + self.is_normal() + } + + #[inline] + fn classify(self) -> FpCategory { + self.classify() + } + + #[inline] + fn floor(self) -> Self { + Self::from_f32(self.to_f32().floor()) + } + + #[inline] + fn ceil(self) -> Self { + Self::from_f32(self.to_f32().ceil()) + } + + #[inline] + fn round(self) -> Self { + Self::from_f32(self.to_f32().round()) + } + + #[inline] + fn trunc(self) -> Self { + Self::from_f32(self.to_f32().trunc()) + } + + #[inline] + fn fract(self) -> Self { + Self::from_f32(self.to_f32().fract()) + } + + #[inline] + fn abs(self) -> Self { + Self::from_f32(self.to_f32().abs()) + } + + #[inline] + fn signum(self) -> Self { + Self::from_f32(self.to_f32().signum()) + } + + #[inline] + fn is_sign_positive(self) -> bool { + self.is_sign_positive() + } + + #[inline] + fn is_sign_negative(self) -> bool { + self.is_sign_negative() + } + + #[inline] + fn mul_add(self, a: Self, b: Self) -> Self { + Self::from_f32(self.to_f32().mul_add(a.to_f32(), b.to_f32())) + } + + #[inline] + fn recip(self) -> Self { + Self::from_f32(self.to_f32().recip()) + } + + #[inline] + fn powi(self, n: i32) -> Self { + Self::from_f32(self.to_f32().powi(n)) + } + + #[inline] + fn powf(self, n: Self) -> Self { + Self::from_f32(self.to_f32().powf(n.to_f32())) + } + + #[inline] + fn sqrt(self) -> Self { + Self::from_f32(self.to_f32().sqrt()) + } + + #[inline] + fn exp(self) -> Self { + Self::from_f32(self.to_f32().exp()) + } + + #[inline] + fn exp2(self) -> Self { + Self::from_f32(self.to_f32().exp2()) + } + + #[inline] + fn ln(self) -> Self { + Self::from_f32(self.to_f32().ln()) + } + + #[inline] + fn log(self, base: Self) -> Self { + Self::from_f32(self.to_f32().log(base.to_f32())) + } + + #[inline] + fn log2(self) -> Self { + Self::from_f32(self.to_f32().log2()) + } + + #[inline] + fn log10(self) -> Self { + Self::from_f32(self.to_f32().log10()) + } + + #[inline] + fn to_degrees(self) -> Self { + Self::from_f32(self.to_f32().to_degrees()) + } + + #[inline] + fn to_radians(self) -> Self { + Self::from_f32(self.to_f32().to_radians()) + } + + #[inline] + fn max(self, other: Self) -> Self { + self.max(other) + } + + #[inline] + fn min(self, other: Self) -> Self { + self.min(other) + } + + #[inline] + fn abs_sub(self, other: Self) -> Self { + Self::from_f32((self.to_f32() - other.to_f32()).max(0.0)) + } + + #[inline] + fn cbrt(self) -> Self { + Self::from_f32(self.to_f32().cbrt()) + } + + #[inline] + fn hypot(self, other: Self) -> Self { + Self::from_f32(self.to_f32().hypot(other.to_f32())) + } + + #[inline] + fn sin(self) -> Self { + Self::from_f32(self.to_f32().sin()) + } + + #[inline] + fn cos(self) -> Self { + Self::from_f32(self.to_f32().cos()) + } + + #[inline] + fn tan(self) -> Self { + Self::from_f32(self.to_f32().tan()) + } + + #[inline] + fn asin(self) -> Self { + Self::from_f32(self.to_f32().asin()) + } + + #[inline] + fn acos(self) -> Self { + Self::from_f32(self.to_f32().acos()) + } + + #[inline] + fn atan(self) -> Self { + Self::from_f32(self.to_f32().atan()) + } + + #[inline] + fn atan2(self, other: Self) -> Self { + Self::from_f32(self.to_f32().atan2(other.to_f32())) + } + + #[inline] + fn sin_cos(self) -> (Self, Self) { + let (sin, cos) = self.to_f32().sin_cos(); + (Self::from_f32(sin), Self::from_f32(cos)) + } + + #[inline] + fn exp_m1(self) -> Self { + Self::from_f32(self.to_f32().exp_m1()) + } + + #[inline] + fn ln_1p(self) -> Self { + Self::from_f32(self.to_f32().ln_1p()) + } + + #[inline] + fn sinh(self) -> Self { + Self::from_f32(self.to_f32().sinh()) + } + + #[inline] + fn cosh(self) -> Self { + Self::from_f32(self.to_f32().cosh()) + } + + #[inline] + fn tanh(self) -> Self { + Self::from_f32(self.to_f32().tanh()) + } + + #[inline] + fn asinh(self) -> Self { + Self::from_f32(self.to_f32().asinh()) + } + + #[inline] + fn acosh(self) -> Self { + Self::from_f32(self.to_f32().acosh()) + } + + #[inline] + fn atanh(self) -> Self { + Self::from_f32(self.to_f32().atanh()) + } + + #[inline] + fn integer_decode(self) -> (u64, i16, i8) { + num_traits::float::Float::integer_decode(self.to_f32()) + } +} + +impl FloatConst for bf16 { + #[inline] + fn E() -> Self { + Self::E + } + + #[inline] + fn FRAC_1_PI() -> Self { + Self::FRAC_1_PI + } + + #[inline] + fn FRAC_1_SQRT_2() -> Self { + Self::FRAC_1_SQRT_2 + } + + #[inline] + fn FRAC_2_PI() -> Self { + Self::FRAC_2_PI + } + + #[inline] + fn FRAC_2_SQRT_PI() -> Self { + Self::FRAC_2_SQRT_PI + } + + #[inline] + fn FRAC_PI_2() -> Self { + Self::FRAC_PI_2 + } + + #[inline] + fn FRAC_PI_3() -> Self { + Self::FRAC_PI_3 + } + + #[inline] + fn FRAC_PI_4() -> Self { + Self::FRAC_PI_4 + } + + #[inline] + fn FRAC_PI_6() -> Self { + Self::FRAC_PI_6 + } + + #[inline] + fn FRAC_PI_8() -> Self { + Self::FRAC_PI_8 + } + + #[inline] + fn LN_10() -> Self { + Self::LN_10 + } + + #[inline] + fn LN_2() -> Self { + Self::LN_2 + } + + #[inline] + fn LOG10_E() -> Self { + Self::LOG10_E + } + + #[inline] + fn LOG2_E() -> Self { + Self::LOG2_E + } + + #[inline] + fn PI() -> Self { + Self::PI + } + + #[inline] + fn SQRT_2() -> Self { + Self::SQRT_2 + } + + #[inline] + fn LOG10_2() -> Self + where + Self: Sized + Div<Self, Output = Self>, + { + Self::LOG10_2 + } + + #[inline] + fn LOG2_10() -> Self + where + Self: Sized + Div<Self, Output = Self>, + { + Self::LOG2_10 + } +} + +impl Bounded for bf16 { + #[inline] + fn min_value() -> Self { + bf16::MIN + } + + #[inline] + fn max_value() -> Self { + bf16::MAX + } +} + +impl AsPrimitive<bf16> for bf16 { + #[inline] + fn as_(self) -> bf16 { + self + } +} + +macro_rules! impl_as_primitive_to_bf16 { + ($ty:ty, $meth:ident) => { + impl AsPrimitive<$ty> for bf16 { + #[inline] + fn as_(self) -> $ty { + self.$meth().as_() + } + } + }; +} + +impl_as_primitive_to_bf16!(i64, to_f32); +impl_as_primitive_to_bf16!(u64, to_f32); +impl_as_primitive_to_bf16!(i8, to_f32); +impl_as_primitive_to_bf16!(u8, to_f32); +impl_as_primitive_to_bf16!(i16, to_f32); +impl_as_primitive_to_bf16!(u16, to_f32); +impl_as_primitive_to_bf16!(i32, to_f32); +impl_as_primitive_to_bf16!(u32, to_f32); +impl_as_primitive_to_bf16!(isize, to_f32); +impl_as_primitive_to_bf16!(usize, to_f32); +impl_as_primitive_to_bf16!(f32, to_f32); +impl_as_primitive_to_bf16!(f64, to_f64); +impl_as_primitive_to_bf16!(f16, to_f32); + +macro_rules! impl_as_primitive_bf16_from { + ($ty:ty, $meth:ident) => { + impl AsPrimitive<bf16> for $ty { + #[inline] + fn as_(self) -> bf16 { + bf16::$meth(self.as_()) + } + } + }; +} + +impl_as_primitive_bf16_from!(i64, from_f32); +impl_as_primitive_bf16_from!(u64, from_f32); +impl_as_primitive_bf16_from!(i8, from_f32); +impl_as_primitive_bf16_from!(u8, from_f32); +impl_as_primitive_bf16_from!(i16, from_f32); +impl_as_primitive_bf16_from!(u16, from_f32); +impl_as_primitive_bf16_from!(i32, from_f32); +impl_as_primitive_bf16_from!(u32, from_f32); +impl_as_primitive_bf16_from!(isize, from_f32); +impl_as_primitive_bf16_from!(usize, from_f32); +impl_as_primitive_bf16_from!(f32, from_f32); +impl_as_primitive_bf16_from!(f64, from_f64); + +impl ToBytes for bf16 { + type Bytes = [u8; 2]; + + fn to_be_bytes(&self) -> Self::Bytes { + Self::to_be_bytes(*self) + } + + fn to_le_bytes(&self) -> Self::Bytes { + Self::to_le_bytes(*self) + } + + fn to_ne_bytes(&self) -> Self::Bytes { + Self::to_ne_bytes(*self) + } +} + +impl FromBytes for bf16 { + type Bytes = [u8; 2]; + + fn from_be_bytes(bytes: &Self::Bytes) -> Self { + Self::from_be_bytes(*bytes) + } + + fn from_le_bytes(bytes: &Self::Bytes) -> Self { + Self::from_le_bytes(*bytes) + } + + fn from_ne_bytes(bytes: &Self::Bytes) -> Self { + Self::from_ne_bytes(*bytes) + } +} + +macro_rules! impl_signed { + ($ty:ty) => { + impl ::num_traits::Signed for $ty { + #[inline] + fn abs(&self) -> Self { + ::num_traits::float::Float::abs(*self) + } + + #[inline] + fn abs_sub(&self, other: &Self) -> Self { + ::num_traits::float::Float::abs_sub(*self, *other) + } + + #[inline] + fn signum(&self) -> Self { + ::num_traits::float::Float::signum(*self) + } + + #[inline] + fn is_positive(&self) -> bool { + ::num_traits::float::Float::is_sign_positive(*self) + } + + #[inline] + fn is_negative(&self) -> bool { + ::num_traits::float::Float::is_sign_negative(*self) + } + } + }; +} + +impl_signed!(f16); +impl_signed!(bf16); diff --git a/vendor/half/src/rand_distr.rs b/vendor/half/src/rand_distr.rs new file mode 100644 index 00000000..99b40d87 --- /dev/null +++ b/vendor/half/src/rand_distr.rs @@ -0,0 +1,145 @@ +use crate::{bf16, f16}; + +use rand::{distr::Distribution, Rng}; +use rand_distr::uniform::UniformFloat; + +macro_rules! impl_distribution_via_f32 { + ($Ty:ty, $Distr:ty) => { + impl Distribution<$Ty> for $Distr { + fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $Ty { + <$Ty>::from_f32(<Self as Distribution<f32>>::sample(self, rng)) + } + } + }; +} + +impl_distribution_via_f32!(f16, rand_distr::StandardUniform); +impl_distribution_via_f32!(f16, rand_distr::StandardNormal); +impl_distribution_via_f32!(f16, rand_distr::Exp1); +impl_distribution_via_f32!(f16, rand_distr::Open01); +impl_distribution_via_f32!(f16, rand_distr::OpenClosed01); + +impl_distribution_via_f32!(bf16, rand_distr::StandardUniform); +impl_distribution_via_f32!(bf16, rand_distr::StandardNormal); +impl_distribution_via_f32!(bf16, rand_distr::Exp1); +impl_distribution_via_f32!(bf16, rand_distr::Open01); +impl_distribution_via_f32!(bf16, rand_distr::OpenClosed01); + +impl rand::distr::weighted::Weight for f16 { + const ZERO: Self = Self::ZERO; + + fn checked_add_assign(&mut self, v: &Self) -> Result<(), ()> { + // Floats have an explicit representation for overflow + *self += v; + Ok(()) + } +} + +impl rand::distr::weighted::Weight for bf16 { + const ZERO: Self = Self::ZERO; + + fn checked_add_assign(&mut self, v: &Self) -> Result<(), ()> { + // Floats have an explicit representation for overflow + *self += v; + Ok(()) + } +} + +#[derive(Debug, Clone, Copy)] +pub struct Float16Sampler(UniformFloat<f32>); + +impl rand_distr::uniform::SampleUniform for f16 { + type Sampler = Float16Sampler; +} + +impl rand_distr::uniform::UniformSampler for Float16Sampler { + type X = f16; + fn new<B1, B2>(low: B1, high: B2) -> Result<Self, rand_distr::uniform::Error> + where + B1: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + B2: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + { + Ok(Self(UniformFloat::new( + low.borrow().to_f32(), + high.borrow().to_f32(), + )?)) + } + fn new_inclusive<B1, B2>(low: B1, high: B2) -> Result<Self, rand_distr::uniform::Error> + where + B1: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + B2: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + { + Ok(Self(UniformFloat::new_inclusive( + low.borrow().to_f32(), + high.borrow().to_f32(), + )?)) + } + fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::X { + f16::from_f32(self.0.sample(rng)) + } +} + +#[derive(Debug, Clone, Copy)] +pub struct BFloat16Sampler(UniformFloat<f32>); + +impl rand_distr::uniform::SampleUniform for bf16 { + type Sampler = BFloat16Sampler; +} + +impl rand_distr::uniform::UniformSampler for BFloat16Sampler { + type X = bf16; + fn new<B1, B2>(low: B1, high: B2) -> Result<Self, rand_distr::uniform::Error> + where + B1: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + B2: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + { + Ok(Self(UniformFloat::new( + low.borrow().to_f32(), + high.borrow().to_f32(), + )?)) + } + fn new_inclusive<B1, B2>(low: B1, high: B2) -> Result<Self, rand_distr::uniform::Error> + where + B1: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + B2: rand_distr::uniform::SampleBorrow<Self::X> + Sized, + { + Ok(Self(UniformFloat::new_inclusive( + low.borrow().to_f32(), + high.borrow().to_f32(), + )?)) + } + fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::X { + bf16::from_f32(self.0.sample(rng)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[allow(unused_imports)] + use rand::{rng, Rng}; + use rand_distr::{StandardNormal, StandardUniform, Uniform}; + + #[test] + fn test_sample_f16() { + let mut rng = rng(); + let _: f16 = rng.sample(StandardUniform); + let _: f16 = rng.sample(StandardNormal); + let _: f16 = rng.sample(Uniform::new(f16::from_f32(0.0), f16::from_f32(1.0)).unwrap()); + #[cfg(feature = "num-traits")] + let _: f16 = + rng.sample(rand_distr::Normal::new(f16::from_f32(0.0), f16::from_f32(1.0)).unwrap()); + } + + #[test] + fn test_sample_bf16() { + let mut rng = rng(); + let _: bf16 = rng.sample(StandardUniform); + let _: bf16 = rng.sample(StandardNormal); + let _: bf16 = rng.sample(Uniform::new(bf16::from_f32(0.0), bf16::from_f32(1.0)).unwrap()); + #[cfg(feature = "num-traits")] + let _: bf16 = + rng.sample(rand_distr::Normal::new(bf16::from_f32(0.0), bf16::from_f32(1.0)).unwrap()); + } +} diff --git a/vendor/half/src/slice.rs b/vendor/half/src/slice.rs new file mode 100644 index 00000000..6b8a587f --- /dev/null +++ b/vendor/half/src/slice.rs @@ -0,0 +1,821 @@ +//! Contains utility functions and traits to convert between slices of [`u16`] bits and [`struct@f16`] or +//! [`struct@bf16`] numbers. +//! +//! The utility [`HalfBitsSliceExt`] sealed extension trait is implemented for `[u16]` slices, +//! while the utility [`HalfFloatSliceExt`] sealed extension trait is implemented for both `[f16]` +//! and `[bf16]` slices. These traits provide efficient conversions and reinterpret casting of +//! larger buffers of floating point values, and are automatically included in the +//! [`prelude`][crate::prelude] module. + +use crate::{bf16, binary16::arch, f16}; +#[cfg(feature = "alloc")] +#[allow(unused_imports)] +use alloc::{vec, vec::Vec}; +use zerocopy::{transmute_mut, transmute_ref}; + +/// Extensions to `[f16]` and `[bf16]` slices to support conversion and reinterpret operations. +/// +/// This trait is sealed and cannot be implemented outside of this crate. +pub trait HalfFloatSliceExt: private::SealedHalfFloatSlice { + /// Reinterprets a slice of [`struct@f16`] or [`struct@bf16`] numbers as a slice of [`u16`] bits. + /// + /// This is a zero-copy operation. The reinterpreted slice has the same lifetime and memory + /// location as `self`. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let float_buffer = [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.)]; + /// let int_buffer = float_buffer.reinterpret_cast(); + /// + /// assert_eq!(int_buffer, [float_buffer[0].to_bits(), float_buffer[1].to_bits(), float_buffer[2].to_bits()]); + /// ``` + #[must_use] + fn reinterpret_cast(&self) -> &[u16]; + + /// Reinterprets a mutable slice of [`struct@f16`] or [`struct@bf16`] numbers as a mutable slice of [`u16`]. + /// bits + /// + /// This is a zero-copy operation. The transmuted slice has the same lifetime as the original, + /// which prevents mutating `self` as long as the returned `&mut [u16]` is borrowed. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let mut float_buffer = [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.)]; + /// + /// { + /// let int_buffer = float_buffer.reinterpret_cast_mut(); + /// + /// assert_eq!(int_buffer, [f16::from_f32(1.).to_bits(), f16::from_f32(2.).to_bits(), f16::from_f32(3.).to_bits()]); + /// + /// // Mutating the u16 slice will mutating the original + /// int_buffer[0] = 0; + /// } + /// + /// // Note that we need to drop int_buffer before using float_buffer again or we will get a borrow error. + /// assert_eq!(float_buffer, [f16::from_f32(0.), f16::from_f32(2.), f16::from_f32(3.)]); + /// ``` + #[must_use] + fn reinterpret_cast_mut(&mut self) -> &mut [u16]; + + /// Converts all of the elements of a `[f32]` slice into [`struct@f16`] or [`struct@bf16`] values in `self`. + /// + /// The length of `src` must be the same as `self`. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation](crate) for more information on hardware conversion + /// support. + /// + /// # Panics + /// + /// This function will panic if the two slices have different lengths. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// // Initialize an empty buffer + /// let mut buffer = [0u16; 4]; + /// let buffer = buffer.reinterpret_cast_mut::<f16>(); + /// + /// let float_values = [1., 2., 3., 4.]; + /// + /// // Now convert + /// buffer.convert_from_f32_slice(&float_values); + /// + /// assert_eq!(buffer, [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.), f16::from_f32(4.)]); + /// ``` + fn convert_from_f32_slice(&mut self, src: &[f32]); + + /// Converts all of the elements of a `[f64]` slice into [`struct@f16`] or [`struct@bf16`] values in `self`. + /// + /// The length of `src` must be the same as `self`. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation](crate) for more information on hardware conversion + /// support. + /// + /// # Panics + /// + /// This function will panic if the two slices have different lengths. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// // Initialize an empty buffer + /// let mut buffer = [0u16; 4]; + /// let buffer = buffer.reinterpret_cast_mut::<f16>(); + /// + /// let float_values = [1., 2., 3., 4.]; + /// + /// // Now convert + /// buffer.convert_from_f64_slice(&float_values); + /// + /// assert_eq!(buffer, [f16::from_f64(1.), f16::from_f64(2.), f16::from_f64(3.), f16::from_f64(4.)]); + /// ``` + fn convert_from_f64_slice(&mut self, src: &[f64]); + + /// Converts all of the [`struct@f16`] or [`struct@bf16`] elements of `self` into [`f32`] values in `dst`. + /// + /// The length of `src` must be the same as `self`. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation](crate) for more information on hardware conversion + /// support. + /// + /// # Panics + /// + /// This function will panic if the two slices have different lengths. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// // Initialize an empty buffer + /// let mut buffer = [0f32; 4]; + /// + /// let half_values = [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.), f16::from_f32(4.)]; + /// + /// // Now convert + /// half_values.convert_to_f32_slice(&mut buffer); + /// + /// assert_eq!(buffer, [1., 2., 3., 4.]); + /// ``` + fn convert_to_f32_slice(&self, dst: &mut [f32]); + + /// Converts all of the [`struct@f16`] or [`struct@bf16`] elements of `self` into [`f64`] values in `dst`. + /// + /// The length of `src` must be the same as `self`. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation](crate) for more information on hardware conversion + /// support. + /// + /// # Panics + /// + /// This function will panic if the two slices have different lengths. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// // Initialize an empty buffer + /// let mut buffer = [0f64; 4]; + /// + /// let half_values = [f16::from_f64(1.), f16::from_f64(2.), f16::from_f64(3.), f16::from_f64(4.)]; + /// + /// // Now convert + /// half_values.convert_to_f64_slice(&mut buffer); + /// + /// assert_eq!(buffer, [1., 2., 3., 4.]); + /// ``` + fn convert_to_f64_slice(&self, dst: &mut [f64]); + + // Because trait is sealed, we can get away with different interfaces between features. + + /// Converts all of the [`struct@f16`] or [`struct@bf16`] elements of `self` into [`f32`] values in a new + /// vector + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation](crate) for more information on hardware conversion + /// support. + /// + /// This method is only available with the `std` or `alloc` feature. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// let half_values = [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.), f16::from_f32(4.)]; + /// let vec = half_values.to_f32_vec(); + /// + /// assert_eq!(vec, vec![1., 2., 3., 4.]); + /// ``` + #[cfg(any(feature = "alloc", feature = "std"))] + #[must_use] + fn to_f32_vec(&self) -> Vec<f32>; + + /// Converts all of the [`struct@f16`] or [`struct@bf16`] elements of `self` into [`f64`] values in a new + /// vector. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation](crate) for more information on hardware conversion + /// support. + /// + /// This method is only available with the `std` or `alloc` feature. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// let half_values = [f16::from_f64(1.), f16::from_f64(2.), f16::from_f64(3.), f16::from_f64(4.)]; + /// let vec = half_values.to_f64_vec(); + /// + /// assert_eq!(vec, vec![1., 2., 3., 4.]); + /// ``` + #[cfg(feature = "alloc")] + #[must_use] + fn to_f64_vec(&self) -> Vec<f64>; +} + +/// Extensions to `[u16]` slices to support reinterpret operations. +/// +/// This trait is sealed and cannot be implemented outside of this crate. +pub trait HalfBitsSliceExt: private::SealedHalfBitsSlice { + /// Reinterprets a slice of [`u16`] bits as a slice of [`struct@f16`] or [`struct@bf16`] numbers. + /// + /// `H` is the type to cast to, and must be either the [`struct@f16`] or [`struct@bf16`] type. + /// + /// This is a zero-copy operation. The reinterpreted slice has the same lifetime and memory + /// location as `self`. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let int_buffer = [f16::from_f32(1.).to_bits(), f16::from_f32(2.).to_bits(), f16::from_f32(3.).to_bits()]; + /// let float_buffer: &[f16] = int_buffer.reinterpret_cast(); + /// + /// assert_eq!(float_buffer, [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.)]); + /// + /// // You may have to specify the cast type directly if the compiler can't infer the type. + /// // The following is also valid in Rust. + /// let typed_buffer = int_buffer.reinterpret_cast::<f16>(); + /// ``` + #[must_use] + fn reinterpret_cast<H>(&self) -> &[H] + where + H: crate::private::SealedHalf; + + /// Reinterprets a mutable slice of [`u16`] bits as a mutable slice of [`struct@f16`] or [`struct@bf16`] + /// numbers. + /// + /// `H` is the type to cast to, and must be either the [`struct@f16`] or [`struct@bf16`] type. + /// + /// This is a zero-copy operation. The transmuted slice has the same lifetime as the original, + /// which prevents mutating `self` as long as the returned `&mut [f16]` is borrowed. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let mut int_buffer = [f16::from_f32(1.).to_bits(), f16::from_f32(2.).to_bits(), f16::from_f32(3.).to_bits()]; + /// + /// { + /// let float_buffer: &mut [f16] = int_buffer.reinterpret_cast_mut(); + /// + /// assert_eq!(float_buffer, [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.)]); + /// + /// // Mutating the f16 slice will mutating the original + /// float_buffer[0] = f16::from_f32(0.); + /// } + /// + /// // Note that we need to drop float_buffer before using int_buffer again or we will get a borrow error. + /// assert_eq!(int_buffer, [f16::from_f32(0.).to_bits(), f16::from_f32(2.).to_bits(), f16::from_f32(3.).to_bits()]); + /// + /// // You may have to specify the cast type directly if the compiler can't infer the type. + /// // The following is also valid in Rust. + /// let typed_buffer = int_buffer.reinterpret_cast_mut::<f16>(); + /// ``` + #[must_use] + fn reinterpret_cast_mut<H>(&mut self) -> &mut [H] + where + H: crate::private::SealedHalf; +} + +mod private { + use crate::{bf16, f16}; + + pub trait SealedHalfFloatSlice {} + impl SealedHalfFloatSlice for [f16] {} + impl SealedHalfFloatSlice for [bf16] {} + + pub trait SealedHalfBitsSlice {} + impl SealedHalfBitsSlice for [u16] {} +} + +impl HalfFloatSliceExt for [f16] { + #[inline] + fn reinterpret_cast(&self) -> &[u16] { + transmute_ref!(self) + } + + #[inline] + fn reinterpret_cast_mut(&mut self) -> &mut [u16] { + transmute_mut!(self) + } + + #[inline] + fn convert_from_f32_slice(&mut self, src: &[f32]) { + assert_eq!( + self.len(), + src.len(), + "destination and source slices have different lengths" + ); + + arch::f32_to_f16_slice(src, self.reinterpret_cast_mut()) + } + + #[inline] + fn convert_from_f64_slice(&mut self, src: &[f64]) { + assert_eq!( + self.len(), + src.len(), + "destination and source slices have different lengths" + ); + + arch::f64_to_f16_slice(src, self.reinterpret_cast_mut()) + } + + #[inline] + fn convert_to_f32_slice(&self, dst: &mut [f32]) { + assert_eq!( + self.len(), + dst.len(), + "destination and source slices have different lengths" + ); + + arch::f16_to_f32_slice(self.reinterpret_cast(), dst) + } + + #[inline] + fn convert_to_f64_slice(&self, dst: &mut [f64]) { + assert_eq!( + self.len(), + dst.len(), + "destination and source slices have different lengths" + ); + + arch::f16_to_f64_slice(self.reinterpret_cast(), dst) + } + + #[cfg(any(feature = "alloc", feature = "std"))] + #[inline] + #[allow(clippy::uninit_vec)] + fn to_f32_vec(&self) -> Vec<f32> { + let mut vec = vec![0f32; self.len()]; + self.convert_to_f32_slice(&mut vec); + vec + } + + #[cfg(any(feature = "alloc", feature = "std"))] + #[inline] + #[allow(clippy::uninit_vec)] + fn to_f64_vec(&self) -> Vec<f64> { + let mut vec = vec![0f64; self.len()]; + self.convert_to_f64_slice(&mut vec); + vec + } +} + +impl HalfFloatSliceExt for [bf16] { + #[inline] + fn reinterpret_cast(&self) -> &[u16] { + transmute_ref!(self) + } + + #[inline] + fn reinterpret_cast_mut(&mut self) -> &mut [u16] { + transmute_mut!(self) + } + + #[inline] + fn convert_from_f32_slice(&mut self, src: &[f32]) { + assert_eq!( + self.len(), + src.len(), + "destination and source slices have different lengths" + ); + + // Just use regular loop here until there's any bf16 SIMD support. + for (i, f) in src.iter().enumerate() { + self[i] = bf16::from_f32(*f); + } + } + + #[inline] + fn convert_from_f64_slice(&mut self, src: &[f64]) { + assert_eq!( + self.len(), + src.len(), + "destination and source slices have different lengths" + ); + + // Just use regular loop here until there's any bf16 SIMD support. + for (i, f) in src.iter().enumerate() { + self[i] = bf16::from_f64(*f); + } + } + + #[inline] + fn convert_to_f32_slice(&self, dst: &mut [f32]) { + assert_eq!( + self.len(), + dst.len(), + "destination and source slices have different lengths" + ); + + // Just use regular loop here until there's any bf16 SIMD support. + for (i, f) in self.iter().enumerate() { + dst[i] = f.to_f32(); + } + } + + #[inline] + fn convert_to_f64_slice(&self, dst: &mut [f64]) { + assert_eq!( + self.len(), + dst.len(), + "destination and source slices have different lengths" + ); + + // Just use regular loop here until there's any bf16 SIMD support. + for (i, f) in self.iter().enumerate() { + dst[i] = f.to_f64(); + } + } + + #[cfg(any(feature = "alloc", feature = "std"))] + #[inline] + #[allow(clippy::uninit_vec)] + fn to_f32_vec(&self) -> Vec<f32> { + let mut vec = vec![0f32; self.len()]; + self.convert_to_f32_slice(&mut vec); + vec + } + + #[cfg(any(feature = "alloc", feature = "std"))] + #[inline] + #[allow(clippy::uninit_vec)] + fn to_f64_vec(&self) -> Vec<f64> { + let mut vec = vec![0f64; self.len()]; + self.convert_to_f64_slice(&mut vec); + vec + } +} + +impl HalfBitsSliceExt for [u16] { + // Since we sealed all the traits involved, these are safe. + #[inline] + fn reinterpret_cast<H>(&self) -> &[H] + where + H: crate::private::SealedHalf, + { + transmute_ref!(self) + } + + #[inline] + fn reinterpret_cast_mut<H>(&mut self) -> &mut [H] + where + H: crate::private::SealedHalf, + { + transmute_mut!(self) + } +} + +#[allow(clippy::float_cmp)] +#[cfg(test)] +mod test { + use super::{HalfBitsSliceExt, HalfFloatSliceExt}; + use crate::{bf16, f16}; + + #[test] + fn test_slice_conversions_f16() { + let bits = &[ + f16::E.to_bits(), + f16::PI.to_bits(), + f16::EPSILON.to_bits(), + f16::FRAC_1_SQRT_2.to_bits(), + ]; + let numbers = &[f16::E, f16::PI, f16::EPSILON, f16::FRAC_1_SQRT_2]; + + // Convert from bits to numbers + let from_bits = bits.reinterpret_cast::<f16>(); + assert_eq!(from_bits, numbers); + + // Convert from numbers back to bits + let to_bits = from_bits.reinterpret_cast(); + assert_eq!(to_bits, bits); + } + + #[test] + fn test_mutablility_f16() { + let mut bits_array = [f16::PI.to_bits()]; + let bits = &mut bits_array[..]; + + { + // would not compile without these braces + let numbers = bits.reinterpret_cast_mut(); + numbers[0] = f16::E; + } + + assert_eq!(bits, &[f16::E.to_bits()]); + + bits[0] = f16::LN_2.to_bits(); + assert_eq!(bits, &[f16::LN_2.to_bits()]); + } + + #[test] + fn test_slice_conversions_bf16() { + let bits = &[ + bf16::E.to_bits(), + bf16::PI.to_bits(), + bf16::EPSILON.to_bits(), + bf16::FRAC_1_SQRT_2.to_bits(), + ]; + let numbers = &[bf16::E, bf16::PI, bf16::EPSILON, bf16::FRAC_1_SQRT_2]; + + // Convert from bits to numbers + let from_bits = bits.reinterpret_cast::<bf16>(); + assert_eq!(from_bits, numbers); + + // Convert from numbers back to bits + let to_bits = from_bits.reinterpret_cast(); + assert_eq!(to_bits, bits); + } + + #[test] + fn test_mutablility_bf16() { + let mut bits_array = [bf16::PI.to_bits()]; + let bits = &mut bits_array[..]; + + { + // would not compile without these braces + let numbers = bits.reinterpret_cast_mut(); + numbers[0] = bf16::E; + } + + assert_eq!(bits, &[bf16::E.to_bits()]); + + bits[0] = bf16::LN_2.to_bits(); + assert_eq!(bits, &[bf16::LN_2.to_bits()]); + } + + #[test] + fn slice_convert_f16_f32() { + // Exact chunks + let vf32 = [1., 2., 3., 4., 5., 6., 7., 8.]; + let vf16 = [ + f16::from_f32(1.), + f16::from_f32(2.), + f16::from_f32(3.), + f16::from_f32(4.), + f16::from_f32(5.), + f16::from_f32(6.), + f16::from_f32(7.), + f16::from_f32(8.), + ]; + let mut buf32 = vf32; + let mut buf16 = vf16; + + vf16.convert_to_f32_slice(&mut buf32); + assert_eq!(&vf32, &buf32); + + buf16.convert_from_f32_slice(&vf32); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf32 = [1., 2., 3., 4., 5., 6., 7., 8., 9.]; + let vf16 = [ + f16::from_f32(1.), + f16::from_f32(2.), + f16::from_f32(3.), + f16::from_f32(4.), + f16::from_f32(5.), + f16::from_f32(6.), + f16::from_f32(7.), + f16::from_f32(8.), + f16::from_f32(9.), + ]; + let mut buf32 = vf32; + let mut buf16 = vf16; + + vf16.convert_to_f32_slice(&mut buf32); + assert_eq!(&vf32, &buf32); + + buf16.convert_from_f32_slice(&vf32); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf32 = [1., 2.]; + let vf16 = [f16::from_f32(1.), f16::from_f32(2.)]; + let mut buf32 = vf32; + let mut buf16 = vf16; + + vf16.convert_to_f32_slice(&mut buf32); + assert_eq!(&vf32, &buf32); + + buf16.convert_from_f32_slice(&vf32); + assert_eq!(&vf16, &buf16); + } + + #[test] + fn slice_convert_bf16_f32() { + // Exact chunks + let vf32 = [1., 2., 3., 4., 5., 6., 7., 8.]; + let vf16 = [ + bf16::from_f32(1.), + bf16::from_f32(2.), + bf16::from_f32(3.), + bf16::from_f32(4.), + bf16::from_f32(5.), + bf16::from_f32(6.), + bf16::from_f32(7.), + bf16::from_f32(8.), + ]; + let mut buf32 = vf32; + let mut buf16 = vf16; + + vf16.convert_to_f32_slice(&mut buf32); + assert_eq!(&vf32, &buf32); + + buf16.convert_from_f32_slice(&vf32); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf32 = [1., 2., 3., 4., 5., 6., 7., 8., 9.]; + let vf16 = [ + bf16::from_f32(1.), + bf16::from_f32(2.), + bf16::from_f32(3.), + bf16::from_f32(4.), + bf16::from_f32(5.), + bf16::from_f32(6.), + bf16::from_f32(7.), + bf16::from_f32(8.), + bf16::from_f32(9.), + ]; + let mut buf32 = vf32; + let mut buf16 = vf16; + + vf16.convert_to_f32_slice(&mut buf32); + assert_eq!(&vf32, &buf32); + + buf16.convert_from_f32_slice(&vf32); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf32 = [1., 2.]; + let vf16 = [bf16::from_f32(1.), bf16::from_f32(2.)]; + let mut buf32 = vf32; + let mut buf16 = vf16; + + vf16.convert_to_f32_slice(&mut buf32); + assert_eq!(&vf32, &buf32); + + buf16.convert_from_f32_slice(&vf32); + assert_eq!(&vf16, &buf16); + } + + #[test] + fn slice_convert_f16_f64() { + // Exact chunks + let vf64 = [1., 2., 3., 4., 5., 6., 7., 8.]; + let vf16 = [ + f16::from_f64(1.), + f16::from_f64(2.), + f16::from_f64(3.), + f16::from_f64(4.), + f16::from_f64(5.), + f16::from_f64(6.), + f16::from_f64(7.), + f16::from_f64(8.), + ]; + let mut buf64 = vf64; + let mut buf16 = vf16; + + vf16.convert_to_f64_slice(&mut buf64); + assert_eq!(&vf64, &buf64); + + buf16.convert_from_f64_slice(&vf64); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf64 = [1., 2., 3., 4., 5., 6., 7., 8., 9.]; + let vf16 = [ + f16::from_f64(1.), + f16::from_f64(2.), + f16::from_f64(3.), + f16::from_f64(4.), + f16::from_f64(5.), + f16::from_f64(6.), + f16::from_f64(7.), + f16::from_f64(8.), + f16::from_f64(9.), + ]; + let mut buf64 = vf64; + let mut buf16 = vf16; + + vf16.convert_to_f64_slice(&mut buf64); + assert_eq!(&vf64, &buf64); + + buf16.convert_from_f64_slice(&vf64); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf64 = [1., 2.]; + let vf16 = [f16::from_f64(1.), f16::from_f64(2.)]; + let mut buf64 = vf64; + let mut buf16 = vf16; + + vf16.convert_to_f64_slice(&mut buf64); + assert_eq!(&vf64, &buf64); + + buf16.convert_from_f64_slice(&vf64); + assert_eq!(&vf16, &buf16); + } + + #[test] + fn slice_convert_bf16_f64() { + // Exact chunks + let vf64 = [1., 2., 3., 4., 5., 6., 7., 8.]; + let vf16 = [ + bf16::from_f64(1.), + bf16::from_f64(2.), + bf16::from_f64(3.), + bf16::from_f64(4.), + bf16::from_f64(5.), + bf16::from_f64(6.), + bf16::from_f64(7.), + bf16::from_f64(8.), + ]; + let mut buf64 = vf64; + let mut buf16 = vf16; + + vf16.convert_to_f64_slice(&mut buf64); + assert_eq!(&vf64, &buf64); + + buf16.convert_from_f64_slice(&vf64); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf64 = [1., 2., 3., 4., 5., 6., 7., 8., 9.]; + let vf16 = [ + bf16::from_f64(1.), + bf16::from_f64(2.), + bf16::from_f64(3.), + bf16::from_f64(4.), + bf16::from_f64(5.), + bf16::from_f64(6.), + bf16::from_f64(7.), + bf16::from_f64(8.), + bf16::from_f64(9.), + ]; + let mut buf64 = vf64; + let mut buf16 = vf16; + + vf16.convert_to_f64_slice(&mut buf64); + assert_eq!(&vf64, &buf64); + + buf16.convert_from_f64_slice(&vf64); + assert_eq!(&vf16, &buf16); + + // Partial with chunks + let vf64 = [1., 2.]; + let vf16 = [bf16::from_f64(1.), bf16::from_f64(2.)]; + let mut buf64 = vf64; + let mut buf16 = vf16; + + vf16.convert_to_f64_slice(&mut buf64); + assert_eq!(&vf64, &buf64); + + buf16.convert_from_f64_slice(&vf64); + assert_eq!(&vf16, &buf16); + } + + #[test] + #[should_panic] + fn convert_from_f32_slice_len_mismatch_panics() { + let mut slice1 = [f16::ZERO; 3]; + let slice2 = [0f32; 4]; + slice1.convert_from_f32_slice(&slice2); + } + + #[test] + #[should_panic] + fn convert_from_f64_slice_len_mismatch_panics() { + let mut slice1 = [f16::ZERO; 3]; + let slice2 = [0f64; 4]; + slice1.convert_from_f64_slice(&slice2); + } + + #[test] + #[should_panic] + fn convert_to_f32_slice_len_mismatch_panics() { + let slice1 = [f16::ZERO; 3]; + let mut slice2 = [0f32; 4]; + slice1.convert_to_f32_slice(&mut slice2); + } + + #[test] + #[should_panic] + fn convert_to_f64_slice_len_mismatch_panics() { + let slice1 = [f16::ZERO; 3]; + let mut slice2 = [0f64; 4]; + slice1.convert_to_f64_slice(&mut slice2); + } +} diff --git a/vendor/half/src/vec.rs b/vendor/half/src/vec.rs new file mode 100644 index 00000000..a9d39c41 --- /dev/null +++ b/vendor/half/src/vec.rs @@ -0,0 +1,260 @@ +//! Contains utility functions and traits to convert between vectors of [`u16`] bits and [`struct@f16`] or +//! [`bf16`] vectors. +//! +//! The utility [`HalfBitsVecExt`] sealed extension trait is implemented for [`Vec<u16>`] vectors, +//! while the utility [`HalfFloatVecExt`] sealed extension trait is implemented for both +//! [`Vec<f16>`] and [`Vec<bf16>`] vectors. These traits provide efficient conversions and +//! reinterpret casting of larger buffers of floating point values, and are automatically included +//! in the [`prelude`][crate::prelude] module. +//! +//! This module is only available with the `std` or `alloc` feature. + +use super::{bf16, f16, slice::HalfFloatSliceExt}; +#[cfg(feature = "alloc")] +#[allow(unused_imports)] +use alloc::{vec, vec::Vec}; +use core::mem; + +/// Extensions to [`Vec<f16>`] and [`Vec<bf16>`] to support reinterpret operations. +/// +/// This trait is sealed and cannot be implemented outside of this crate. +pub trait HalfFloatVecExt: private::SealedHalfFloatVec { + /// Reinterprets a vector of [`struct@f16`]or [`bf16`] numbers as a vector of [`u16`] bits. + /// + /// This is a zero-copy operation. The reinterpreted vector has the same memory location as + /// `self`. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let float_buffer = vec![f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.)]; + /// let int_buffer = float_buffer.reinterpret_into(); + /// + /// assert_eq!(int_buffer, [f16::from_f32(1.).to_bits(), f16::from_f32(2.).to_bits(), f16::from_f32(3.).to_bits()]); + /// ``` + #[must_use] + fn reinterpret_into(self) -> Vec<u16>; + + /// Converts all of the elements of a `[f32]` slice into a new [`struct@f16`] or [`bf16`] vector. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation][crate] for more information on hardware conversion + /// support. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// let float_values = [1., 2., 3., 4.]; + /// let vec: Vec<f16> = Vec::from_f32_slice(&float_values); + /// + /// assert_eq!(vec, vec![f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.), f16::from_f32(4.)]); + /// ``` + #[must_use] + fn from_f32_slice(slice: &[f32]) -> Self; + + /// Converts all of the elements of a `[f64]` slice into a new [`struct@f16`] or [`bf16`] vector. + /// + /// The conversion operation is vectorized over the slice, meaning the conversion may be more + /// efficient than converting individual elements on some hardware that supports SIMD + /// conversions. See [crate documentation][crate] for more information on hardware conversion + /// support. + /// + /// # Examples + /// ```rust + /// # use half::prelude::*; + /// let float_values = [1., 2., 3., 4.]; + /// let vec: Vec<f16> = Vec::from_f64_slice(&float_values); + /// + /// assert_eq!(vec, vec![f16::from_f64(1.), f16::from_f64(2.), f16::from_f64(3.), f16::from_f64(4.)]); + /// ``` + #[must_use] + fn from_f64_slice(slice: &[f64]) -> Self; +} + +/// Extensions to [`Vec<u16>`] to support reinterpret operations. +/// +/// This trait is sealed and cannot be implemented outside of this crate. +pub trait HalfBitsVecExt: private::SealedHalfBitsVec { + /// Reinterprets a vector of [`u16`] bits as a vector of [`struct@f16`] or [`bf16`] numbers. + /// + /// `H` is the type to cast to, and must be either the [`struct@f16`] or [`bf16`] type. + /// + /// This is a zero-copy operation. The reinterpreted vector has the same memory location as + /// `self`. + /// + /// # Examples + /// + /// ```rust + /// # use half::prelude::*; + /// let int_buffer = vec![f16::from_f32(1.).to_bits(), f16::from_f32(2.).to_bits(), f16::from_f32(3.).to_bits()]; + /// let float_buffer = int_buffer.reinterpret_into::<f16>(); + /// + /// assert_eq!(float_buffer, [f16::from_f32(1.), f16::from_f32(2.), f16::from_f32(3.)]); + /// ``` + #[must_use] + fn reinterpret_into<H>(self) -> Vec<H> + where + H: crate::private::SealedHalf; +} + +mod private { + use crate::{bf16, f16}; + #[cfg(feature = "alloc")] + #[allow(unused_imports)] + use alloc::vec::Vec; + + pub trait SealedHalfFloatVec {} + impl SealedHalfFloatVec for Vec<f16> {} + impl SealedHalfFloatVec for Vec<bf16> {} + + pub trait SealedHalfBitsVec {} + impl SealedHalfBitsVec for Vec<u16> {} +} + +impl HalfFloatVecExt for Vec<f16> { + #[inline] + fn reinterpret_into(mut self) -> Vec<u16> { + // An f16 array has same length and capacity as u16 array + let length = self.len(); + let capacity = self.capacity(); + + // Actually reinterpret the contents of the Vec<f16> as u16, + // knowing that structs are represented as only their members in memory, + // which is the u16 part of `f16(u16)` + let pointer = self.as_mut_ptr() as *mut u16; + + // Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted + mem::forget(self); + + // Finally construct a new Vec<f16> from the raw pointer + // SAFETY: We are reconstructing full length and capacity of original vector, + // using its original pointer, and the size of elements are identical. + unsafe { Vec::from_raw_parts(pointer, length, capacity) } + } + + #[allow(clippy::uninit_vec)] + fn from_f32_slice(slice: &[f32]) -> Self { + let mut vec = vec![f16::from_bits(0); slice.len()]; + vec.convert_from_f32_slice(slice); + vec + } + + #[allow(clippy::uninit_vec)] + fn from_f64_slice(slice: &[f64]) -> Self { + let mut vec = vec![f16::from_bits(0); slice.len()]; + vec.convert_from_f64_slice(slice); + vec + } +} + +impl HalfFloatVecExt for Vec<bf16> { + #[inline] + fn reinterpret_into(mut self) -> Vec<u16> { + // An f16 array has same length and capacity as u16 array + let length = self.len(); + let capacity = self.capacity(); + + // Actually reinterpret the contents of the Vec<f16> as u16, + // knowing that structs are represented as only their members in memory, + // which is the u16 part of `f16(u16)` + let pointer = self.as_mut_ptr() as *mut u16; + + // Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted + mem::forget(self); + + // Finally construct a new Vec<f16> from the raw pointer + // SAFETY: We are reconstructing full length and capacity of original vector, + // using its original pointer, and the size of elements are identical. + unsafe { Vec::from_raw_parts(pointer, length, capacity) } + } + + #[allow(clippy::uninit_vec)] + fn from_f32_slice(slice: &[f32]) -> Self { + let mut vec = vec![bf16::from_bits(0); slice.len()]; + vec.convert_from_f32_slice(slice); + vec + } + + #[allow(clippy::uninit_vec)] + fn from_f64_slice(slice: &[f64]) -> Self { + let mut vec = vec![bf16::from_bits(0); slice.len()]; + vec.convert_from_f64_slice(slice); + vec + } +} + +impl HalfBitsVecExt for Vec<u16> { + // This is safe because all traits are sealed + #[inline] + fn reinterpret_into<H>(mut self) -> Vec<H> + where + H: crate::private::SealedHalf, + { + // An f16 array has same length and capacity as u16 array + let length = self.len(); + let capacity = self.capacity(); + + // Actually reinterpret the contents of the Vec<u16> as f16, + // knowing that structs are represented as only their members in memory, + // which is the u16 part of `f16(u16)` + let pointer = self.as_mut_ptr() as *mut H; + + // Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted + mem::forget(self); + + // Finally construct a new Vec<f16> from the raw pointer + // SAFETY: We are reconstructing full length and capacity of original vector, + // using its original pointer, and the size of elements are identical. + unsafe { Vec::from_raw_parts(pointer, length, capacity) } + } +} + +#[cfg(test)] +mod test { + use super::{HalfBitsVecExt, HalfFloatVecExt}; + use crate::{bf16, f16}; + #[cfg(all(feature = "alloc", not(feature = "std")))] + use alloc::vec; + + #[test] + fn test_vec_conversions_f16() { + let numbers = vec![f16::E, f16::PI, f16::EPSILON, f16::FRAC_1_SQRT_2]; + let bits = vec![ + f16::E.to_bits(), + f16::PI.to_bits(), + f16::EPSILON.to_bits(), + f16::FRAC_1_SQRT_2.to_bits(), + ]; + let bits_cloned = bits.clone(); + + // Convert from bits to numbers + let from_bits = bits.reinterpret_into::<f16>(); + assert_eq!(&from_bits[..], &numbers[..]); + + // Convert from numbers back to bits + let to_bits = from_bits.reinterpret_into(); + assert_eq!(&to_bits[..], &bits_cloned[..]); + } + + #[test] + fn test_vec_conversions_bf16() { + let numbers = vec![bf16::E, bf16::PI, bf16::EPSILON, bf16::FRAC_1_SQRT_2]; + let bits = vec![ + bf16::E.to_bits(), + bf16::PI.to_bits(), + bf16::EPSILON.to_bits(), + bf16::FRAC_1_SQRT_2.to_bits(), + ]; + let bits_cloned = bits.clone(); + + // Convert from bits to numbers + let from_bits = bits.reinterpret_into::<bf16>(); + assert_eq!(&from_bits[..], &numbers[..]); + + // Convert from numbers back to bits + let to_bits = from_bits.reinterpret_into(); + assert_eq!(&to_bits[..], &bits_cloned[..]); + } +} diff --git a/vendor/proc-macro2/.cargo-checksum.json b/vendor/proc-macro2/.cargo-checksum.json index 9621da34..9d997fa6 100644 --- a/vendor/proc-macro2/.cargo-checksum.json +++ b/vendor/proc-macro2/.cargo-checksum.json @@ -1 +1 @@ -{"files":{".cargo_vcs_info.json":"6668af556e69812970d749f58ed00bd93f667b7a6e046c6f53d9013a78a89e0c",".github/FUNDING.yml":"b017158736b3c9751a2d21edfce7fe61c8954e2fced8da8dd3013c2f3e295bd9",".github/workflows/ci.yml":"94953f47fb3077accfebfe2feb2421a0d8f85c150382c9ee5aaf8ef643f4cfaa","Cargo.toml":"01a9ffc0f14fe891a6a75927a22429879d06ea732b24ab1229838e6c827d9630","Cargo.toml.orig":"12998eb25a7c88b13cfaf7e57f1467d10fdd3fd85f29f2eed3797b4d88b24efe","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"c609b6865476d6c35879784e9155367a97a0da496aa5c3c61488440a20f59883","build.rs":"6b0b19a3af5248513b186b9c28c133f5af34a1d11122c0268c68e89724aa40fa","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"d378a9e799e5c49933b067cd38f5364d16a152ef337eef86ce42fdc86005ddf3","src/fallback.rs":"11b005c95258ad50362bcaa9b778aab15a8d49a0c434cd2b42afff98416ae1fe","src/lib.rs":"99a6acbddbe01cd906ac243a3db52a28b9fbb2bdc5f238f992d15fb0cebdbcdc","src/location.rs":"f55d2e61f1bb1af65e14ed04c9e91eb1ddbf8430e8c05f2048d1cd538d27368e","src/marker.rs":"43f5a18f5059f1a16507c047b3b7387afee7f25ac45ba4eb1621ca7fa733eb01","src/parse.rs":"6ceaad0a6375af9a202cf8df6ebe72e1bce9543b1f0db71ea03929ac02c3a8b8","src/rcvec.rs":"1c3c48c4f819927cc445ae15ca3bb06775feff2fd1cb21901ae4c40c7e6b4e82","src/wrapper.rs":"06624150b94f4fd9ada30b2c9ad6936ea695d05c2138ceb14f2304e757133d52","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"3190ee07dae510251f360db701ce257030f94a479b6689c3a9ef804bd5d8d099","tests/test.rs":"7511be57e097b15403cf36feb858b4aabdc832fac7024571059a559a7e2ed2a0","tests/test_fmt.rs":"b7743b612af65f2c88cbe109d50a093db7aa7e87f9e37bf45b7bbaeb240aa020","tests/test_size.rs":"acf05963c1e62052d769d237b50844a2c59b4182b491231b099a4f74e5456ab0"},"package":"134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"} \ No newline at end of file +{"files":{".cargo_vcs_info.json":"0c0e9279875a3f985b09df73eefab42d9e6f5566f26485c9e3a057e887d219b7",".github/FUNDING.yml":"b017158736b3c9751a2d21edfce7fe61c8954e2fced8da8dd3013c2f3e295bd9",".github/workflows/ci.yml":"b2004c92e8985c58c1338202b2ebef0f25fa50de01e9101fe46a17000ca59962","Cargo.lock":"4afb839b0f3299f791ccdda8213faddff1ee208d64a14e883b4e24ee48957aea","Cargo.toml":"8c059fba2000e51a2d88025b8ebdc7a0b0e26b3f67cb3baa96c222dafb9c31e4","Cargo.toml.orig":"42bf3a4709d2fcc1027a9c68f525054ea542683cedf626ef2c76b6b2ac63a5dc","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"e60d0a33eb3bfc8583866bb84ca53fbae5e5cb39b67acfbb3c1f35dae41e19a9","build.rs":"baeb20b52f6b536be8657a566591a507bb2e34a45cf8baa42b135510a0c3c729","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"29f094473279a29b71c3cc9f5fa27c2e2c30c670390cf7e4b7cf451486cc857e","src/fallback.rs":"962e1897fefb138101ae3f6fda9c46cecff787550cdfb9133066326379464d90","src/lib.rs":"c07a2ad1ccbda629d0f2018d6d7762f4dcb955e8d0714ffcf9c7f3d5cd0020f2","src/location.rs":"9225c5a55f03b56cce42bc55ceb509e8216a5e0b24c94aa1cd071b04e3d6c15f","src/marker.rs":"c11c5a1be8bdf18be3fcd224393f350a9aae7ce282e19ce583c84910c6903a8f","src/num.rs":"82d625cbcd255965e46231ac3af1b74ab8bff9787c799e8ed1f978de146cb0b5","src/parse.rs":"0c380fdbe8795d41e08a40e3a1e67e505e9aa9398277a6a794af7d96fab06ac6","src/probe.rs":"2b57e8ebf46a7c60ee2762f23f16d24ee9ddb8f1acd0a7faf7a99cf2e4187151","src/probe/proc_macro_span.rs":"f3f9c728438060c9450d4568621beca9125f559eb65faab9574d2e43e9b49643","src/probe/proc_macro_span_file.rs":"a20a1920d121b153ce33c8e2ea203b9370606744847b62e8ffd0c657d2545778","src/probe/proc_macro_span_location.rs":"71a4768f65f8a87e5a3c2bc2e05fb84d2562a0f4733780e9f919563f25ee07dc","src/rcvec.rs":"a159d246cac59aae2d51b899471ce34766f51f3c11c376ac36ee501ee3f12a7a","src/rustc_literal_escaper.rs":"188cbe8fffe7af3899977530cbb1b6c0b1dff51623db0ec115db1e082159e7b6","src/wrapper.rs":"057b7baa778e8205c0af47405c1af077d4fd19318ed4b79bd195ddceb4da0b15","tests/comments.rs":"11520f6baee23b9258db904f4c256fd3877493f754e2b99041f73a330e74a911","tests/features.rs":"7e52c0c801019b271bf11a994c2e1799a1429b0c1a3a34e551a23971797fe412","tests/marker.rs":"f16299460587d6c65603ed809f1a3b81853e4b99d6cb44d0b68bb07259d7e9f8","tests/test.rs":"c590a13e38c2b5d92a6181433652925dd9d19dd404c6839290abc7acbc3cb5a3","tests/test_fmt.rs":"b7743b612af65f2c88cbe109d50a093db7aa7e87f9e37bf45b7bbaeb240aa020","tests/test_size.rs":"62d8373ea46b669b87bc90a9c49b6d02f90ff4c21f9a25acebf60c9926e01fb7"},"package":"5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"} \ No newline at end of file diff --git a/vendor/proc-macro2/.cargo_vcs_info.json b/vendor/proc-macro2/.cargo_vcs_info.json index c15c6f1f..bdeb94e1 100644 --- a/vendor/proc-macro2/.cargo_vcs_info.json +++ b/vendor/proc-macro2/.cargo_vcs_info.json @@ -1,6 +1,6 @@ { "git": { - "sha1": "937bbcdcc15ef7bc7ee90813dc72093a2beb30c5" + "sha1": "d1bf13ac1d90c3b65c1b7fc131a26f37a8e2d0db" }, "path_in_vcs": "" } \ No newline at end of file diff --git a/vendor/proc-macro2/.github/workflows/ci.yml b/vendor/proc-macro2/.github/workflows/ci.yml index f7d6be80..669a88c9 100644 --- a/vendor/proc-macro2/.github/workflows/ci.yml +++ b/vendor/proc-macro2/.github/workflows/ci.yml @@ -24,10 +24,10 @@ jobs: strategy: fail-fast: false matrix: - rust: [1.56.0, stable, beta] + rust: [1.80.0, stable, beta] timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@master with: toolchain: ${{matrix.rust}} @@ -51,12 +51,15 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly with: components: rust-src - name: Enable type layout randomization - run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV + run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout\ --cfg=randomize_layout >> $GITHUB_ENV + - run: cargo check + env: + RUSTFLAGS: --cfg procmacro2_nightly_testing ${{env.RUSTFLAGS}} - run: cargo test - run: cargo test --no-default-features - run: cargo test --no-default-features --test features -- --ignored make_sure_no_proc_macro # run the ignored test to make sure the `proc-macro` feature is disabled @@ -73,7 +76,52 @@ jobs: - name: RUSTFLAGS='-Z allow-features=' cargo test run: cargo test env: - RUSTFLAGS: -Z allow-features= ${{env.RUSTFLAGS}} + RUSTFLAGS: -Z allow-features= --cfg procmacro2_backtrace ${{env.RUSTFLAGS}} + - uses: actions/upload-artifact@v4 + if: always() + with: + name: Cargo.lock + path: Cargo.lock + continue-on-error: true + + layout: + name: Layout + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v5 + - uses: dtolnay/rust-toolchain@nightly + with: + components: rust-src + - run: cargo test --test test_size + - run: cargo test --test test_size --features span-locations + - run: cargo test --test test_size --no-default-features + - run: cargo test --test test_size --no-default-features --features span-locations + + msrv: + name: Rust 1.60.0 + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v5 + - uses: dtolnay/rust-toolchain@1.60.0 + with: + components: rust-src + - run: cargo check + - run: cargo check --no-default-features + - run: cargo check --features span-locations + - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo check + run: cargo check + env: + RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} + - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo check --no-default-features + run: cargo check --no-default-features + env: + RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} minimal: name: Minimal versions @@ -82,7 +130,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly - run: cargo generate-lockfile -Z minimal-versions - run: cargo check --locked @@ -94,11 +142,13 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly with: target: wasm32-unknown-unknown components: rust-src + - name: Ignore WebAssembly linker warning + run: echo RUSTFLAGS=${RUSTFLAGS}\ -Alinker_messages >> $GITHUB_ENV - run: cargo test --target wasm32-unknown-unknown --no-run fuzz: @@ -108,7 +158,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly with: components: rust-src @@ -117,7 +167,12 @@ jobs: - run: cargo check --no-default-features --features afl working-directory: fuzz - uses: dtolnay/install@honggfuzz - - run: sudo apt-get install binutils-dev libunwind-dev + - name: Run apt install binutils-dev libunwind-dev + run: | + sudo sed -i 's/^update_initramfs=yes$/update_initramfs=no/' /etc/initramfs-tools/update-initramfs.conf + sudo rm -f /var/lib/man-db/auto-update + sudo apt-get update + sudo apt-get install binutils-dev libunwind-dev - run: cargo hfuzz build --no-default-features --features honggfuzz working-directory: fuzz @@ -130,7 +185,7 @@ jobs: env: RUSTDOCFLAGS: -Dwarnings steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly with: components: rust-src @@ -143,7 +198,7 @@ jobs: if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly with: components: clippy, rust-src @@ -157,7 +212,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@miri - run: cargo miri setup - run: cargo miri test @@ -170,7 +225,8 @@ jobs: if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 + - uses: dtolnay/rust-toolchain@stable - uses: dtolnay/install@cargo-outdated - run: cargo outdated --workspace --exit-code 1 - run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1 diff --git a/vendor/proc-macro2/Cargo.lock b/vendor/proc-macro2/Cargo.lock new file mode 100644 index 00000000..e37ffdd4 --- /dev/null +++ b/vendor/proc-macro2/Cargo.lock @@ -0,0 +1,326 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "filetime" +version = "0.2.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.60.2", +] + +[[package]] +name = "flate2" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc5a4e564e38c699f2880d3fda590bedc2e69f3f84cd48b457bd892ce61d0aa9" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "libc" +version = "0.2.177" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" + +[[package]] +name = "libredox" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +dependencies = [ + "bitflags", + "libc", + "redox_syscall", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "proc-macro2" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e0f6df8eaa422d97d72edcd152e1451618fed47fabbdbd5a8864167b1d4aff7" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proc-macro2" +version = "1.0.103" +dependencies = [ + "flate2", + "quote", + "rayon", + "rustversion", + "tar", + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" +dependencies = [ + "proc-macro2 1.0.102", +] + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rustix" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "unicode-ident" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "462eeb75aeb73aea900253ce739c8e18a67423fadf006037cd3ff27e82748a06" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix", +] diff --git a/vendor/proc-macro2/Cargo.toml b/vendor/proc-macro2/Cargo.toml index 6f4ed46f..3f0173c7 100644 --- a/vendor/proc-macro2/Cargo.toml +++ b/vendor/proc-macro2/Cargo.toml @@ -11,13 +11,18 @@ [package] edition = "2021" -rust-version = "1.56" +rust-version = "1.60" name = "proc-macro2" -version = "1.0.69" +version = "1.0.103" authors = [ "David Tolnay <dtolnay@gmail.com>", "Alex Crichton <alex@alexcrichton.com>", ] +build = "build.rs" +autolib = false +autobins = false +autoexamples = false +autotests = false autobenches = false description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." documentation = "https://docs.rs/proc-macro2" @@ -31,37 +36,70 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/proc-macro2" [package.metadata.docs.rs] -rustc-args = [ - "--cfg", - "procmacro2_semver_exempt", -] +rustc-args = ["--cfg=procmacro2_semver_exempt"] +targets = ["x86_64-unknown-linux-gnu"] rustdoc-args = [ - "--cfg", - "procmacro2_semver_exempt", - "--cfg", - "doc_cfg", + "--cfg=procmacro2_semver_exempt", "--generate-link-to-definition", + "--generate-macro-expansion", + "--extern-html-root-url=core=https://doc.rust-lang.org", + "--extern-html-root-url=alloc=https://doc.rust-lang.org", + "--extern-html-root-url=std=https://doc.rust-lang.org", + "--extern-html-root-url=proc_macro=https://doc.rust-lang.org", ] -targets = ["x86_64-unknown-linux-gnu"] [package.metadata.playground] features = ["span-locations"] +[features] +default = ["proc-macro"] +nightly = [] +proc-macro = [] +span-locations = [] + [lib] -doc-scrape-examples = false +name = "proc_macro2" +path = "src/lib.rs" + +[[test]] +name = "comments" +path = "tests/comments.rs" + +[[test]] +name = "features" +path = "tests/features.rs" + +[[test]] +name = "marker" +path = "tests/marker.rs" + +[[test]] +name = "test" +path = "tests/test.rs" + +[[test]] +name = "test_fmt" +path = "tests/test_fmt.rs" + +[[test]] +name = "test_size" +path = "tests/test_size.rs" [dependencies.unicode-ident] version = "1.0" +[dev-dependencies.flate2] +version = "1.0" + [dev-dependencies.quote] version = "1.0" -default_features = false +default-features = false + +[dev-dependencies.rayon] +version = "1.0" [dev-dependencies.rustversion] version = "1" -[features] -default = ["proc-macro"] -nightly = [] -proc-macro = [] -span-locations = [] +[dev-dependencies.tar] +version = "0.4" diff --git a/vendor/proc-macro2/Cargo.toml.orig b/vendor/proc-macro2/Cargo.toml.orig index ca5f7d6d..5f42f40b 100644 --- a/vendor/proc-macro2/Cargo.toml.orig +++ b/vendor/proc-macro2/Cargo.toml.orig @@ -1,6 +1,6 @@ [package] name = "proc-macro2" -version = "1.0.69" # remember to update html_root_url +version = "1.0.103" authors = ["David Tolnay <dtolnay@gmail.com>", "Alex Crichton <alex@alexcrichton.com>"] autobenches = false categories = ["development-tools::procedural-macro-helpers"] @@ -10,12 +10,20 @@ edition = "2021" keywords = ["macros", "syn"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/proc-macro2" -rust-version = "1.56" +rust-version = "1.60" [package.metadata.docs.rs] -rustc-args = ["--cfg", "procmacro2_semver_exempt"] -rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg", "--generate-link-to-definition"] +rustc-args = ["--cfg=procmacro2_semver_exempt"] targets = ["x86_64-unknown-linux-gnu"] +rustdoc-args = [ + "--cfg=procmacro2_semver_exempt", + "--generate-link-to-definition", + "--generate-macro-expansion", + "--extern-html-root-url=core=https://doc.rust-lang.org", + "--extern-html-root-url=alloc=https://doc.rust-lang.org", + "--extern-html-root-url=std=https://doc.rust-lang.org", + "--extern-html-root-url=proc_macro=https://doc.rust-lang.org", +] [package.metadata.playground] features = ["span-locations"] @@ -24,8 +32,11 @@ features = ["span-locations"] unicode-ident = "1.0" [dev-dependencies] -quote = { version = "1.0", default_features = false } +flate2 = "1.0" +quote = { version = "1.0", default-features = false } +rayon = "1.0" rustversion = "1" +tar = "0.4" [features] proc-macro = [] @@ -38,9 +49,6 @@ span-locations = [] # This feature no longer means anything. nightly = [] -[lib] -doc-scrape-examples = false - [workspace] members = ["benches/bench-libproc-macro", "tests/ui"] diff --git a/vendor/proc-macro2/README.md b/vendor/proc-macro2/README.md index 3a29ce8b..0b6b490f 100644 --- a/vendor/proc-macro2/README.md +++ b/vendor/proc-macro2/README.md @@ -62,7 +62,7 @@ proc-macro2 by default. To opt into the additional APIs available in the most recent nightly compiler, the `procmacro2_semver_exempt` config flag must be passed to rustc. We will -polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable APIs +polyfill those nightly-only APIs back to Rust 1.60.0. As these are unstable APIs that track the nightly compiler, minor versions of proc-macro2 may make breaking changes to them at any time. diff --git a/vendor/proc-macro2/build.rs b/vendor/proc-macro2/build.rs index 9f0fb51e..26c3ed1b 100644 --- a/vendor/proc-macro2/build.rs +++ b/vendor/proc-macro2/build.rs @@ -1,133 +1,267 @@ -// rustc-cfg emitted by the build script: -// -// "wrap_proc_macro" -// Wrap types from libproc_macro rather than polyfilling the whole API. -// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set, -// because we can't emulate the unstable API without emulating everything -// else. Also enabled unconditionally on nightly, in which case the -// procmacro2_semver_exempt surface area is implemented by using the -// nightly-only proc_macro API. -// -// "hygiene" -// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at -// and Span::located_at. Enabled on Rust 1.45+. -// -// "proc_macro_span" -// Enable non-dummy behavior of Span::start and Span::end methods which -// requires an unstable compiler feature. Enabled when building with -// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable -// features. -// -// "super_unstable" -// Implement the semver exempt API in terms of the nightly-only proc_macro -// API. Enabled when using procmacro2_semver_exempt on a nightly compiler. -// -// "span_locations" -// Provide methods Span::start and Span::end which give the line/column -// location of a token. Enabled by procmacro2_semver_exempt or the -// "span-locations" Cargo cfg. This is behind a cfg because tracking -// location inside spans is a performance hit. -// -// "is_available" -// Use proc_macro::is_available() to detect if the proc macro API is -// available or needs to be polyfilled instead of trying to use the proc -// macro API and catching a panic if it isn't available. Enabled on Rust -// 1.57+. +#![allow(unknown_lints)] +#![allow(unexpected_cfgs)] +#![allow(clippy::uninlined_format_args)] use std::env; -use std::process::Command; +use std::ffi::OsString; +use std::fs; +use std::io::ErrorKind; +use std::iter; +use std::path::Path; +use std::process::{self, Command, Stdio}; use std::str; -use std::u32; fn main() { - println!("cargo:rerun-if-changed=build.rs"); + let rustc = rustc_minor_version().unwrap_or(u32::MAX); - let version = rustc_version().unwrap_or(RustcVersion { - minor: u32::MAX, - nightly: false, - }); + if rustc >= 80 { + println!("cargo:rustc-check-cfg=cfg(fuzzing)"); + println!("cargo:rustc-check-cfg=cfg(no_is_available)"); + println!("cargo:rustc-check-cfg=cfg(no_literal_byte_character)"); + println!("cargo:rustc-check-cfg=cfg(no_literal_c_string)"); + println!("cargo:rustc-check-cfg=cfg(no_source_text)"); + println!("cargo:rustc-check-cfg=cfg(proc_macro_span)"); + println!("cargo:rustc-check-cfg=cfg(proc_macro_span_file)"); + println!("cargo:rustc-check-cfg=cfg(proc_macro_span_location)"); + println!("cargo:rustc-check-cfg=cfg(procmacro2_backtrace)"); + println!("cargo:rustc-check-cfg=cfg(procmacro2_build_probe)"); + println!("cargo:rustc-check-cfg=cfg(procmacro2_nightly_testing)"); + println!("cargo:rustc-check-cfg=cfg(procmacro2_semver_exempt)"); + println!("cargo:rustc-check-cfg=cfg(randomize_layout)"); + println!("cargo:rustc-check-cfg=cfg(span_locations)"); + println!("cargo:rustc-check-cfg=cfg(super_unstable)"); + println!("cargo:rustc-check-cfg=cfg(wrap_proc_macro)"); + } - let docs_rs = env::var_os("DOCS_RS").is_some(); - let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs; + let semver_exempt = cfg!(procmacro2_semver_exempt); if semver_exempt { // https://github.com/dtolnay/proc-macro2/issues/147 println!("cargo:rustc-cfg=procmacro2_semver_exempt"); } if semver_exempt || cfg!(feature = "span-locations") { + // Provide methods Span::start and Span::end which give the line/column + // location of a token. This is behind a cfg because tracking location + // inside spans is a performance hit. println!("cargo:rustc-cfg=span_locations"); } - if version.minor < 57 { + if rustc < 57 { + // Do not use proc_macro::is_available() to detect whether the proc + // macro API is available vs needs to be polyfilled. Instead, use the + // proc macro API unconditionally and catch the panic that occurs if it + // isn't available. println!("cargo:rustc-cfg=no_is_available"); } - if version.minor < 66 { + if rustc < 66 { + // Do not call libproc_macro's Span::source_text. Always return None. println!("cargo:rustc-cfg=no_source_text"); } + if rustc < 79 { + // Do not call Literal::byte_character nor Literal::c_string. They can + // be emulated by way of Literal::from_str. + println!("cargo:rustc-cfg=no_literal_byte_character"); + println!("cargo:rustc-cfg=no_literal_c_string"); + } + if !cfg!(feature = "proc-macro") { + println!("cargo:rerun-if-changed=build.rs"); return; } - if version.nightly || !semver_exempt { + let proc_macro_span; + let consider_rustc_bootstrap; + if compile_probe_unstable("proc_macro_span", false) { + // This is a nightly or dev compiler, so it supports unstable features + // regardless of RUSTC_BOOTSTRAP. No need to rerun build script if + // RUSTC_BOOTSTRAP is changed. + proc_macro_span = true; + consider_rustc_bootstrap = false; + } else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") { + if compile_probe_unstable("proc_macro_span", true) { + // This is a stable or beta compiler for which the user has set + // RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script + // if they change it. + proc_macro_span = true; + consider_rustc_bootstrap = true; + } else if rustc_bootstrap == "1" { + // This compiler does not support the proc macro Span API in the + // form that proc-macro2 expects. No need to pay attention to + // RUSTC_BOOTSTRAP. + proc_macro_span = false; + consider_rustc_bootstrap = false; + } else { + // This is a stable or beta compiler for which RUSTC_BOOTSTRAP is + // set to restrict the use of unstable features by this crate. + proc_macro_span = false; + consider_rustc_bootstrap = true; + } + } else { + // Without RUSTC_BOOTSTRAP, this compiler does not support the proc + // macro Span API in the form that proc-macro2 expects, but try again if + // the user turns on unstable features. + proc_macro_span = false; + consider_rustc_bootstrap = true; + } + + if proc_macro_span || !semver_exempt { + // Wrap types from libproc_macro rather than polyfilling the whole API. + // Enabled as long as procmacro2_semver_exempt is not set, because we + // can't emulate the unstable API without emulating everything else. + // Also enabled unconditionally on nightly, in which case the + // procmacro2_semver_exempt surface area is implemented by using the + // nightly-only proc_macro API. println!("cargo:rustc-cfg=wrap_proc_macro"); } - if version.nightly && feature_allowed("proc_macro_span") { + if proc_macro_span { + // Enable non-dummy behavior of Span::byte_range and Span::join methods + // which requires an unstable compiler feature. Enabled when building + // with nightly, unless `-Z allow-feature` in RUSTFLAGS disallows + // unstable features. println!("cargo:rustc-cfg=proc_macro_span"); } - if semver_exempt && version.nightly { + if proc_macro_span || (rustc >= 88 && compile_probe_stable("proc_macro_span_location")) { + // Enable non-dummy behavior of Span::start and Span::end methods on + // Rust 1.88+. + println!("cargo:rustc-cfg=proc_macro_span_location"); + } + + if proc_macro_span || (rustc >= 88 && compile_probe_stable("proc_macro_span_file")) { + // Enable non-dummy behavior of Span::file and Span::local_file methods + // on Rust 1.88+. + println!("cargo:rustc-cfg=proc_macro_span_file"); + } + + if semver_exempt && proc_macro_span { + // Implement the semver exempt API in terms of the nightly-only + // proc_macro API. println!("cargo:rustc-cfg=super_unstable"); } + + if consider_rustc_bootstrap { + println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP"); + } +} + +fn compile_probe_unstable(feature: &str, rustc_bootstrap: bool) -> bool { + // RUSTC_STAGE indicates that this crate is being compiled as a dependency + // of a multistage rustc bootstrap. This environment uses Cargo in a highly + // non-standard way with issues such as: + // + // https://github.com/rust-lang/cargo/issues/11138 + // https://github.com/rust-lang/rust/issues/114839 + // + env::var_os("RUSTC_STAGE").is_none() && do_compile_probe(feature, rustc_bootstrap) +} + +fn compile_probe_stable(feature: &str) -> bool { + env::var_os("RUSTC_STAGE").is_some() || do_compile_probe(feature, true) } -struct RustcVersion { - minor: u32, - nightly: bool, +fn do_compile_probe(feature: &str, rustc_bootstrap: bool) -> bool { + println!("cargo:rerun-if-changed=src/probe/{}.rs", feature); + + let rustc = cargo_env_var("RUSTC"); + let out_dir = cargo_env_var("OUT_DIR"); + let out_subdir = Path::new(&out_dir).join("probe"); + let probefile = Path::new("src") + .join("probe") + .join(feature) + .with_extension("rs"); + + if let Err(err) = fs::create_dir(&out_subdir) { + if err.kind() != ErrorKind::AlreadyExists { + eprintln!("Failed to create {}: {}", out_subdir.display(), err); + process::exit(1); + } + } + + let rustc_wrapper = env::var_os("RUSTC_WRAPPER").filter(|wrapper| !wrapper.is_empty()); + let rustc_workspace_wrapper = + env::var_os("RUSTC_WORKSPACE_WRAPPER").filter(|wrapper| !wrapper.is_empty()); + let mut rustc = rustc_wrapper + .into_iter() + .chain(rustc_workspace_wrapper) + .chain(iter::once(rustc)); + let mut cmd = Command::new(rustc.next().unwrap()); + cmd.args(rustc); + + if !rustc_bootstrap { + cmd.env_remove("RUSTC_BOOTSTRAP"); + } + + cmd.stderr(Stdio::null()) + .arg("--cfg=procmacro2_build_probe") + .arg("--edition=2021") + .arg("--crate-name=proc_macro2") + .arg("--crate-type=lib") + .arg("--cap-lints=allow") + .arg("--emit=dep-info,metadata") + .arg("--out-dir") + .arg(&out_subdir) + .arg(probefile); + + if let Some(target) = env::var_os("TARGET") { + cmd.arg("--target").arg(target); + } + + // If Cargo wants to set RUSTFLAGS, use that. + if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") { + if !rustflags.is_empty() { + for arg in rustflags.split('\x1f') { + cmd.arg(arg); + } + } + } + + let success = match cmd.status() { + Ok(status) => status.success(), + Err(_) => false, + }; + + // Clean up to avoid leaving nondeterministic absolute paths in the dep-info + // file in OUT_DIR, which causes nonreproducible builds in build systems + // that treat the entire OUT_DIR as an artifact. + if let Err(err) = fs::remove_dir_all(&out_subdir) { + // libc::ENOTEMPTY + // Some filesystems (NFSv3) have timing issues under load where '.nfs*' + // dummy files can continue to get created for a short period after the + // probe command completes, breaking remove_dir_all. + // To be replaced with ErrorKind::DirectoryNotEmpty (Rust 1.83+). + const ENOTEMPTY: i32 = 39; + + if !(err.kind() == ErrorKind::NotFound + || (cfg!(target_os = "linux") && err.raw_os_error() == Some(ENOTEMPTY))) + { + eprintln!("Failed to clean up {}: {}", out_subdir.display(), err); + process::exit(1); + } + } + + success } -fn rustc_version() -> Option<RustcVersion> { - let rustc = env::var_os("RUSTC")?; +fn rustc_minor_version() -> Option<u32> { + let rustc = cargo_env_var("RUSTC"); let output = Command::new(rustc).arg("--version").output().ok()?; let version = str::from_utf8(&output.stdout).ok()?; - let nightly = version.contains("nightly") || version.contains("dev"); let mut pieces = version.split('.'); if pieces.next() != Some("rustc 1") { return None; } - let minor = pieces.next()?.parse().ok()?; - Some(RustcVersion { minor, nightly }) + pieces.next()?.parse().ok() } -fn feature_allowed(feature: &str) -> bool { - // Recognized formats: - // - // -Z allow-features=feature1,feature2 - // - // -Zallow-features=feature1,feature2 - - let flags_var; - let flags_var_string; - let flags = if let Some(encoded_rustflags) = env::var_os("CARGO_ENCODED_RUSTFLAGS") { - flags_var = encoded_rustflags; - flags_var_string = flags_var.to_string_lossy(); - flags_var_string.split('\x1f') - } else { - return true; - }; - - for mut flag in flags { - if flag.starts_with("-Z") { - flag = &flag["-Z".len()..]; - } - if flag.starts_with("allow-features=") { - flag = &flag["allow-features=".len()..]; - return flag.split(',').any(|allowed| allowed == feature); - } - } - - // No allow-features= flag, allowed by default. - true +fn cargo_env_var(key: &str) -> OsString { + env::var_os(key).unwrap_or_else(|| { + eprintln!( + "Environment variable ${} is not set during execution of build script", + key, + ); + process::exit(1); + }) } diff --git a/vendor/proc-macro2/src/extra.rs b/vendor/proc-macro2/src/extra.rs index 4a69d465..522a90e1 100644 --- a/vendor/proc-macro2/src/extra.rs +++ b/vendor/proc-macro2/src/extra.rs @@ -3,18 +3,85 @@ use crate::fallback; use crate::imp; -use crate::marker::Marker; +use crate::marker::{ProcMacroAutoTraits, MARKER}; use crate::Span; use core::fmt::{self, Debug}; +/// Invalidate any `proc_macro2::Span` that exist on the current thread. +/// +/// The implementation of `Span` uses thread-local data structures and this +/// function clears them. Calling any method on a `Span` on the current thread +/// created prior to the invalidation will return incorrect values or crash. +/// +/// This function is useful for programs that process more than 2<sup>32</sup> +/// bytes of Rust source code on the same thread. Just like rustc, proc-macro2 +/// uses 32-bit source locations, and these wrap around when the total source +/// code processed by the same thread exceeds 2<sup>32</sup> bytes (4 +/// gigabytes). After a wraparound, `Span` methods such as `source_text()` can +/// return wrong data. +/// +/// # Example +/// +/// As of late 2023, there is 200 GB of Rust code published on crates.io. +/// Looking at just the newest version of every crate, it is 16 GB of code. So a +/// workload that involves parsing it all would overflow a 32-bit source +/// location unless spans are being invalidated. +/// +/// ``` +/// use flate2::read::GzDecoder; +/// use std::ffi::OsStr; +/// use std::io::{BufReader, Read}; +/// use std::str::FromStr; +/// use tar::Archive; +/// +/// rayon::scope(|s| { +/// for krate in every_version_of_every_crate() { +/// s.spawn(move |_| { +/// proc_macro2::extra::invalidate_current_thread_spans(); +/// +/// let reader = BufReader::new(krate); +/// let tar = GzDecoder::new(reader); +/// let mut archive = Archive::new(tar); +/// for entry in archive.entries().unwrap() { +/// let mut entry = entry.unwrap(); +/// let path = entry.path().unwrap(); +/// if path.extension() != Some(OsStr::new("rs")) { +/// continue; +/// } +/// let mut content = String::new(); +/// entry.read_to_string(&mut content).unwrap(); +/// match proc_macro2::TokenStream::from_str(&content) { +/// Ok(tokens) => {/* ... */}, +/// Err(_) => continue, +/// } +/// } +/// }); +/// } +/// }); +/// # +/// # fn every_version_of_every_crate() -> Vec<std::fs::File> { +/// # Vec::new() +/// # } +/// ``` +/// +/// # Panics +/// +/// This function is not applicable to and will panic if called from a +/// procedural macro. +#[cfg(span_locations)] +#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] +pub fn invalidate_current_thread_spans() { + crate::imp::invalidate_current_thread_spans(); +} + /// An object that holds a [`Group`]'s `span_open()` and `span_close()` together -/// (in a more compact representation than holding those 2 spans individually. +/// in a more compact representation than holding those 2 spans individually. /// /// [`Group`]: crate::Group #[derive(Copy, Clone)] pub struct DelimSpan { inner: DelimSpanEnum, - _marker: Marker, + _marker: ProcMacroAutoTraits, } #[derive(Copy, Clone)] @@ -45,7 +112,7 @@ impl DelimSpan { DelimSpan { inner, - _marker: Marker, + _marker: MARKER, } } diff --git a/vendor/proc-macro2/src/fallback.rs b/vendor/proc-macro2/src/fallback.rs index 7f559cfb..61b7b91b 100644 --- a/vendor/proc-macro2/src/fallback.rs +++ b/vendor/proc-macro2/src/fallback.rs @@ -1,3 +1,5 @@ +#[cfg(wrap_proc_macro)] +use crate::imp; #[cfg(span_locations)] use crate::location::LineColumn; use crate::parse::{self, Cursor}; @@ -9,11 +11,21 @@ use alloc::collections::BTreeMap; use core::cell::RefCell; #[cfg(span_locations)] use core::cmp; +#[cfg(all(span_locations, not(fuzzing)))] +use core::cmp::Ordering; use core::fmt::{self, Debug, Display, Write}; use core::mem::ManuallyDrop; +#[cfg(span_locations)] +use core::ops::Range; use core::ops::RangeBounds; use core::ptr; +use core::str; +#[cfg(feature = "proc-macro")] use core::str::FromStr; +use std::ffi::CStr; +#[cfg(wrap_proc_macro)] +use std::panic; +#[cfg(span_locations)] use std::path::PathBuf; /// Force use of proc-macro2's fallback implementation of the API for now, even @@ -45,7 +57,7 @@ impl LexError { self.span } - fn call_site() -> Self { + pub(crate) fn call_site() -> Self { LexError { span: Span::call_site(), } @@ -53,13 +65,31 @@ impl LexError { } impl TokenStream { - pub fn new() -> Self { + pub(crate) fn new() -> Self { TokenStream { inner: RcVecBuilder::new().build(), } } - pub fn is_empty(&self) -> bool { + pub(crate) fn from_str_checked(src: &str) -> Result<Self, LexError> { + // Create a dummy file & add it to the source map + let mut cursor = get_cursor(src); + + // Strip a byte order mark if present + const BYTE_ORDER_MARK: &str = "\u{feff}"; + if cursor.starts_with(BYTE_ORDER_MARK) { + cursor = cursor.advance(BYTE_ORDER_MARK.len()); + } + + parse::token_stream(cursor) + } + + #[cfg(feature = "proc-macro")] + pub(crate) fn from_str_unchecked(src: &str) -> Self { + Self::from_str_checked(src).unwrap() + } + + pub(crate) fn is_empty(&self) -> bool { self.inner.len() == 0 } @@ -97,21 +127,32 @@ fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) { // Nonrecursive to prevent stack overflow. impl Drop for TokenStream { fn drop(&mut self) { - let mut inner = match self.inner.get_mut() { - Some(inner) => inner, + let mut stack = Vec::new(); + let mut current = match self.inner.get_mut() { + Some(inner) => inner.take().into_iter(), None => return, }; - while let Some(token) = inner.pop() { - let group = match token { - TokenTree::Group(group) => group.inner, - _ => continue, - }; - #[cfg(wrap_proc_macro)] - let group = match group { - crate::imp::Group::Fallback(group) => group, - crate::imp::Group::Compiler(_) => continue, - }; - inner.extend(group.stream.take_inner()); + loop { + while let Some(token) = current.next() { + let group = match token { + TokenTree::Group(group) => group.inner, + _ => continue, + }; + #[cfg(wrap_proc_macro)] + let group = match group { + crate::imp::Group::Fallback(group) => group, + crate::imp::Group::Compiler(_) => continue, + }; + let mut group = group; + if let Some(inner) = group.stream.inner.get_mut() { + stack.push(current); + current = inner.take().into_iter(); + } + } + match stack.pop() { + Some(next) => current = next, + None => return, + } } } } @@ -121,23 +162,23 @@ pub(crate) struct TokenStreamBuilder { } impl TokenStreamBuilder { - pub fn new() -> Self { + pub(crate) fn new() -> Self { TokenStreamBuilder { inner: RcVecBuilder::new(), } } - pub fn with_capacity(cap: usize) -> Self { + pub(crate) fn with_capacity(cap: usize) -> Self { TokenStreamBuilder { inner: RcVecBuilder::with_capacity(cap), } } - pub fn push_token_from_parser(&mut self, tt: TokenTree) { + pub(crate) fn push_token_from_parser(&mut self, tt: TokenTree) { self.inner.push(tt); } - pub fn build(self) -> TokenStream { + pub(crate) fn build(self) -> TokenStream { TokenStream { inner: self.inner.build(), } @@ -151,9 +192,9 @@ fn get_cursor(src: &str) -> Cursor { // Create a dummy file & add it to the source map #[cfg(not(fuzzing))] - SOURCE_MAP.with(|cm| { - let mut cm = cm.borrow_mut(); - let span = cm.add_file(src); + SOURCE_MAP.with(|sm| { + let mut sm = sm.borrow_mut(); + let span = sm.add_file(src); Cursor { rest: src, off: span.lo, @@ -166,23 +207,6 @@ fn get_cursor(src: &str) -> Cursor { Cursor { rest: src } } -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result<TokenStream, LexError> { - // Create a dummy file & add it to the source map - let mut cursor = get_cursor(src); - - // Strip a byte order mark if present - const BYTE_ORDER_MARK: &str = "\u{feff}"; - if cursor.starts_with(BYTE_ORDER_MARK) { - cursor = cursor.advance(BYTE_ORDER_MARK.len()); - } - - parse::token_stream(cursor) - } -} - impl Display for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("cannot parse string into token stream") @@ -198,13 +222,13 @@ impl Display for TokenStream { } joint = false; match tt { - TokenTree::Group(tt) => Display::fmt(tt, f), - TokenTree::Ident(tt) => Display::fmt(tt, f), + TokenTree::Group(tt) => write!(f, "{}", tt), + TokenTree::Ident(tt) => write!(f, "{}", tt), TokenTree::Punct(tt) => { joint = tt.spacing() == Spacing::Joint; - Display::fmt(tt, f) + write!(f, "{}", tt) } - TokenTree::Literal(tt) => Display::fmt(tt, f), + TokenTree::Literal(tt) => write!(f, "{}", tt), }?; } @@ -222,20 +246,14 @@ impl Debug for TokenStream { #[cfg(feature = "proc-macro")] impl From<proc_macro::TokenStream> for TokenStream { fn from(inner: proc_macro::TokenStream) -> Self { - inner - .to_string() - .parse() - .expect("compiler token stream parse failed") + TokenStream::from_str_unchecked(&inner.to_string()) } } #[cfg(feature = "proc-macro")] impl From<TokenStream> for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { - inner - .to_string() - .parse() - .expect("failed to parse to compiler tokens") + proc_macro::TokenStream::from_str_unchecked(&inner.to_string()) } } @@ -295,31 +313,6 @@ impl IntoIterator for TokenStream { } } -#[derive(Clone, PartialEq, Eq)] -pub(crate) struct SourceFile { - path: PathBuf, -} - -impl SourceFile { - /// Get the path to this source file as a string. - pub fn path(&self) -> PathBuf { - self.path.clone() - } - - pub fn is_real(&self) -> bool { - false - } -} - -impl Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("SourceFile") - .field("path", &self.path()) - .field("is_real", &self.is_real()) - .finish() - } -} - #[cfg(all(span_locations, not(fuzzing)))] thread_local! { static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap { @@ -334,6 +327,12 @@ thread_local! { }); } +#[cfg(span_locations)] +pub(crate) fn invalidate_current_thread_spans() { + #[cfg(not(fuzzing))] + SOURCE_MAP.with(|sm| sm.borrow_mut().files.truncate(1)); +} + #[cfg(all(span_locations, not(fuzzing)))] struct FileInfo { source_text: String, @@ -347,7 +346,7 @@ impl FileInfo { fn offset_line_column(&self, offset: usize) -> LineColumn { assert!(self.span_within(Span { lo: offset as u32, - hi: offset as u32 + hi: offset as u32, })); let offset = offset - self.span.lo as usize; match self.lines.binary_search(&offset) { @@ -366,7 +365,7 @@ impl FileInfo { span.lo >= self.span.lo && span.hi <= self.span.hi } - fn source_text(&mut self, span: Span) -> String { + fn byte_range(&mut self, span: Span) -> Range<usize> { let lo_char = (span.lo - self.span.lo) as usize; // Look up offset of the largest already-computed char index that is @@ -395,11 +394,15 @@ impl FileInfo { let trunc_lo = &self.source_text[lo_byte..]; let char_len = (span.hi - span.lo) as usize; - let source_text = match trunc_lo.char_indices().nth(char_len) { - Some((offset, _ch)) => &trunc_lo[..offset], - None => trunc_lo, - }; - source_text.to_owned() + lo_byte..match trunc_lo.char_indices().nth(char_len) { + Some((offset, _ch)) => lo_byte + offset, + None => self.source_text.len(), + } + } + + fn source_text(&mut self, span: Span) -> String { + let byte_range = self.byte_range(span); + self.source_text[byte_range].to_owned() } } @@ -454,36 +457,39 @@ impl SourceMap { span } - #[cfg(procmacro2_semver_exempt)] - fn filepath(&self, span: Span) -> PathBuf { - for (i, file) in self.files.iter().enumerate() { - if file.span_within(span) { - return PathBuf::from(if i == 0 { - "<unspecified>".to_owned() - } else { - format!("<parsed string {}>", i) - }); + fn find(&self, span: Span) -> usize { + match self.files.binary_search_by(|file| { + if file.span.hi < span.lo { + Ordering::Less + } else if file.span.lo > span.hi { + Ordering::Greater + } else { + assert!(file.span_within(span)); + Ordering::Equal } + }) { + Ok(i) => i, + Err(_) => unreachable!("Invalid span with no related FileInfo!"), } - unreachable!("Invalid span with no related FileInfo!"); } - fn fileinfo(&self, span: Span) -> &FileInfo { - for file in &self.files { - if file.span_within(span) { - return file; - } + fn filepath(&self, span: Span) -> String { + let i = self.find(span); + if i == 0 { + "<unspecified>".to_owned() + } else { + format!("<parsed string {}>", i) } - unreachable!("Invalid span with no related FileInfo!"); + } + + fn fileinfo(&self, span: Span) -> &FileInfo { + let i = self.find(span); + &self.files[i] } fn fileinfo_mut(&mut self, span: Span) -> &mut FileInfo { - for file in &mut self.files { - if file.span_within(span) { - return file; - } - } - unreachable!("Invalid span with no related FileInfo!"); + let i = self.find(span); + &mut self.files[i] } } @@ -497,83 +503,100 @@ pub(crate) struct Span { impl Span { #[cfg(not(span_locations))] - pub fn call_site() -> Self { + pub(crate) fn call_site() -> Self { Span {} } #[cfg(span_locations)] - pub fn call_site() -> Self { + pub(crate) fn call_site() -> Self { Span { lo: 0, hi: 0 } } - pub fn mixed_site() -> Self { + pub(crate) fn mixed_site() -> Self { Span::call_site() } #[cfg(procmacro2_semver_exempt)] - pub fn def_site() -> Self { + pub(crate) fn def_site() -> Self { Span::call_site() } - pub fn resolved_at(&self, _other: Span) -> Span { + pub(crate) fn resolved_at(&self, _other: Span) -> Span { // Stable spans consist only of line/column information, so // `resolved_at` and `located_at` only select which span the // caller wants line/column information from. *self } - pub fn located_at(&self, other: Span) -> Span { + pub(crate) fn located_at(&self, other: Span) -> Span { other } - #[cfg(procmacro2_semver_exempt)] - pub fn source_file(&self) -> SourceFile { + #[cfg(span_locations)] + pub(crate) fn byte_range(&self) -> Range<usize> { #[cfg(fuzzing)] - return SourceFile { - path: PathBuf::from("<unspecified>"), - }; + return 0..0; #[cfg(not(fuzzing))] - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - let path = cm.filepath(*self); - SourceFile { path } - }) + { + if self.is_call_site() { + 0..0 + } else { + SOURCE_MAP.with(|sm| sm.borrow_mut().fileinfo_mut(*self).byte_range(*self)) + } + } } #[cfg(span_locations)] - pub fn start(&self) -> LineColumn { + pub(crate) fn start(&self) -> LineColumn { #[cfg(fuzzing)] return LineColumn { line: 0, column: 0 }; #[cfg(not(fuzzing))] - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); + SOURCE_MAP.with(|sm| { + let sm = sm.borrow(); + let fi = sm.fileinfo(*self); fi.offset_line_column(self.lo as usize) }) } #[cfg(span_locations)] - pub fn end(&self) -> LineColumn { + pub(crate) fn end(&self) -> LineColumn { #[cfg(fuzzing)] return LineColumn { line: 0, column: 0 }; #[cfg(not(fuzzing))] - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); + SOURCE_MAP.with(|sm| { + let sm = sm.borrow(); + let fi = sm.fileinfo(*self); fi.offset_line_column(self.hi as usize) }) } + #[cfg(span_locations)] + pub(crate) fn file(&self) -> String { + #[cfg(fuzzing)] + return "<unspecified>".to_owned(); + + #[cfg(not(fuzzing))] + SOURCE_MAP.with(|sm| { + let sm = sm.borrow(); + sm.filepath(*self) + }) + } + + #[cfg(span_locations)] + pub(crate) fn local_file(&self) -> Option<PathBuf> { + None + } + #[cfg(not(span_locations))] - pub fn join(&self, _other: Span) -> Option<Span> { + pub(crate) fn join(&self, _other: Span) -> Option<Span> { Some(Span {}) } #[cfg(span_locations)] - pub fn join(&self, other: Span) -> Option<Span> { + pub(crate) fn join(&self, other: Span) -> Option<Span> { #[cfg(fuzzing)] return { let _ = other; @@ -581,10 +604,10 @@ impl Span { }; #[cfg(not(fuzzing))] - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); + SOURCE_MAP.with(|sm| { + let sm = sm.borrow(); // If `other` is not within the same FileInfo as us, return None. - if !cm.fileinfo(*self).span_within(other) { + if !sm.fileinfo(*self).span_within(other) { return None; } Some(Span { @@ -595,12 +618,12 @@ impl Span { } #[cfg(not(span_locations))] - pub fn source_text(&self) -> Option<String> { + pub(crate) fn source_text(&self) -> Option<String> { None } #[cfg(span_locations)] - pub fn source_text(&self) -> Option<String> { + pub(crate) fn source_text(&self) -> Option<String> { #[cfg(fuzzing)] return None; @@ -609,7 +632,7 @@ impl Span { if self.is_call_site() { None } else { - Some(SOURCE_MAP.with(|cm| cm.borrow_mut().fileinfo_mut(*self).source_text(*self))) + Some(SOURCE_MAP.with(|sm| sm.borrow_mut().fileinfo_mut(*self).source_text(*self))) } } } @@ -677,7 +700,7 @@ pub(crate) struct Group { } impl Group { - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self { + pub(crate) fn new(delimiter: Delimiter, stream: TokenStream) -> Self { Group { delimiter, stream, @@ -685,27 +708,27 @@ impl Group { } } - pub fn delimiter(&self) -> Delimiter { + pub(crate) fn delimiter(&self) -> Delimiter { self.delimiter } - pub fn stream(&self) -> TokenStream { + pub(crate) fn stream(&self) -> TokenStream { self.stream.clone() } - pub fn span(&self) -> Span { + pub(crate) fn span(&self) -> Span { self.span } - pub fn span_open(&self) -> Span { + pub(crate) fn span_open(&self) -> Span { self.span.first_byte() } - pub fn span_close(&self) -> Span { + pub(crate) fn span_close(&self) -> Span { self.span.last_byte() } - pub fn set_span(&mut self, span: Span) { + pub(crate) fn set_span(&mut self, span: Span) { self.span = span; } } @@ -749,35 +772,45 @@ impl Debug for Group { #[derive(Clone)] pub(crate) struct Ident { - sym: String, + sym: Box<str>, span: Span, raw: bool, } impl Ident { - fn _new(string: &str, raw: bool, span: Span) -> Self { - validate_ident(string, raw); + #[track_caller] + pub(crate) fn new_checked(string: &str, span: Span) -> Self { + validate_ident(string); + Ident::new_unchecked(string, span) + } + pub(crate) fn new_unchecked(string: &str, span: Span) -> Self { Ident { - sym: string.to_owned(), + sym: Box::from(string), span, - raw, + raw: false, } } - pub fn new(string: &str, span: Span) -> Self { - Ident::_new(string, false, span) + #[track_caller] + pub(crate) fn new_raw_checked(string: &str, span: Span) -> Self { + validate_ident_raw(string); + Ident::new_raw_unchecked(string, span) } - pub fn new_raw(string: &str, span: Span) -> Self { - Ident::_new(string, true, span) + pub(crate) fn new_raw_unchecked(string: &str, span: Span) -> Self { + Ident { + sym: Box::from(string), + span, + raw: true, + } } - pub fn span(&self) -> Span { + pub(crate) fn span(&self) -> Span { self.span } - pub fn set_span(&mut self, span: Span) { + pub(crate) fn set_span(&mut self, span: Span) { self.span = span; } } @@ -790,7 +823,8 @@ pub(crate) fn is_ident_continue(c: char) -> bool { unicode_ident::is_xid_continue(c) } -fn validate_ident(string: &str, raw: bool) { +#[track_caller] +fn validate_ident(string: &str) { if string.is_empty() { panic!("Ident is not allowed to be empty; use Option<Ident>"); } @@ -816,14 +850,17 @@ fn validate_ident(string: &str, raw: bool) { if !ident_ok(string) { panic!("{:?} is not a valid Ident", string); } +} - if raw { - match string { - "_" | "super" | "self" | "Self" | "crate" => { - panic!("`r#{}` cannot be a raw identifier", string); - } - _ => {} +#[track_caller] +fn validate_ident_raw(string: &str) { + validate_ident(string); + + match string { + "_" | "super" | "self" | "Self" | "crate" => { + panic!("`r#{}` cannot be a raw identifier", string); } + _ => {} } } @@ -840,9 +877,9 @@ where fn eq(&self, other: &T) -> bool { let other = other.as_ref(); if self.raw { - other.starts_with("r#") && self.sym == other[2..] + other.starts_with("r#") && *self.sym == other[2..] } else { - self.sym == other + *self.sym == *other } } } @@ -852,7 +889,7 @@ impl Display for Ident { if self.raw { f.write_str("r#")?; } - Display::fmt(&self.sym, f) + f.write_str(&self.sym) } } @@ -881,13 +918,13 @@ impl Debug for Ident { #[derive(Clone)] pub(crate) struct Literal { - repr: String, + pub(crate) repr: String, span: Span, } macro_rules! suffixed_numbers { ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { + pub(crate) fn $name(n: $kind) -> Literal { Literal::_new(format!(concat!("{}", stringify!($kind)), n)) } )*) @@ -895,7 +932,7 @@ macro_rules! suffixed_numbers { macro_rules! unsuffixed_numbers { ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { + pub(crate) fn $name(n: $kind) -> Literal { Literal::_new(n.to_string()) } )*) @@ -909,6 +946,36 @@ impl Literal { } } + pub(crate) fn from_str_checked(repr: &str) -> Result<Self, LexError> { + let mut cursor = get_cursor(repr); + #[cfg(span_locations)] + let lo = cursor.off; + + let negative = cursor.starts_with_char('-'); + if negative { + cursor = cursor.advance(1); + if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) { + return Err(LexError::call_site()); + } + } + + if let Ok((rest, mut literal)) = parse::literal(cursor) { + if rest.is_empty() { + if negative { + literal.repr.insert(0, '-'); + } + literal.span = Span { + #[cfg(span_locations)] + lo, + #[cfg(span_locations)] + hi: rest.off, + }; + return Ok(literal); + } + } + Err(LexError::call_site()) + } + pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self { Literal::_new(repr.to_owned()) } @@ -946,7 +1013,7 @@ impl Literal { isize_unsuffixed => isize, } - pub fn f32_unsuffixed(f: f32) -> Literal { + pub(crate) fn f32_unsuffixed(f: f32) -> Literal { let mut s = f.to_string(); if !s.contains('.') { s.push_str(".0"); @@ -954,7 +1021,7 @@ impl Literal { Literal::_new(s) } - pub fn f64_unsuffixed(f: f64) -> Literal { + pub(crate) fn f64_unsuffixed(f: f64) -> Literal { let mut s = f.to_string(); if !s.contains('.') { s.push_str(".0"); @@ -962,82 +1029,109 @@ impl Literal { Literal::_new(s) } - pub fn string(t: &str) -> Literal { - let mut repr = String::with_capacity(t.len() + 2); + pub(crate) fn string(string: &str) -> Literal { + let mut repr = String::with_capacity(string.len() + 2); repr.push('"'); - let mut chars = t.chars(); - while let Some(ch) = chars.next() { - if ch == '\0' { - repr.push_str( - if chars - .as_str() - .starts_with(|next| '0' <= next && next <= '7') - { - // circumvent clippy::octal_escapes lint - "\\x00" - } else { - "\\0" - }, - ); - } else if ch == '\'' { - // escape_debug turns this into "\'" which is unnecessary. - repr.push(ch); - } else { - repr.extend(ch.escape_debug()); - } - } + escape_utf8(string, &mut repr); repr.push('"'); Literal::_new(repr) } - pub fn character(t: char) -> Literal { + pub(crate) fn character(ch: char) -> Literal { let mut repr = String::new(); repr.push('\''); - if t == '"' { + if ch == '"' { // escape_debug turns this into '\"' which is unnecessary. - repr.push(t); + repr.push(ch); } else { - repr.extend(t.escape_debug()); + repr.extend(ch.escape_debug()); } repr.push('\''); Literal::_new(repr) } - pub fn byte_string(bytes: &[u8]) -> Literal { - let mut escaped = "b\"".to_string(); + pub(crate) fn byte_character(byte: u8) -> Literal { + let mut repr = "b'".to_string(); + #[allow(clippy::match_overlapping_arm)] + match byte { + b'\0' => repr.push_str(r"\0"), + b'\t' => repr.push_str(r"\t"), + b'\n' => repr.push_str(r"\n"), + b'\r' => repr.push_str(r"\r"), + b'\'' => repr.push_str(r"\'"), + b'\\' => repr.push_str(r"\\"), + b'\x20'..=b'\x7E' => repr.push(byte as char), + _ => { + let _ = write!(repr, r"\x{:02X}", byte); + } + } + repr.push('\''); + Literal::_new(repr) + } + + pub(crate) fn byte_string(bytes: &[u8]) -> Literal { + let mut repr = "b\"".to_string(); let mut bytes = bytes.iter(); while let Some(&b) = bytes.next() { #[allow(clippy::match_overlapping_arm)] match b { - b'\0' => escaped.push_str(match bytes.as_slice().first() { + b'\0' => repr.push_str(match bytes.as_slice().first() { // circumvent clippy::octal_escapes lint Some(b'0'..=b'7') => r"\x00", _ => r"\0", }), - b'\t' => escaped.push_str(r"\t"), - b'\n' => escaped.push_str(r"\n"), - b'\r' => escaped.push_str(r"\r"), - b'"' => escaped.push_str("\\\""), - b'\\' => escaped.push_str("\\\\"), - b'\x20'..=b'\x7E' => escaped.push(b as char), + b'\t' => repr.push_str(r"\t"), + b'\n' => repr.push_str(r"\n"), + b'\r' => repr.push_str(r"\r"), + b'"' => repr.push_str("\\\""), + b'\\' => repr.push_str(r"\\"), + b'\x20'..=b'\x7E' => repr.push(b as char), _ => { - let _ = write!(escaped, "\\x{:02X}", b); + let _ = write!(repr, r"\x{:02X}", b); + } + } + } + repr.push('"'); + Literal::_new(repr) + } + + pub(crate) fn c_string(string: &CStr) -> Literal { + let mut repr = "c\"".to_string(); + let mut bytes = string.to_bytes(); + while !bytes.is_empty() { + let (valid, invalid) = match str::from_utf8(bytes) { + Ok(all_valid) => { + bytes = b""; + (all_valid, bytes) } + Err(utf8_error) => { + let (valid, rest) = bytes.split_at(utf8_error.valid_up_to()); + let valid = str::from_utf8(valid).unwrap(); + let invalid = utf8_error + .error_len() + .map_or(rest, |error_len| &rest[..error_len]); + bytes = &bytes[valid.len() + invalid.len()..]; + (valid, invalid) + } + }; + escape_utf8(valid, &mut repr); + for &byte in invalid { + let _ = write!(repr, r"\x{:02X}", byte); } } - escaped.push('"'); - Literal::_new(escaped) + repr.push('"'); + Literal::_new(repr) } - pub fn span(&self) -> Span { + pub(crate) fn span(&self) -> Span { self.span } - pub fn set_span(&mut self, span: Span) { + pub(crate) fn set_span(&mut self, span: Span) { self.span = span; } - pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { + pub(crate) fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { #[cfg(not(span_locations))] { let _ = range; @@ -1079,40 +1173,6 @@ impl Literal { } } -impl FromStr for Literal { - type Err = LexError; - - fn from_str(repr: &str) -> Result<Self, Self::Err> { - let mut cursor = get_cursor(repr); - #[cfg(span_locations)] - let lo = cursor.off; - - let negative = cursor.starts_with_char('-'); - if negative { - cursor = cursor.advance(1); - if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) { - return Err(LexError::call_site()); - } - } - - if let Ok((rest, mut literal)) = parse::literal(cursor) { - if rest.is_empty() { - if negative { - literal.repr.insert(0, '-'); - } - literal.span = Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - }; - return Ok(literal); - } - } - Err(LexError::call_site()) - } -} - impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&self.repr, f) @@ -1127,3 +1187,70 @@ impl Debug for Literal { debug.finish() } } + +fn escape_utf8(string: &str, repr: &mut String) { + let mut chars = string.chars(); + while let Some(ch) = chars.next() { + if ch == '\0' { + repr.push_str( + if chars + .as_str() + .starts_with(|next| '0' <= next && next <= '7') + { + // circumvent clippy::octal_escapes lint + r"\x00" + } else { + r"\0" + }, + ); + } else if ch == '\'' { + // escape_debug turns this into "\'" which is unnecessary. + repr.push(ch); + } else { + repr.extend(ch.escape_debug()); + } + } +} + +#[cfg(feature = "proc-macro")] +pub(crate) trait FromStr2: FromStr<Err = proc_macro::LexError> { + #[cfg(wrap_proc_macro)] + fn valid(src: &str) -> bool; + + #[cfg(wrap_proc_macro)] + fn from_str_checked(src: &str) -> Result<Self, imp::LexError> { + // Validate using fallback parser, because rustc is incapable of + // returning a recoverable Err for certain invalid token streams, and + // will instead permanently poison the compilation. + if !Self::valid(src) { + return Err(imp::LexError::CompilerPanic); + } + + // Catch panic to work around https://github.com/rust-lang/rust/issues/58736. + match panic::catch_unwind(|| Self::from_str(src)) { + Ok(Ok(ok)) => Ok(ok), + Ok(Err(lex)) => Err(imp::LexError::Compiler(lex)), + Err(_panic) => Err(imp::LexError::CompilerPanic), + } + } + + fn from_str_unchecked(src: &str) -> Self { + Self::from_str(src).unwrap() + } +} + +#[cfg(feature = "proc-macro")] +impl FromStr2 for proc_macro::TokenStream { + #[cfg(wrap_proc_macro)] + fn valid(src: &str) -> bool { + TokenStream::from_str_checked(src).is_ok() + } +} + +#[cfg(feature = "proc-macro")] +impl FromStr2 for proc_macro::Literal { + #[cfg(wrap_proc_macro)] + fn valid(src: &str) -> bool { + Literal::from_str_checked(src).is_ok() + } +} diff --git a/vendor/proc-macro2/src/lib.rs b/vendor/proc-macro2/src/lib.rs index 56d986b4..7952afaa 100644 --- a/vendor/proc-macro2/src/lib.rs +++ b/vendor/proc-macro2/src/lib.rs @@ -9,8 +9,6 @@ //! A wrapper around the procedural macro API of the compiler's [`proc_macro`] //! crate. This library serves two purposes: //! -//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/ -//! //! - **Bring proc-macro-like functionality to other contexts like build.rs and //! main.rs.** Types from `proc_macro` are entirely specific to procedural //! macros and cannot ever exist in code outside of a procedural macro. @@ -65,7 +63,7 @@ //! //! To opt into the additional APIs available in the most recent nightly //! compiler, the `procmacro2_semver_exempt` config flag must be passed to -//! rustc. We will polyfill those nightly-only APIs back to Rust 1.56.0. As +//! rustc. We will polyfill those nightly-only APIs back to Rust 1.60.0. As //! these are unstable APIs that track the nightly compiler, minor versions of //! proc-macro2 may make breaking changes to them at any time. //! @@ -86,31 +84,39 @@ //! a different thread. // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.69")] +#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.103")] #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] #![cfg_attr(super_unstable, feature(proc_macro_def_site))] -#![cfg_attr(doc_cfg, feature(doc_cfg))] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![deny(unsafe_op_in_unsafe_fn)] #![allow( clippy::cast_lossless, clippy::cast_possible_truncation, + clippy::checked_conversions, clippy::doc_markdown, + clippy::elidable_lifetime_names, + clippy::incompatible_msrv, clippy::items_after_statements, clippy::iter_without_into_iter, clippy::let_underscore_untyped, clippy::manual_assert, clippy::manual_range_contains, + clippy::missing_panics_doc, clippy::missing_safety_doc, clippy::must_use_candidate, clippy::needless_doctest_main, + clippy::needless_lifetimes, clippy::new_without_default, clippy::return_self_not_must_use, clippy::shadow_unrelated, clippy::trivially_copy_pass_by_ref, + clippy::uninlined_format_args, clippy::unnecessary_wraps, clippy::unused_self, clippy::used_underscore_binding, clippy::vec_init_then_push )] +#![allow(unknown_lints, mismatched_lifetime_syntaxes)] #[cfg(all(procmacro2_semver_exempt, wrap_proc_macro, not(super_unstable)))] compile_error! {"\ @@ -120,6 +126,15 @@ compile_error! {"\ build script as well. "} +#[cfg(all( + procmacro2_nightly_testing, + feature = "proc-macro", + not(proc_macro_span) +))] +compile_error! {"\ + Build script probe failed to compile. +"} + extern crate alloc; #[cfg(feature = "proc-macro")] @@ -127,6 +142,7 @@ extern crate proc_macro; mod marker; mod parse; +mod probe; mod rcvec; #[cfg(wrap_proc_macro)] @@ -148,20 +164,36 @@ mod imp; #[cfg(span_locations)] mod location; +#[cfg(procmacro2_semver_exempt)] +mod num; +#[cfg(procmacro2_semver_exempt)] +#[allow(dead_code)] +mod rustc_literal_escaper; + use crate::extra::DelimSpan; -use crate::marker::Marker; +use crate::marker::{ProcMacroAutoTraits, MARKER}; +#[cfg(procmacro2_semver_exempt)] +use crate::rustc_literal_escaper::MixedUnit; use core::cmp::Ordering; use core::fmt::{self, Debug, Display}; use core::hash::{Hash, Hasher}; +#[cfg(span_locations)] +use core::ops::Range; use core::ops::RangeBounds; use core::str::FromStr; use std::error::Error; -#[cfg(procmacro2_semver_exempt)] +use std::ffi::CStr; +#[cfg(span_locations)] use std::path::PathBuf; #[cfg(span_locations)] +#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] pub use crate::location::LineColumn; +#[cfg(procmacro2_semver_exempt)] +#[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] +pub use crate::rustc_literal_escaper::EscapeError; + /// An abstract stream of tokens, or more concretely a sequence of token trees. /// /// This type provides interfaces for iterating over token trees and for @@ -172,27 +204,27 @@ pub use crate::location::LineColumn; #[derive(Clone)] pub struct TokenStream { inner: imp::TokenStream, - _marker: Marker, + _marker: ProcMacroAutoTraits, } /// Error returned from `TokenStream::from_str`. pub struct LexError { inner: imp::LexError, - _marker: Marker, + _marker: ProcMacroAutoTraits, } impl TokenStream { fn _new(inner: imp::TokenStream) -> Self { TokenStream { inner, - _marker: Marker, + _marker: MARKER, } } fn _new_fallback(inner: fallback::TokenStream) -> Self { TokenStream { - inner: inner.into(), - _marker: Marker, + inner: imp::TokenStream::from(inner), + _marker: MARKER, } } @@ -227,27 +259,29 @@ impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result<TokenStream, LexError> { - let e = src.parse().map_err(|e| LexError { - inner: e, - _marker: Marker, - })?; - Ok(TokenStream::_new(e)) + match imp::TokenStream::from_str_checked(src) { + Ok(tokens) => Ok(TokenStream::_new(tokens)), + Err(lex) => Err(LexError { + inner: lex, + _marker: MARKER, + }), + } } } #[cfg(feature = "proc-macro")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] +#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))] impl From<proc_macro::TokenStream> for TokenStream { fn from(inner: proc_macro::TokenStream) -> Self { - TokenStream::_new(inner.into()) + TokenStream::_new(imp::TokenStream::from(inner)) } } #[cfg(feature = "proc-macro")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] +#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))] impl From<TokenStream> for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { - inner.inner.into() + proc_macro::TokenStream::from(inner.inner) } } @@ -319,76 +353,25 @@ impl Display for LexError { impl Error for LexError {} -/// The source file of a given `Span`. -/// -/// This type is semver exempt and not exposed by default. -#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] -#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] -#[derive(Clone, PartialEq, Eq)] -pub struct SourceFile { - inner: imp::SourceFile, - _marker: Marker, -} - -#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] -impl SourceFile { - fn _new(inner: imp::SourceFile) -> Self { - SourceFile { - inner, - _marker: Marker, - } - } - - /// Get the path to this source file. - /// - /// ### Note - /// - /// If the code span associated with this `SourceFile` was generated by an - /// external macro, this may not be an actual path on the filesystem. Use - /// [`is_real`] to check. - /// - /// Also note that even if `is_real` returns `true`, if - /// `--remap-path-prefix` was passed on the command line, the path as given - /// may not actually be valid. - /// - /// [`is_real`]: #method.is_real - pub fn path(&self) -> PathBuf { - self.inner.path() - } - - /// Returns `true` if this source file is a real source file, and not - /// generated by an external macro's expansion. - pub fn is_real(&self) -> bool { - self.inner.is_real() - } -} - -#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] -impl Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - /// A region of source code, along with macro expansion information. #[derive(Copy, Clone)] pub struct Span { inner: imp::Span, - _marker: Marker, + _marker: ProcMacroAutoTraits, } impl Span { fn _new(inner: imp::Span) -> Self { Span { inner, - _marker: Marker, + _marker: MARKER, } } fn _new_fallback(inner: fallback::Span) -> Self { Span { - inner: inner.into(), - _marker: Marker, + inner: imp::Span::from(inner), + _marker: MARKER, } } @@ -412,7 +395,7 @@ impl Span { /// /// This method is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] + #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] pub fn def_site() -> Self { Span::_new(imp::Span::def_site()) } @@ -451,13 +434,19 @@ impl Span { self.unwrap() } - /// The original source file into which this span points. + /// Returns the span's byte position range in the source file. /// - /// This method is semver exempt and not exposed by default. - #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn source_file(&self) -> SourceFile { - SourceFile::_new(self.inner.source_file()) + /// This method requires the `"span-locations"` feature to be enabled. + /// + /// When executing in a procedural macro context, the returned range is only + /// accurate if compiled with a nightly toolchain. The stable toolchain does + /// not have this information available. When executing outside of a + /// procedural macro, such as main.rs or build.rs, the byte range is always + /// accurate regardless of toolchain. + #[cfg(span_locations)] + #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] + pub fn byte_range(&self) -> Range<usize> { + self.inner.byte_range() } /// Get the starting line/column in the source file for this span. @@ -470,7 +459,7 @@ impl Span { /// outside of a procedural macro, such as main.rs or build.rs, the /// line/column are always meaningful regardless of toolchain. #[cfg(span_locations)] - #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] + #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] pub fn start(&self) -> LineColumn { self.inner.start() } @@ -485,11 +474,34 @@ impl Span { /// outside of a procedural macro, such as main.rs or build.rs, the /// line/column are always meaningful regardless of toolchain. #[cfg(span_locations)] - #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] + #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] pub fn end(&self) -> LineColumn { self.inner.end() } + /// The path to the source file in which this span occurs, for display + /// purposes. + /// + /// This might not correspond to a valid file system path. It might be + /// remapped, or might be an artificial path such as `"<macro expansion>"`. + #[cfg(span_locations)] + #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] + pub fn file(&self) -> String { + self.inner.file() + } + + /// The path to the source file in which this span occurs on disk. + /// + /// This is the actual path on disk. It is unaffected by path remapping. + /// + /// This path should not be embedded in the output of the macro; prefer + /// `file()` instead. + #[cfg(span_locations)] + #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] + pub fn local_file(&self) -> Option<PathBuf> { + self.inner.local_file() + } + /// Create a new span encompassing `self` and `other`. /// /// Returns `None` if `self` and `other` are from different files. @@ -497,8 +509,6 @@ impl Span { /// Warning: the underlying [`proc_macro::Span::join`] method is /// nightly-only. When called from within a procedural macro not using a /// nightly compiler, this method will always return `None`. - /// - /// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join pub fn join(&self, other: Span) -> Option<Span> { self.inner.join(other.inner).map(Span::_new) } @@ -507,7 +517,7 @@ impl Span { /// /// This method is semver exempt and not exposed by default. #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] + #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] pub fn eq(&self, other: &Span) -> bool { self.inner.eq(&other.inner) } @@ -647,13 +657,25 @@ pub enum Delimiter { Brace, /// `[ ... ]` Bracket, - /// `Ø ... Ø` + /// `∅ ... ∅` /// - /// An implicit delimiter, that may, for example, appear around tokens + /// An invisible delimiter, that may, for example, appear around tokens /// coming from a "macro variable" `$var`. It is important to preserve /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`. - /// Implicit delimiters may not survive roundtrip of a token stream through + /// Invisible delimiters may not survive roundtrip of a token stream through /// a string. + /// + /// <div class="warning"> + /// + /// Note: rustc currently can ignore the grouping of tokens delimited by `None` in the output + /// of a proc_macro. Only `None`-delimited groups created by a macro_rules macro in the input + /// of a proc_macro macro are preserved, and only in very specific circumstances. + /// Any `None`-delimited groups (re)created by a proc_macro will therefore not preserve + /// operator priorities as indicated above. The other `Delimiter` variants should be used + /// instead in this context. This is a rustc bug. For details, see + /// [rust-lang/rust#67062](https://github.com/rust-lang/rust/issues/67062). + /// + /// </div> None, } @@ -664,7 +686,7 @@ impl Group { fn _new_fallback(inner: fallback::Group) -> Self { Group { - inner: inner.into(), + inner: imp::Group::from(inner), } } @@ -790,10 +812,16 @@ impl Punct { /// The returned `Punct` will have the default span of `Span::call_site()` /// which can be further configured with the `set_span` method below. pub fn new(ch: char, spacing: Spacing) -> Self { - Punct { - ch, - spacing, - span: Span::call_site(), + if let '!' | '#' | '$' | '%' | '&' | '\'' | '*' | '+' | ',' | '-' | '.' | '/' | ':' | ';' + | '<' | '=' | '>' | '?' | '@' | '^' | '|' | '~' = ch + { + Punct { + ch, + spacing, + span: Span::call_site(), + } + } else { + panic!("unsupported proc macro punctuation character {:?}", ch); } } @@ -907,14 +935,21 @@ impl Debug for Punct { #[derive(Clone)] pub struct Ident { inner: imp::Ident, - _marker: Marker, + _marker: ProcMacroAutoTraits, } impl Ident { fn _new(inner: imp::Ident) -> Self { Ident { inner, - _marker: Marker, + _marker: MARKER, + } + } + + fn _new_fallback(inner: fallback::Ident) -> Self { + Ident { + inner: imp::Ident::from(inner), + _marker: MARKER, } } @@ -949,8 +984,9 @@ impl Ident { /// style="padding-right:0;">syn::parse_str</code></a><code /// style="padding-left:0;">::<Ident></code> /// rather than `Ident::new`. + #[track_caller] pub fn new(string: &str, span: Span) -> Self { - Ident::_new(imp::Ident::new(string, span.inner)) + Ident::_new(imp::Ident::new_checked(string, span.inner)) } /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). The @@ -958,12 +994,9 @@ impl Ident { /// (including keywords, e.g. `fn`). Keywords which are usable in path /// segments (e.g. `self`, `super`) are not supported, and will cause a /// panic. + #[track_caller] pub fn new_raw(string: &str, span: Span) -> Self { - Ident::_new_raw(string, span) - } - - fn _new_raw(string: &str, span: Span) -> Self { - Ident::_new(imp::Ident::new_raw(string, span.inner)) + Ident::_new(imp::Ident::new_raw_checked(string, span.inner)) } /// Returns the span of this `Ident`. @@ -1036,7 +1069,7 @@ impl Debug for Ident { #[derive(Clone)] pub struct Literal { inner: imp::Literal, - _marker: Marker, + _marker: ProcMacroAutoTraits, } macro_rules! suffixed_int_literals { @@ -1083,14 +1116,14 @@ impl Literal { fn _new(inner: imp::Literal) -> Self { Literal { inner, - _marker: Marker, + _marker: MARKER, } } fn _new_fallback(inner: fallback::Literal) -> Self { Literal { - inner: inner.into(), - _marker: Marker, + inner: imp::Literal::from(inner), + _marker: MARKER, } } @@ -1206,9 +1239,19 @@ impl Literal { Literal::_new(imp::Literal::character(ch)) } + /// Byte character literal. + pub fn byte_character(byte: u8) -> Literal { + Literal::_new(imp::Literal::byte_character(byte)) + } + /// Byte string literal. - pub fn byte_string(s: &[u8]) -> Literal { - Literal::_new(imp::Literal::byte_string(s)) + pub fn byte_string(bytes: &[u8]) -> Literal { + Literal::_new(imp::Literal::byte_string(bytes)) + } + + /// C string literal. + pub fn c_string(string: &CStr) -> Literal { + Literal::_new(imp::Literal::c_string(string)) } /// Returns the span encompassing this literal. @@ -1228,19 +1271,123 @@ impl Literal { /// Warning: the underlying [`proc_macro::Literal::subspan`] method is /// nightly-only. When called from within a procedural macro not using a /// nightly compiler, this method will always return `None`. - /// - /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { self.inner.subspan(range).map(Span::_new) } + /// Returns the unescaped string value if this is a string literal. + #[cfg(procmacro2_semver_exempt)] + pub fn str_value(&self) -> Result<String, ConversionErrorKind> { + let repr = self.to_string(); + + if repr.starts_with('"') && repr[1..].ends_with('"') { + let quoted = &repr[1..repr.len() - 1]; + let mut value = String::with_capacity(quoted.len()); + let mut error = None; + rustc_literal_escaper::unescape_str(quoted, |_range, res| match res { + Ok(ch) => value.push(ch), + Err(err) => { + if err.is_fatal() { + error = Some(ConversionErrorKind::FailedToUnescape(err)); + } + } + }); + return match error { + Some(error) => Err(error), + None => Ok(value), + }; + } + + if repr.starts_with('r') { + if let Some(raw) = get_raw(&repr[1..]) { + return Ok(raw.to_owned()); + } + } + + Err(ConversionErrorKind::InvalidLiteralKind) + } + + /// Returns the unescaped string value (including nul terminator) if this is + /// a c-string literal. + #[cfg(procmacro2_semver_exempt)] + pub fn cstr_value(&self) -> Result<Vec<u8>, ConversionErrorKind> { + let repr = self.to_string(); + + if repr.starts_with("c\"") && repr[2..].ends_with('"') { + let quoted = &repr[2..repr.len() - 1]; + let mut value = Vec::with_capacity(quoted.len()); + let mut error = None; + rustc_literal_escaper::unescape_c_str(quoted, |_range, res| match res { + Ok(MixedUnit::Char(ch)) => { + value.extend_from_slice(ch.get().encode_utf8(&mut [0; 4]).as_bytes()); + } + Ok(MixedUnit::HighByte(byte)) => value.push(byte.get()), + Err(err) => { + if err.is_fatal() { + error = Some(ConversionErrorKind::FailedToUnescape(err)); + } + } + }); + return match error { + Some(error) => Err(error), + None => { + value.push(b'\0'); + Ok(value) + } + }; + } + + if repr.starts_with("cr") { + if let Some(raw) = get_raw(&repr[2..]) { + let mut value = Vec::with_capacity(raw.len() + 1); + value.extend_from_slice(raw.as_bytes()); + value.push(b'\0'); + return Ok(value); + } + } + + Err(ConversionErrorKind::InvalidLiteralKind) + } + + /// Returns the unescaped string value if this is a byte string literal. + #[cfg(procmacro2_semver_exempt)] + pub fn byte_str_value(&self) -> Result<Vec<u8>, ConversionErrorKind> { + let repr = self.to_string(); + + if repr.starts_with("b\"") && repr[2..].ends_with('"') { + let quoted = &repr[2..repr.len() - 1]; + let mut value = Vec::with_capacity(quoted.len()); + let mut error = None; + rustc_literal_escaper::unescape_byte_str(quoted, |_range, res| match res { + Ok(byte) => value.push(byte), + Err(err) => { + if err.is_fatal() { + error = Some(ConversionErrorKind::FailedToUnescape(err)); + } + } + }); + return match error { + Some(error) => Err(error), + None => Ok(value), + }; + } + + if repr.starts_with("br") { + if let Some(raw) = get_raw(&repr[2..]) { + return Ok(raw.as_bytes().to_owned()); + } + } + + Err(ConversionErrorKind::InvalidLiteralKind) + } + // Intended for the `quote!` macro to use when constructing a proc-macro2 // token out of a macro_rules $:literal token, which is already known to be // a valid literal. This avoids reparsing/validating the literal's string // representation. This is not public API other than for quote. #[doc(hidden)] pub unsafe fn from_str_unchecked(repr: &str) -> Self { - Literal::_new(imp::Literal::from_str_unchecked(repr)) + Literal::_new(unsafe { imp::Literal::from_str_unchecked(repr) }) } } @@ -1248,10 +1395,13 @@ impl FromStr for Literal { type Err = LexError; fn from_str(repr: &str) -> Result<Self, LexError> { - repr.parse().map(Literal::_new).map_err(|inner| LexError { - inner, - _marker: Marker, - }) + match imp::Literal::from_str_checked(repr) { + Ok(lit) => Ok(Literal::_new(lit)), + Err(lex) => Err(LexError { + inner: lex, + _marker: MARKER, + }), + } } } @@ -1267,9 +1417,36 @@ impl Display for Literal { } } +/// Error when retrieving a string literal's unescaped value. +#[cfg(procmacro2_semver_exempt)] +#[derive(Debug, PartialEq, Eq)] +pub enum ConversionErrorKind { + /// The literal is of the right string kind, but its contents are malformed + /// in a way that cannot be unescaped to a value. + FailedToUnescape(EscapeError), + /// The literal is not of the string kind whose value was requested, for + /// example byte string vs UTF-8 string. + InvalidLiteralKind, +} + +// ###"..."### -> ... +#[cfg(procmacro2_semver_exempt)] +fn get_raw(repr: &str) -> Option<&str> { + let pounds = repr.len() - repr.trim_start_matches('#').len(); + if repr.len() >= pounds + 1 + 1 + pounds + && repr[pounds..].starts_with('"') + && repr.trim_end_matches('#').len() + pounds == repr.len() + && repr[..repr.len() - pounds].ends_with('"') + { + Some(&repr[pounds + 1..repr.len() - pounds - 1]) + } else { + None + } +} + /// Public implementation details for the `TokenStream` type, such as iterators. pub mod token_stream { - use crate::marker::Marker; + use crate::marker::{ProcMacroAutoTraits, MARKER}; use crate::{imp, TokenTree}; use core::fmt::{self, Debug}; @@ -1282,7 +1459,7 @@ pub mod token_stream { #[derive(Clone)] pub struct IntoIter { inner: imp::TokenTreeIter, - _marker: Marker, + _marker: ProcMacroAutoTraits, } impl Iterator for IntoIter { @@ -1311,7 +1488,7 @@ pub mod token_stream { fn into_iter(self) -> IntoIter { IntoIter { inner: self.inner.into_iter(), - _marker: Marker, + _marker: MARKER, } } } diff --git a/vendor/proc-macro2/src/location.rs b/vendor/proc-macro2/src/location.rs index 463026c2..7190e2d0 100644 --- a/vendor/proc-macro2/src/location.rs +++ b/vendor/proc-macro2/src/location.rs @@ -3,7 +3,7 @@ use core::cmp::Ordering; /// A line-column pair representing the start or end of a `Span`. /// /// This type is semver exempt and not exposed by default. -#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] +#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct LineColumn { /// The 1-indexed line in the source file on which the span starts or ends diff --git a/vendor/proc-macro2/src/marker.rs b/vendor/proc-macro2/src/marker.rs index e648dd21..23b94ce6 100644 --- a/vendor/proc-macro2/src/marker.rs +++ b/vendor/proc-macro2/src/marker.rs @@ -4,15 +4,14 @@ use core::panic::{RefUnwindSafe, UnwindSafe}; // Zero sized marker with the correct set of autotrait impls we want all proc // macro types to have. -pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>; +#[derive(Copy, Clone)] +#[cfg_attr( + all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)), + derive(PartialEq, Eq) +)] +pub(crate) struct ProcMacroAutoTraits(PhantomData<Rc<()>>); -pub(crate) use self::value::*; - -mod value { - pub(crate) use core::marker::PhantomData as Marker; -} - -pub(crate) struct ProcMacroAutoTraits(Rc<()>); +pub(crate) const MARKER: ProcMacroAutoTraits = ProcMacroAutoTraits(PhantomData); impl UnwindSafe for ProcMacroAutoTraits {} impl RefUnwindSafe for ProcMacroAutoTraits {} diff --git a/vendor/proc-macro2/src/num.rs b/vendor/proc-macro2/src/num.rs new file mode 100644 index 00000000..3ac82c86 --- /dev/null +++ b/vendor/proc-macro2/src/num.rs @@ -0,0 +1,17 @@ +// TODO: use NonZero<char> in Rust 1.89+ +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct NonZeroChar(char); + +impl NonZeroChar { + pub fn new(ch: char) -> Option<Self> { + if ch == '\0' { + None + } else { + Some(NonZeroChar(ch)) + } + } + + pub fn get(self) -> char { + self.0 + } +} diff --git a/vendor/proc-macro2/src/parse.rs b/vendor/proc-macro2/src/parse.rs index 1430d736..b8be403f 100644 --- a/vendor/proc-macro2/src/parse.rs +++ b/vendor/proc-macro2/src/parse.rs @@ -1,5 +1,5 @@ use crate::fallback::{ - is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream, + self, is_ident_continue, is_ident_start, Group, Ident, LexError, Literal, Span, TokenStream, TokenStreamBuilder, }; use crate::{Delimiter, Punct, Spacing, TokenTree}; @@ -8,13 +8,13 @@ use core::str::{Bytes, CharIndices, Chars}; #[derive(Copy, Clone, Eq, PartialEq)] pub(crate) struct Cursor<'a> { - pub rest: &'a str, + pub(crate) rest: &'a str, #[cfg(span_locations)] - pub off: u32, + pub(crate) off: u32, } impl<'a> Cursor<'a> { - pub fn advance(&self, bytes: usize) -> Cursor<'a> { + pub(crate) fn advance(&self, bytes: usize) -> Cursor<'a> { let (_front, rest) = self.rest.split_at(bytes); Cursor { rest, @@ -23,22 +23,22 @@ impl<'a> Cursor<'a> { } } - pub fn starts_with(&self, s: &str) -> bool { + pub(crate) fn starts_with(&self, s: &str) -> bool { self.rest.starts_with(s) } - pub fn starts_with_char(&self, ch: char) -> bool { + pub(crate) fn starts_with_char(&self, ch: char) -> bool { self.rest.starts_with(ch) } - pub fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool + pub(crate) fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool where Pattern: FnMut(char) -> bool, { self.rest.starts_with(f) } - pub fn is_empty(&self) -> bool { + pub(crate) fn is_empty(&self) -> bool { self.rest.is_empty() } @@ -300,7 +300,8 @@ fn ident_any(input: Cursor) -> PResult<crate::Ident> { let (rest, sym) = ident_not_raw(rest)?; if !raw { - let ident = crate::Ident::new(sym, crate::Span::call_site()); + let ident = + crate::Ident::_new_fallback(Ident::new_unchecked(sym, fallback::Span::call_site())); return Ok((rest, ident)); } @@ -309,7 +310,8 @@ fn ident_any(input: Cursor) -> PResult<crate::Ident> { _ => {} } - let ident = crate::Ident::_new_raw(sym, crate::Span::call_site()); + let ident = + crate::Ident::_new_fallback(Ident::new_raw_unchecked(sym, fallback::Span::call_site())); Ok((rest, ident)) } @@ -855,7 +857,7 @@ fn digits(mut input: Cursor) -> Result<Cursor, Reject> { continue; } _ => break, - }; + } len += 1; empty = false; } @@ -869,7 +871,10 @@ fn digits(mut input: Cursor) -> Result<Cursor, Reject> { fn punct(input: Cursor) -> PResult<Punct> { let (rest, ch) = punct_char(input)?; if ch == '\'' { - if ident_any(rest)?.0.starts_with_char('\'') { + let (after_lifetime, _ident) = ident_any(rest)?; + if after_lifetime.starts_with_char('\'') + || (after_lifetime.starts_with_char('#') && !rest.starts_with("r#")) + { Err(Reject) } else { Ok((rest, Punct::new('\'', Spacing::Joint))) @@ -908,12 +913,13 @@ fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult #[cfg(span_locations)] let lo = input.off; let (rest, (comment, inner)) = doc_comment_contents(input)?; - let span = crate::Span::_new_fallback(Span { + let fallback_span = Span { #[cfg(span_locations)] lo, #[cfg(span_locations)] hi: rest.off, - }); + }; + let span = crate::Span::_new_fallback(fallback_span); let mut scan_for_bare_cr = comment; while let Some(cr) = scan_for_bare_cr.find('\r') { @@ -934,10 +940,10 @@ fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult trees.push_token_from_parser(TokenTree::Punct(bang)); } - let doc_ident = crate::Ident::new("doc", span); + let doc_ident = crate::Ident::_new_fallback(Ident::new_unchecked("doc", fallback_span)); let mut equal = Punct::new('=', Spacing::Alone); equal.set_span(span); - let mut literal = crate::Literal::string(comment); + let mut literal = crate::Literal::_new_fallback(Literal::string(comment)); literal.set_span(span); let mut bracketed = TokenStreamBuilder::with_capacity(3); bracketed.push_token_from_parser(TokenTree::Ident(doc_ident)); diff --git a/vendor/proc-macro2/src/probe.rs b/vendor/proc-macro2/src/probe.rs new file mode 100644 index 00000000..b67f5203 --- /dev/null +++ b/vendor/proc-macro2/src/probe.rs @@ -0,0 +1,10 @@ +#![allow(dead_code)] + +#[cfg(proc_macro_span)] +pub(crate) mod proc_macro_span; + +#[cfg(proc_macro_span_file)] +pub(crate) mod proc_macro_span_file; + +#[cfg(proc_macro_span_location)] +pub(crate) mod proc_macro_span_location; diff --git a/vendor/proc-macro2/src/probe/proc_macro_span.rs b/vendor/proc-macro2/src/probe/proc_macro_span.rs new file mode 100644 index 00000000..2d7d44e0 --- /dev/null +++ b/vendor/proc-macro2/src/probe/proc_macro_span.rs @@ -0,0 +1,51 @@ +// This code exercises the surface area that we expect of Span's unstable API. +// If the current toolchain is able to compile it, then proc-macro2 is able to +// offer these APIs too. + +#![cfg_attr(procmacro2_build_probe, feature(proc_macro_span))] + +extern crate proc_macro; + +use core::ops::{Range, RangeBounds}; +use proc_macro::{Literal, Span}; +use std::path::PathBuf; + +pub fn byte_range(this: &Span) -> Range<usize> { + this.byte_range() +} + +pub fn start(this: &Span) -> Span { + this.start() +} + +pub fn end(this: &Span) -> Span { + this.end() +} + +pub fn line(this: &Span) -> usize { + this.line() +} + +pub fn column(this: &Span) -> usize { + this.column() +} + +pub fn file(this: &Span) -> String { + this.file() +} + +pub fn local_file(this: &Span) -> Option<PathBuf> { + this.local_file() +} + +pub fn join(this: &Span, other: Span) -> Option<Span> { + this.join(other) +} + +pub fn subspan<R: RangeBounds<usize>>(this: &Literal, range: R) -> Option<Span> { + this.subspan(range) +} + +// Include in sccache cache key. +#[cfg(procmacro2_build_probe)] +const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP"); diff --git a/vendor/proc-macro2/src/probe/proc_macro_span_file.rs b/vendor/proc-macro2/src/probe/proc_macro_span_file.rs new file mode 100644 index 00000000..8b76bdf5 --- /dev/null +++ b/vendor/proc-macro2/src/probe/proc_macro_span_file.rs @@ -0,0 +1,14 @@ +// The subset of Span's API stabilized in Rust 1.88. + +extern crate proc_macro; + +use proc_macro::Span; +use std::path::PathBuf; + +pub fn file(this: &Span) -> String { + this.file() +} + +pub fn local_file(this: &Span) -> Option<PathBuf> { + this.local_file() +} diff --git a/vendor/proc-macro2/src/probe/proc_macro_span_location.rs b/vendor/proc-macro2/src/probe/proc_macro_span_location.rs new file mode 100644 index 00000000..79da34af --- /dev/null +++ b/vendor/proc-macro2/src/probe/proc_macro_span_location.rs @@ -0,0 +1,21 @@ +// The subset of Span's API stabilized in Rust 1.88. + +extern crate proc_macro; + +use proc_macro::Span; + +pub fn start(this: &Span) -> Span { + this.start() +} + +pub fn end(this: &Span) -> Span { + this.end() +} + +pub fn line(this: &Span) -> usize { + this.line() +} + +pub fn column(this: &Span) -> usize { + this.column() +} diff --git a/vendor/proc-macro2/src/rcvec.rs b/vendor/proc-macro2/src/rcvec.rs index 37955afb..23edc77d 100644 --- a/vendor/proc-macro2/src/rcvec.rs +++ b/vendor/proc-macro2/src/rcvec.rs @@ -22,19 +22,19 @@ pub(crate) struct RcVecIntoIter<T> { } impl<T> RcVec<T> { - pub fn is_empty(&self) -> bool { + pub(crate) fn is_empty(&self) -> bool { self.inner.is_empty() } - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.inner.len() } - pub fn iter(&self) -> slice::Iter<T> { + pub(crate) fn iter(&self) -> slice::Iter<T> { self.inner.iter() } - pub fn make_mut(&mut self) -> RcVecMut<T> + pub(crate) fn make_mut(&mut self) -> RcVecMut<T> where T: Clone, { @@ -43,12 +43,12 @@ impl<T> RcVec<T> { } } - pub fn get_mut(&mut self) -> Option<RcVecMut<T>> { + pub(crate) fn get_mut(&mut self) -> Option<RcVecMut<T>> { let inner = Rc::get_mut(&mut self.inner)?; Some(RcVecMut { inner }) } - pub fn make_owned(mut self) -> RcVecBuilder<T> + pub(crate) fn make_owned(mut self) -> RcVecBuilder<T> where T: Clone, { @@ -62,31 +62,31 @@ impl<T> RcVec<T> { } impl<T> RcVecBuilder<T> { - pub fn new() -> Self { + pub(crate) fn new() -> Self { RcVecBuilder { inner: Vec::new() } } - pub fn with_capacity(cap: usize) -> Self { + pub(crate) fn with_capacity(cap: usize) -> Self { RcVecBuilder { inner: Vec::with_capacity(cap), } } - pub fn push(&mut self, element: T) { + pub(crate) fn push(&mut self, element: T) { self.inner.push(element); } - pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) { + pub(crate) fn extend(&mut self, iter: impl IntoIterator<Item = T>) { self.inner.extend(iter); } - pub fn as_mut(&mut self) -> RcVecMut<T> { + pub(crate) fn as_mut(&mut self) -> RcVecMut<T> { RcVecMut { inner: &mut self.inner, } } - pub fn build(self) -> RcVec<T> { + pub(crate) fn build(self) -> RcVec<T> { RcVec { inner: Rc::new(self.inner), } @@ -94,20 +94,21 @@ impl<T> RcVecBuilder<T> { } impl<'a, T> RcVecMut<'a, T> { - pub fn push(&mut self, element: T) { + pub(crate) fn push(&mut self, element: T) { self.inner.push(element); } - pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) { + pub(crate) fn extend(&mut self, iter: impl IntoIterator<Item = T>) { self.inner.extend(iter); } - pub fn pop(&mut self) -> Option<T> { - self.inner.pop() + pub(crate) fn as_mut(&mut self) -> RcVecMut<T> { + RcVecMut { inner: self.inner } } - pub fn as_mut(&mut self) -> RcVecMut<T> { - RcVecMut { inner: self.inner } + pub(crate) fn take(self) -> RcVecBuilder<T> { + let vec = mem::take(self.inner); + RcVecBuilder { inner: vec } } } diff --git a/vendor/proc-macro2/src/rustc_literal_escaper.rs b/vendor/proc-macro2/src/rustc_literal_escaper.rs new file mode 100644 index 00000000..8233e5d6 --- /dev/null +++ b/vendor/proc-macro2/src/rustc_literal_escaper.rs @@ -0,0 +1,701 @@ +// Vendored from rustc-literal-escaper v0.0.5 +// https://github.com/rust-lang/literal-escaper/tree/v0.0.5 + +//! Utilities for validating (raw) string, char, and byte literals and +//! turning escape sequences into the values they represent. + +use crate::num::NonZeroChar; +use std::ffi::CStr; +use std::num::NonZeroU8; +use std::ops::Range; +use std::str::Chars; + +/// Errors and warnings that can occur during string, char, and byte unescaping. +/// +/// Mostly relating to malformed escape sequences, but also a few other problems. +#[derive(Debug, PartialEq, Eq)] +pub enum EscapeError { + /// Expected 1 char, but 0 were found. + ZeroChars, + /// Expected 1 char, but more than 1 were found. + MoreThanOneChar, + + /// Escaped '\' character without continuation. + LoneSlash, + /// Invalid escape character (e.g. '\z'). + InvalidEscape, + /// Raw '\r' encountered. + BareCarriageReturn, + /// Raw '\r' encountered in raw string. + BareCarriageReturnInRawString, + /// Unescaped character that was expected to be escaped (e.g. raw '\t'). + EscapeOnlyChar, + + /// Numeric character escape is too short (e.g. '\x1'). + TooShortHexEscape, + /// Invalid character in numeric escape (e.g. '\xz') + InvalidCharInHexEscape, + /// Character code in numeric escape is non-ascii (e.g. '\xFF'). + OutOfRangeHexEscape, + + /// '\u' not followed by '{'. + NoBraceInUnicodeEscape, + /// Non-hexadecimal value in '\u{..}'. + InvalidCharInUnicodeEscape, + /// '\u{}' + EmptyUnicodeEscape, + /// No closing brace in '\u{..}', e.g. '\u{12'. + UnclosedUnicodeEscape, + /// '\u{_12}' + LeadingUnderscoreUnicodeEscape, + /// More than 6 characters in '\u{..}', e.g. '\u{10FFFF_FF}' + OverlongUnicodeEscape, + /// Invalid in-bound unicode character code, e.g. '\u{DFFF}'. + LoneSurrogateUnicodeEscape, + /// Out of bounds unicode character code, e.g. '\u{FFFFFF}'. + OutOfRangeUnicodeEscape, + + /// Unicode escape code in byte literal. + UnicodeEscapeInByte, + /// Non-ascii character in byte literal, byte string literal, or raw byte string literal. + NonAsciiCharInByte, + + /// `\0` in a C string literal. + NulInCStr, + + /// After a line ending with '\', the next line contains whitespace + /// characters that are not skipped. + UnskippedWhitespaceWarning, + + /// After a line ending with '\', multiple lines are skipped. + MultipleSkippedLinesWarning, +} + +impl EscapeError { + /// Returns true for actual errors, as opposed to warnings. + pub fn is_fatal(&self) -> bool { + !matches!( + self, + EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning + ) + } +} + +/// Check a raw string literal for validity +/// +/// Takes the contents of a raw string literal (without quotes) +/// and produces a sequence of characters or errors, +/// which are returned by invoking `callback`. +/// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). +pub fn check_raw_str(src: &str, callback: impl FnMut(Range<usize>, Result<char, EscapeError>)) { + str::check_raw(src, callback); +} + +/// Check a raw byte string literal for validity +/// +/// Takes the contents of a raw byte string literal (without quotes) +/// and produces a sequence of bytes or errors, +/// which are returned by invoking `callback`. +/// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). +pub fn check_raw_byte_str(src: &str, callback: impl FnMut(Range<usize>, Result<u8, EscapeError>)) { + <[u8]>::check_raw(src, callback); +} + +/// Check a raw C string literal for validity +/// +/// Takes the contents of a raw C string literal (without quotes) +/// and produces a sequence of characters or errors, +/// which are returned by invoking `callback`. +/// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). +pub fn check_raw_c_str( + src: &str, + callback: impl FnMut(Range<usize>, Result<NonZeroChar, EscapeError>), +) { + CStr::check_raw(src, callback); +} + +/// Trait for checking raw string literals for validity +trait CheckRaw { + /// Unit type of the implementing string type (`char` for string, `u8` for byte string) + type RawUnit; + + /// Converts chars to the unit type of the literal type + fn char2raw_unit(c: char) -> Result<Self::RawUnit, EscapeError>; + + /// Takes the contents of a raw literal (without quotes) + /// and produces a sequence of `Result<Self::RawUnit, EscapeError>` + /// which are returned via `callback`. + /// + /// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). + fn check_raw( + src: &str, + mut callback: impl FnMut(Range<usize>, Result<Self::RawUnit, EscapeError>), + ) { + let mut chars = src.chars(); + while let Some(c) = chars.next() { + let start = src.len() - chars.as_str().len() - c.len_utf8(); + let res = match c { + '\r' => Err(EscapeError::BareCarriageReturnInRawString), + _ => Self::char2raw_unit(c), + }; + let end = src.len() - chars.as_str().len(); + callback(start..end, res); + } + + // Unfortunately, it is a bit unclear whether the following equivalent code is slower or faster: bug 141855 + // src.char_indices().for_each(|(pos, c)| { + // callback( + // pos..pos + c.len_utf8(), + // if c == '\r' { + // Err(EscapeError::BareCarriageReturnInRawString) + // } else { + // Self::char2raw_unit(c) + // }, + // ); + // }); + } +} + +impl CheckRaw for str { + type RawUnit = char; + + #[inline] + fn char2raw_unit(c: char) -> Result<Self::RawUnit, EscapeError> { + Ok(c) + } +} + +impl CheckRaw for [u8] { + type RawUnit = u8; + + #[inline] + fn char2raw_unit(c: char) -> Result<Self::RawUnit, EscapeError> { + char2byte(c) + } +} + +/// Turn an ascii char into a byte +#[inline] +fn char2byte(c: char) -> Result<u8, EscapeError> { + // do NOT do: c.try_into().ok_or(EscapeError::NonAsciiCharInByte) + if c.is_ascii() { + Ok(c as u8) + } else { + Err(EscapeError::NonAsciiCharInByte) + } +} + +impl CheckRaw for CStr { + type RawUnit = NonZeroChar; + + #[inline] + fn char2raw_unit(c: char) -> Result<Self::RawUnit, EscapeError> { + NonZeroChar::new(c).ok_or(EscapeError::NulInCStr) + } +} + +/// Unescape a char literal +/// +/// Takes the contents of a char literal (without quotes), +/// and returns an unescaped char or an error. +#[inline] +pub fn unescape_char(src: &str) -> Result<char, EscapeError> { + str::unescape_single(&mut src.chars()) +} + +/// Unescape a byte literal +/// +/// Takes the contents of a byte literal (without quotes), +/// and returns an unescaped byte or an error. +#[inline] +pub fn unescape_byte(src: &str) -> Result<u8, EscapeError> { + <[u8]>::unescape_single(&mut src.chars()) +} + +/// Unescape a string literal +/// +/// Takes the contents of a string literal (without quotes) +/// and produces a sequence of escaped characters or errors, +/// which are returned by invoking `callback`. +pub fn unescape_str(src: &str, callback: impl FnMut(Range<usize>, Result<char, EscapeError>)) { + str::unescape(src, callback) +} + +/// Unescape a byte string literal +/// +/// Takes the contents of a byte string literal (without quotes) +/// and produces a sequence of escaped bytes or errors, +/// which are returned by invoking `callback`. +pub fn unescape_byte_str(src: &str, callback: impl FnMut(Range<usize>, Result<u8, EscapeError>)) { + <[u8]>::unescape(src, callback) +} + +/// Unescape a C string literal +/// +/// Takes the contents of a C string literal (without quotes) +/// and produces a sequence of escaped MixedUnits or errors, +/// which are returned by invoking `callback`. +pub fn unescape_c_str( + src: &str, + callback: impl FnMut(Range<usize>, Result<MixedUnit, EscapeError>), +) { + CStr::unescape(src, callback) +} + +/// Enum representing either a char or a byte +/// +/// Used for mixed utf8 string literals, i.e. those that allow both unicode +/// chars and high bytes. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum MixedUnit { + /// Used for ASCII chars (written directly or via `\x00`..`\x7f` escapes) + /// and Unicode chars (written directly or via `\u` escapes). + /// + /// For example, if '¥' appears in a string it is represented here as + /// `MixedUnit::Char('¥')`, and it will be appended to the relevant byte + /// string as the two-byte UTF-8 sequence `[0xc2, 0xa5]` + Char(NonZeroChar), + + /// Used for high bytes (`\x80`..`\xff`). + /// + /// For example, if `\xa5` appears in a string it is represented here as + /// `MixedUnit::HighByte(0xa5)`, and it will be appended to the relevant + /// byte string as the single byte `0xa5`. + HighByte(NonZeroU8), +} + +impl From<NonZeroChar> for MixedUnit { + #[inline] + fn from(c: NonZeroChar) -> Self { + MixedUnit::Char(c) + } +} + +impl From<NonZeroU8> for MixedUnit { + #[inline] + fn from(byte: NonZeroU8) -> Self { + if byte.get().is_ascii() { + MixedUnit::Char(NonZeroChar::new(byte.get() as char).unwrap()) + } else { + MixedUnit::HighByte(byte) + } + } +} + +impl TryFrom<char> for MixedUnit { + type Error = EscapeError; + + #[inline] + fn try_from(c: char) -> Result<Self, EscapeError> { + NonZeroChar::new(c) + .map(MixedUnit::Char) + .ok_or(EscapeError::NulInCStr) + } +} + +impl TryFrom<u8> for MixedUnit { + type Error = EscapeError; + + #[inline] + fn try_from(byte: u8) -> Result<Self, EscapeError> { + NonZeroU8::new(byte) + .map(From::from) + .ok_or(EscapeError::NulInCStr) + } +} + +/// Trait for unescaping escape sequences in strings +trait Unescape { + /// Unit type of the implementing string type (`char` for string, `u8` for byte string) + type Unit; + + /// Result of unescaping the zero char ('\0') + const ZERO_RESULT: Result<Self::Unit, EscapeError>; + + /// Converts non-zero bytes to the unit type + fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit; + + /// Converts chars to the unit type + fn char2unit(c: char) -> Result<Self::Unit, EscapeError>; + + /// Converts the byte of a hex escape to the unit type + fn hex2unit(b: u8) -> Result<Self::Unit, EscapeError>; + + /// Converts the result of a unicode escape to the unit type + fn unicode2unit(r: Result<char, EscapeError>) -> Result<Self::Unit, EscapeError>; + + /// Unescape a single unit (single quote syntax) + fn unescape_single(chars: &mut Chars<'_>) -> Result<Self::Unit, EscapeError> { + let res = match chars.next().ok_or(EscapeError::ZeroChars)? { + '\\' => Self::unescape_1(chars), + '\n' | '\t' | '\'' => Err(EscapeError::EscapeOnlyChar), + '\r' => Err(EscapeError::BareCarriageReturn), + c => Self::char2unit(c), + }?; + if chars.next().is_some() { + return Err(EscapeError::MoreThanOneChar); + } + Ok(res) + } + + /// Unescape the first unit of a string (double quoted syntax) + fn unescape_1(chars: &mut Chars<'_>) -> Result<Self::Unit, EscapeError> { + // Previous character was '\\', unescape what follows. + let c = chars.next().ok_or(EscapeError::LoneSlash)?; + if c == '0' { + Self::ZERO_RESULT + } else { + simple_escape(c) + .map(|b| Self::nonzero_byte2unit(b)) + .or_else(|c| match c { + 'x' => Self::hex2unit(hex_escape(chars)?), + 'u' => Self::unicode2unit({ + let value = unicode_escape(chars)?; + if value > char::MAX as u32 { + Err(EscapeError::OutOfRangeUnicodeEscape) + } else { + char::from_u32(value).ok_or(EscapeError::LoneSurrogateUnicodeEscape) + } + }), + _ => Err(EscapeError::InvalidEscape), + }) + } + } + + /// Unescape a string literal + /// + /// Takes the contents of a raw string literal (without quotes) + /// and produces a sequence of `Result<Self::Unit, EscapeError>` + /// which are returned via `callback`. + fn unescape( + src: &str, + mut callback: impl FnMut(Range<usize>, Result<Self::Unit, EscapeError>), + ) { + let mut chars = src.chars(); + while let Some(c) = chars.next() { + let start = src.len() - chars.as_str().len() - c.len_utf8(); + let res = match c { + '\\' => { + if let Some(b'\n') = chars.as_str().as_bytes().first() { + let _ = chars.next(); + // skip whitespace for backslash newline, see [Rust language reference] + // (https://doc.rust-lang.org/reference/tokens.html#string-literals). + let callback_err = |range, err| callback(range, Err(err)); + skip_ascii_whitespace(&mut chars, start, callback_err); + continue; + } else { + Self::unescape_1(&mut chars) + } + } + '"' => Err(EscapeError::EscapeOnlyChar), + '\r' => Err(EscapeError::BareCarriageReturn), + c => Self::char2unit(c), + }; + let end = src.len() - chars.as_str().len(); + callback(start..end, res); + } + } +} + +/// Interpret a non-nul ASCII escape +/// +/// Parses the character of an ASCII escape (except nul) without the leading backslash. +#[inline] // single use in Unescape::unescape_1 +fn simple_escape(c: char) -> Result<NonZeroU8, char> { + // Previous character was '\\', unescape what follows. + Ok(NonZeroU8::new(match c { + '"' => b'"', + 'n' => b'\n', + 'r' => b'\r', + 't' => b'\t', + '\\' => b'\\', + '\'' => b'\'', + _ => Err(c)?, + }) + .unwrap()) +} + +/// Interpret a hexadecimal escape +/// +/// Parses the two hexadecimal characters of a hexadecimal escape without the leading r"\x". +#[inline] // single use in Unescape::unescape_1 +fn hex_escape(chars: &mut impl Iterator<Item = char>) -> Result<u8, EscapeError> { + let hi = chars.next().ok_or(EscapeError::TooShortHexEscape)?; + let hi = hi.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; + + let lo = chars.next().ok_or(EscapeError::TooShortHexEscape)?; + let lo = lo.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; + + Ok((hi * 16 + lo) as u8) +} + +/// Interpret a unicode escape +/// +/// Parse the braces with hexadecimal characters (and underscores) part of a unicode escape. +/// This r"{...}" normally comes after r"\u" and cannot start with an underscore. +#[inline] // single use in Unescape::unescape_1 +fn unicode_escape(chars: &mut impl Iterator<Item = char>) -> Result<u32, EscapeError> { + if chars.next() != Some('{') { + return Err(EscapeError::NoBraceInUnicodeEscape); + } + + // First character must be a hexadecimal digit. + let mut value: u32 = match chars.next().ok_or(EscapeError::UnclosedUnicodeEscape)? { + '_' => return Err(EscapeError::LeadingUnderscoreUnicodeEscape), + '}' => return Err(EscapeError::EmptyUnicodeEscape), + c => c + .to_digit(16) + .ok_or(EscapeError::InvalidCharInUnicodeEscape)?, + }; + + // First character is valid, now parse the rest of the number + // and closing brace. + let mut n_digits = 1; + loop { + match chars.next() { + None => return Err(EscapeError::UnclosedUnicodeEscape), + Some('_') => continue, + Some('}') => { + // Incorrect syntax has higher priority for error reporting + // than unallowed value for a literal. + return if n_digits > 6 { + Err(EscapeError::OverlongUnicodeEscape) + } else { + Ok(value) + }; + } + Some(c) => { + let digit: u32 = c + .to_digit(16) + .ok_or(EscapeError::InvalidCharInUnicodeEscape)?; + n_digits += 1; + if n_digits > 6 { + // Stop updating value since we're sure that it's incorrect already. + continue; + } + value = value * 16 + digit; + } + }; + } +} + +/// Interpret a string continuation escape (https://doc.rust-lang.org/reference/expressions/literal-expr.html#string-continuation-escapes) +/// +/// Skip ASCII whitespace, except for the formfeed character +/// (see [this issue](https://github.com/rust-lang/rust/issues/136600)). +/// Warns on unescaped newline and following non-ASCII whitespace. +#[inline] // single use in Unescape::unescape +fn skip_ascii_whitespace( + chars: &mut Chars<'_>, + start: usize, + mut callback: impl FnMut(Range<usize>, EscapeError), +) { + let rest = chars.as_str(); + let first_non_space = rest + .bytes() + .position(|b| b != b' ' && b != b'\t' && b != b'\n' && b != b'\r') + .unwrap_or(rest.len()); + let (space, rest) = rest.split_at(first_non_space); + // backslash newline adds 2 bytes + let end = start + 2 + first_non_space; + if space.contains('\n') { + callback(start..end, EscapeError::MultipleSkippedLinesWarning); + } + *chars = rest.chars(); + if let Some(c) = chars.clone().next() { + if c.is_whitespace() { + // for error reporting, include the character that was not skipped in the span + callback( + start..end + c.len_utf8(), + EscapeError::UnskippedWhitespaceWarning, + ); + } + } +} + +impl Unescape for str { + type Unit = char; + + const ZERO_RESULT: Result<Self::Unit, EscapeError> = Ok('\0'); + + #[inline] + fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit { + b.get().into() + } + + #[inline] + fn char2unit(c: char) -> Result<Self::Unit, EscapeError> { + Ok(c) + } + + #[inline] + fn hex2unit(b: u8) -> Result<Self::Unit, EscapeError> { + if b.is_ascii() { + Ok(b as char) + } else { + Err(EscapeError::OutOfRangeHexEscape) + } + } + + #[inline] + fn unicode2unit(r: Result<char, EscapeError>) -> Result<Self::Unit, EscapeError> { + r + } +} + +impl Unescape for [u8] { + type Unit = u8; + + const ZERO_RESULT: Result<Self::Unit, EscapeError> = Ok(b'\0'); + + #[inline] + fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit { + b.get() + } + + #[inline] + fn char2unit(c: char) -> Result<Self::Unit, EscapeError> { + char2byte(c) + } + + #[inline] + fn hex2unit(b: u8) -> Result<Self::Unit, EscapeError> { + Ok(b) + } + + #[inline] + fn unicode2unit(_r: Result<char, EscapeError>) -> Result<Self::Unit, EscapeError> { + Err(EscapeError::UnicodeEscapeInByte) + } +} + +impl Unescape for CStr { + type Unit = MixedUnit; + + const ZERO_RESULT: Result<Self::Unit, EscapeError> = Err(EscapeError::NulInCStr); + + #[inline] + fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit { + b.into() + } + + #[inline] + fn char2unit(c: char) -> Result<Self::Unit, EscapeError> { + c.try_into() + } + + #[inline] + fn hex2unit(byte: u8) -> Result<Self::Unit, EscapeError> { + byte.try_into() + } + + #[inline] + fn unicode2unit(r: Result<char, EscapeError>) -> Result<Self::Unit, EscapeError> { + Self::char2unit(r?) + } +} + +/// Enum of the different kinds of literal +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Mode { + /// `'a'` + Char, + + /// `b'a'` + Byte, + + /// `"hello"` + Str, + /// `r"hello"` + RawStr, + + /// `b"hello"` + ByteStr, + /// `br"hello"` + RawByteStr, + + /// `c"hello"` + CStr, + /// `cr"hello"` + RawCStr, +} + +impl Mode { + pub fn in_double_quotes(self) -> bool { + match self { + Mode::Str + | Mode::RawStr + | Mode::ByteStr + | Mode::RawByteStr + | Mode::CStr + | Mode::RawCStr => true, + Mode::Char | Mode::Byte => false, + } + } + + pub fn prefix_noraw(self) -> &'static str { + match self { + Mode::Char | Mode::Str | Mode::RawStr => "", + Mode::Byte | Mode::ByteStr | Mode::RawByteStr => "b", + Mode::CStr | Mode::RawCStr => "c", + } + } +} + +/// Check a literal only for errors +/// +/// Takes the contents of a literal (without quotes) +/// and produces a sequence of only errors, +/// which are returned by invoking `error_callback`. +/// +/// NB Does not produce any output other than errors +pub fn check_for_errors( + src: &str, + mode: Mode, + mut error_callback: impl FnMut(Range<usize>, EscapeError), +) { + match mode { + Mode::Char => { + let mut chars = src.chars(); + if let Err(e) = str::unescape_single(&mut chars) { + error_callback(0..(src.len() - chars.as_str().len()), e); + } + } + Mode::Byte => { + let mut chars = src.chars(); + if let Err(e) = <[u8]>::unescape_single(&mut chars) { + error_callback(0..(src.len() - chars.as_str().len()), e); + } + } + Mode::Str => unescape_str(src, |range, res| { + if let Err(e) = res { + error_callback(range, e); + } + }), + Mode::ByteStr => unescape_byte_str(src, |range, res| { + if let Err(e) = res { + error_callback(range, e); + } + }), + Mode::CStr => unescape_c_str(src, |range, res| { + if let Err(e) = res { + error_callback(range, e); + } + }), + Mode::RawStr => check_raw_str(src, |range, res| { + if let Err(e) = res { + error_callback(range, e); + } + }), + Mode::RawByteStr => check_raw_byte_str(src, |range, res| { + if let Err(e) = res { + error_callback(range, e); + } + }), + Mode::RawCStr => check_raw_c_str(src, |range, res| { + if let Err(e) = res { + error_callback(range, e); + } + }), + } +} diff --git a/vendor/proc-macro2/src/wrapper.rs b/vendor/proc-macro2/src/wrapper.rs index 860b6b77..2e3eb5b4 100644 --- a/vendor/proc-macro2/src/wrapper.rs +++ b/vendor/proc-macro2/src/wrapper.rs @@ -1,12 +1,20 @@ use crate::detection::inside_proc_macro; +use crate::fallback::{self, FromStr2 as _}; #[cfg(span_locations)] use crate::location::LineColumn; -use crate::{fallback, Delimiter, Punct, Spacing, TokenTree}; +#[cfg(proc_macro_span)] +use crate::probe::proc_macro_span; +#[cfg(all(span_locations, proc_macro_span_file))] +use crate::probe::proc_macro_span_file; +#[cfg(all(span_locations, proc_macro_span_location))] +use crate::probe::proc_macro_span_location; +use crate::{Delimiter, Punct, Spacing, TokenTree}; use core::fmt::{self, Debug, Display}; +#[cfg(span_locations)] +use core::ops::Range; use core::ops::RangeBounds; -use core::str::FromStr; -use std::panic; -#[cfg(super_unstable)] +use std::ffi::CStr; +#[cfg(span_locations)] use std::path::PathBuf; #[derive(Clone)] @@ -28,18 +36,23 @@ pub(crate) struct DeferredTokenStream { pub(crate) enum LexError { Compiler(proc_macro::LexError), Fallback(fallback::LexError), -} -impl LexError { - fn call_site() -> Self { - LexError::Fallback(fallback::LexError { - span: fallback::Span::call_site(), - }) - } + // Rustc was supposed to return a LexError, but it panicked instead. + // https://github.com/rust-lang/rust/issues/58736 + CompilerPanic, } -fn mismatch() -> ! { - panic!("compiler/fallback mismatch") +#[cold] +fn mismatch(line: u32) -> ! { + #[cfg(procmacro2_backtrace)] + { + let backtrace = std::backtrace::Backtrace::force_capture(); + panic!("compiler/fallback mismatch L{}\n\n{}", line, backtrace) + } + #[cfg(not(procmacro2_backtrace))] + { + panic!("compiler/fallback mismatch L{}", line) + } } impl DeferredTokenStream { @@ -70,7 +83,7 @@ impl DeferredTokenStream { } impl TokenStream { - pub fn new() -> Self { + pub(crate) fn new() -> Self { if inside_proc_macro() { TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new())) } else { @@ -78,7 +91,19 @@ impl TokenStream { } } - pub fn is_empty(&self) -> bool { + pub(crate) fn from_str_checked(src: &str) -> Result<Self, LexError> { + if inside_proc_macro() { + Ok(TokenStream::Compiler(DeferredTokenStream::new( + proc_macro::TokenStream::from_str_checked(src)?, + ))) + } else { + Ok(TokenStream::Fallback( + fallback::TokenStream::from_str_checked(src)?, + )) + } + } + + pub(crate) fn is_empty(&self) -> bool { match self { TokenStream::Compiler(tts) => tts.is_empty(), TokenStream::Fallback(tts) => tts.is_empty(), @@ -88,38 +113,18 @@ impl TokenStream { fn unwrap_nightly(self) -> proc_macro::TokenStream { match self { TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(), + TokenStream::Fallback(_) => mismatch(line!()), } } fn unwrap_stable(self) -> fallback::TokenStream { match self { - TokenStream::Compiler(_) => mismatch(), + TokenStream::Compiler(_) => mismatch(line!()), TokenStream::Fallback(s) => s, } } } -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result<TokenStream, LexError> { - if inside_proc_macro() { - Ok(TokenStream::Compiler(DeferredTokenStream::new( - proc_macro_parse(src)?, - ))) - } else { - Ok(TokenStream::Fallback(src.parse()?)) - } - } -} - -// Work around https://github.com/rust-lang/rust/issues/58736. -fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> { - let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler)); - result.unwrap_or_else(|_| Err(LexError::call_site())) -} - impl Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -139,7 +144,9 @@ impl From<TokenStream> for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { match inner { TokenStream::Compiler(inner) => inner.into_token_stream(), - TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(), + TokenStream::Fallback(inner) => { + proc_macro::TokenStream::from_str_unchecked(&inner.to_string()) + } } } } @@ -153,7 +160,7 @@ impl From<fallback::TokenStream> for TokenStream { // Assumes inside_proc_macro(). fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree { match token { - TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(), + TokenTree::Group(tt) => proc_macro::TokenTree::Group(tt.inner.unwrap_nightly()), TokenTree::Punct(tt) => { let spacing = match tt.spacing() { Spacing::Joint => proc_macro::Spacing::Joint, @@ -161,19 +168,21 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree { }; let mut punct = proc_macro::Punct::new(tt.as_char(), spacing); punct.set_span(tt.span().inner.unwrap_nightly()); - punct.into() + proc_macro::TokenTree::Punct(punct) } - TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(), - TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(), + TokenTree::Ident(tt) => proc_macro::TokenTree::Ident(tt.inner.unwrap_nightly()), + TokenTree::Literal(tt) => proc_macro::TokenTree::Literal(tt.inner.unwrap_nightly()), } } impl From<TokenTree> for TokenStream { fn from(token: TokenTree) -> Self { if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into())) + TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::from( + into_compiler_token(token), + ))) } else { - TokenStream::Fallback(token.into()) + TokenStream::Fallback(fallback::TokenStream::from(token)) } } } @@ -198,14 +207,14 @@ impl FromIterator<TokenStream> for TokenStream { first.evaluate_now(); first.stream.extend(streams.map(|s| match s { TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(), + TokenStream::Fallback(_) => mismatch(line!()), })); TokenStream::Compiler(first) } Some(TokenStream::Fallback(mut first)) => { first.extend(streams.map(|s| match s { TokenStream::Fallback(s) => s, - TokenStream::Compiler(_) => mismatch(), + TokenStream::Compiler(_) => mismatch(line!()), })); TokenStream::Fallback(first) } @@ -255,7 +264,7 @@ impl Debug for TokenStream { impl LexError { pub(crate) fn span(&self) -> Span { match self { - LexError::Compiler(_) => Span::call_site(), + LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(), LexError::Fallback(e) => Span::Fallback(e.span()), } } @@ -278,6 +287,10 @@ impl Debug for LexError { match self { LexError::Compiler(e) => Debug::fmt(e, f), LexError::Fallback(e) => Debug::fmt(e, f), + LexError::CompilerPanic => { + let fallback = fallback::LexError::call_site(); + Debug::fmt(&fallback, f) + } } } } @@ -287,6 +300,10 @@ impl Display for LexError { match self { LexError::Compiler(e) => Display::fmt(e, f), LexError::Fallback(e) => Display::fmt(e, f), + LexError::CompilerPanic => { + let fallback = fallback::LexError::call_site(); + Display::fmt(&fallback, f) + } } } } @@ -320,7 +337,9 @@ impl Iterator for TokenTreeIter { TokenTreeIter::Fallback(iter) => return iter.next(), }; Some(match token { - proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(), + proc_macro::TokenTree::Group(tt) => { + TokenTree::Group(crate::Group::_new(Group::Compiler(tt))) + } proc_macro::TokenTree::Punct(tt) => { let spacing = match tt.spacing() { proc_macro::Spacing::Joint => Spacing::Joint, @@ -328,10 +347,14 @@ impl Iterator for TokenTreeIter { }; let mut o = Punct::new(tt.as_char(), spacing); o.set_span(crate::Span::_new(Span::Compiler(tt.span()))); - o.into() + TokenTree::Punct(o) + } + proc_macro::TokenTree::Ident(s) => { + TokenTree::Ident(crate::Ident::_new(Ident::Compiler(s))) + } + proc_macro::TokenTree::Literal(l) => { + TokenTree::Literal(crate::Literal::_new(Literal::Compiler(l))) } - proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(), - proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(), }) } @@ -343,45 +366,6 @@ impl Iterator for TokenTreeIter { } } -#[derive(Clone, PartialEq, Eq)] -#[cfg(super_unstable)] -pub(crate) enum SourceFile { - Compiler(proc_macro::SourceFile), - Fallback(fallback::SourceFile), -} - -#[cfg(super_unstable)] -impl SourceFile { - fn nightly(sf: proc_macro::SourceFile) -> Self { - SourceFile::Compiler(sf) - } - - /// Get the path to this source file as a string. - pub fn path(&self) -> PathBuf { - match self { - SourceFile::Compiler(a) => a.path(), - SourceFile::Fallback(a) => a.path(), - } - } - - pub fn is_real(&self) -> bool { - match self { - SourceFile::Compiler(a) => a.is_real(), - SourceFile::Fallback(a) => a.is_real(), - } - } -} - -#[cfg(super_unstable)] -impl Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - SourceFile::Compiler(a) => Debug::fmt(a, f), - SourceFile::Fallback(a) => Debug::fmt(a, f), - } - } -} - #[derive(Copy, Clone)] pub(crate) enum Span { Compiler(proc_macro::Span), @@ -389,7 +373,7 @@ pub(crate) enum Span { } impl Span { - pub fn call_site() -> Self { + pub(crate) fn call_site() -> Self { if inside_proc_macro() { Span::Compiler(proc_macro::Span::call_site()) } else { @@ -397,7 +381,7 @@ impl Span { } } - pub fn mixed_site() -> Self { + pub(crate) fn mixed_site() -> Self { if inside_proc_macro() { Span::Compiler(proc_macro::Span::mixed_site()) } else { @@ -406,7 +390,7 @@ impl Span { } #[cfg(super_unstable)] - pub fn def_site() -> Self { + pub(crate) fn def_site() -> Self { if inside_proc_macro() { Span::Compiler(proc_macro::Span::def_site()) } else { @@ -414,57 +398,99 @@ impl Span { } } - pub fn resolved_at(&self, other: Span) -> Span { + pub(crate) fn resolved_at(&self, other: Span) -> Span { match (self, other) { (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), - _ => mismatch(), + (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), + (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), } } - pub fn located_at(&self, other: Span) -> Span { + pub(crate) fn located_at(&self, other: Span) -> Span { match (self, other) { (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), - _ => mismatch(), + (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), + (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), } } - pub fn unwrap(self) -> proc_macro::Span { + pub(crate) fn unwrap(self) -> proc_macro::Span { match self { Span::Compiler(s) => s, Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"), } } - #[cfg(super_unstable)] - pub fn source_file(&self) -> SourceFile { + #[cfg(span_locations)] + pub(crate) fn byte_range(&self) -> Range<usize> { match self { - Span::Compiler(s) => SourceFile::nightly(s.source_file()), - Span::Fallback(s) => SourceFile::Fallback(s.source_file()), + #[cfg(proc_macro_span)] + Span::Compiler(s) => proc_macro_span::byte_range(s), + #[cfg(not(proc_macro_span))] + Span::Compiler(_) => 0..0, + Span::Fallback(s) => s.byte_range(), } } #[cfg(span_locations)] - pub fn start(&self) -> LineColumn { + pub(crate) fn start(&self) -> LineColumn { match self { + #[cfg(proc_macro_span_location)] + Span::Compiler(s) => LineColumn { + line: proc_macro_span_location::line(s), + column: proc_macro_span_location::column(s).saturating_sub(1), + }, + #[cfg(not(proc_macro_span_location))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => s.start(), } } #[cfg(span_locations)] - pub fn end(&self) -> LineColumn { - match self { + pub(crate) fn end(&self) -> LineColumn { + match self { + #[cfg(proc_macro_span_location)] + Span::Compiler(s) => { + let end = proc_macro_span_location::end(s); + LineColumn { + line: proc_macro_span_location::line(&end), + column: proc_macro_span_location::column(&end).saturating_sub(1), + } + } + #[cfg(not(proc_macro_span_location))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => s.end(), } } - pub fn join(&self, other: Span) -> Option<Span> { + #[cfg(span_locations)] + pub(crate) fn file(&self) -> String { + match self { + #[cfg(proc_macro_span_file)] + Span::Compiler(s) => proc_macro_span_file::file(s), + #[cfg(not(proc_macro_span_file))] + Span::Compiler(_) => "<token stream>".to_owned(), + Span::Fallback(s) => s.file(), + } + } + + #[cfg(span_locations)] + pub(crate) fn local_file(&self) -> Option<PathBuf> { + match self { + #[cfg(proc_macro_span_file)] + Span::Compiler(s) => proc_macro_span_file::local_file(s), + #[cfg(not(proc_macro_span_file))] + Span::Compiler(_) => None, + Span::Fallback(s) => s.local_file(), + } + } + + pub(crate) fn join(&self, other: Span) -> Option<Span> { let ret = match (self, other) { #[cfg(proc_macro_span)] - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?), + (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(proc_macro_span::join(a, b)?), (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?), _ => return None, }; @@ -472,7 +498,7 @@ impl Span { } #[cfg(super_unstable)] - pub fn eq(&self, other: &Span) -> bool { + pub(crate) fn eq(&self, other: &Span) -> bool { match (self, other) { (Span::Compiler(a), Span::Compiler(b)) => a.eq(b), (Span::Fallback(a), Span::Fallback(b)) => a.eq(b), @@ -480,7 +506,7 @@ impl Span { } } - pub fn source_text(&self) -> Option<String> { + pub(crate) fn source_text(&self) -> Option<String> { match self { #[cfg(not(no_source_text))] Span::Compiler(s) => s.source_text(), @@ -493,7 +519,7 @@ impl Span { fn unwrap_nightly(self) -> proc_macro::Span { match self { Span::Compiler(s) => s, - Span::Fallback(_) => mismatch(), + Span::Fallback(_) => mismatch(line!()), } } } @@ -535,7 +561,7 @@ pub(crate) enum Group { } impl Group { - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self { + pub(crate) fn new(delimiter: Delimiter, stream: TokenStream) -> Self { match stream { TokenStream::Compiler(tts) => { let delimiter = match delimiter { @@ -552,7 +578,7 @@ impl Group { } } - pub fn delimiter(&self) -> Delimiter { + pub(crate) fn delimiter(&self) -> Delimiter { match self { Group::Compiler(g) => match g.delimiter() { proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, @@ -564,46 +590,47 @@ impl Group { } } - pub fn stream(&self) -> TokenStream { + pub(crate) fn stream(&self) -> TokenStream { match self { Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())), Group::Fallback(g) => TokenStream::Fallback(g.stream()), } } - pub fn span(&self) -> Span { + pub(crate) fn span(&self) -> Span { match self { Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span()), } } - pub fn span_open(&self) -> Span { + pub(crate) fn span_open(&self) -> Span { match self { Group::Compiler(g) => Span::Compiler(g.span_open()), Group::Fallback(g) => Span::Fallback(g.span_open()), } } - pub fn span_close(&self) -> Span { + pub(crate) fn span_close(&self) -> Span { match self { Group::Compiler(g) => Span::Compiler(g.span_close()), Group::Fallback(g) => Span::Fallback(g.span_close()), } } - pub fn set_span(&mut self, span: Span) { + pub(crate) fn set_span(&mut self, span: Span) { match (self, span) { (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s), (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s), - _ => mismatch(), + (Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()), + (Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()), } } fn unwrap_nightly(self) -> proc_macro::Group { match self { Group::Compiler(g) => g, - Group::Fallback(_) => mismatch(), + Group::Fallback(_) => mismatch(line!()), } } } @@ -639,49 +666,59 @@ pub(crate) enum Ident { } impl Ident { - pub fn new(string: &str, span: Span) -> Self { + #[track_caller] + pub(crate) fn new_checked(string: &str, span: Span) -> Self { match span { Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)), + Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)), } } - pub fn new_raw(string: &str, span: Span) -> Self { + #[track_caller] + pub(crate) fn new_raw_checked(string: &str, span: Span) -> Self { match span { Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)), + Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)), } } - pub fn span(&self) -> Span { + pub(crate) fn span(&self) -> Span { match self { Ident::Compiler(t) => Span::Compiler(t.span()), Ident::Fallback(t) => Span::Fallback(t.span()), } } - pub fn set_span(&mut self, span: Span) { + pub(crate) fn set_span(&mut self, span: Span) { match (self, span) { (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s), (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s), - _ => mismatch(), + (Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()), + (Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()), } } fn unwrap_nightly(self) -> proc_macro::Ident { match self { Ident::Compiler(s) => s, - Ident::Fallback(_) => mismatch(), + Ident::Fallback(_) => mismatch(line!()), } } } +impl From<fallback::Ident> for Ident { + fn from(inner: fallback::Ident) -> Self { + Ident::Fallback(inner) + } +} + impl PartialEq for Ident { fn eq(&self, other: &Ident) -> bool { match (self, other) { (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(), (Ident::Fallback(t), Ident::Fallback(o)) => t == o, - _ => mismatch(), + (Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()), + (Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()), } } } @@ -725,7 +762,7 @@ pub(crate) enum Literal { macro_rules! suffixed_numbers { ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { + pub(crate) fn $name(n: $kind) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::$name(n)) } else { @@ -737,7 +774,7 @@ macro_rules! suffixed_numbers { macro_rules! unsuffixed_integers { ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { + pub(crate) fn $name(n: $kind) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::$name(n)) } else { @@ -748,11 +785,21 @@ macro_rules! unsuffixed_integers { } impl Literal { - pub unsafe fn from_str_unchecked(repr: &str) -> Self { + pub(crate) fn from_str_checked(repr: &str) -> Result<Self, LexError> { + if inside_proc_macro() { + let literal = proc_macro::Literal::from_str_checked(repr)?; + Ok(Literal::Compiler(literal)) + } else { + let literal = fallback::Literal::from_str_checked(repr)?; + Ok(Literal::Fallback(literal)) + } + } + + pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self { if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::from_str(repr).expect("invalid literal")) + Literal::Compiler(proc_macro::Literal::from_str_unchecked(repr)) } else { - Literal::Fallback(fallback::Literal::from_str_unchecked(repr)) + Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) }) } } @@ -789,7 +836,7 @@ impl Literal { isize_unsuffixed => isize, } - pub fn f32_unsuffixed(f: f32) -> Literal { + pub(crate) fn f32_unsuffixed(f: f32) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f)) } else { @@ -797,7 +844,7 @@ impl Literal { } } - pub fn f64_unsuffixed(f: f64) -> Literal { + pub(crate) fn f64_unsuffixed(f: f64) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f)) } else { @@ -805,23 +852,42 @@ impl Literal { } } - pub fn string(t: &str) -> Literal { + pub(crate) fn string(string: &str) -> Literal { if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::string(t)) + Literal::Compiler(proc_macro::Literal::string(string)) } else { - Literal::Fallback(fallback::Literal::string(t)) + Literal::Fallback(fallback::Literal::string(string)) } } - pub fn character(t: char) -> Literal { + pub(crate) fn character(ch: char) -> Literal { if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::character(t)) + Literal::Compiler(proc_macro::Literal::character(ch)) } else { - Literal::Fallback(fallback::Literal::character(t)) + Literal::Fallback(fallback::Literal::character(ch)) } } - pub fn byte_string(bytes: &[u8]) -> Literal { + pub(crate) fn byte_character(byte: u8) -> Literal { + if inside_proc_macro() { + Literal::Compiler({ + #[cfg(not(no_literal_byte_character))] + { + proc_macro::Literal::byte_character(byte) + } + + #[cfg(no_literal_byte_character)] + { + let fallback = fallback::Literal::byte_character(byte); + proc_macro::Literal::from_str_unchecked(&fallback.repr) + } + }) + } else { + Literal::Fallback(fallback::Literal::byte_character(byte)) + } + } + + pub(crate) fn byte_string(bytes: &[u8]) -> Literal { if inside_proc_macro() { Literal::Compiler(proc_macro::Literal::byte_string(bytes)) } else { @@ -829,25 +895,45 @@ impl Literal { } } - pub fn span(&self) -> Span { + pub(crate) fn c_string(string: &CStr) -> Literal { + if inside_proc_macro() { + Literal::Compiler({ + #[cfg(not(no_literal_c_string))] + { + proc_macro::Literal::c_string(string) + } + + #[cfg(no_literal_c_string)] + { + let fallback = fallback::Literal::c_string(string); + proc_macro::Literal::from_str_unchecked(&fallback.repr) + } + }) + } else { + Literal::Fallback(fallback::Literal::c_string(string)) + } + } + + pub(crate) fn span(&self) -> Span { match self { Literal::Compiler(lit) => Span::Compiler(lit.span()), Literal::Fallback(lit) => Span::Fallback(lit.span()), } } - pub fn set_span(&mut self, span: Span) { + pub(crate) fn set_span(&mut self, span: Span) { match (self, span) { (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s), (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s), - _ => mismatch(), + (Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()), + (Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()), } } - pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { + pub(crate) fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { match self { #[cfg(proc_macro_span)] - Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler), + Literal::Compiler(lit) => proc_macro_span::subspan(lit, range).map(Span::Compiler), #[cfg(not(proc_macro_span))] Literal::Compiler(_lit) => None, Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback), @@ -857,7 +943,7 @@ impl Literal { fn unwrap_nightly(self) -> proc_macro::Literal { match self { Literal::Compiler(s) => s, - Literal::Fallback(_) => mismatch(), + Literal::Fallback(_) => mismatch(line!()), } } } @@ -868,20 +954,6 @@ impl From<fallback::Literal> for Literal { } } -impl FromStr for Literal { - type Err = LexError; - - fn from_str(repr: &str) -> Result<Self, Self::Err> { - if inside_proc_macro() { - let literal = proc_macro::Literal::from_str(repr)?; - Ok(Literal::Compiler(literal)) - } else { - let literal = fallback::Literal::from_str(repr)?; - Ok(Literal::Fallback(literal)) - } - } -} - impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -899,3 +971,14 @@ impl Debug for Literal { } } } + +#[cfg(span_locations)] +pub(crate) fn invalidate_current_thread_spans() { + if inside_proc_macro() { + panic!( + "proc_macro2::extra::invalidate_current_thread_spans is not available in procedural macros" + ); + } else { + crate::fallback::invalidate_current_thread_spans(); + } +} diff --git a/vendor/proc-macro2/tests/comments.rs b/vendor/proc-macro2/tests/comments.rs index 4f7236de..34951f7f 100644 --- a/vendor/proc-macro2/tests/comments.rs +++ b/vendor/proc-macro2/tests/comments.rs @@ -1,4 +1,4 @@ -#![allow(clippy::assertions_on_result_states)] +#![allow(clippy::assertions_on_result_states, clippy::uninlined_format_args)] use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; diff --git a/vendor/proc-macro2/tests/features.rs b/vendor/proc-macro2/tests/features.rs index 073f6e60..ea1704d9 100644 --- a/vendor/proc-macro2/tests/features.rs +++ b/vendor/proc-macro2/tests/features.rs @@ -1,3 +1,5 @@ +#![allow(clippy::assertions_on_constants, clippy::ignore_without_reason)] + #[test] #[ignore] fn make_sure_no_proc_macro() { diff --git a/vendor/proc-macro2/tests/marker.rs b/vendor/proc-macro2/tests/marker.rs index d08fbfc1..af8932a1 100644 --- a/vendor/proc-macro2/tests/marker.rs +++ b/vendor/proc-macro2/tests/marker.rs @@ -21,6 +21,7 @@ macro_rules! assert_impl { $( { // Implemented for types that implement $marker. + #[allow(dead_code)] trait IsNotImplemented { fn assert_not_implemented() {} } @@ -55,19 +56,17 @@ assert_impl!(TokenTree is not Send or Sync); #[cfg(procmacro2_semver_exempt)] mod semver_exempt { - use proc_macro2::{LineColumn, SourceFile}; + use proc_macro2::LineColumn; assert_impl!(LineColumn is Send and Sync); - - assert_impl!(SourceFile is not Send or Sync); } mod unwind_safe { + #[cfg(procmacro2_semver_exempt)] + use proc_macro2::LineColumn; use proc_macro2::{ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, }; - #[cfg(procmacro2_semver_exempt)] - use proc_macro2::{LineColumn, SourceFile}; use std::panic::{RefUnwindSafe, UnwindSafe}; macro_rules! assert_unwind_safe { @@ -94,6 +93,5 @@ mod unwind_safe { #[cfg(procmacro2_semver_exempt)] assert_unwind_safe! { LineColumn - SourceFile } } diff --git a/vendor/proc-macro2/tests/test.rs b/vendor/proc-macro2/tests/test.rs index b75cd552..a9272716 100644 --- a/vendor/proc-macro2/tests/test.rs +++ b/vendor/proc-macro2/tests/test.rs @@ -1,11 +1,15 @@ #![allow( clippy::assertions_on_result_states, clippy::items_after_statements, + clippy::needless_pass_by_value, + clippy::needless_raw_string_hashes, clippy::non_ascii_literal, - clippy::octal_escapes + clippy::octal_escapes, + clippy::uninlined_format_args )] use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::ffi::CStr; use std::iter; use std::str::{self, FromStr}; @@ -96,12 +100,22 @@ fn lifetime_invalid() { #[test] fn literal_string() { - assert_eq!(Literal::string("foo").to_string(), "\"foo\""); - assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); - assert_eq!(Literal::string("didn't").to_string(), "\"didn't\""); - assert_eq!( - Literal::string("a\00b\07c\08d\0e\0").to_string(), - "\"a\\x000b\\x007c\\08d\\0e\\0\"", + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected.trim()); + } + + assert(Literal::string(""), r#" "" "#); + assert(Literal::string("aA"), r#" "aA" "#); + assert(Literal::string("\t"), r#" "\t" "#); + assert(Literal::string("❤"), r#" "❤" "#); + assert(Literal::string("'"), r#" "'" "#); + assert(Literal::string("\""), r#" "\"" "#); + assert(Literal::string("\0"), r#" "\0" "#); + assert(Literal::string("\u{1}"), r#" "\u{1}" "#); + assert( + Literal::string("a\00b\07c\08d\0e\0"), + r#" "a\x000b\x007c\08d\0e\0" "#, ); "\"\\\r\n x\"".parse::<TokenStream>().unwrap(); @@ -132,16 +146,67 @@ fn literal_raw_string() { .unwrap_err(); } +#[cfg(procmacro2_semver_exempt)] +#[test] +fn literal_string_value() { + for string in ["", "...", "...\t...", "...\\...", "...\0...", "...\u{1}..."] { + assert_eq!(string, Literal::string(string).str_value().unwrap()); + assert_eq!( + string, + format!("r\"{string}\"") + .parse::<Literal>() + .unwrap() + .str_value() + .unwrap(), + ); + assert_eq!( + string, + format!("r##\"{string}\"##") + .parse::<Literal>() + .unwrap() + .str_value() + .unwrap(), + ); + } +} + +#[test] +fn literal_byte_character() { + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected.trim()); + } + + assert(Literal::byte_character(b'a'), r#" b'a' "#); + assert(Literal::byte_character(b'\0'), r#" b'\0' "#); + assert(Literal::byte_character(b'\t'), r#" b'\t' "#); + assert(Literal::byte_character(b'\n'), r#" b'\n' "#); + assert(Literal::byte_character(b'\r'), r#" b'\r' "#); + assert(Literal::byte_character(b'\''), r#" b'\'' "#); + assert(Literal::byte_character(b'\\'), r#" b'\\' "#); + assert(Literal::byte_character(b'\x1f'), r#" b'\x1F' "#); + assert(Literal::byte_character(b'"'), r#" b'"' "#); +} + #[test] fn literal_byte_string() { - assert_eq!(Literal::byte_string(b"").to_string(), "b\"\""); - assert_eq!( - Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(), - "b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"", - ); - assert_eq!( - Literal::byte_string(b"a\00b\07c\08d\0e\0").to_string(), - "b\"a\\x000b\\x007c\\08d\\0e\\0\"", + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected.trim()); + } + + assert(Literal::byte_string(b""), r#" b"" "#); + assert(Literal::byte_string(b"\0"), r#" b"\0" "#); + assert(Literal::byte_string(b"\t"), r#" b"\t" "#); + assert(Literal::byte_string(b"\n"), r#" b"\n" "#); + assert(Literal::byte_string(b"\r"), r#" b"\r" "#); + assert(Literal::byte_string(b"\""), r#" b"\"" "#); + assert(Literal::byte_string(b"\\"), r#" b"\\" "#); + assert(Literal::byte_string(b"\x1f"), r#" b"\x1F" "#); + assert(Literal::byte_string(b"'"), r#" b"'" "#); + assert( + Literal::byte_string(b"a\00b\07c\08d\0e\0"), + r#" b"a\x000b\x007c\08d\0e\0" "#, ); "b\"\\\r\n x\"".parse::<TokenStream>().unwrap(); @@ -150,8 +215,79 @@ fn literal_byte_string() { "br\"\u{a0}\"".parse::<TokenStream>().unwrap_err(); } +#[cfg(procmacro2_semver_exempt)] +#[test] +fn literal_byte_string_value() { + for bytestr in [ + &b""[..], + b"...", + b"...\t...", + b"...\\...", + b"...\0...", + b"...\xF0...", + ] { + assert_eq!( + bytestr, + Literal::byte_string(bytestr).byte_str_value().unwrap(), + ); + if let Ok(string) = str::from_utf8(bytestr) { + assert_eq!( + bytestr, + format!("br\"{string}\"") + .parse::<Literal>() + .unwrap() + .byte_str_value() + .unwrap(), + ); + assert_eq!( + bytestr, + format!("br##\"{string}\"##") + .parse::<Literal>() + .unwrap() + .byte_str_value() + .unwrap(), + ); + } + } +} + #[test] fn literal_c_string() { + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected.trim()); + } + + assert(Literal::c_string(<&CStr>::default()), r#" c"" "#); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()), + r#" c"aA" "#, + ); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()), + r#" c"aA" "#, + ); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"\t\0").unwrap()), + r#" c"\t" "#, + ); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"\xE2\x9D\xA4\0").unwrap()), + r#" c"❤" "#, + ); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"'\0").unwrap()), + r#" c"'" "#, + ); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"\"\0").unwrap()), + r#" c"\"" "#, + ); + assert( + Literal::c_string(CStr::from_bytes_with_nul(b"\x7F\xFF\xFE\xCC\xB3\0").unwrap()), + r#" c"\u{7f}\xFF\xFE\u{333}" "#, + ); + let strings = r###" c"hello\x80我叫\u{1F980}" // from the RFC cr"\" @@ -186,51 +322,118 @@ fn literal_c_string() { } } +#[cfg(procmacro2_semver_exempt)] +#[test] +fn literal_c_string_value() { + for cstr in [ + c"", + c"...", + c"...\t...", + c"...\\...", + c"...\u{1}...", + c"...\xF0...", + ] { + assert_eq!( + cstr.to_bytes_with_nul(), + Literal::c_string(cstr).cstr_value().unwrap(), + ); + if let Ok(string) = cstr.to_str() { + assert_eq!( + cstr.to_bytes_with_nul(), + format!("cr\"{string}\"") + .parse::<Literal>() + .unwrap() + .cstr_value() + .unwrap(), + ); + assert_eq!( + cstr.to_bytes_with_nul(), + format!("cr##\"{string}\"##") + .parse::<Literal>() + .unwrap() + .cstr_value() + .unwrap(), + ); + } + } +} + #[test] fn literal_character() { - assert_eq!(Literal::character('x').to_string(), "'x'"); - assert_eq!(Literal::character('\'').to_string(), "'\\''"); - assert_eq!(Literal::character('"').to_string(), "'\"'"); + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected.trim()); + } + + assert(Literal::character('a'), r#" 'a' "#); + assert(Literal::character('\t'), r#" '\t' "#); + assert(Literal::character('❤'), r#" '❤' "#); + assert(Literal::character('\''), r#" '\'' "#); + assert(Literal::character('"'), r#" '"' "#); + assert(Literal::character('\0'), r#" '\0' "#); + assert(Literal::character('\u{1}'), r#" '\u{1}' "#); } #[test] fn literal_integer() { - assert_eq!(Literal::u8_suffixed(10).to_string(), "10u8"); - assert_eq!(Literal::u16_suffixed(10).to_string(), "10u16"); - assert_eq!(Literal::u32_suffixed(10).to_string(), "10u32"); - assert_eq!(Literal::u64_suffixed(10).to_string(), "10u64"); - assert_eq!(Literal::u128_suffixed(10).to_string(), "10u128"); - assert_eq!(Literal::usize_suffixed(10).to_string(), "10usize"); - - assert_eq!(Literal::i8_suffixed(10).to_string(), "10i8"); - assert_eq!(Literal::i16_suffixed(10).to_string(), "10i16"); - assert_eq!(Literal::i32_suffixed(10).to_string(), "10i32"); - assert_eq!(Literal::i64_suffixed(10).to_string(), "10i64"); - assert_eq!(Literal::i128_suffixed(10).to_string(), "10i128"); - assert_eq!(Literal::isize_suffixed(10).to_string(), "10isize"); - - assert_eq!(Literal::u8_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::u16_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::u32_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::u64_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::u128_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::usize_unsuffixed(10).to_string(), "10"); - - assert_eq!(Literal::i8_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::i16_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::i32_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::i64_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::i128_unsuffixed(10).to_string(), "10"); - assert_eq!(Literal::isize_unsuffixed(10).to_string(), "10"); + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected); + } + + assert(Literal::u8_suffixed(10), "10u8"); + assert(Literal::u16_suffixed(10), "10u16"); + assert(Literal::u32_suffixed(10), "10u32"); + assert(Literal::u64_suffixed(10), "10u64"); + assert(Literal::u128_suffixed(10), "10u128"); + assert(Literal::usize_suffixed(10), "10usize"); + + assert(Literal::i8_suffixed(10), "10i8"); + assert(Literal::i16_suffixed(10), "10i16"); + assert(Literal::i32_suffixed(10), "10i32"); + assert(Literal::i64_suffixed(10), "10i64"); + assert(Literal::i128_suffixed(10), "10i128"); + assert(Literal::isize_suffixed(10), "10isize"); + + assert(Literal::u8_unsuffixed(10), "10"); + assert(Literal::u16_unsuffixed(10), "10"); + assert(Literal::u32_unsuffixed(10), "10"); + assert(Literal::u64_unsuffixed(10), "10"); + assert(Literal::u128_unsuffixed(10), "10"); + assert(Literal::usize_unsuffixed(10), "10"); + + assert(Literal::i8_unsuffixed(10), "10"); + assert(Literal::i16_unsuffixed(10), "10"); + assert(Literal::i32_unsuffixed(10), "10"); + assert(Literal::i64_unsuffixed(10), "10"); + assert(Literal::i128_unsuffixed(10), "10"); + assert(Literal::isize_unsuffixed(10), "10"); + + assert(Literal::i32_suffixed(-10), "-10i32"); + assert(Literal::i32_unsuffixed(-10), "-10"); } #[test] fn literal_float() { - assert_eq!(Literal::f32_suffixed(10.0).to_string(), "10f32"); - assert_eq!(Literal::f64_suffixed(10.0).to_string(), "10f64"); + #[track_caller] + fn assert(literal: Literal, expected: &str) { + assert_eq!(literal.to_string(), expected); + } - assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0"); - assert_eq!(Literal::f64_unsuffixed(10.0).to_string(), "10.0"); + assert(Literal::f32_suffixed(10.0), "10f32"); + assert(Literal::f32_suffixed(-10.0), "-10f32"); + assert(Literal::f64_suffixed(10.0), "10f64"); + assert(Literal::f64_suffixed(-10.0), "-10f64"); + + assert(Literal::f32_unsuffixed(10.0), "10.0"); + assert(Literal::f32_unsuffixed(-10.0), "-10.0"); + assert(Literal::f64_unsuffixed(10.0), "10.0"); + assert(Literal::f64_unsuffixed(-10.0), "-10.0"); + + assert( + Literal::f64_unsuffixed(1e100), + "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0", + ); } #[test] @@ -248,9 +451,13 @@ fn literal_suffix() { assert_eq!(token_count("1._m"), 3); assert_eq!(token_count("\"\"s"), 1); assert_eq!(token_count("r\"\"r"), 1); + assert_eq!(token_count("r#\"\"#r"), 1); assert_eq!(token_count("b\"\"b"), 1); assert_eq!(token_count("br\"\"br"), 1); - assert_eq!(token_count("r#\"\"#r"), 1); + assert_eq!(token_count("br#\"\"#br"), 1); + assert_eq!(token_count("c\"\"c"), 1); + assert_eq!(token_count("cr\"\"cr"), 1); + assert_eq!(token_count("cr#\"\"#cr"), 1); assert_eq!(token_count("'c'c"), 1); assert_eq!(token_count("b'b'b"), 1); assert_eq!(token_count("0E"), 1); @@ -343,6 +550,81 @@ fn source_text() { assert_eq!("a", second.span().source_text().unwrap()); } +#[test] +fn lifetimes() { + let mut tokens = "'a 'static 'struct 'r#gen 'r#prefix#lifetime" + .parse::<TokenStream>() + .unwrap() + .into_iter(); + assert!(match tokens.next() { + Some(TokenTree::Punct(punct)) => { + punct.as_char() == '\'' && punct.spacing() == Spacing::Joint + } + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Ident(ident)) => ident == "a", + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Punct(punct)) => { + punct.as_char() == '\'' && punct.spacing() == Spacing::Joint + } + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Ident(ident)) => ident == "static", + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Punct(punct)) => { + punct.as_char() == '\'' && punct.spacing() == Spacing::Joint + } + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Ident(ident)) => ident == "struct", + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Punct(punct)) => { + punct.as_char() == '\'' && punct.spacing() == Spacing::Joint + } + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Ident(ident)) => ident == "r#gen", + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Punct(punct)) => { + punct.as_char() == '\'' && punct.spacing() == Spacing::Joint + } + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Ident(ident)) => ident == "r#prefix", + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Punct(punct)) => { + punct.as_char() == '#' && punct.spacing() == Spacing::Alone + } + _ => false, + }); + assert!(match tokens.next() { + Some(TokenTree::Ident(ident)) => ident == "lifetime", + _ => false, + }); + + "' a".parse::<TokenStream>().unwrap_err(); + "' r#gen".parse::<TokenStream>().unwrap_err(); + "' prefix#lifetime".parse::<TokenStream>().unwrap_err(); + "'prefix#lifetime".parse::<TokenStream>().unwrap_err(); + "'aa'bb".parse::<TokenStream>().unwrap_err(); + "'r#gen'a".parse::<TokenStream>().unwrap_err(); +} + #[test] fn roundtrip() { fn roundtrip(p: &str) { @@ -378,7 +660,7 @@ fn roundtrip() { roundtrip("'a"); roundtrip("'_"); roundtrip("'static"); - roundtrip("'\\u{10__FFFF}'"); + roundtrip(r"'\u{10__FFFF}'"); roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); } @@ -401,6 +683,7 @@ fn fail() { fail("\"\\\r \""); // backslash carriage return fail("'aa'aa"); fail("br##\"\"#"); + fail("cr##\"\"#"); fail("\"\\\n\u{85}\r\""); } @@ -430,7 +713,6 @@ testing 123 } #[cfg(procmacro2_semver_exempt)] -#[cfg(not(nightly))] #[test] fn default_span() { let start = Span::call_site().start(); @@ -439,9 +721,8 @@ fn default_span() { let end = Span::call_site().end(); assert_eq!(end.line, 1); assert_eq!(end.column, 0); - let source_file = Span::call_site().source_file(); - assert_eq!(source_file.path().to_string_lossy(), "<unspecified>"); - assert!(!source_file.is_real()); + assert_eq!(Span::call_site().file(), "<unspecified>"); + assert!(Span::call_site().local_file().is_none()); } #[cfg(procmacro2_semver_exempt)] @@ -458,11 +739,8 @@ fn span_join() { .into_iter() .collect::<Vec<_>>(); - assert!(source1[0].span().source_file() != source2[0].span().source_file()); - assert_eq!( - source1[0].span().source_file(), - source1[1].span().source_file() - ); + assert!(source1[0].span().file() != source2[0].span().file()); + assert_eq!(source1[0].span().file(), source1[1].span().file()); let joined1 = source1[0].span().join(source1[1].span()); let joined2 = source1[0].span().join(source2[0].span()); @@ -476,10 +754,7 @@ fn span_join() { assert_eq!(end.line, 2); assert_eq!(end.column, 3); - assert_eq!( - joined1.unwrap().source_file(), - source1[0].span().source_file() - ); + assert_eq!(joined1.unwrap().file(), source1[0].span().file()); } #[test] @@ -527,18 +802,42 @@ fn raw_identifier() { } #[test] -fn test_debug_ident() { +fn test_display_ident() { let ident = Ident::new("proc_macro", Span::call_site()); + assert_eq!(format!("{ident}"), "proc_macro"); + assert_eq!(format!("{ident:-^14}"), "proc_macro"); - #[cfg(not(span_locations))] - let expected = "Ident(proc_macro)"; + let ident = Ident::new_raw("proc_macro", Span::call_site()); + assert_eq!(format!("{ident}"), "r#proc_macro"); + assert_eq!(format!("{ident:-^14}"), "r#proc_macro"); +} - #[cfg(span_locations)] - let expected = "Ident { sym: proc_macro }"; +#[test] +fn test_debug_ident() { + let ident = Ident::new("proc_macro", Span::call_site()); + let expected = if cfg!(span_locations) { + "Ident { sym: proc_macro }" + } else { + "Ident(proc_macro)" + }; + assert_eq!(expected, format!("{:?}", ident)); + let ident = Ident::new_raw("proc_macro", Span::call_site()); + let expected = if cfg!(span_locations) { + "Ident { sym: r#proc_macro }" + } else { + "Ident(r#proc_macro)" + }; assert_eq!(expected, format!("{:?}", ident)); } +#[test] +fn test_display_tokenstream() { + let tts = TokenStream::from_str("[a + 1]").unwrap(); + assert_eq!(format!("{tts}"), "[a + 1]"); + assert_eq!(format!("{tts:-^5}"), "[a + 1]"); +} + #[test] fn test_debug_tokenstream() { let tts = TokenStream::from_str("[a + 1]").unwrap(); @@ -757,3 +1056,39 @@ fn byte_order_mark() { let string = "foo\u{feff}"; string.parse::<TokenStream>().unwrap_err(); } + +#[cfg(span_locations)] +fn create_span() -> proc_macro2::Span { + let tts: TokenStream = "1".parse().unwrap(); + match tts.into_iter().next().unwrap() { + TokenTree::Literal(literal) => literal.span(), + _ => unreachable!(), + } +} + +#[cfg(span_locations)] +#[test] +fn test_invalidate_current_thread_spans() { + let actual = format!("{:#?}", create_span()); + assert_eq!(actual, "bytes(1..2)"); + let actual = format!("{:#?}", create_span()); + assert_eq!(actual, "bytes(3..4)"); + + proc_macro2::extra::invalidate_current_thread_spans(); + + let actual = format!("{:#?}", create_span()); + // Test that span offsets have been reset after the call + // to invalidate_current_thread_spans() + assert_eq!(actual, "bytes(1..2)"); +} + +#[cfg(span_locations)] +#[test] +#[should_panic(expected = "Invalid span with no related FileInfo!")] +fn test_use_span_after_invalidation() { + let span = create_span(); + + proc_macro2::extra::invalidate_current_thread_spans(); + + span.source_text(); +} diff --git a/vendor/proc-macro2/tests/test_size.rs b/vendor/proc-macro2/tests/test_size.rs index 46e58db4..8b679151 100644 --- a/vendor/proc-macro2/tests/test_size.rs +++ b/vendor/proc-macro2/tests/test_size.rs @@ -1,42 +1,81 @@ +#![allow(unused_attributes)] + extern crate proc_macro; use std::mem; -#[rustversion::attr(before(1.32), ignore)] +#[rustversion::attr(before(1.64), ignore = "requires Rust 1.64+")] +#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] +#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] #[test] -fn test_proc_macro_span_size() { +fn test_proc_macro_size() { assert_eq!(mem::size_of::<proc_macro::Span>(), 4); assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4); + assert_eq!(mem::size_of::<proc_macro::Group>(), 20); + assert_eq!(mem::size_of::<proc_macro::Ident>(), 12); + assert_eq!(mem::size_of::<proc_macro::Punct>(), 8); + assert_eq!(mem::size_of::<proc_macro::Literal>(), 16); + assert_eq!(mem::size_of::<proc_macro::TokenStream>(), 4); } -#[cfg_attr(not(all(not(wrap_proc_macro), not(span_locations))), ignore)] +#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] +#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] +#[cfg_attr(wrap_proc_macro, ignore = "wrapper mode")] +#[cfg_attr(span_locations, ignore = "span locations are on")] #[test] -fn test_proc_macro2_fallback_span_size_without_locations() { +fn test_proc_macro2_fallback_size_without_locations() { assert_eq!(mem::size_of::<proc_macro2::Span>(), 0); assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1); + assert_eq!(mem::size_of::<proc_macro2::Group>(), 16); + assert_eq!(mem::size_of::<proc_macro2::Ident>(), 24); + assert_eq!(mem::size_of::<proc_macro2::Punct>(), 8); + assert_eq!(mem::size_of::<proc_macro2::Literal>(), 24); + assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 8); } -#[cfg_attr(not(all(not(wrap_proc_macro), span_locations)), ignore)] +#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] +#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] +#[cfg_attr(wrap_proc_macro, ignore = "wrapper mode")] +#[cfg_attr(not(span_locations), ignore = "span locations are off")] #[test] -fn test_proc_macro2_fallback_span_size_with_locations() { +fn test_proc_macro2_fallback_size_with_locations() { assert_eq!(mem::size_of::<proc_macro2::Span>(), 8); assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12); + assert_eq!(mem::size_of::<proc_macro2::Group>(), 24); + assert_eq!(mem::size_of::<proc_macro2::Ident>(), 32); + assert_eq!(mem::size_of::<proc_macro2::Punct>(), 16); + assert_eq!(mem::size_of::<proc_macro2::Literal>(), 32); + assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 8); } -#[rustversion::attr(before(1.32), ignore)] -#[rustversion::attr( - since(1.32), - cfg_attr(not(all(wrap_proc_macro, not(span_locations))), ignore) -)] +#[rustversion::attr(before(1.71), ignore = "requires Rust 1.71+")] +#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] +#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] +#[cfg_attr(not(wrap_proc_macro), ignore = "fallback mode")] +#[cfg_attr(span_locations, ignore = "span locations are on")] #[test] -fn test_proc_macro2_wrapper_span_size_without_locations() { +fn test_proc_macro2_wrapper_size_without_locations() { assert_eq!(mem::size_of::<proc_macro2::Span>(), 4); assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8); + assert_eq!(mem::size_of::<proc_macro2::Group>(), 24); + assert_eq!(mem::size_of::<proc_macro2::Ident>(), 24); + assert_eq!(mem::size_of::<proc_macro2::Punct>(), 12); + assert_eq!(mem::size_of::<proc_macro2::Literal>(), 24); + assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 32); } -#[cfg_attr(not(all(wrap_proc_macro, span_locations)), ignore)] +#[rustversion::attr(before(1.65), ignore = "requires Rust 1.65+")] +#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] +#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] +#[cfg_attr(not(wrap_proc_macro), ignore = "fallback mode")] +#[cfg_attr(not(span_locations), ignore = "span locations are off")] #[test] -fn test_proc_macro2_wrapper_span_size_with_locations() { +fn test_proc_macro2_wrapper_size_with_locations() { assert_eq!(mem::size_of::<proc_macro2::Span>(), 12); assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12); + assert_eq!(mem::size_of::<proc_macro2::Group>(), 32); + assert_eq!(mem::size_of::<proc_macro2::Ident>(), 32); + assert_eq!(mem::size_of::<proc_macro2::Punct>(), 20); + assert_eq!(mem::size_of::<proc_macro2::Literal>(), 32); + assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 32); } diff --git a/vendor/quote/.cargo-checksum.json b/vendor/quote/.cargo-checksum.json index ac15d4a6..0559d43c 100644 --- a/vendor/quote/.cargo-checksum.json +++ b/vendor/quote/.cargo-checksum.json @@ -1 +1 @@ -{"files":{".cargo_vcs_info.json":"d12c627787b7cc914ab974c246acca0cc3277c14327ec75d26507df749151e9e",".github/FUNDING.yml":"b017158736b3c9751a2d21edfce7fe61c8954e2fced8da8dd3013c2f3e295bd9",".github/workflows/ci.yml":"06e4da77304902f0cfab53c55a2258fe41b4b10f5d06563445c20254c47b531d","Cargo.toml":"1eab00879906063b283945ad8ffd68f41fb2d99b54773fadd9154c86b98b827f","Cargo.toml.orig":"59e7b16a6b1922e1cfc3fb118e30320124d8c8f91563446f0f1ad4efbd28be20","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"626e7079eab0baacf0fcaf3e244f407b2014ebaeca45905d72e8fb8bed18aaea","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"9881576cac3e476a4bf04f9b601cf9a53b79399fb0ca9634e8b861ac91709843","src/format.rs":"c595015418f35e6992e710441b9999f09b2afe4678b138039d670d100c0bdd86","src/ident_fragment.rs":"0b3e6c2129e55910fd2d240e1e7efba6f1796801d24352d1c0bfbceb0e8b678f","src/lib.rs":"2a4bafc9784df4bac935139d72795e0eeb25e05bbf2bc98c134aedbd3f3a2377","src/runtime.rs":"7f37326edaeac2c42ed806b447eeba12e36dd4b1bc25fbf52f8eb23140f3be7a","src/spanned.rs":"3ccf5120593f35787442c0a37d243e802c5262e7f8b35aed503873008ec035c5","src/to_tokens.rs":"1c76311fcc82098e630056d71fd6f3929194ee31b0840e2aa643ed7e78026e3e","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"3be80741f84a707376c230d9cf70ce9537caa359691d8d4c34968e28175e4ad7","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"90a4bdb9267535f5d2785940148338d6b7d905548051d2c9c5dcbd58f2c11d8e","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"ae7c2739554c862b331705e82781aa4687a4375210cef6ae899a4be4a4ec2d97","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"03fd560979ebcd5aa6f83858bc2c3c01ba6546c16335101275505304895c1ae9","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"d6da483c29e232ced72059bbdf05d31afb1df9e02954edaa9cfaea1ec6df72dc","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-quotable.stderr":"459bdadbf1e73b9401cf7d5d578dc053774bb4e5aa25ad2abf25d6b0f61aa306","tests/ui/not-repeatable.rs":"a4b115c04e4e41049a05f5b69450503fbffeba031218b4189cb931839f7f9a9c","tests/ui/not-repeatable.stderr":"594249d59d16f039c16816f1aaf9933176994e296fcf81d1b8b24d5b66ae0d0a","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"cad072e40e0ecc04f375122ae41aede2f0da2a9244492b3fcf70249e59d1b128"},"package":"5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"} \ No newline at end of file +{"files":{".cargo_vcs_info.json":"31f077cccc677667ae9dbd3ca2a97807c645307199ec9dd6c2620fbf1b80015e",".github/FUNDING.yml":"b017158736b3c9751a2d21edfce7fe61c8954e2fced8da8dd3013c2f3e295bd9",".github/workflows/ci.yml":"a74a11b884e49e64e0af70d7b66a497dfe19f61d1e7375798fb7dcf46d074e30","Cargo.lock":"7f9f3eb56475b19bf94e20384421c6485c217ef1ab136867aa678b2dec7922b3","Cargo.toml":"f98585795e8fb0a2798c24fd5bc39d6de078f96cbe1c4be6532dee2f10ade5ae","Cargo.toml.orig":"8e7c7edea1aa52e0854b58bc77d5da20fb01a76138675757b162f03d2243c1c3","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"5bc59a97099fbdc7f9f8b69d3f9910e27629184647412b5009b274b5b8bfb6d1","build.rs":"cd6808c02e476b09a520105e2c6f6d325cccb1ecd542cbbcc836a0ae6f6fb0f1","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"33e41c8a11743de714c1cab1db37b242ce6df9cdb1dda43927c1f015b33701b3","src/format.rs":"141ee1049cfbe363f0d6e9210996dabc997bd3d1c67eb9695fab1c2a0b100e80","src/ident_fragment.rs":"0b3e6c2129e55910fd2d240e1e7efba6f1796801d24352d1c0bfbceb0e8b678f","src/lib.rs":"1f852ff55a08bc73e37ec76faf862bdd8769a8b825c2f49e5ca97e9b905b28c7","src/runtime.rs":"905008e29cb70a13845c2b334e531569121699b2a23be2acc7ab6070c45221e4","src/spanned.rs":"713678bf5cb3b4bf2f119dcf64d188a63dc59455a724c3d2567ceab83b734d73","src/to_tokens.rs":"5bd52437ed5764ae2b5d84843b23f29497ad0361f3ee3cfda621a4b91c70ef1c","tests/compiletest.rs":"4e381aa8ca3eabb7ac14d1e0c3700b3223e47640547a6988cfa13ad68255f60f","tests/test.rs":"c746974d738a6922b9a25eacb55416d0ef513cc418de3aa5ce5e12cacb7ee94d","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"e5966b716290266591f97f1ab04107a47748d493e10ca99f19675fa76524f205","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"a20403a06f36b54d45a195e455a11543cca7259e1c9f1bc78f0ce65cc0226347","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"29718da7187e2da98c98bea9bfa405305a6df60af6c2f3c70cc27b7e13deead7","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"691c985934330d5ba063fd4b172f89702673c710e610e8381e39ab78d729b0f1","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-quotable.stderr":"433a290bd53070d5cce6d623f9ef6f991756a78de109d3e486b46b699c2ce764","tests/ui/not-repeatable.rs":"a4b115c04e4e41049a05f5b69450503fbffeba031218b4189cb931839f7f9a9c","tests/ui/not-repeatable.stderr":"501ea5e47492b55bea457b02e991e0c624cd0c12601e0b759fff54a731370caf","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"cad072e40e0ecc04f375122ae41aede2f0da2a9244492b3fcf70249e59d1b128"},"package":"a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"} \ No newline at end of file diff --git a/vendor/quote/.cargo_vcs_info.json b/vendor/quote/.cargo_vcs_info.json index 471c0897..43e8425a 100644 --- a/vendor/quote/.cargo_vcs_info.json +++ b/vendor/quote/.cargo_vcs_info.json @@ -1,6 +1,6 @@ { "git": { - "sha1": "8beec4c311a196d0adc9fb70188226d9c1f8bf1c" + "sha1": "bb9e7a46b3105e11c73416bd59b4455a71068949" }, "path_in_vcs": "" } \ No newline at end of file diff --git a/vendor/quote/.github/workflows/ci.yml b/vendor/quote/.github/workflows/ci.yml index c14c2212..9e25479a 100644 --- a/vendor/quote/.github/workflows/ci.yml +++ b/vendor/quote/.github/workflows/ci.yml @@ -24,10 +24,10 @@ jobs: strategy: fail-fast: false matrix: - rust: [nightly, stable, beta, 1.56.0] + rust: [nightly, stable, beta, 1.76.0, 1.68.0] timeout-minutes: 45 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@master with: toolchain: ${{matrix.rust}} @@ -35,8 +35,16 @@ jobs: - name: Enable type layout randomization run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV if: matrix.rust == 'nightly' + - run: cargo check - run: cargo test + if: matrix.rust != '1.68.0' - run: cargo run --manifest-path benches/Cargo.toml + - uses: actions/upload-artifact@v4 + if: matrix.rust == 'nightly' && always() + with: + name: Cargo.lock + path: Cargo.lock + continue-on-error: true minimal: name: Minimal versions @@ -45,18 +53,34 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly - run: cargo generate-lockfile -Z minimal-versions - run: cargo check --locked + doc: + name: Documentation + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + env: + RUSTDOCFLAGS: -Dwarnings + steps: + - uses: actions/checkout@v5 + - uses: dtolnay/rust-toolchain@nightly + with: + components: rust-src + - uses: dtolnay/install@cargo-docs-rs + - run: cargo docs-rs + clippy: name: Clippy runs-on: ubuntu-latest if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@nightly with: components: clippy, rust-src @@ -69,7 +93,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@miri - run: cargo miri setup - run: cargo miri test @@ -82,6 +106,7 @@ jobs: if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 + - uses: dtolnay/rust-toolchain@stable - uses: dtolnay/install@cargo-outdated - run: cargo outdated --workspace --exit-code 1 diff --git a/vendor/quote/Cargo.lock b/vendor/quote/Cargo.lock new file mode 100644 index 00000000..038bc1b2 --- /dev/null +++ b/vendor/quote/Cargo.lock @@ -0,0 +1,256 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "dissimilar" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" + +[[package]] +name = "indexmap" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "proc-macro2" +version = "1.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "quote" +version = "1.0.42" +dependencies = [ + "proc-macro2", + "rustversion", + "trybuild", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote 1.0.41", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.145" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", + "serde_core", +] + +[[package]] +name = "serde_spanned" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" +dependencies = [ + "serde_core", +] + +[[package]] +name = "syn" +version = "2.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f17c7e013e88258aa9543dcbe81aca68a667a9ac37cd69c9fbc07858bfe0e2f" +dependencies = [ + "proc-macro2", + "quote 1.0.41", + "unicode-ident", +] + +[[package]] +name = "target-triple" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "591ef38edfb78ca4771ee32cf494cb8771944bee237a9b91fc9c1424ac4b777b" + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "toml" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" +dependencies = [ + "indexmap", + "serde_core", + "serde_spanned", + "toml_datetime", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_parser" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_writer" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" + +[[package]] +name = "trybuild" +version = "1.0.113" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "559b6a626c0815c942ac98d434746138b4f89ddd6a1b8cbb168c6845fb3376c5" +dependencies = [ + "dissimilar", + "glob", + "serde", + "serde_derive", + "serde_json", + "target-triple", + "termcolor", + "toml", +] + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "winnow" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" diff --git a/vendor/quote/Cargo.toml b/vendor/quote/Cargo.toml index 4dbf7abe..1b6fed14 100644 --- a/vendor/quote/Cargo.toml +++ b/vendor/quote/Cargo.toml @@ -11,10 +11,15 @@ [package] edition = "2018" -rust-version = "1.56" +rust-version = "1.68" name = "quote" -version = "1.0.33" +version = "1.0.42" authors = ["David Tolnay <dtolnay@gmail.com>"] +build = "build.rs" +autolib = false +autobins = false +autoexamples = false +autotests = false autobenches = false description = "Quasi-quoting macro quote!(...)" documentation = "https://docs.rs/quote/" @@ -28,23 +33,38 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/quote" [package.metadata.docs.rs] -rustdoc-args = ["--generate-link-to-definition"] targets = ["x86_64-unknown-linux-gnu"] +rustdoc-args = [ + "--generate-link-to-definition", + "--generate-macro-expansion", + "--extern-html-root-url=core=https://doc.rust-lang.org", + "--extern-html-root-url=alloc=https://doc.rust-lang.org", + "--extern-html-root-url=std=https://doc.rust-lang.org", +] + +[features] +default = ["proc-macro"] +proc-macro = ["proc-macro2/proc-macro"] [lib] -doc-scrape-examples = false +name = "quote" +path = "src/lib.rs" + +[[test]] +name = "compiletest" +path = "tests/compiletest.rs" + +[[test]] +name = "test" +path = "tests/test.rs" [dependencies.proc-macro2] -version = "1.0.66" +version = "1.0.80" default-features = false [dev-dependencies.rustversion] version = "1.0" [dev-dependencies.trybuild] -version = "1.0.66" +version = "1.0.108" features = ["diff"] - -[features] -default = ["proc-macro"] -proc-macro = ["proc-macro2/proc-macro"] diff --git a/vendor/quote/Cargo.toml.orig b/vendor/quote/Cargo.toml.orig index 8c722654..f31eda83 100644 --- a/vendor/quote/Cargo.toml.orig +++ b/vendor/quote/Cargo.toml.orig @@ -1,6 +1,6 @@ [package] name = "quote" -version = "1.0.33" # don't forget to update html_root_url, version in readme for breaking changes +version = "1.0.42" authors = ["David Tolnay <dtolnay@gmail.com>"] autobenches = false categories = ["development-tools::procedural-macro-helpers"] @@ -10,14 +10,14 @@ edition = "2018" keywords = ["macros", "syn"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/quote" -rust-version = "1.56" +rust-version = "1.68" [dependencies] -proc-macro2 = { version = "1.0.66", default-features = false } +proc-macro2 = { version = "1.0.80", default-features = false } [dev-dependencies] rustversion = "1.0" -trybuild = { version = "1.0.66", features = ["diff"] } +trybuild = { version = "1.0.108", features = ["diff"] } [features] default = ["proc-macro"] @@ -25,12 +25,15 @@ default = ["proc-macro"] # libproc_macro in the rustc compiler. proc-macro = ["proc-macro2/proc-macro"] -[lib] -doc-scrape-examples = false - [workspace] members = ["benches"] [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] -rustdoc-args = ["--generate-link-to-definition"] +rustdoc-args = [ + "--generate-link-to-definition", + "--generate-macro-expansion", + "--extern-html-root-url=core=https://doc.rust-lang.org", + "--extern-html-root-url=alloc=https://doc.rust-lang.org", + "--extern-html-root-url=std=https://doc.rust-lang.org", +] diff --git a/vendor/quote/README.md b/vendor/quote/README.md index bfc91a97..c4316be3 100644 --- a/vendor/quote/README.md +++ b/vendor/quote/README.md @@ -34,7 +34,7 @@ macros. quote = "1.0" ``` -*Version requirement: Quote supports rustc 1.56 and up.*<br> +*Version requirement: Quote supports rustc 1.68 and up.*<br> [*Release notes*](https://github.com/dtolnay/quote/releases) <br> @@ -84,8 +84,7 @@ let tokens = quote! { Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This iterates through the elements of any variable interpolated within the repetition and inserts a copy of the repetition body for each one. The variables in an -interpolation may be anything that implements `IntoIterator`, including `Vec` or -a pre-existing iterator. +interpolation may be a `Vec`, slice, `BTreeSet`, or any `Iterator`. - `#(#var)*` — no separators - `#(#var),*` — the character before the asterisk is used as a separator diff --git a/vendor/quote/build.rs b/vendor/quote/build.rs new file mode 100644 index 00000000..50f98cb3 --- /dev/null +++ b/vendor/quote/build.rs @@ -0,0 +1,32 @@ +use std::env; +use std::process::Command; +use std::str; + +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + + let Some(minor) = rustc_minor_version() else { + return; + }; + + if minor >= 77 { + println!("cargo:rustc-check-cfg=cfg(no_diagnostic_namespace)"); + } + + // Support for the `#[diagnostic]` tool attribute namespace + // https://blog.rust-lang.org/2024/05/02/Rust-1.78.0.html#diagnostic-attributes + if minor < 78 { + println!("cargo:rustc-cfg=no_diagnostic_namespace"); + } +} + +fn rustc_minor_version() -> Option<u32> { + let rustc = env::var_os("RUSTC")?; + let output = Command::new(rustc).arg("--version").output().ok()?; + let version = str::from_utf8(&output.stdout).ok()?; + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return None; + } + pieces.next()?.parse().ok() +} diff --git a/vendor/quote/src/ext.rs b/vendor/quote/src/ext.rs index 92c2315b..bc983a5d 100644 --- a/vendor/quote/src/ext.rs +++ b/vendor/quote/src/ext.rs @@ -69,8 +69,16 @@ impl TokenStreamExt for TokenStream { I: IntoIterator, I::Item: ToTokens, { - for token in iter { - token.to_tokens(self); + do_append_all(self, iter.into_iter()); + + fn do_append_all<I>(stream: &mut TokenStream, iter: I) + where + I: Iterator, + I::Item: ToTokens, + { + for token in iter { + token.to_tokens(stream); + } } } @@ -80,11 +88,20 @@ impl TokenStreamExt for TokenStream { I::Item: ToTokens, U: ToTokens, { - for (i, token) in iter.into_iter().enumerate() { - if i > 0 { - op.to_tokens(self); + do_append_separated(self, iter.into_iter(), op); + + fn do_append_separated<I, U>(stream: &mut TokenStream, iter: I, op: U) + where + I: Iterator, + I::Item: ToTokens, + U: ToTokens, + { + for (i, token) in iter.into_iter().enumerate() { + if i > 0 { + op.to_tokens(stream); + } + token.to_tokens(stream); } - token.to_tokens(self); } } @@ -94,9 +111,18 @@ impl TokenStreamExt for TokenStream { I::Item: ToTokens, U: ToTokens, { - for token in iter { - token.to_tokens(self); - term.to_tokens(self); + do_append_terminated(self, iter.into_iter(), term); + + fn do_append_terminated<I, U>(stream: &mut TokenStream, iter: I, term: U) + where + I: Iterator, + I::Item: ToTokens, + U: ToTokens, + { + for token in iter { + token.to_tokens(stream); + term.to_tokens(stream); + } } } } diff --git a/vendor/quote/src/format.rs b/vendor/quote/src/format.rs index 3cddbd28..ec0bbf38 100644 --- a/vendor/quote/src/format.rs +++ b/vendor/quote/src/format.rs @@ -26,7 +26,7 @@ /// default. This trait is like `Display`, with a few differences: /// /// * `IdentFragment` is only implemented for a limited set of types, such as -/// unsigned integers and strings. +/// unsigned integers and strings. /// * [`Ident`] arguments will have their `r#` prefixes stripped, if present. /// /// [`IdentFragment`]: crate::IdentFragment diff --git a/vendor/quote/src/lib.rs b/vendor/quote/src/lib.rs index 47167d3f..dd2f5b7c 100644 --- a/vendor/quote/src/lib.rs +++ b/vendor/quote/src/lib.rs @@ -9,8 +9,6 @@ //! This crate provides the [`quote!`] macro for turning Rust syntax tree data //! structures into tokens of source code. //! -//! [`quote!`]: macro.quote.html -//! //! Procedural macros in Rust receive a stream of tokens as input, execute //! arbitrary Rust code to determine how to manipulate those tokens, and produce //! a stream of tokens to hand back to the compiler to compile into the caller's @@ -46,7 +44,6 @@ //! implementing hygienic procedural macros. //! //! [a]: https://serde.rs/ -//! [`quote_spanned!`]: macro.quote_spanned.html //! //! ``` //! # use quote::quote; @@ -92,12 +89,15 @@ //! [prettyplease]: https://github.com/dtolnay/prettyplease // Quote types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/quote/1.0.33")] +#![doc(html_root_url = "https://docs.rs/quote/1.0.42")] #![allow( clippy::doc_markdown, + clippy::elidable_lifetime_names, + clippy::items_after_statements, clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::module_name_repetitions, + clippy::needless_lifetimes, // false positive https://github.com/rust-lang/rust-clippy/issues/6983 clippy::wrong_self_convention, )] @@ -125,545 +125,556 @@ pub use crate::to_tokens::ToTokens; #[doc(hidden)] pub mod spanned; -/// The whole point. -/// -/// Performs variable interpolation against the input and produces it as -/// [`proc_macro2::TokenStream`]. -/// -/// Note: for returning tokens to the compiler in a procedural macro, use -/// `.into()` on the result to convert to [`proc_macro::TokenStream`]. -/// -/// [`TokenStream`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html -/// -/// <br> -/// -/// # Interpolation -/// -/// Variable interpolation is done with `#var` (similar to `$var` in -/// `macro_rules!` macros). This grabs the `var` variable that is currently in -/// scope and inserts it in that location in the output tokens. Any type -/// implementing the [`ToTokens`] trait can be interpolated. This includes most -/// Rust primitive types as well as most of the syntax tree types from the [Syn] -/// crate. -/// -/// [`ToTokens`]: trait.ToTokens.html -/// [Syn]: https://github.com/dtolnay/syn -/// -/// Repetition is done using `#(...)*` or `#(...),*` again similar to -/// `macro_rules!`. This iterates through the elements of any variable -/// interpolated within the repetition and inserts a copy of the repetition body -/// for each one. The variables in an interpolation may be a `Vec`, slice, -/// `BTreeSet`, or any `Iterator`. -/// -/// - `#(#var)*` — no separators -/// - `#(#var),*` — the character before the asterisk is used as a separator -/// - `#( struct #var; )*` — the repetition can contain other tokens -/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations -/// -/// <br> -/// -/// # Hygiene -/// -/// Any interpolated tokens preserve the `Span` information provided by their -/// `ToTokens` implementation. Tokens that originate within the `quote!` -/// invocation are spanned with [`Span::call_site()`]. -/// -/// [`Span::call_site()`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site -/// -/// A different span can be provided through the [`quote_spanned!`] macro. -/// -/// [`quote_spanned!`]: macro.quote_spanned.html -/// -/// <br> -/// -/// # Return type -/// -/// The macro evaluates to an expression of type `proc_macro2::TokenStream`. -/// Meanwhile Rust procedural macros are expected to return the type -/// `proc_macro::TokenStream`. -/// -/// The difference between the two types is that `proc_macro` types are entirely -/// specific to procedural macros and cannot ever exist in code outside of a -/// procedural macro, while `proc_macro2` types may exist anywhere including -/// tests and non-macro code like main.rs and build.rs. This is why even the -/// procedural macro ecosystem is largely built around `proc_macro2`, because -/// that ensures the libraries are unit testable and accessible in non-macro -/// contexts. -/// -/// There is a [`From`]-conversion in both directions so returning the output of -/// `quote!` from a procedural macro usually looks like `tokens.into()` or -/// `proc_macro::TokenStream::from(tokens)`. -/// -/// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html -/// -/// <br> -/// -/// # Examples -/// -/// ### Procedural macro -/// -/// The structure of a basic procedural macro is as follows. Refer to the [Syn] -/// crate for further useful guidance on using `quote!` as part of a procedural -/// macro. -/// -/// [Syn]: https://github.com/dtolnay/syn -/// -/// ``` -/// # #[cfg(any())] -/// extern crate proc_macro; -/// # extern crate proc_macro2; -/// -/// # #[cfg(any())] -/// use proc_macro::TokenStream; -/// # use proc_macro2::TokenStream; -/// use quote::quote; -/// -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// #[proc_macro_derive(HeapSize)] -/// # }; -/// pub fn derive_heap_size(input: TokenStream) -> TokenStream { -/// // Parse the input and figure out what implementation to generate... -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let name = /* ... */; -/// let expr = /* ... */; -/// # }; -/// # -/// # let name = 0; -/// # let expr = 0; -/// -/// let expanded = quote! { -/// // The generated impl. -/// impl heapsize::HeapSize for #name { -/// fn heap_size_of_children(&self) -> usize { -/// #expr -/// } -/// } -/// }; -/// -/// // Hand the output tokens back to the compiler. -/// TokenStream::from(expanded) -/// } -/// ``` -/// -/// <p><br></p> -/// -/// ### Combining quoted fragments -/// -/// Usually you don't end up constructing an entire final `TokenStream` in one -/// piece. Different parts may come from different helper functions. The tokens -/// produced by `quote!` themselves implement `ToTokens` and so can be -/// interpolated into later `quote!` invocations to build up a final result. -/// -/// ``` -/// # use quote::quote; -/// # -/// let type_definition = quote! {...}; -/// let methods = quote! {...}; -/// -/// let tokens = quote! { -/// #type_definition -/// #methods -/// }; -/// ``` -/// -/// <p><br></p> -/// -/// ### Constructing identifiers -/// -/// Suppose we have an identifier `ident` which came from somewhere in a macro -/// input and we need to modify it in some way for the macro output. Let's -/// consider prepending the identifier with an underscore. -/// -/// Simply interpolating the identifier next to an underscore will not have the -/// behavior of concatenating them. The underscore and the identifier will -/// continue to be two separate tokens as if you had written `_ x`. -/// -/// ``` -/// # use proc_macro2::{self as syn, Span}; -/// # use quote::quote; -/// # -/// # let ident = syn::Ident::new("i", Span::call_site()); -/// # -/// // incorrect -/// quote! { -/// let mut _#ident = 0; -/// } -/// # ; -/// ``` -/// -/// The solution is to build a new identifier token with the correct value. As -/// this is such a common case, the [`format_ident!`] macro provides a -/// convenient utility for doing so correctly. -/// -/// ``` -/// # use proc_macro2::{Ident, Span}; -/// # use quote::{format_ident, quote}; -/// # -/// # let ident = Ident::new("i", Span::call_site()); -/// # -/// let varname = format_ident!("_{}", ident); -/// quote! { -/// let mut #varname = 0; -/// } -/// # ; -/// ``` -/// -/// Alternatively, the APIs provided by Syn and proc-macro2 can be used to -/// directly build the identifier. This is roughly equivalent to the above, but -/// will not handle `ident` being a raw identifier. -/// -/// ``` -/// # use proc_macro2::{self as syn, Span}; -/// # use quote::quote; -/// # -/// # let ident = syn::Ident::new("i", Span::call_site()); -/// # -/// let concatenated = format!("_{}", ident); -/// let varname = syn::Ident::new(&concatenated, ident.span()); -/// quote! { -/// let mut #varname = 0; -/// } -/// # ; -/// ``` -/// -/// <p><br></p> -/// -/// ### Making method calls -/// -/// Let's say our macro requires some type specified in the macro input to have -/// a constructor called `new`. We have the type in a variable called -/// `field_type` of type `syn::Type` and want to invoke the constructor. -/// -/// ``` -/// # use quote::quote; -/// # -/// # let field_type = quote!(...); -/// # -/// // incorrect -/// quote! { -/// let value = #field_type::new(); -/// } -/// # ; -/// ``` -/// -/// This works only sometimes. If `field_type` is `String`, the expanded code -/// contains `String::new()` which is fine. But if `field_type` is something -/// like `Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid -/// syntax. Ordinarily in handwritten Rust we would write `Vec::<i32>::new()` -/// but for macros often the following is more convenient. -/// -/// ``` -/// # use quote::quote; -/// # -/// # let field_type = quote!(...); -/// # -/// quote! { -/// let value = <#field_type>::new(); -/// } -/// # ; -/// ``` -/// -/// This expands to `<Vec<i32>>::new()` which behaves correctly. -/// -/// A similar pattern is appropriate for trait methods. -/// -/// ``` -/// # use quote::quote; -/// # -/// # let field_type = quote!(...); -/// # -/// quote! { -/// let value = <#field_type as core::default::Default>::default(); -/// } -/// # ; -/// ``` -/// -/// <p><br></p> -/// -/// ### Interpolating text inside of doc comments -/// -/// Neither doc comments nor string literals get interpolation behavior in -/// quote: -/// -/// ```compile_fail -/// quote! { -/// /// try to interpolate: #ident -/// /// -/// /// ... -/// } -/// ``` -/// -/// ```compile_fail -/// quote! { -/// #[doc = "try to interpolate: #ident"] -/// } -/// ``` -/// -/// Instead the best way to build doc comments that involve variables is by -/// formatting the doc string literal outside of quote. -/// -/// ```rust -/// # use proc_macro2::{Ident, Span}; -/// # use quote::quote; -/// # -/// # const IGNORE: &str = stringify! { -/// let msg = format!(...); -/// # }; -/// # -/// # let ident = Ident::new("var", Span::call_site()); -/// # let msg = format!("try to interpolate: {}", ident); -/// quote! { -/// #[doc = #msg] -/// /// -/// /// ... -/// } -/// # ; -/// ``` -/// -/// <p><br></p> -/// -/// ### Indexing into a tuple struct -/// -/// When interpolating indices of a tuple or tuple struct, we need them not to -/// appears suffixed as integer literals by interpolating them as [`syn::Index`] -/// instead. -/// -/// [`syn::Index`]: https://docs.rs/syn/1.0/syn/struct.Index.html -/// -/// ```compile_fail -/// let i = 0usize..self.fields.len(); -/// -/// // expands to 0 + self.0usize.heap_size() + self.1usize.heap_size() + ... -/// // which is not valid syntax -/// quote! { -/// 0 #( + self.#i.heap_size() )* -/// } -/// ``` -/// -/// ``` -/// # use proc_macro2::{Ident, TokenStream}; -/// # use quote::quote; -/// # -/// # mod syn { -/// # use proc_macro2::{Literal, TokenStream}; -/// # use quote::{ToTokens, TokenStreamExt}; -/// # -/// # pub struct Index(usize); -/// # -/// # impl From<usize> for Index { -/// # fn from(i: usize) -> Self { -/// # Index(i) -/// # } -/// # } -/// # -/// # impl ToTokens for Index { -/// # fn to_tokens(&self, tokens: &mut TokenStream) { -/// # tokens.append(Literal::usize_unsuffixed(self.0)); -/// # } -/// # } -/// # } -/// # -/// # struct Struct { -/// # fields: Vec<Ident>, -/// # } -/// # -/// # impl Struct { -/// # fn example(&self) -> TokenStream { -/// let i = (0..self.fields.len()).map(syn::Index::from); -/// -/// // expands to 0 + self.0.heap_size() + self.1.heap_size() + ... -/// quote! { -/// 0 #( + self.#i.heap_size() )* -/// } -/// # } -/// # } -/// ``` -#[cfg(doc)] -#[macro_export] -macro_rules! quote { - ($($tt:tt)*) => { - ... +macro_rules! __quote { + ($quote:item) => { + /// The whole point. + /// + /// Performs variable interpolation against the input and produces it as + /// [`proc_macro2::TokenStream`]. + /// + /// Note: for returning tokens to the compiler in a procedural macro, use + /// `.into()` on the result to convert to [`proc_macro::TokenStream`]. + /// + /// <br> + /// + /// # Interpolation + /// + /// Variable interpolation is done with `#var` (similar to `$var` in + /// `macro_rules!` macros). This grabs the `var` variable that is currently in + /// scope and inserts it in that location in the output tokens. Any type + /// implementing the [`ToTokens`] trait can be interpolated. This includes most + /// Rust primitive types as well as most of the syntax tree types from the [Syn] + /// crate. + /// + /// [Syn]: https://github.com/dtolnay/syn + /// + /// Repetition is done using `#(...)*` or `#(...),*` again similar to + /// `macro_rules!`. This iterates through the elements of any variable + /// interpolated within the repetition and inserts a copy of the repetition body + /// for each one. The variables in an interpolation may be a `Vec`, slice, + /// `BTreeSet`, or any `Iterator`. + /// + /// - `#(#var)*` — no separators + /// - `#(#var),*` — the character before the asterisk is used as a separator + /// - `#( struct #var; )*` — the repetition can contain other tokens + /// - `#( #k => println!("{}", #v), )*` — even multiple interpolations + /// + /// <br> + /// + /// # Hygiene + /// + /// Any interpolated tokens preserve the `Span` information provided by their + /// `ToTokens` implementation. Tokens that originate within the `quote!` + /// invocation are spanned with [`Span::call_site()`]. + /// + /// [`Span::call_site()`]: proc_macro2::Span::call_site + /// + /// A different span can be provided through the [`quote_spanned!`] macro. + /// + /// <br> + /// + /// # Return type + /// + /// The macro evaluates to an expression of type `proc_macro2::TokenStream`. + /// Meanwhile Rust procedural macros are expected to return the type + /// `proc_macro::TokenStream`. + /// + /// The difference between the two types is that `proc_macro` types are entirely + /// specific to procedural macros and cannot ever exist in code outside of a + /// procedural macro, while `proc_macro2` types may exist anywhere including + /// tests and non-macro code like main.rs and build.rs. This is why even the + /// procedural macro ecosystem is largely built around `proc_macro2`, because + /// that ensures the libraries are unit testable and accessible in non-macro + /// contexts. + /// + /// There is a [`From`]-conversion in both directions so returning the output of + /// `quote!` from a procedural macro usually looks like `tokens.into()` or + /// `proc_macro::TokenStream::from(tokens)`. + /// + /// <br> + /// + /// # Examples + /// + /// ### Procedural macro + /// + /// The structure of a basic procedural macro is as follows. Refer to the [Syn] + /// crate for further useful guidance on using `quote!` as part of a procedural + /// macro. + /// + /// [Syn]: https://github.com/dtolnay/syn + /// + /// ``` + /// # #[cfg(any())] + /// extern crate proc_macro; + /// # extern crate proc_macro2; + /// + /// # #[cfg(any())] + /// use proc_macro::TokenStream; + /// # use proc_macro2::TokenStream; + /// use quote::quote; + /// + /// # const IGNORE_TOKENS: &'static str = stringify! { + /// #[proc_macro_derive(HeapSize)] + /// # }; + /// pub fn derive_heap_size(input: TokenStream) -> TokenStream { + /// // Parse the input and figure out what implementation to generate... + /// # const IGNORE_TOKENS: &'static str = stringify! { + /// let name = /* ... */; + /// let expr = /* ... */; + /// # }; + /// # + /// # let name = 0; + /// # let expr = 0; + /// + /// let expanded = quote! { + /// // The generated impl. + /// impl heapsize::HeapSize for #name { + /// fn heap_size_of_children(&self) -> usize { + /// #expr + /// } + /// } + /// }; + /// + /// // Hand the output tokens back to the compiler. + /// TokenStream::from(expanded) + /// } + /// ``` + /// + /// <p><br></p> + /// + /// ### Combining quoted fragments + /// + /// Usually you don't end up constructing an entire final `TokenStream` in one + /// piece. Different parts may come from different helper functions. The tokens + /// produced by `quote!` themselves implement `ToTokens` and so can be + /// interpolated into later `quote!` invocations to build up a final result. + /// + /// ``` + /// # use quote::quote; + /// # + /// let type_definition = quote! {...}; + /// let methods = quote! {...}; + /// + /// let tokens = quote! { + /// #type_definition + /// #methods + /// }; + /// ``` + /// + /// <p><br></p> + /// + /// ### Constructing identifiers + /// + /// Suppose we have an identifier `ident` which came from somewhere in a macro + /// input and we need to modify it in some way for the macro output. Let's + /// consider prepending the identifier with an underscore. + /// + /// Simply interpolating the identifier next to an underscore will not have the + /// behavior of concatenating them. The underscore and the identifier will + /// continue to be two separate tokens as if you had written `_ x`. + /// + /// ``` + /// # use proc_macro2::{self as syn, Span}; + /// # use quote::quote; + /// # + /// # let ident = syn::Ident::new("i", Span::call_site()); + /// # + /// // incorrect + /// quote! { + /// let mut _#ident = 0; + /// } + /// # ; + /// ``` + /// + /// The solution is to build a new identifier token with the correct value. As + /// this is such a common case, the [`format_ident!`] macro provides a + /// convenient utility for doing so correctly. + /// + /// ``` + /// # use proc_macro2::{Ident, Span}; + /// # use quote::{format_ident, quote}; + /// # + /// # let ident = Ident::new("i", Span::call_site()); + /// # + /// let varname = format_ident!("_{}", ident); + /// quote! { + /// let mut #varname = 0; + /// } + /// # ; + /// ``` + /// + /// Alternatively, the APIs provided by Syn and proc-macro2 can be used to + /// directly build the identifier. This is roughly equivalent to the above, but + /// will not handle `ident` being a raw identifier. + /// + /// ``` + /// # use proc_macro2::{self as syn, Span}; + /// # use quote::quote; + /// # + /// # let ident = syn::Ident::new("i", Span::call_site()); + /// # + /// let concatenated = format!("_{}", ident); + /// let varname = syn::Ident::new(&concatenated, ident.span()); + /// quote! { + /// let mut #varname = 0; + /// } + /// # ; + /// ``` + /// + /// <p><br></p> + /// + /// ### Making method calls + /// + /// Let's say our macro requires some type specified in the macro input to have + /// a constructor called `new`. We have the type in a variable called + /// `field_type` of type `syn::Type` and want to invoke the constructor. + /// + /// ``` + /// # use quote::quote; + /// # + /// # let field_type = quote!(...); + /// # + /// // incorrect + /// quote! { + /// let value = #field_type::new(); + /// } + /// # ; + /// ``` + /// + /// This works only sometimes. If `field_type` is `String`, the expanded code + /// contains `String::new()` which is fine. But if `field_type` is something + /// like `Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid + /// syntax. Ordinarily in handwritten Rust we would write `Vec::<i32>::new()` + /// but for macros often the following is more convenient. + /// + /// ``` + /// # use quote::quote; + /// # + /// # let field_type = quote!(...); + /// # + /// quote! { + /// let value = <#field_type>::new(); + /// } + /// # ; + /// ``` + /// + /// This expands to `<Vec<i32>>::new()` which behaves correctly. + /// + /// A similar pattern is appropriate for trait methods. + /// + /// ``` + /// # use quote::quote; + /// # + /// # let field_type = quote!(...); + /// # + /// quote! { + /// let value = <#field_type as core::default::Default>::default(); + /// } + /// # ; + /// ``` + /// + /// <p><br></p> + /// + /// ### Interpolating text inside of doc comments + /// + /// Neither doc comments nor string literals get interpolation behavior in + /// quote: + /// + /// ```compile_fail + /// quote! { + /// /// try to interpolate: #ident + /// /// + /// /// ... + /// } + /// ``` + /// + /// ```compile_fail + /// quote! { + /// #[doc = "try to interpolate: #ident"] + /// } + /// ``` + /// + /// Instead the best way to build doc comments that involve variables is by + /// formatting the doc string literal outside of quote. + /// + /// ```rust + /// # use proc_macro2::{Ident, Span}; + /// # use quote::quote; + /// # + /// # const IGNORE: &str = stringify! { + /// let msg = format!(...); + /// # }; + /// # + /// # let ident = Ident::new("var", Span::call_site()); + /// # let msg = format!("try to interpolate: {}", ident); + /// quote! { + /// #[doc = #msg] + /// /// + /// /// ... + /// } + /// # ; + /// ``` + /// + /// <p><br></p> + /// + /// ### Indexing into a tuple struct + /// + /// When interpolating indices of a tuple or tuple struct, we need them not to + /// appears suffixed as integer literals by interpolating them as [`syn::Index`] + /// instead. + /// + /// [`syn::Index`]: https://docs.rs/syn/2.0/syn/struct.Index.html + /// + /// ```compile_fail + /// let i = 0usize..self.fields.len(); + /// + /// // expands to 0 + self.0usize.heap_size() + self.1usize.heap_size() + ... + /// // which is not valid syntax + /// quote! { + /// 0 #( + self.#i.heap_size() )* + /// } + /// ``` + /// + /// ``` + /// # use proc_macro2::{Ident, TokenStream}; + /// # use quote::quote; + /// # + /// # mod syn { + /// # use proc_macro2::{Literal, TokenStream}; + /// # use quote::{ToTokens, TokenStreamExt}; + /// # + /// # pub struct Index(usize); + /// # + /// # impl From<usize> for Index { + /// # fn from(i: usize) -> Self { + /// # Index(i) + /// # } + /// # } + /// # + /// # impl ToTokens for Index { + /// # fn to_tokens(&self, tokens: &mut TokenStream) { + /// # tokens.append(Literal::usize_unsuffixed(self.0)); + /// # } + /// # } + /// # } + /// # + /// # struct Struct { + /// # fields: Vec<Ident>, + /// # } + /// # + /// # impl Struct { + /// # fn example(&self) -> TokenStream { + /// let i = (0..self.fields.len()).map(syn::Index::from); + /// + /// // expands to 0 + self.0.heap_size() + self.1.heap_size() + ... + /// quote! { + /// 0 #( + self.#i.heap_size() )* + /// } + /// # } + /// # } + /// ``` + $quote }; } -#[cfg(not(doc))] -#[macro_export] -macro_rules! quote { - () => { - $crate::__private::TokenStream::new() - }; - - // Special case rule for a single tt, for performance. - ($tt:tt) => {{ - let mut _s = $crate::__private::TokenStream::new(); - $crate::quote_token!{$tt _s} - _s - }}; +#[cfg(doc)] +__quote![ + #[macro_export] + macro_rules! quote { + ($($tt:tt)*) => { + ... + }; + } +]; - // Special case rules for two tts, for performance. - (# $var:ident) => {{ - let mut _s = $crate::__private::TokenStream::new(); - $crate::ToTokens::to_tokens(&$var, &mut _s); - _s - }}; - ($tt1:tt $tt2:tt) => {{ - let mut _s = $crate::__private::TokenStream::new(); - $crate::quote_token!{$tt1 _s} - $crate::quote_token!{$tt2 _s} - _s - }}; +#[cfg(not(doc))] +__quote![ + #[macro_export] + macro_rules! quote { + () => { + $crate::__private::TokenStream::new() + }; - // Rule for any other number of tokens. - ($($tt:tt)*) => {{ - let mut _s = $crate::__private::TokenStream::new(); - $crate::quote_each_token!{_s $($tt)*} - _s - }}; + // Special case rule for a single tt, for performance. + ($tt:tt) => {{ + let mut _s = $crate::__private::TokenStream::new(); + $crate::quote_token!{$tt _s} + _s + }}; + + // Special case rules for two tts, for performance. + (# $var:ident) => {{ + let mut _s = $crate::__private::TokenStream::new(); + $crate::ToTokens::to_tokens(&$var, &mut _s); + _s + }}; + ($tt1:tt $tt2:tt) => {{ + let mut _s = $crate::__private::TokenStream::new(); + $crate::quote_token!{$tt1 _s} + $crate::quote_token!{$tt2 _s} + _s + }}; + + // Rule for any other number of tokens. + ($($tt:tt)*) => {{ + let mut _s = $crate::__private::TokenStream::new(); + $crate::quote_each_token!{_s $($tt)*} + _s + }}; + } +]; + +macro_rules! __quote_spanned { + ($quote_spanned:item) => { + /// Same as `quote!`, but applies a given span to all tokens originating within + /// the macro invocation. + /// + /// <br> + /// + /// # Syntax + /// + /// A span expression of type [`Span`], followed by `=>`, followed by the tokens + /// to quote. The span expression should be brief — use a variable for + /// anything more than a few characters. There should be no space before the + /// `=>` token. + /// + /// [`Span`]: proc_macro2::Span + /// + /// ``` + /// # use proc_macro2::Span; + /// # use quote::quote_spanned; + /// # + /// # const IGNORE_TOKENS: &'static str = stringify! { + /// let span = /* ... */; + /// # }; + /// # let span = Span::call_site(); + /// # let init = 0; + /// + /// // On one line, use parentheses. + /// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init))); + /// + /// // On multiple lines, place the span at the top and use braces. + /// let tokens = quote_spanned! {span=> + /// Box::into_raw(Box::new(#init)) + /// }; + /// ``` + /// + /// The lack of space before the `=>` should look jarring to Rust programmers + /// and this is intentional. The formatting is designed to be visibly + /// off-balance and draw the eye a particular way, due to the span expression + /// being evaluated in the context of the procedural macro and the remaining + /// tokens being evaluated in the generated code. + /// + /// <br> + /// + /// # Hygiene + /// + /// Any interpolated tokens preserve the `Span` information provided by their + /// `ToTokens` implementation. Tokens that originate within the `quote_spanned!` + /// invocation are spanned with the given span argument. + /// + /// <br> + /// + /// # Example + /// + /// The following procedural macro code uses `quote_spanned!` to assert that a + /// particular Rust type implements the [`Sync`] trait so that references can be + /// safely shared between threads. + /// + /// ``` + /// # use quote::{quote_spanned, TokenStreamExt, ToTokens}; + /// # use proc_macro2::{Span, TokenStream}; + /// # + /// # struct Type; + /// # + /// # impl Type { + /// # fn span(&self) -> Span { + /// # Span::call_site() + /// # } + /// # } + /// # + /// # impl ToTokens for Type { + /// # fn to_tokens(&self, _tokens: &mut TokenStream) {} + /// # } + /// # + /// # let ty = Type; + /// # let call_site = Span::call_site(); + /// # + /// let ty_span = ty.span(); + /// let assert_sync = quote_spanned! {ty_span=> + /// struct _AssertSync where #ty: Sync; + /// }; + /// ``` + /// + /// If the assertion fails, the user will see an error like the following. The + /// input span of their type is highlighted in the error. + /// + /// ```text + /// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied + /// --> src/main.rs:10:21 + /// | + /// 10 | static ref PTR: *const () = &(); + /// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely + /// ``` + /// + /// In this example it is important for the where-clause to be spanned with the + /// line/column information of the user's input type so that error messages are + /// placed appropriately by the compiler. + $quote_spanned + }; } -/// Same as `quote!`, but applies a given span to all tokens originating within -/// the macro invocation. -/// -/// <br> -/// -/// # Syntax -/// -/// A span expression of type [`Span`], followed by `=>`, followed by the tokens -/// to quote. The span expression should be brief — use a variable for -/// anything more than a few characters. There should be no space before the -/// `=>` token. -/// -/// [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html -/// -/// ``` -/// # use proc_macro2::Span; -/// # use quote::quote_spanned; -/// # -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let span = /* ... */; -/// # }; -/// # let span = Span::call_site(); -/// # let init = 0; -/// -/// // On one line, use parentheses. -/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init))); -/// -/// // On multiple lines, place the span at the top and use braces. -/// let tokens = quote_spanned! {span=> -/// Box::into_raw(Box::new(#init)) -/// }; -/// ``` -/// -/// The lack of space before the `=>` should look jarring to Rust programmers -/// and this is intentional. The formatting is designed to be visibly -/// off-balance and draw the eye a particular way, due to the span expression -/// being evaluated in the context of the procedural macro and the remaining -/// tokens being evaluated in the generated code. -/// -/// <br> -/// -/// # Hygiene -/// -/// Any interpolated tokens preserve the `Span` information provided by their -/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!` -/// invocation are spanned with the given span argument. -/// -/// <br> -/// -/// # Example -/// -/// The following procedural macro code uses `quote_spanned!` to assert that a -/// particular Rust type implements the [`Sync`] trait so that references can be -/// safely shared between threads. -/// -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -/// -/// ``` -/// # use quote::{quote_spanned, TokenStreamExt, ToTokens}; -/// # use proc_macro2::{Span, TokenStream}; -/// # -/// # struct Type; -/// # -/// # impl Type { -/// # fn span(&self) -> Span { -/// # Span::call_site() -/// # } -/// # } -/// # -/// # impl ToTokens for Type { -/// # fn to_tokens(&self, _tokens: &mut TokenStream) {} -/// # } -/// # -/// # let ty = Type; -/// # let call_site = Span::call_site(); -/// # -/// let ty_span = ty.span(); -/// let assert_sync = quote_spanned! {ty_span=> -/// struct _AssertSync where #ty: Sync; -/// }; -/// ``` -/// -/// If the assertion fails, the user will see an error like the following. The -/// input span of their type is highlighted in the error. -/// -/// ```text -/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied -/// --> src/main.rs:10:21 -/// | -/// 10 | static ref PTR: *const () = &(); -/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely -/// ``` -/// -/// In this example it is important for the where-clause to be spanned with the -/// line/column information of the user's input type so that error messages are -/// placed appropriately by the compiler. #[cfg(doc)] -#[macro_export] -macro_rules! quote_spanned { - ($span:expr=> $($tt:tt)*) => { - ... - }; -} +__quote_spanned![ + #[macro_export] + macro_rules! quote_spanned { + ($span:expr=> $($tt:tt)*) => { + ... + }; + } +]; #[cfg(not(doc))] -#[macro_export] -macro_rules! quote_spanned { - ($span:expr=>) => {{ - let _: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); - $crate::__private::TokenStream::new() - }}; - - // Special case rule for a single tt, for performance. - ($span:expr=> $tt:tt) => {{ - let mut _s = $crate::__private::TokenStream::new(); - let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); - $crate::quote_token_spanned!{$tt _s _span} - _s - }}; - - // Special case rules for two tts, for performance. - ($span:expr=> # $var:ident) => {{ - let mut _s = $crate::__private::TokenStream::new(); - let _: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); - $crate::ToTokens::to_tokens(&$var, &mut _s); - _s - }}; - ($span:expr=> $tt1:tt $tt2:tt) => {{ - let mut _s = $crate::__private::TokenStream::new(); - let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); - $crate::quote_token_spanned!{$tt1 _s _span} - $crate::quote_token_spanned!{$tt2 _s _span} - _s - }}; - - // Rule for any other number of tokens. - ($span:expr=> $($tt:tt)*) => {{ - let mut _s = $crate::__private::TokenStream::new(); - let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); - $crate::quote_each_token_spanned!{_s _span $($tt)*} - _s - }}; -} +__quote_spanned![ + #[macro_export] + macro_rules! quote_spanned { + ($span:expr=>) => {{ + let _: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); + $crate::__private::TokenStream::new() + }}; + + // Special case rule for a single tt, for performance. + ($span:expr=> $tt:tt) => {{ + let mut _s = $crate::__private::TokenStream::new(); + let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); + $crate::quote_token_spanned!{$tt _s _span} + _s + }}; + + // Special case rules for two tts, for performance. + ($span:expr=> # $var:ident) => {{ + let mut _s = $crate::__private::TokenStream::new(); + let _: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); + $crate::ToTokens::to_tokens(&$var, &mut _s); + _s + }}; + ($span:expr=> $tt1:tt $tt2:tt) => {{ + let mut _s = $crate::__private::TokenStream::new(); + let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); + $crate::quote_token_spanned!{$tt1 _s _span} + $crate::quote_token_spanned!{$tt2 _s _span} + _s + }}; + + // Rule for any other number of tokens. + ($span:expr=> $($tt:tt)*) => {{ + let mut _s = $crate::__private::TokenStream::new(); + let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span(); + $crate::quote_each_token_spanned!{_s _span $($tt)*} + _s + }}; + } +]; // Extract the names of all #metavariables and pass them to the $call macro. // @@ -878,9 +889,9 @@ macro_rules! quote_token_with_context { // A repetition with no separator. ($tokens:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) * $a3:tt) => {{ use $crate::__private::ext::*; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; + let has_iter = $crate::__private::HasIterator::<false>; $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) () $($inner)*} - let _: $crate::__private::HasIterator = has_iter; + <_ as $crate::__private::CheckHasIterator<true>>::check(has_iter); // This is `while true` instead of `loop` because if there are no // iterators used inside of this repetition then the body would not // contain any `break`, so the compiler would emit unreachable code @@ -901,9 +912,9 @@ macro_rules! quote_token_with_context { ($tokens:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) $sep:tt *) => {{ use $crate::__private::ext::*; let mut _i = 0usize; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; + let has_iter = $crate::__private::HasIterator::<false>; $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) () $($inner)*} - let _: $crate::__private::HasIterator = has_iter; + <_ as $crate::__private::CheckHasIterator<true>>::check(has_iter); while true { $crate::pounded_var_names!{quote_bind_next_or_break!() () $($inner)*} if _i > 0 { @@ -948,9 +959,9 @@ macro_rules! quote_token_with_context_spanned { ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) * $a3:tt) => {{ use $crate::__private::ext::*; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; + let has_iter = $crate::__private::HasIterator::<false>; $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) () $($inner)*} - let _: $crate::__private::HasIterator = has_iter; + <_ as $crate::__private::CheckHasIterator<true>>::check(has_iter); while true { $crate::pounded_var_names!{quote_bind_next_or_break!() () $($inner)*} $crate::quote_each_token_spanned!{$tokens $span $($inner)*} @@ -962,9 +973,9 @@ macro_rules! quote_token_with_context_spanned { ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) $sep:tt *) => {{ use $crate::__private::ext::*; let mut _i = 0usize; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; + let has_iter = $crate::__private::HasIterator::<false>; $crate::pounded_var_names!{quote_bind_into_iter!(has_iter) () $($inner)*} - let _: $crate::__private::HasIterator = has_iter; + <_ as $crate::__private::CheckHasIterator<true>>::check(has_iter); while true { $crate::pounded_var_names!{quote_bind_next_or_break!() () $($inner)*} if _i > 0 { diff --git a/vendor/quote/src/runtime.rs b/vendor/quote/src/runtime.rs index eff044a9..28fb60c7 100644 --- a/vendor/quote/src/runtime.rs +++ b/vendor/quote/src/runtime.rs @@ -18,38 +18,50 @@ pub type Span = proc_macro2::Span; pub type TokenStream = proc_macro2::TokenStream; #[doc(hidden)] -pub struct HasIterator; // True -#[doc(hidden)] -pub struct ThereIsNoIteratorInRepetition; // False +pub struct HasIterator<const B: bool>; -impl BitOr<ThereIsNoIteratorInRepetition> for ThereIsNoIteratorInRepetition { - type Output = ThereIsNoIteratorInRepetition; - fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> ThereIsNoIteratorInRepetition { - ThereIsNoIteratorInRepetition +impl BitOr<HasIterator<false>> for HasIterator<false> { + type Output = HasIterator<false>; + fn bitor(self, _rhs: HasIterator<false>) -> HasIterator<false> { + HasIterator::<false> } } -impl BitOr<ThereIsNoIteratorInRepetition> for HasIterator { - type Output = HasIterator; - fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> HasIterator { - HasIterator +impl BitOr<HasIterator<false>> for HasIterator<true> { + type Output = HasIterator<true>; + fn bitor(self, _rhs: HasIterator<false>) -> HasIterator<true> { + HasIterator::<true> } } -impl BitOr<HasIterator> for ThereIsNoIteratorInRepetition { - type Output = HasIterator; - fn bitor(self, _rhs: HasIterator) -> HasIterator { - HasIterator +impl BitOr<HasIterator<true>> for HasIterator<false> { + type Output = HasIterator<true>; + fn bitor(self, _rhs: HasIterator<true>) -> HasIterator<true> { + HasIterator::<true> } } -impl BitOr<HasIterator> for HasIterator { - type Output = HasIterator; - fn bitor(self, _rhs: HasIterator) -> HasIterator { - HasIterator +impl BitOr<HasIterator<true>> for HasIterator<true> { + type Output = HasIterator<true>; + fn bitor(self, _rhs: HasIterator<true>) -> HasIterator<true> { + HasIterator::<true> } } +#[doc(hidden)] +#[cfg_attr( + not(no_diagnostic_namespace), + diagnostic::on_unimplemented( + message = "repetition contains no interpolated value that is an iterator", + label = "none of the values interpolated inside this repetition are iterable" + ) +)] +pub trait CheckHasIterator<const B: bool>: Sized { + fn check(self) {} +} + +impl CheckHasIterator<true> for HasIterator<true> {} + /// Extension traits used by the implementation of `quote!`. These are defined /// in separate traits, rather than as a single trait due to ambiguity issues. /// @@ -58,8 +70,7 @@ impl BitOr<HasIterator> for HasIterator { /// the returned value should be idempotent. #[doc(hidden)] pub mod ext { - use super::RepInterp; - use super::{HasIterator as HasIter, ThereIsNoIteratorInRepetition as DoesNotHaveIter}; + use super::{HasIterator, RepInterp}; use crate::ToTokens; use alloc::collections::btree_set::{self, BTreeSet}; use core::slice; @@ -67,8 +78,8 @@ pub mod ext { /// Extension trait providing the `quote_into_iter` method on iterators. #[doc(hidden)] pub trait RepIteratorExt: Iterator + Sized { - fn quote_into_iter(self) -> (Self, HasIter) { - (self, HasIter) + fn quote_into_iter(self) -> (Self, HasIterator<true>) { + (self, HasIterator::<true>) } } @@ -81,13 +92,13 @@ pub mod ext { pub trait RepToTokensExt { /// Pretend to be an iterator for the purposes of `quote_into_iter`. /// This allows repeated calls to `quote_into_iter` to continue - /// correctly returning DoesNotHaveIter. + /// correctly returning HasIterator<false>. fn next(&self) -> Option<&Self> { Some(self) } - fn quote_into_iter(&self) -> (&Self, DoesNotHaveIter) { - (self, DoesNotHaveIter) + fn quote_into_iter(&self) -> (&Self, HasIterator<false>) { + (self, HasIterator::<false>) } } @@ -99,21 +110,21 @@ pub mod ext { pub trait RepAsIteratorExt<'q> { type Iter: Iterator; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter); + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>); } - impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a T { + impl<'q, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &T { type Iter = T::Iter; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { <T as RepAsIteratorExt>::quote_into_iter(*self) } } - impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a mut T { + impl<'q, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &mut T { type Iter = T::Iter; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { <T as RepAsIteratorExt>::quote_into_iter(*self) } } @@ -121,31 +132,39 @@ pub mod ext { impl<'q, T: 'q> RepAsIteratorExt<'q> for [T] { type Iter = slice::Iter<'q, T>; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { + (self.iter(), HasIterator::<true>) + } + } + + impl<'q, T: 'q, const N: usize> RepAsIteratorExt<'q> for [T; N] { + type Iter = slice::Iter<'q, T>; + + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { + (self.iter(), HasIterator::<true>) } } impl<'q, T: 'q> RepAsIteratorExt<'q> for Vec<T> { type Iter = slice::Iter<'q, T>; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { + (self.iter(), HasIterator::<true>) } } impl<'q, T: 'q> RepAsIteratorExt<'q> for BTreeSet<T> { type Iter = btree_set::Iter<'q, T>; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { + (self.iter(), HasIterator::<true>) } } impl<'q, T: RepAsIteratorExt<'q>> RepAsIteratorExt<'q> for RepInterp<T> { type Iter = T::Iter; - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { + fn quote_into_iter(&'q self) -> (Self::Iter, HasIterator<true>) { self.0.quote_into_iter() } } @@ -264,19 +283,20 @@ pub fn parse(tokens: &mut TokenStream, s: &str) { #[doc(hidden)] pub fn parse_spanned(tokens: &mut TokenStream, span: Span, s: &str) { let s: TokenStream = s.parse().expect("invalid token stream"); - tokens.extend(s.into_iter().map(|t| respan_token_tree(t, span))); + for token in s { + tokens.append(respan_token_tree(token, span)); + } } // Token tree with every span replaced by the given one. fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree { match &mut token { TokenTree::Group(g) => { - let stream = g - .stream() - .into_iter() - .map(|token| respan_token_tree(token, span)) - .collect(); - *g = Group::new(g.delimiter(), stream); + let mut tokens = TokenStream::new(); + for token in g.stream() { + tokens.append(respan_token_tree(token, span)); + } + *g = Group::new(g.delimiter(), tokens); g.set_span(span); } other => other.set_span(span), @@ -297,68 +317,21 @@ pub fn push_ident_spanned(tokens: &mut TokenStream, span: Span, s: &str) { #[doc(hidden)] pub fn push_lifetime(tokens: &mut TokenStream, lifetime: &str) { - struct Lifetime<'a> { - name: &'a str, - state: u8, - } - - impl<'a> Iterator for Lifetime<'a> { - type Item = TokenTree; - - fn next(&mut self) -> Option<Self::Item> { - match self.state { - 0 => { - self.state = 1; - Some(TokenTree::Punct(Punct::new('\'', Spacing::Joint))) - } - 1 => { - self.state = 2; - Some(TokenTree::Ident(Ident::new(self.name, Span::call_site()))) - } - _ => None, - } - } - } - - tokens.extend(Lifetime { - name: &lifetime[1..], - state: 0, - }); + tokens.append(TokenTree::Punct(Punct::new('\'', Spacing::Joint))); + tokens.append(TokenTree::Ident(Ident::new( + &lifetime[1..], + Span::call_site(), + ))); } #[doc(hidden)] pub fn push_lifetime_spanned(tokens: &mut TokenStream, span: Span, lifetime: &str) { - struct Lifetime<'a> { - name: &'a str, - span: Span, - state: u8, - } - - impl<'a> Iterator for Lifetime<'a> { - type Item = TokenTree; - - fn next(&mut self) -> Option<Self::Item> { - match self.state { - 0 => { - self.state = 1; - let mut apostrophe = Punct::new('\'', Spacing::Joint); - apostrophe.set_span(self.span); - Some(TokenTree::Punct(apostrophe)) - } - 1 => { - self.state = 2; - Some(TokenTree::Ident(Ident::new(self.name, self.span))) - } - _ => None, - } - } - } - - tokens.extend(Lifetime { - name: &lifetime[1..], - span, - state: 0, - }); + tokens.append(TokenTree::Punct({ + let mut apostrophe = Punct::new('\'', Spacing::Joint); + apostrophe.set_span(span); + apostrophe + })); + tokens.append(TokenTree::Ident(Ident::new(&lifetime[1..], span))); } macro_rules! push_punct { diff --git a/vendor/quote/src/spanned.rs b/vendor/quote/src/spanned.rs index 6eba6444..6afc6b30 100644 --- a/vendor/quote/src/spanned.rs +++ b/vendor/quote/src/spanned.rs @@ -28,9 +28,8 @@ impl<T: ?Sized + ToTokens> Spanned for T { fn join_spans(tokens: TokenStream) -> Span { let mut iter = tokens.into_iter().map(|tt| tt.span()); - let first = match iter.next() { - Some(span) => span, - None => return Span::call_site(), + let Some(first) = iter.next() else { + return Span::call_site(); }; iter.fold(None, |_prev, next| Some(next)) diff --git a/vendor/quote/src/to_tokens.rs b/vendor/quote/src/to_tokens.rs index 23b6ec2c..f373092b 100644 --- a/vendor/quote/src/to_tokens.rs +++ b/vendor/quote/src/to_tokens.rs @@ -3,18 +3,15 @@ use alloc::borrow::Cow; use alloc::rc::Rc; use core::iter; use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; +use std::ffi::{CStr, CString}; /// Types that can be interpolated inside a `quote!` invocation. -/// -/// [`quote!`]: macro.quote.html pub trait ToTokens { /// Write `self` to the given `TokenStream`. /// /// The token append methods provided by the [`TokenStreamExt`] extension /// trait may be useful for implementing `ToTokens`. /// - /// [`TokenStreamExt`]: trait.TokenStreamExt.html - /// /// # Example /// /// Example implementation for a struct representing Rust paths like @@ -74,13 +71,13 @@ pub trait ToTokens { } } -impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T { +impl<T: ?Sized + ToTokens> ToTokens for &T { fn to_tokens(&self, tokens: &mut TokenStream) { (**self).to_tokens(tokens); } } -impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T { +impl<T: ?Sized + ToTokens> ToTokens for &mut T { fn to_tokens(&self, tokens: &mut TokenStream) { (**self).to_tokens(tokens); } @@ -106,7 +103,7 @@ impl<T: ?Sized + ToTokens> ToTokens for Rc<T> { impl<T: ToTokens> ToTokens for Option<T> { fn to_tokens(&self, tokens: &mut TokenStream) { - if let Some(ref t) = *self { + if let Some(t) = self { t.to_tokens(tokens); } } @@ -124,35 +121,88 @@ impl ToTokens for String { } } -macro_rules! primitive { - ($($t:ident => $name:ident)*) => { - $( - impl ToTokens for $t { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(Literal::$name(*self)); - } - } - )* - }; +impl ToTokens for i8 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::i8_suffixed(*self)); + } +} + +impl ToTokens for i16 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::i16_suffixed(*self)); + } +} + +impl ToTokens for i32 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::i32_suffixed(*self)); + } +} + +impl ToTokens for i64 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::i64_suffixed(*self)); + } +} + +impl ToTokens for i128 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::i128_suffixed(*self)); + } +} + +impl ToTokens for isize { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::isize_suffixed(*self)); + } +} + +impl ToTokens for u8 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::u8_suffixed(*self)); + } +} + +impl ToTokens for u16 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::u16_suffixed(*self)); + } +} + +impl ToTokens for u32 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::u32_suffixed(*self)); + } +} + +impl ToTokens for u64 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::u64_suffixed(*self)); + } } -primitive! { - i8 => i8_suffixed - i16 => i16_suffixed - i32 => i32_suffixed - i64 => i64_suffixed - i128 => i128_suffixed - isize => isize_suffixed +impl ToTokens for u128 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::u128_suffixed(*self)); + } +} - u8 => u8_suffixed - u16 => u16_suffixed - u32 => u32_suffixed - u64 => u64_suffixed - u128 => u128_suffixed - usize => usize_suffixed +impl ToTokens for usize { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::usize_suffixed(*self)); + } +} - f32 => f32_suffixed - f64 => f64_suffixed +impl ToTokens for f32 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::f32_suffixed(*self)); + } +} + +impl ToTokens for f64 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::f64_suffixed(*self)); + } } impl ToTokens for char { @@ -168,6 +218,18 @@ impl ToTokens for bool { } } +impl ToTokens for CStr { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::c_string(self)); + } +} + +impl ToTokens for CString { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Literal::c_string(self)); + } +} + impl ToTokens for Group { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append(self.clone()); @@ -193,14 +255,14 @@ impl ToTokens for Literal { } impl ToTokens for TokenTree { - fn to_tokens(&self, dst: &mut TokenStream) { - dst.append(self.clone()); + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(self.clone()); } } impl ToTokens for TokenStream { - fn to_tokens(&self, dst: &mut TokenStream) { - dst.extend(iter::once(self.clone())); + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(iter::once(self.clone())); } fn into_token_stream(self) -> TokenStream { diff --git a/vendor/quote/tests/compiletest.rs b/vendor/quote/tests/compiletest.rs index 7974a624..23a6a065 100644 --- a/vendor/quote/tests/compiletest.rs +++ b/vendor/quote/tests/compiletest.rs @@ -1,5 +1,5 @@ -#[rustversion::attr(not(nightly), ignore)] -#[cfg_attr(miri, ignore)] +#[rustversion::attr(not(nightly), ignore = "requires nightly")] +#[cfg_attr(miri, ignore = "incompatible with miri")] #[test] fn ui() { let t = trybuild::TestCases::new(); diff --git a/vendor/quote/tests/test.rs b/vendor/quote/tests/test.rs index eab4f55a..e096780e 100644 --- a/vendor/quote/tests/test.rs +++ b/vendor/quote/tests/test.rs @@ -8,11 +8,11 @@ extern crate proc_macro; -use std::borrow::Cow; -use std::collections::BTreeSet; - use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream}; use quote::{format_ident, quote, quote_spanned, TokenStreamExt}; +use std::borrow::Cow; +use std::collections::BTreeSet; +use std::ffi::{CStr, CString}; struct X; @@ -105,6 +105,9 @@ fn test_array() { let ref_slice: &[X] = &[X, X]; let _ = quote!(#(#ref_slice #ref_slice)*); + + let array_of_array: [[u8; 2]; 2] = [[0; 2]; 2]; + let _ = quote!(#(#(#array_of_array)*)*); } #[test] @@ -197,7 +200,7 @@ fn test_floating() { #e32 #e64 }; - let expected = concat!("2.345f32 2.345f64"); + let expected = "2.345f32 2.345f64"; assert_eq!(expected, tokens.to_string()); } @@ -233,6 +236,22 @@ fn test_string() { assert_eq!(expected, tokens.to_string()); } +#[test] +fn test_c_str() { + let s = CStr::from_bytes_with_nul(b"\x01 a 'b \" c\0").unwrap(); + let tokens = quote!(#s); + let expected = "c\"\\u{1} a 'b \\\" c\""; + assert_eq!(expected, tokens.to_string()); +} + +#[test] +fn test_c_string() { + let s = CString::new(&b"\x01 a 'b \" c"[..]).unwrap(); + let tokens = quote!(#s); + let expected = "c\"\\u{1} a 'b \\\" c\""; + assert_eq!(expected, tokens.to_string()); +} + #[test] fn test_interpolated_literal() { macro_rules! m { diff --git a/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr b/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr index 99c20a56..96af8163 100644 --- a/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr +++ b/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.stderr @@ -1,11 +1,13 @@ -error[E0308]: mismatched types +error[E0277]: repetition contains no interpolated value that is an iterator --> tests/ui/does-not-have-iter-interpolated-dup.rs:8:5 | 8 | quote!(#(#nonrep #nonrep)*); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | | - | expected `HasIterator`, found `ThereIsNoIteratorInRepetition` - | expected due to this - | here the type of `has_iter` is inferred to be `ThereIsNoIteratorInRepetition` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ none of the values interpolated inside this repetition are iterable | +help: the trait `CheckHasIterator<true>` is not implemented for `HasIterator<false>` + but it is implemented for `HasIterator<true>` + --> src/runtime.rs + | + | impl CheckHasIterator<true> for HasIterator<true> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr b/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr index ef908131..0c0572c9 100644 --- a/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr +++ b/vendor/quote/tests/ui/does-not-have-iter-interpolated.stderr @@ -1,11 +1,13 @@ -error[E0308]: mismatched types +error[E0277]: repetition contains no interpolated value that is an iterator --> tests/ui/does-not-have-iter-interpolated.rs:8:5 | 8 | quote!(#(#nonrep)*); - | ^^^^^^^^^^^^^^^^^^^ - | | - | expected `HasIterator`, found `ThereIsNoIteratorInRepetition` - | expected due to this - | here the type of `has_iter` is inferred to be `ThereIsNoIteratorInRepetition` + | ^^^^^^^^^^^^^^^^^^^ none of the values interpolated inside this repetition are iterable | +help: the trait `CheckHasIterator<true>` is not implemented for `HasIterator<false>` + but it is implemented for `HasIterator<true>` + --> src/runtime.rs + | + | impl CheckHasIterator<true> for HasIterator<true> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/quote/tests/ui/does-not-have-iter-separated.stderr b/vendor/quote/tests/ui/does-not-have-iter-separated.stderr index 7c6e30f2..e899fb48 100644 --- a/vendor/quote/tests/ui/does-not-have-iter-separated.stderr +++ b/vendor/quote/tests/ui/does-not-have-iter-separated.stderr @@ -1,10 +1,13 @@ -error[E0308]: mismatched types +error[E0277]: repetition contains no interpolated value that is an iterator --> tests/ui/does-not-have-iter-separated.rs:4:5 | 4 | quote!(#(a b),*); - | ^^^^^^^^^^^^^^^^ - | | - | expected `HasIterator`, found `ThereIsNoIteratorInRepetition` - | expected due to this + | ^^^^^^^^^^^^^^^^ none of the values interpolated inside this repetition are iterable | +help: the trait `CheckHasIterator<true>` is not implemented for `HasIterator<false>` + but it is implemented for `HasIterator<true>` + --> src/runtime.rs + | + | impl CheckHasIterator<true> for HasIterator<true> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/quote/tests/ui/does-not-have-iter.stderr b/vendor/quote/tests/ui/does-not-have-iter.stderr index 0b13e5cb..348071cc 100644 --- a/vendor/quote/tests/ui/does-not-have-iter.stderr +++ b/vendor/quote/tests/ui/does-not-have-iter.stderr @@ -1,10 +1,13 @@ -error[E0308]: mismatched types +error[E0277]: repetition contains no interpolated value that is an iterator --> tests/ui/does-not-have-iter.rs:4:5 | 4 | quote!(#(a b)*); - | ^^^^^^^^^^^^^^^ - | | - | expected `HasIterator`, found `ThereIsNoIteratorInRepetition` - | expected due to this + | ^^^^^^^^^^^^^^^ none of the values interpolated inside this repetition are iterable | +help: the trait `CheckHasIterator<true>` is not implemented for `HasIterator<false>` + but it is implemented for `HasIterator<true>` + --> src/runtime.rs + | + | impl CheckHasIterator<true> for HasIterator<true> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/quote/tests/ui/not-quotable.stderr b/vendor/quote/tests/ui/not-quotable.stderr index 35cb6f2b..15492463 100644 --- a/vendor/quote/tests/ui/not-quotable.stderr +++ b/vendor/quote/tests/ui/not-quotable.stderr @@ -8,13 +8,13 @@ error[E0277]: the trait bound `Ipv4Addr: ToTokens` is not satisfied | required by a bound introduced by this call | = help: the following other types implement trait `ToTokens`: - bool - char - isize - i8 - i16 - i32 - i64 - i128 + &T + &mut T + Box<T> + CStr + CString + Cow<'a, T> + Option<T> + Rc<T> and $N others = note: this error originates in the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/quote/tests/ui/not-repeatable.stderr b/vendor/quote/tests/ui/not-repeatable.stderr index 2ed1da04..d5e13b04 100644 --- a/vendor/quote/tests/ui/not-repeatable.stderr +++ b/vendor/quote/tests/ui/not-repeatable.stderr @@ -2,13 +2,7 @@ error[E0599]: the method `quote_into_iter` exists for struct `Ipv4Addr`, but its --> tests/ui/not-repeatable.rs:7:13 | 3 | struct Ipv4Addr; - | --------------- - | | - | method `quote_into_iter` not found for this struct - | doesn't satisfy `Ipv4Addr: Iterator` - | doesn't satisfy `Ipv4Addr: ToTokens` - | doesn't satisfy `Ipv4Addr: ext::RepIteratorExt` - | doesn't satisfy `Ipv4Addr: ext::RepToTokensExt` + | --------------- method `quote_into_iter` not found for this struct because it doesn't satisfy `Ipv4Addr: Iterator`, `Ipv4Addr: ToTokens`, `Ipv4Addr: ext::RepIteratorExt` or `Ipv4Addr: ext::RepToTokensExt` ... 7 | let _ = quote! { #(#ip)* }; | ^^^^^^^^^^^^^^^^^^ method cannot be called on `Ipv4Addr` due to unsatisfied trait bounds @@ -22,14 +16,27 @@ error[E0599]: the method `quote_into_iter` exists for struct `Ipv4Addr`, but its which is required by `Ipv4Addr: ext::RepToTokensExt` `&mut Ipv4Addr: Iterator` which is required by `&mut Ipv4Addr: ext::RepIteratorExt` -note: the traits `ToTokens` and `Iterator` must be implemented - --> src/to_tokens.rs +note: the traits `Iterator` and `ToTokens` must be implemented + --> $RUST/core/src/iter/traits/iterator.rs | - | pub trait ToTokens { + | pub trait Iterator { | ^^^^^^^^^^^^^^^^^^ | - ::: $RUST/core/src/iter/traits/iterator.rs + ::: src/to_tokens.rs | - | pub trait Iterator { + | pub trait ToTokens { | ^^^^^^^^^^^^^^^^^^ + = help: items from traits can only be used if the trait is implemented and in scope + = note: the following traits define an item `quote_into_iter`, perhaps you need to implement one of them: + candidate #1: `ext::RepAsIteratorExt` + candidate #2: `ext::RepIteratorExt` + candidate #3: `ext::RepToTokensExt` = note: this error originates in the macro `$crate::quote_bind_into_iter` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0282]: type annotations needed + --> tests/ui/not-repeatable.rs:7:13 + | +7 | let _ = quote! { #(#ip)* }; + | ^^^^^^^^^^^^^^^^^^ cannot infer type + | + = note: this error originates in the macro `$crate::quote_bind_next_or_break` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/syn/.cargo-checksum.json b/vendor/syn/.cargo-checksum.json index 1e391f59..b6c607dd 100644 --- a/vendor/syn/.cargo-checksum.json +++ b/vendor/syn/.cargo-checksum.json @@ -1 +1 @@ -{"files":{".cargo_vcs_info.json":"1d94974df1794207ff46a2db2909866a6cd9fd3395c0c49a2a23d903fa72f321","Cargo.toml":"f56cb0ef622eddc84e2f801e2123e0e3728847d581d8597a6d5707339162e96b","Cargo.toml.orig":"e2d7209bb57033e33a9bbccad1b52b49f5f72e861c015df3a3c4004f43868e20","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"f6904878f9082d7d267b6d0d737ef211ff165cfd039a4d45ad88e9861f3e217f","benches/file.rs":"0a0527c78d849148cbb6118b4d36f72da7d4add865ba1a410e0a1be9e8dbfe0e","benches/rust.rs":"c006f01aacf95f61e5d6f4064be4040896a37a9632fb3dcfd9bc2a7ab171741d","src/attr.rs":"bd5ffae18a363162f7d9c12a1b6c1d023070cbf1b060c98ebc38ef79f1de9c67","src/bigint.rs":"0299829b2f7a1a798fe2f7bc1680e4a10f9b6f4a852d09af4da2deab466c4242","src/buffer.rs":"3ef1c3302acc4f9976484fd11c28981a5ff245cf4f3b8b888b7f065c8614881e","src/custom_keyword.rs":"f2704d15a337def453bdb337359fffdd014d62ae875cd6dbd442691c932f89c9","src/custom_punctuation.rs":"2e688f6a7c3c0405f209a6aaa85a43fc34807fc280e4a71235642fc9333492dd","src/data.rs":"09ce487d27f7dd79031b390c8344fc6361d64b05064ee25dba1489cf3aa56128","src/derive.rs":"3132e7f064725c7ca43f26daee93ec78037d46a935c6b0758af905cff450c15c","src/discouraged.rs":"482970b03bdee3cbc30c034f644e3293b25387db46300da5d8d8efd97dad8507","src/drops.rs":"013385f1dd95663f1afab41abc1e2eea04181998644828935ca564c74d6462ae","src/error.rs":"8dbb17978f688e12bcce58232f05c0ca9ed9de1fe511440793fb958da2fc93b9","src/export.rs":"6b519b1c17967cdd1519f52f981022435fcf940834fc8a705923426b65f21eac","src/expr.rs":"388348e9404a53331fe84d670bc0da889e1e9471461a8dc0a2e010a377103ecf","src/ext.rs":"3cf2d869812e5be894aa1c48bf074da262143fb2df1c9ac1b5ee965bf2a96a1c","src/file.rs":"a4d510dd0e2756bd54983dfa747601918c801e987cbf92deab44cdca6a201aeb","src/gen/clone.rs":"46540509dc99bb849014948a0c5b02ea372d5feceae5ea391c29f226f06516eb","src/gen/debug.rs":"32b2076b755f021428a0fb268a94057e1bcb1cd400feb895946703d7919b843a","src/gen/eq.rs":"aa5455b2cc0d9846d119ce001e821872df911f65133b993e3801a42e8f635f2a","src/gen/fold.rs":"305fe1db2b5b7039e79104a6e8ead11b1ee3c009f31f2a2bbebca10e494188cd","src/gen/hash.rs":"4ca8239c681ea5fd7b16bb61bff9034bff09680c088f5a16e90e99013e55742f","src/gen/visit.rs":"cbd0ebe18da62614c66a7900ef67350a5efee261bcdfff6cffc1ea6c4e754e34","src/gen/visit_mut.rs":"1b2a260ab3a6465eed6164f6d2c0cec977f9c3ca912f6bed81f8599b4d8f5cb0","src/gen_helper.rs":"750caab67ba0ba11a95ea28cd38026485227bb4aa114cdb497472386f60fdb35","src/generics.rs":"cb1a0374c523a469d48b1db7316166559034d437fb4f6cc5661511edb8aba32c","src/group.rs":"f5911e9cf2dc2dffab546590167c48de30409cb7708aa3307d22be143df720e4","src/ident.rs":"1b2d86d2e380b1fa83aee6ed113c47f0b482a229c25be428dac921deec0b4852","src/item.rs":"00ae729a00854f15203dbcbf2251910658eb052130de243144dcbec68be37919","src/lib.rs":"723c0555b398e4efd39c3198b4fc264b2bab6051e6f81ea3af81865784fdf4ec","src/lifetime.rs":"64abfd72af5dd271b2de1a8304ca0899b90da7750df9dec2ee8102bceb08a426","src/lit.rs":"3a70ebbc81563b967d2cbf589b6980a5b202ee3379d5d401e01a4c2af124d7ae","src/lookahead.rs":"376092f91a1c32e1b277db0a6790fdda151c9ec51bd971fe6a6545b5b9e73b5d","src/mac.rs":"b1cf73f34a27a8f1429125e726623a524fb5dce875eb68ead3beaffa976442c3","src/macros.rs":"64fce3fda990306cfae5f0409918946ff7c9e98ecad963932e6c1436c0b0d4c2","src/meta.rs":"9df61ebaa405ef743ba1629b39ee5e806f9ead77694a1027f192da860270c83d","src/op.rs":"fe5db7c3373b956234ea8a1a7d129a06e5aef5db77c44c1c2fedb4aaa667ac56","src/parse.rs":"e57637899f8533da26e16d41575c3489fd1bf3560cea427ba57b3c966d891d8e","src/parse_macro_input.rs":"4a753b2a6dbfefd6dc93852d66b4f6d73ebd6b8b9be74019fc476f429b9a892d","src/parse_quote.rs":"08c8f4eaedeffaac73170b59921d140f63f4a01c243993dff050b4b848d0ac1f","src/pat.rs":"4d99c5ed6a08e6adfd0f6c31438befd3f03e48982a36bb2544962d9db7805a4a","src/path.rs":"8079943ec924ad5338e4bfa16b15b8ea0d6d27092396b62667eadcd0b98f20db","src/print.rs":"22910bf0521ab868ebd7c62601c55912d12cfb400c65723e08e5cfa3a2d111c0","src/punctuated.rs":"dffaab15b9215c70d7db416e8d4c44b78c43ba2f255485e6211363f0f0fea063","src/restriction.rs":"62efbc127d7e7316dd1070c0e976872de6238b2602bba1fb35df18511b4e7199","src/sealed.rs":"6ece3b3dcb30f6bb98b93d83759ca7712ee8592bef9c0511141039c38765db0e","src/span.rs":"0a48e375e5c9768f6f64174a91ba6a255f4b021e2fb3548d8494e617f142601b","src/spanned.rs":"1bba75d73dd4dc5be6c4e11fdd72686d340fb25b5808830bd603ddc840beabdc","src/stmt.rs":"321d445f681c46ac30644504df2a8afc333a1dde0371159e9e077a17eed16548","src/thread.rs":"1f1deb1272525ab2af9a36aac4bce8f65b0e315adb1656641fd7075662f49222","src/token.rs":"42c9c9d8a7145255abf0cb1af1f2fe56a0b428a8a5ea8ff08e58f894fb519fd5","src/tt.rs":"32490509abcc4a5a3c7eb5628337172b3b49d30697d2f7b7df4d8045255c13da","src/ty.rs":"1d11c614298f25a8fa42165a01d6545fc699a446d7f3f8630162c7f478b26886","src/verbatim.rs":"87cbe82a90f48efb57ffd09141042698b3e011a21d0d5412154d80324b0a5ef0","src/whitespace.rs":"718a80c12cdd145358e2690f0f68ff7779a91ec17ce9fde9bb755f635fce69ad","tests/common/eq.rs":"e51d7f102ec29eef1336e0aad451378d0191845f9b1617744b79d47ff557e6df","tests/common/mod.rs":"432ad35577f836a20b517d8c26ed994ac25fe73ef2f461c67688b61b99762015","tests/common/parse.rs":"246ddf1d303a9dbbc380e8d0689bd851cef3c3146d09d2627175deb9203b003d","tests/debug/gen.rs":"bc638d0fcb4a007d658535b60eec09cf4dc2c4907062832cb94fe6960a0d7e01","tests/debug/mod.rs":"dd87563bbd359401790a9c4185178539929ff9fa35a6998657af82a85731fe4c","tests/macros/mod.rs":"4c84bd9e82df255258671b6a57b0f2a3e4bef2127a2e8b842a4b6f3037b7fc5c","tests/regression.rs":"e9565ea0efecb4136f099164ffcfa26e1996b0a27fb9c6659e90ad9bdd42e7b6","tests/regression/issue1108.rs":"f32db35244a674e22ff824ca9e5bbec2184e287b59f022db68c418b5878a2edc","tests/regression/issue1235.rs":"a2266b10c3f7c7af5734817ab0a3e8b309b51e7d177b63f26e67e6b744d280b0","tests/repo/mod.rs":"ca9ca1d192aa25ffcfc12161edf4998f4c4e3160f8834f8f7259023a28981e5c","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"3868181f25f7470476077f80a442a7804b6b9b371ad5917f4fd18b1002714c64","tests/test_attribute.rs":"b35550a43bbd187bb330997ba36f90c65d8fc489135b1d32ef4547f145cb7612","tests/test_derive_input.rs":"c215245c4d09052661ac5b65b34e950ea47622847bdffe648d380470f12db8f2","tests/test_expr.rs":"2de81f6171bcde3db8d36ce90b9c77ce672f93dcb1e03b6a44290b039246132b","tests/test_generics.rs":"b77741aa38e6ac7e1a9082faf168e7b7b92fbabf9f3fd07306676339a67394df","tests/test_grouping.rs":"ecbe3324878b2e2be42640a3dec198620cff18731fcb95ee7e94eacd11d2fec1","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"7f0255b61d0a6921313c09aaba470beefc55f1d4e66d1e24cfac7a3f63b035d8","tests/test_iterators.rs":"f4dacb5f3a8e0473dfb0d27f05270d41e79eddb4759b1fad3e88e379b4731e17","tests/test_lit.rs":"8e30c2d7837673a742d77aef01212788bbd099182dd5c1d10ee474cfeb786c39","tests/test_meta.rs":"3e1bb60b4bd56adb1e04b0e2d867404f0d81f7bf69caf7d8a70fc7090e079e84","tests/test_parse_buffer.rs":"3ed83ea2e50f84b80c0b543aac4bfbd379610d0911c0baa1eb94bb925bda7341","tests/test_parse_stream.rs":"a7e186272c89a239cae03053b5a039cdc073cdb46fad64b178fe76fde98405d5","tests/test_pat.rs":"fe94e084ee478d41cccea4eeb3e975386a70d36ff7cbb902ba0c767d536aab6e","tests/test_path.rs":"0033e1082b576bb3217ebd4546423d6f86fde7ee7ba3aba8c57bf137d2b42f47","tests/test_precedence.rs":"d3c34f2e5ffe22f7bdacff7ab6af71b24ba8150fdcc640f3c3e28db3491a556a","tests/test_receiver.rs":"af64117acd66fbf42edc476f731ecd20c88009d9cb641dbd7a1d6384ae99ae73","tests/test_round_trip.rs":"61183de56bf70c628659b9529f794b9f138904959f919f08f3b8176ba62c76ef","tests/test_shebang.rs":"06d3acabed004767d8b3a0389bde7485a6719cad6a0d0b4ac2c7439b03586651","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"78c14995718c2f8d5a7296c8f524601c30f91a5586e1402c0775977a4f814406","tests/test_stmt.rs":"42a3707056da0ce3a01f8fb13e8b7631f9be6066627ac376e1874742419ad2cc","tests/test_token_trees.rs":"d012da9c3c861073711b006bf6ffdc073821fb9fb0a08733628cdae57124d1f5","tests/test_ty.rs":"f7f21f76e9e798669f09a95c380e26ae5959ee8ac5f3b765b1a799cc9505d075","tests/test_visibility.rs":"cf4c93997cd88821ef7f8e2dd5d1586175cce4614407cd3bdf371ecc4d8abc44","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"} \ No newline at end of file +{"files":{".cargo_vcs_info.json":"a292e51393a4de0e47290e8b89762dae18dd11abd22616373d1a94455a13dac0","Cargo.lock":"fa1135ef27fa16cbbdbdeac78f79bf9ee6350cbab0e3304c961056483d9206d9","Cargo.toml":"a46aa45b686f88cb3e28c8f6ea0f6d71aff4fa051d7342274bb69fecadf114d1","Cargo.toml.orig":"ddfd969ce2c69912aa9a6bde81cc0142741a0c7682f070933c0008011e4f318f","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"84d8f027ce3c19ae9cf91e68207b0bc737b71aa25706ba575bef7d4cba6fc937","benches/file.rs":"6f2ae7748d1576cff48e4ef55d4e87e2f5bb7898a36aa99ea6dd6ed0f72a4e3c","benches/rust.rs":"53cb8accfda73e59a3efc78081e7c58a1010ae60c23ef8c43bb240550daa3e96","src/attr.rs":"f44ff3cb9d3bc34d7de9e3f1aba62ddb1c8973881d9db981623b112005d4ed53","src/bigint.rs":"0299829b2f7a1a798fe2f7bc1680e4a10f9b6f4a852d09af4da2deab466c4242","src/buffer.rs":"7622b646e0d2399068868c41cb6b8abec39afa925c220a8e9f19c2a424911fd4","src/classify.rs":"3c796df4c891034abc3008196d34ad62c22fcb9525a067890731a5a6bbc7b5fb","src/custom_keyword.rs":"322114e36ae43a2f8605506fb4568efdbc2986853e2fee74bd10a4ca0fb60c69","src/custom_punctuation.rs":"26b28164f0b2e5e80e7cf36a3ba6d2577d27442cce5e00a72f685b5ee6f51ecd","src/data.rs":"fa04dce757ca3dd1e350aaa84bbcf8b743c13a00b0983b980bf2919f91a22078","src/derive.rs":"f54f8cf9386a2d45186ff3c86ade5dae59e0e337b0198532449190ae8520cff8","src/discouraged.rs":"653c5d9e6c4e3c2359817dc343f145569a0c9562a707f4949c374c242955ce12","src/drops.rs":"e98da4aaafb5afc75919f9e2914326ad09bf16094a9407c60a05515a2e01dd00","src/error.rs":"cbf06fb7b000f2e6444fa224a062c493911a8f9fc5d386be6e52dadbb7658f34","src/export.rs":"b260cc49da1da3489e7755832bc8015cfad79e84f6c74e237f65ae25a2385e56","src/expr.rs":"fa766ce749ea31367e178f45a2dc8f8545b9467f7fc51e7a1fe72bbb0b9738dc","src/ext.rs":"57577c7e6b7b65cd27ac5aad66d47e63693762d8880cde088b20975ec845244d","src/file.rs":"9d04206da5eff88e92698b3f78c51082d615300cb11d93768250a3e97c40b078","src/fixup.rs":"7647cde30efdce96b1488ae805788c168d4499b464b7d421abc17ea8ffde66f2","src/gen/clone.rs":"7af00b0a240453d7aac178be1b6cdf9df3b33f1281da35e02f66ba7ba55d060c","src/gen/debug.rs":"59bc259fa9dc0c7ffe094df7ad8afc9c4e79b6d73a8f0fae8a2e435905866e7a","src/gen/eq.rs":"d7428672d82c1931fdefb8bda0425a25ebbe20e5e2736b18cfd1752b64e99f78","src/gen/fold.rs":"39b0a26cfdf0accaff6da108a2b6d8f93e83c63d0bf6a8d7af0900fc0f71b55b","src/gen/hash.rs":"6808bb0e47e7346a14fbec5f55430906efa172f46417c83c2f7c76ce8c9ceab6","src/gen/token.css":"3a5882d0b3aefbf56ca5d08c28c117e25ac2327eadf7242202d906b2ddb2232e","src/gen/visit.rs":"fe1443aa7953eaca10d6bf982396e627e31ce6b8aea8eb7cf949e0adeea5badb","src/gen/visit_mut.rs":"9948f0f07aefd8133dcc958e744c49f1da625d199f7707363b79f0373b2dcd6b","src/generics.rs":"6170b4a9d82ba27a3b4471057a5206e45d4b379813855b67d06aa1fc7f978ccc","src/group.rs":"61e067094aa930b6550dc3717a5aaeef7dabfdcf1a177f83fb636598fa7e4e72","src/ident.rs":"d6061030fadae9c7dc847e1ee46178d9657d782aad108c7197e8cafe765b3eaa","src/item.rs":"ad2d5f4621426420ba4dc0c1a82626b7b0979cb67c06fbcb16ee6abb025e7c80","src/lib.rs":"c41b90d4dd52377a0aa13e2b8963a5c414ee36aa728a0facc36f2c7f841e8d3d","src/lifetime.rs":"ec748fdbdedeb75c4dbc4460653cf97fcf113207eea5b12fea9e1f6e1198beca","src/lit.rs":"69ef534be9ba43de0da9a65d75de36f3d14d83f5bd1666ea72419c9302095408","src/lookahead.rs":"b2837d80fa4466bb430b65d32b54d1bad5de9bb851550f916658347145c281b4","src/mac.rs":"fdce8291f71adef3f69975f229156dca2309ca232ed943061afaf96220908ab8","src/macros.rs":"2a6e895dfe1c3a9a7237b5e23358ca5d8967e2beae6d094dda68d3659f9a5c84","src/meta.rs":"969d8ccbdbc6ea2e4928a21831b791c57447b231e1373149e4c63b46f3951801","src/op.rs":"a61757370f802e44efa3c4a1057ae2cd26e64e273f7d76c06d5ffb49602319e2","src/parse.rs":"bbe69237d50ce5f9b5c029e851607c54ca6232cad0790551c2f5bb29e2f9657d","src/parse_macro_input.rs":"e4e22b63d0496d06a4ca17742a22467ed93f08a739081324773828bad63175ee","src/parse_quote.rs":"80eec7ce54c38f3bbd23acb70cd8a6649d7e1523c3977e3bf12849fd8c5cf16d","src/pat.rs":"b6c8c04c330a76dbe9cd35949026724fc3aeacf98e8c0a259cf2e16caff99071","src/path.rs":"2146bdf5e0eb6991232c8a09de3a30440727f439ab792a34f5313057c091a724","src/precedence.rs":"58420a5015003ecd4d7a4a0c87c168caa4c696e646355523d9eaae81fc5e1d54","src/print.rs":"22910bf0521ab868ebd7c62601c55912d12cfb400c65723e08e5cfa3a2d111c0","src/punctuated.rs":"711c1f9122f560530d40bdccbd8784b6c2c54067f0d753cce282a4d6ca933a37","src/restriction.rs":"a7152ec5a4ee4f55446019aa2b4d84f2238776f0e6ffc0c22adf3374b517fe56","src/scan_expr.rs":"e199c35e8bbf3e2c70901e1175df8dd446f4cb67b60100647f478f2dc31f6f12","src/sealed.rs":"6ece3b3dcb30f6bb98b93d83759ca7712ee8592bef9c0511141039c38765db0e","src/span.rs":"0a48e375e5c9768f6f64174a91ba6a255f4b021e2fb3548d8494e617f142601b","src/spanned.rs":"4b9bd65f60ab81922adfd0be8f03b6d50e98da3a5f525f242f9639aec4beac79","src/stmt.rs":"7a594d08cbedef4c6c0ed6ca9c331f4f087bd631a12938240180f7c53ada44e9","src/thread.rs":"1f1deb1272525ab2af9a36aac4bce8f65b0e315adb1656641fd7075662f49222","src/token.rs":"55f1ad3ba0edc43ae7b65a6fa6dc13fc1a99053d6300187a4cc48572b8f451f3","src/tt.rs":"ad478bef531007fac0e4af7ecae81f8fe66a5ce44532288156b7e3d4bfc45950","src/ty.rs":"b7daaf57dd96fc09448e45fc92f55b00f3b7ba99a00f3f2eb8a11f35e302af3c","src/verbatim.rs":"4aa06d0ce2f6b6c6aa657bc349ccc85005d2eb05494dfa1ac1fe9012916dcc3e","src/whitespace.rs":"9cdcbfe9045b259046329a795bc1105ab5a871471a6d3f7318d275ee53f7a825","tests/common/eq.rs":"134ac97e3a5ff4ce2c505f0008b816f3b2280621f9dfa6e6a2b890469d01575e","tests/common/mod.rs":"b752aa8f1faf8c6abf1286a12fb50b6c257ec1889d81bcdb3dc3257134695a89","tests/common/parse.rs":"f226bfa84803429c4ef203a09b30372db01298e14443089fb60c11e2112212db","tests/common/visit.rs":"a260ecd2ce7853cd3644e19aba08e8d358a656fd3fb0f1287cea40c59c9e62c9","tests/debug/gen.rs":"cdd89f1bf91fe215e06868fc93423d2f1872c812c3bfec93dc920bc105e20c09","tests/debug/mod.rs":"1259df940bbcaa968a837e402d6853f2efa38d2260e306d42f17f9e8ef74fae5","tests/macros/mod.rs":"d2294a79e341c623ae671dd363e99965d78dda7f340b0cc038267207adfacae2","tests/regression.rs":"e9565ea0efecb4136f099164ffcfa26e1996b0a27fb9c6659e90ad9bdd42e7b6","tests/regression/issue1108.rs":"f32db35244a674e22ff824ca9e5bbec2184e287b59f022db68c418b5878a2edc","tests/regression/issue1235.rs":"a2266b10c3f7c7af5734817ab0a3e8b309b51e7d177b63f26e67e6b744d280b0","tests/repo/mod.rs":"4e2d370876192fc0514962e1eeb9e1e4a96e3805b1f87257ba4d1eeda8b1db73","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/snapshot/mod.rs":"4a101272c5abe6ca9f3501e0cacacee9a0ccf7ca773348a239e5b046d0316a7e","tests/test_asyncness.rs":"971d560d927d5a8494eaa7fce8f0d062d6971c17c4c464fcfc31570572b7d3d7","tests/test_attribute.rs":"8a4429b7cfe2360bb73beae54a62ae3255ebbd5181467a8608d6f858c2711728","tests/test_derive_input.rs":"c8f5dbac6482dadd0fab30d0b1fe3254869256c48ea68ea484cad7f7406c8568","tests/test_expr.rs":"055cb9b33a5bb6ed5dc67491e6f1ae794a53a5a091245debd464ef57144f5edb","tests/test_generics.rs":"0d79a25b75e45779185c2adefd3d88a9e49d0f333d885265551df1402d50abaf","tests/test_grouping.rs":"fe3de6e8824f0722ab6450c6dfc374f6e0f8fe75c87c4dd56b2cb00a2197ed58","tests/test_ident.rs":"d5850e817720e774cd397a46dbc5298c57933823c18e20805e84503fc9387e8f","tests/test_item.rs":"f4119000784af2d65d5fd097830368a391c05b249f3df8c60613a98b16a322ca","tests/test_lit.rs":"4130efa425d14ed3ad9a1c2a00ef4b29782c9d1cf9e29ff9dddd3b23b2e3ddee","tests/test_meta.rs":"5b0fdee0decbd07476c9673403a662de385901b4bf60600c26ac879893f5bf9c","tests/test_parse_buffer.rs":"0de6af13ba0345986b18d495063f9b75a1018e8569c34b277f9522c63a6c0941","tests/test_parse_quote.rs":"85d90d2d51b82aab7c30159dd884f26c592ddb28ed31ef2baf371ee31349694c","tests/test_parse_stream.rs":"b6b533432173123d6d01d8d2cb33714bc50b30b16ffbb6116f93937221ad4594","tests/test_pat.rs":"dafa3e1f51812e8c852dc5210640a4adf6fff7cd0a0790ee17d2c4c115321846","tests/test_path.rs":"7a6763a262c41a9522068887702fe7cd4ff72b07da5253ac47761d73315b021d","tests/test_precedence.rs":"ed27331fe3bc4496970e677df0d2f66e4516e6eea975d4a31029338ad23c79c0","tests/test_punctuated.rs":"efed2c281b6965d71b065c7606631ba1989af6e7b5f5d1ca1033f8b968dc076c","tests/test_receiver.rs":"2053028236f95f3cb508ebf2eb606df43cae4f9f4dd27823661459ff6c54a39c","tests/test_round_trip.rs":"8b2ed3c4164247577953e3108cca67eed97761c90b9c0df31cbd50097ed1a047","tests/test_shebang.rs":"9bc24b1ee2947b06a279d2ed40039cb45bba6caf7cd40530d93f7e2355de53c6","tests/test_size.rs":"03efaf829b80b7db1f831474c1d3ce268914fc499d0e2a7eea03cad04a482974","tests/test_stmt.rs":"b3c120059d7b56388963b85234feb2e4d379e32a0bf7f29b6683eca000dd3919","tests/test_token_trees.rs":"c30b921a96739c9334ec2bdd06552729891e0251b9d8fbdf0b8f5cc897babee5","tests/test_ty.rs":"9bb5f632941451ca6b200100310b55e62a9956190df3efe28b80d42843e75362","tests/test_unparenthesize.rs":"e5c047819afd5f70fde1bdd095467b1291d0854641f21e8183e50919986d8ce7","tests/test_visibility.rs":"7d05f05b0782976369d21477ac9f4d35a7c7f36faa42127e3a9c12ada270baf8","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"} \ No newline at end of file diff --git a/vendor/syn/.cargo_vcs_info.json b/vendor/syn/.cargo_vcs_info.json index e68aaa00..0c81a225 100644 --- a/vendor/syn/.cargo_vcs_info.json +++ b/vendor/syn/.cargo_vcs_info.json @@ -1,6 +1,6 @@ { "git": { - "sha1": "43632bfb6c78ee1f952645a268ab1ac4af162977" + "sha1": "4e508677213155cf73b4072f7a82f73a26cad3f5" }, "path_in_vcs": "" } \ No newline at end of file diff --git a/vendor/syn/Cargo.lock b/vendor/syn/Cargo.lock new file mode 100644 index 00000000..22ef04a0 --- /dev/null +++ b/vendor/syn/Cargo.lock @@ -0,0 +1,1813 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "automod" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebb4bd301db2e2ca1f5be131c24eb8ebf2d9559bc3744419e93baf8ddea7e670" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "bytes" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" + +[[package]] +name = "cc" +version = "1.2.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd405d82c84ff7f35739f175f67d8b9fb7687a0e84ccdc78bd3568839827cf07" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "console" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "windows-sys 0.59.0", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "filetime" +version = "0.2.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.60.2", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" + +[[package]] +name = "flate2" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-core", + "futures-io", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + +[[package]] +name = "h2" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "insta" +version = "1.44.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8732d3774162a0851e3f2b150eb98f31a9885dd75985099421d393385a01dfd" +dependencies = [ + "console", + "once_cell", + "similar", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "js-sys" +version = "0.3.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.177" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" + +[[package]] +name = "libredox" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +dependencies = [ + "bitflags", + "libc", + "redox_syscall", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "proc-macro2" +version = "1.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "reqwest" +version = "0.12.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustix" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "serde_json" +version = "1.0.145" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.111" +dependencies = [ + "anyhow", + "automod", + "flate2", + "insta", + "proc-macro2", + "quote", + "rayon", + "ref-cast", + "reqwest", + "rustversion", + "syn-test-suite", + "tar", + "termcolor", + "unicode-ident", + "walkdir", +] + +[[package]] +name = "syn-test-suite" +version = "0.0.0+test" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d661992f60e67c8bdd9a7d6360d30d1301f5783abf7d59933844f656762eb5" + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "tempfile" +version = "3.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +dependencies = [ + "fastrand", + "getrandom 0.3.4", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +dependencies = [ + "bytes", + "libc", + "mio", + "pin-project-lite", + "socket2", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn 2.0.110", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix", +] + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] diff --git a/vendor/syn/Cargo.toml b/vendor/syn/Cargo.toml index a0e56c52..72e7c3aa 100644 --- a/vendor/syn/Cargo.toml +++ b/vendor/syn/Cargo.toml @@ -11,10 +11,11 @@ [package] edition = "2021" -rust-version = "1.56" +rust-version = "1.68" name = "syn" -version = "2.0.38" +version = "2.0.111" authors = ["David Tolnay <dtolnay@gmail.com>"] +build = false include = [ "/benches/**", "/Cargo.toml", @@ -24,6 +25,11 @@ include = [ "/src/**", "/tests/**", ] +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false description = "Parser for Rust source code" documentation = "https://docs.rs/syn" readme = "README.md" @@ -40,12 +46,16 @@ repository = "https://github.com/dtolnay/syn" [package.metadata.docs.rs] all-features = true +targets = ["x86_64-unknown-linux-gnu"] rustdoc-args = [ - "--cfg", - "doc_cfg", "--generate-link-to-definition", + "--generate-macro-expansion", + "--extend-css=src/gen/token.css", + "--extern-html-root-url=core=https://doc.rust-lang.org", + "--extern-html-root-url=alloc=https://doc.rust-lang.org", + "--extern-html-root-url=std=https://doc.rust-lang.org", + "--extern-html-root-url=proc_macro=https://doc.rust-lang.org", ] -targets = ["x86_64-unknown-linux-gnu"] [package.metadata.playground] features = [ @@ -56,30 +66,168 @@ features = [ "extra-traits", ] +[features] +clone-impls = [] +default = [ + "derive", + "parsing", + "printing", + "clone-impls", + "proc-macro", +] +derive = [] +extra-traits = [] +fold = [] +full = [] +parsing = [] +printing = ["dep:quote"] +proc-macro = [ + "proc-macro2/proc-macro", + "quote?/proc-macro", +] +test = ["syn-test-suite/all-features"] +visit = [] +visit-mut = [] + [lib] -doc-scrape-examples = false +name = "syn" +path = "src/lib.rs" + +[[test]] +name = "regression" +path = "tests/regression.rs" + +[[test]] +name = "test_asyncness" +path = "tests/test_asyncness.rs" + +[[test]] +name = "test_attribute" +path = "tests/test_attribute.rs" + +[[test]] +name = "test_derive_input" +path = "tests/test_derive_input.rs" + +[[test]] +name = "test_expr" +path = "tests/test_expr.rs" + +[[test]] +name = "test_generics" +path = "tests/test_generics.rs" + +[[test]] +name = "test_grouping" +path = "tests/test_grouping.rs" + +[[test]] +name = "test_ident" +path = "tests/test_ident.rs" + +[[test]] +name = "test_item" +path = "tests/test_item.rs" + +[[test]] +name = "test_lit" +path = "tests/test_lit.rs" + +[[test]] +name = "test_meta" +path = "tests/test_meta.rs" + +[[test]] +name = "test_parse_buffer" +path = "tests/test_parse_buffer.rs" + +[[test]] +name = "test_parse_quote" +path = "tests/test_parse_quote.rs" + +[[test]] +name = "test_parse_stream" +path = "tests/test_parse_stream.rs" + +[[test]] +name = "test_pat" +path = "tests/test_pat.rs" + +[[test]] +name = "test_path" +path = "tests/test_path.rs" + +[[test]] +name = "test_precedence" +path = "tests/test_precedence.rs" + +[[test]] +name = "test_punctuated" +path = "tests/test_punctuated.rs" + +[[test]] +name = "test_receiver" +path = "tests/test_receiver.rs" + +[[test]] +name = "test_round_trip" +path = "tests/test_round_trip.rs" + +[[test]] +name = "test_shebang" +path = "tests/test_shebang.rs" + +[[test]] +name = "test_size" +path = "tests/test_size.rs" + +[[test]] +name = "test_stmt" +path = "tests/test_stmt.rs" + +[[test]] +name = "test_token_trees" +path = "tests/test_token_trees.rs" + +[[test]] +name = "test_ty" +path = "tests/test_ty.rs" + +[[test]] +name = "test_unparenthesize" +path = "tests/test_unparenthesize.rs" + +[[test]] +name = "test_visibility" +path = "tests/test_visibility.rs" + +[[test]] +name = "zzz_stable" +path = "tests/zzz_stable.rs" [[bench]] -name = "rust" -harness = false +name = "file" +path = "benches/file.rs" required-features = [ "full", "parsing", ] [[bench]] -name = "file" +name = "rust" +path = "benches/rust.rs" +harness = false required-features = [ "full", "parsing", ] [dependencies.proc-macro2] -version = "1.0.67" +version = "1.0.91" default-features = false [dependencies.quote] -version = "1.0.28" +version = "1.0.35" optional = true default-features = false @@ -92,59 +240,33 @@ version = "1" [dev-dependencies.automod] version = "1" -[dev-dependencies.flate2] -version = "1" - [dev-dependencies.insta] version = "1" -[dev-dependencies.rayon] -version = "1" - [dev-dependencies.ref-cast] version = "1" -[dev-dependencies.regex] -version = "1" - -[dev-dependencies.reqwest] -version = "0.11" -features = ["blocking"] - [dev-dependencies.rustversion] version = "1" [dev-dependencies.syn-test-suite] version = "0" -[dev-dependencies.tar] -version = "0.4.16" - [dev-dependencies.termcolor] version = "1" -[dev-dependencies.walkdir] -version = "2.3.2" +[target."cfg(not(miri))".dev-dependencies.flate2] +version = "1" -[features] -clone-impls = [] -default = [ - "derive", - "parsing", - "printing", - "clone-impls", - "proc-macro", -] -derive = [] -extra-traits = [] -fold = [] -full = [] -parsing = [] -printing = ["quote"] -proc-macro = [ - "proc-macro2/proc-macro", - "quote/proc-macro", -] -test = ["syn-test-suite/all-features"] -visit = [] -visit-mut = [] +[target."cfg(not(miri))".dev-dependencies.rayon] +version = "1" + +[target."cfg(not(miri))".dev-dependencies.reqwest] +version = "0.12" +features = ["blocking"] + +[target."cfg(not(miri))".dev-dependencies.tar] +version = "0.4.16" + +[target."cfg(not(miri))".dev-dependencies.walkdir] +version = "2.3.2" diff --git a/vendor/syn/Cargo.toml.orig b/vendor/syn/Cargo.toml.orig index 1d4231c5..da3dca98 100644 --- a/vendor/syn/Cargo.toml.orig +++ b/vendor/syn/Cargo.toml.orig @@ -1,6 +1,6 @@ [package] name = "syn" -version = "2.0.38" # don't forget to update html_root_url and syn.json +version = "2.0.111" authors = ["David Tolnay <dtolnay@gmail.com>"] categories = ["development-tools::procedural-macro-helpers", "parser-implementations"] description = "Parser for Rust source code" @@ -18,44 +18,42 @@ include = [ keywords = ["macros", "syn"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/syn" -rust-version = "1.56" +rust-version = "1.68" [features] default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"] derive = [] full = [] parsing = [] -printing = ["quote"] +printing = ["dep:quote"] visit = [] visit-mut = [] fold = [] clone-impls = [] extra-traits = [] -proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"] +proc-macro = ["proc-macro2/proc-macro", "quote?/proc-macro"] test = ["syn-test-suite/all-features"] [dependencies] -proc-macro2 = { version = "1.0.67", default-features = false } -quote = { version = "1.0.28", optional = true, default-features = false } +proc-macro2 = { version = "1.0.91", default-features = false } +quote = { version = "1.0.35", optional = true, default-features = false } unicode-ident = "1" [dev-dependencies] anyhow = "1" automod = "1" -flate2 = "1" insta = "1" -rayon = "1" ref-cast = "1" -regex = "1" -reqwest = { version = "0.11", features = ["blocking"] } rustversion = "1" syn-test-suite = { version = "0", path = "tests/features" } -tar = "0.4.16" termcolor = "1" -walkdir = "2.3.2" -[lib] -doc-scrape-examples = false +[target.'cfg(not(miri))'.dev-dependencies] +flate2 = "1" +rayon = "1" +reqwest = { version = "0.12", features = ["blocking"] } +tar = "0.4.16" +walkdir = "2.3.2" [[bench]] name = "rust" @@ -69,7 +67,15 @@ required-features = ["full", "parsing"] [package.metadata.docs.rs] all-features = true targets = ["x86_64-unknown-linux-gnu"] -rustdoc-args = ["--cfg", "doc_cfg", "--generate-link-to-definition"] +rustdoc-args = [ + "--generate-link-to-definition", + "--generate-macro-expansion", + "--extend-css=src/gen/token.css", + "--extern-html-root-url=core=https://doc.rust-lang.org", + "--extern-html-root-url=alloc=https://doc.rust-lang.org", + "--extern-html-root-url=std=https://doc.rust-lang.org", + "--extern-html-root-url=proc_macro=https://doc.rust-lang.org", +] [package.metadata.playground] features = ["full", "visit", "visit-mut", "fold", "extra-traits"] diff --git a/vendor/syn/README.md b/vendor/syn/README.md index e8d99abc..9ae472ea 100644 --- a/vendor/syn/README.md +++ b/vendor/syn/README.md @@ -46,8 +46,6 @@ contains some APIs that may be useful more generally. [`syn::DeriveInput`]: https://docs.rs/syn/2.0/syn/struct.DeriveInput.html [parser functions]: https://docs.rs/syn/2.0/syn/parse/index.html -*Version requirement: Syn supports rustc 1.56 and up.* - [*Release notes*](https://github.com/dtolnay/syn/releases) <br> diff --git a/vendor/syn/benches/file.rs b/vendor/syn/benches/file.rs index b4247239..6167488c 100644 --- a/vendor/syn/benches/file.rs +++ b/vendor/syn/benches/file.rs @@ -3,11 +3,13 @@ #![feature(rustc_private, test)] #![recursion_limit = "1024"] #![allow( + clippy::elidable_lifetime_names, clippy::items_after_statements, clippy::manual_let_else, clippy::match_like_matches_macro, clippy::missing_panics_doc, clippy::must_use_candidate, + clippy::needless_lifetimes, clippy::uninlined_format_args )] diff --git a/vendor/syn/benches/rust.rs b/vendor/syn/benches/rust.rs index 64397618..ecb9c56f 100644 --- a/vendor/syn/benches/rust.rs +++ b/vendor/syn/benches/rust.rs @@ -6,10 +6,13 @@ #![cfg_attr(not(syn_only), feature(rustc_private))] #![recursion_limit = "1024"] #![allow( + clippy::arc_with_non_send_sync, clippy::cast_lossless, + clippy::elidable_lifetime_names, clippy::let_underscore_untyped, clippy::manual_let_else, clippy::match_like_matches_macro, + clippy::needless_lifetimes, clippy::uninlined_format_args, clippy::unnecessary_wraps )] @@ -23,20 +26,24 @@ mod macros; mod repo; use std::fs; +use std::path::Path; use std::time::{Duration, Instant}; #[cfg(not(syn_only))] mod tokenstream_parse { use proc_macro2::TokenStream; + use std::path::Path; use std::str::FromStr; - pub fn bench(content: &str) -> Result<(), ()> { + pub fn bench(_path: &Path, content: &str) -> Result<(), ()> { TokenStream::from_str(content).map(drop).map_err(drop) } } mod syn_parse { - pub fn bench(content: &str) -> Result<(), ()> { + use std::path::Path; + + pub fn bench(_path: &Path, content: &str) -> Result<(), ()> { syn::parse_file(content).map(drop).map_err(drop) } } @@ -51,45 +58,49 @@ mod librustc_parse { extern crate rustc_session; extern crate rustc_span; - use rustc_data_structures::sync::Lrc; - use rustc_error_messages::FluentBundle; - use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler}; + use crate::repo; + use rustc_errors::emitter::Emitter; + use rustc_errors::registry::Registry; + use rustc_errors::translation::Translator; + use rustc_errors::{DiagCtxt, DiagInner}; + use rustc_parse::lexer::StripTokens; use rustc_session::parse::ParseSess; use rustc_span::source_map::{FilePathMapping, SourceMap}; - use rustc_span::{edition::Edition, FileName}; + use rustc_span::FileName; + use std::path::Path; + use std::sync::Arc; - pub fn bench(content: &str) -> Result<(), ()> { + pub fn bench(path: &Path, content: &str) -> Result<(), ()> { struct SilentEmitter; impl Emitter for SilentEmitter { - fn emit_diagnostic(&mut self, _diag: &Diagnostic) {} - fn source_map(&self) -> Option<&Lrc<SourceMap>> { - None - } - } - - impl Translate for SilentEmitter { - fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> { + fn emit_diagnostic(&mut self, _diag: DiagInner, _registry: &Registry) {} + fn source_map(&self) -> Option<&SourceMap> { None } - fn fallback_fluent_bundle(&self) -> &FluentBundle { + fn translator(&self) -> &Translator { panic!("silent emitter attempted to translate a diagnostic"); } } - rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { - let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + let edition = repo::edition(path).parse().unwrap(); + rustc_span::create_session_if_not_set_then(edition, |_| { + let source_map = Arc::new(SourceMap::new(FilePathMapping::empty())); let emitter = Box::new(SilentEmitter); - let handler = Handler::with_emitter(emitter); - let sess = ParseSess::with_span_handler(handler, cm); - if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str( - FileName::Custom("bench".to_owned()), - content.to_owned(), + let handler = DiagCtxt::new(emitter); + let sess = ParseSess::with_dcx(handler, source_map); + let name = FileName::Custom("bench".to_owned()); + let mut parser = rustc_parse::new_parser_from_source_str( &sess, - ) { + name, + content.to_owned(), + StripTokens::ShebangAndFrontmatter, + ) + .unwrap(); + if let Err(diagnostic) = parser.parse_crate_mod() { diagnostic.cancel(); return Err(()); - }; + } Ok(()) }) } @@ -97,13 +108,15 @@ mod librustc_parse { #[cfg(not(syn_only))] mod read_from_disk { - pub fn bench(content: &str) -> Result<(), ()> { + use std::path::Path; + + pub fn bench(_path: &Path, content: &str) -> Result<(), ()> { let _ = content; Ok(()) } } -fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { +fn exec(mut codepath: impl FnMut(&Path, &str) -> Result<(), ()>) -> Duration { let begin = Instant::now(); let mut success = 0; let mut total = 0; @@ -122,7 +135,7 @@ fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { return; } let content = fs::read_to_string(path).unwrap(); - let ok = codepath(&content).is_ok(); + let ok = codepath(path, &content).is_ok(); success += ok as usize; total += 1; if !ok { @@ -142,7 +155,7 @@ fn main() { [ $( $(#[$cfg])* - (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>), + (stringify!($name), $name::bench as fn(&Path, &str) -> Result<(), ()>), )* ] }; @@ -152,7 +165,7 @@ fn main() { { let mut lines = 0; let mut files = 0; - exec(|content| { + exec(|_path, content| { lines += content.lines().count(); files += 1; Ok(()) diff --git a/vendor/syn/src/attr.rs b/vendor/syn/src/attr.rs index 34d5515a..a543af55 100644 --- a/vendor/syn/src/attr.rs +++ b/vendor/syn/src/attr.rs @@ -1,12 +1,20 @@ -use super::*; -use proc_macro2::TokenStream; -use std::iter; -use std::slice; - +#[cfg(feature = "parsing")] +use crate::error::Error; +#[cfg(feature = "parsing")] +use crate::error::Result; +use crate::expr::Expr; +use crate::mac::MacroDelimiter; #[cfg(feature = "parsing")] use crate::meta::{self, ParseNestedMeta}; #[cfg(feature = "parsing")] -use crate::parse::{Parse, ParseStream, Parser, Result}; +use crate::parse::{Parse, ParseStream, Parser}; +use crate::path::Path; +use crate::token; +use proc_macro2::TokenStream; +#[cfg(feature = "printing")] +use std::iter; +#[cfg(feature = "printing")] +use std::slice; ast_struct! { /// An attribute, like `#[repr(transparent)]`. @@ -77,9 +85,9 @@ ast_struct! { /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on /// which you intend to parse. /// - /// [`Parse`]: parse::Parse - /// [`ParseStream::parse`]: parse::ParseBuffer::parse - /// [`ParseStream::call`]: parse::ParseBuffer::call + /// [`Parse`]: crate::parse::Parse + /// [`ParseStream::parse`]: crate::parse::ParseBuffer::parse + /// [`ParseStream::call`]: crate::parse::ParseBuffer::call /// /// ``` /// use syn::{Attribute, Ident, Result, Token}; @@ -161,7 +169,7 @@ ast_struct! { /// }; /// assert_eq!(doc, attr); /// ``` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Attribute { pub pound_token: Token![#], pub style: AttrStyle, @@ -210,7 +218,7 @@ impl Attribute { /// # anyhow::Ok(()) /// ``` #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_args<T: Parse>(&self) -> Result<T> { self.parse_args_with(T::parse) } @@ -233,7 +241,7 @@ impl Attribute { /// # anyhow::Ok(()) /// ``` #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> { match &self.meta { Meta::Path(path) => Err(crate::error::new2( @@ -379,7 +387,7 @@ impl Attribute { /// # Ok(()) /// ``` #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_nested_meta( &self, logic: impl FnMut(ParseNestedMeta) -> Result<()>, @@ -394,7 +402,7 @@ impl Attribute { /// See /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> { let mut attrs = Vec::new(); while input.peek(Token![#]) { @@ -410,7 +418,7 @@ impl Attribute { /// See /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> { let mut attrs = Vec::new(); parsing::parse_inner(input, &mut attrs)?; @@ -433,14 +441,14 @@ ast_enum! { /// - `#![feature(proc_macro)]` /// - `//! # Example` /// - `/*! Please file an issue */` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum AttrStyle { Outer, Inner(Token![!]), } } -ast_enum_of_structs! { +ast_enum! { /// Content of a compile-time structured attribute. /// /// ## Path @@ -460,8 +468,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum Meta { Path(Path), @@ -475,7 +483,7 @@ ast_enum_of_structs! { ast_struct! { /// A structured list within an attribute, like `derive(Copy, Clone)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct MetaList { pub path: Path, pub delimiter: MacroDelimiter, @@ -485,7 +493,7 @@ ast_struct! { ast_struct! { /// A name-value pair within an attribute, like `feature = "nightly"`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct MetaNameValue { pub path: Path, pub eq_token: Token![=], @@ -508,7 +516,7 @@ impl Meta { /// Error if this is a `Meta::List` or `Meta::NameValue`. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn require_path_only(&self) -> Result<&Path> { let error_span = match self { Meta::Path(path) => return Ok(path), @@ -520,7 +528,7 @@ impl Meta { /// Error if this is a `Meta::Path` or `Meta::NameValue`. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn require_list(&self) -> Result<&MetaList> { match self { Meta::List(meta) => Ok(meta), @@ -538,7 +546,7 @@ impl Meta { /// Error if this is a `Meta::Path` or `Meta::List`. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn require_name_value(&self) -> Result<&MetaNameValue> { match self { Meta::NameValue(meta) => Ok(meta), @@ -558,14 +566,14 @@ impl Meta { impl MetaList { /// See [`Attribute::parse_args`]. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_args<T: Parse>(&self) -> Result<T> { self.parse_args_with(T::parse) } /// See [`Attribute::parse_args_with`]. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> { let scope = self.delimiter.span().close(); crate::parse::parse_scoped(parser, scope, self.tokens.clone()) @@ -573,7 +581,7 @@ impl MetaList { /// See [`Attribute::parse_nested_meta`]. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_nested_meta( &self, logic: impl FnMut(ParseNestedMeta) -> Result<()>, @@ -582,13 +590,16 @@ impl MetaList { } } +#[cfg(feature = "printing")] pub(crate) trait FilterAttrs<'a> { type Ret: Iterator<Item = &'a Attribute>; fn outer(self) -> Self::Ret; + #[cfg(feature = "full")] fn inner(self) -> Self::Ret; } +#[cfg(feature = "printing")] impl<'a> FilterAttrs<'a> for &'a [Attribute] { type Ret = iter::Filter<slice::Iter<'a, Attribute>, fn(&&Attribute) -> bool>; @@ -602,6 +613,7 @@ impl<'a> FilterAttrs<'a> for &'a [Attribute] { self.iter().filter(is_outer) } + #[cfg(feature = "full")] fn inner(self) -> Self::Ret { fn is_inner(attr: &&Attribute) -> bool { match attr.style { @@ -613,16 +625,40 @@ impl<'a> FilterAttrs<'a> for &'a [Attribute] { } } +impl From<Path> for Meta { + fn from(meta: Path) -> Meta { + Meta::Path(meta) + } +} + +impl From<MetaList> for Meta { + fn from(meta: MetaList) -> Meta { + Meta::List(meta) + } +} + +impl From<MetaNameValue> for Meta { + fn from(meta: MetaNameValue) -> Meta { + Meta::NameValue(meta) + } +} + #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::discouraged::Speculative; - use crate::parse::{Parse, ParseStream, Result}; + use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue}; + use crate::error::Result; + use crate::expr::{Expr, ExprLit}; + use crate::lit::Lit; + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseStream}; + use crate::path::Path; + use crate::{mac, token}; + use proc_macro2::Ident; use std::fmt::{self, Display}; pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> { while input.peek(Token![#]) && input.peek2(Token![!]) { - attrs.push(input.call(parsing::single_parse_inner)?); + attrs.push(input.call(single_parse_inner)?); } Ok(()) } @@ -647,34 +683,45 @@ pub(crate) mod parsing { }) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Meta { fn parse(input: ParseStream) -> Result<Self> { - let path = input.call(Path::parse_mod_style)?; + let path = parse_outermost_meta_path(input)?; parse_meta_after_path(path, input) } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for MetaList { fn parse(input: ParseStream) -> Result<Self> { - let path = input.call(Path::parse_mod_style)?; + let path = parse_outermost_meta_path(input)?; parse_meta_list_after_path(path, input) } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for MetaNameValue { fn parse(input: ParseStream) -> Result<Self> { - let path = input.call(Path::parse_mod_style)?; + let path = parse_outermost_meta_path(input)?; parse_meta_name_value_after_path(path, input) } } + // Unlike meta::parse_meta_path which accepts arbitrary keywords in the path, + // only the `unsafe` keyword is accepted as an attribute's outermost path. + fn parse_outermost_meta_path(input: ParseStream) -> Result<Path> { + if input.peek(Token![unsafe]) { + let unsafe_token: Token![unsafe] = input.parse()?; + Ok(Path::from(Ident::new("unsafe", unsafe_token.span))) + } else { + Path::parse_mod_style(input) + } + } + pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> { if input.peek(token::Paren) || input.peek(token::Bracket) || input.peek(token::Brace) { parse_meta_list_after_path(path, input).map(Meta::List) - } else if input.peek(Token![=]) { + } else if input.peek(Token![=]) && !input.peek(Token![==]) && !input.peek(Token![=>]) { parse_meta_name_value_after_path(path, input).map(Meta::NameValue) } else { Ok(Meta::Path(path)) @@ -740,11 +787,13 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue}; + use crate::path; + use crate::path::printing::PathStyle; use proc_macro2::TokenStream; use quote::ToTokens; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Attribute { fn to_tokens(&self, tokens: &mut TokenStream) { self.pound_token.to_tokens(tokens); @@ -757,18 +806,29 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + impl ToTokens for Meta { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Meta::Path(path) => path::printing::print_path(tokens, path, PathStyle::Mod), + Meta::List(meta_list) => meta_list.to_tokens(tokens), + Meta::NameValue(meta_name_value) => meta_name_value.to_tokens(tokens), + } + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for MetaList { fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); + path::printing::print_path(tokens, &self.path, PathStyle::Mod); self.delimiter.surround(tokens, self.tokens.clone()); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for MetaNameValue { fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); + path::printing::print_path(tokens, &self.path, PathStyle::Mod); self.eq_token.to_tokens(tokens); self.value.to_tokens(tokens); } diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs index 564ccc75..b0f31484 100644 --- a/vendor/syn/src/buffer.rs +++ b/vendor/syn/src/buffer.rs @@ -5,11 +5,13 @@ // Syn, and caution should be used when editing it. The public-facing interface // is 100% safe but the implementation is fragile internally. +use crate::ext::TokenStreamExt as _; use crate::Lifetime; use proc_macro2::extra::DelimSpan; use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; use std::cmp::Ordering; use std::marker::PhantomData; +use std::ptr; /// Internal type which is used instead of `TokenTree` to represent a token tree /// within a `TokenBuffer`. @@ -20,8 +22,9 @@ enum Entry { Ident(Ident), Punct(Punct), Literal(Literal), - // End entries contain the offset (negative) to the start of the buffer. - End(isize), + // End entries contain the offset (negative) to the start of the buffer, and + // offset (negative) to the matching Group entry. + End(isize, isize), } /// A buffer that can be efficiently traversed multiple times, unlike @@ -42,12 +45,15 @@ impl TokenBuffer { TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), TokenTree::Group(group) => { let group_start_index = entries.len(); - entries.push(Entry::End(0)); // we replace this below + entries.push(Entry::End(0, 0)); // we replace this below Self::recursive_new(entries, group.stream()); let group_end_index = entries.len(); - entries.push(Entry::End(-(group_end_index as isize))); - let group_end_offset = group_end_index - group_start_index; - entries[group_start_index] = Entry::Group(group, group_end_offset); + let group_offset = group_end_index - group_start_index; + entries.push(Entry::End( + -(group_end_index as isize), + -(group_offset as isize), + )); + entries[group_start_index] = Entry::Group(group, group_offset); } } } @@ -56,7 +62,7 @@ impl TokenBuffer { /// Creates a `TokenBuffer` containing all the tokens from the input /// `proc_macro::TokenStream`. #[cfg(feature = "proc-macro")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] + #[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))] pub fn new(stream: proc_macro::TokenStream) -> Self { Self::new2(stream.into()) } @@ -66,7 +72,7 @@ impl TokenBuffer { pub fn new2(stream: TokenStream) -> Self { let mut entries = Vec::new(); Self::recursive_new(&mut entries, stream); - entries.push(Entry::End(-(entries.len() as isize))); + entries.push(Entry::End(-(entries.len() as isize), 0)); Self { entries: entries.into_boxed_slice(), } @@ -111,7 +117,7 @@ impl<'a> Cursor<'a> { // object in global storage. struct UnsafeSyncEntry(Entry); unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0)); + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0, 0)); Cursor { ptr: &EMPTY_ENTRY.0, @@ -128,11 +134,11 @@ impl<'a> Cursor<'a> { // past it, unless `ptr == scope`, which means that we're at the edge of // our cursor's scope. We should only have `ptr != scope` at the exit // from None-delimited groups entered with `ignore_none`. - while let Entry::End(_) = *ptr { - if ptr == scope { + while let Entry::End(..) = unsafe { &*ptr } { + if ptr::eq(ptr, scope) { break; } - ptr = ptr.add(1); + ptr = unsafe { ptr.add(1) }; } Cursor { @@ -154,7 +160,7 @@ impl<'a> Cursor<'a> { /// If the cursor is looking at an `Entry::Group`, the bumped cursor will /// point at the first token in the group (with the same scope end). unsafe fn bump_ignore_group(self) -> Cursor<'a> { - Cursor::create(self.ptr.offset(1), self.scope) + unsafe { Cursor::create(self.ptr.offset(1), self.scope) } } /// While the cursor is looking at a `None`-delimited group, move it to look @@ -176,53 +182,7 @@ impl<'a> Cursor<'a> { /// scope. pub fn eof(self) -> bool { // We're at eof if we're at the end of our scope. - self.ptr == self.scope - } - - /// If the cursor is pointing at a `Group` with the given delimiter, returns - /// a cursor into that group and one pointing to the next `TokenTree`. - pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> { - // If we're not trying to enter a none-delimited group, we want to - // ignore them. We have to make sure to _not_ ignore them when we want - // to enter them, of course. For obvious reasons. - if delim != Delimiter::None { - self.ignore_none(); - } - - if let Entry::Group(group, end_offset) = self.entry() { - if group.delimiter() == delim { - let span = group.delim_span(); - let end_of_group = unsafe { self.ptr.add(*end_offset) }; - let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; - let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; - return Some((inside_of_group, span, after_group)); - } - } - - None - } - - pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> { - if let Entry::Group(group, end_offset) = self.entry() { - let delimiter = group.delimiter(); - let span = group.delim_span(); - let end_of_group = unsafe { self.ptr.add(*end_offset) }; - let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; - let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; - return Some((inside_of_group, delimiter, span, after_group)); - } - - None - } - - pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> { - if let Entry::Group(group, end_offset) = self.entry() { - let end_of_group = unsafe { self.ptr.add(*end_offset) }; - let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; - return Some((group.clone(), after_group)); - } - - None + ptr::eq(self.ptr, self.scope) } /// If the cursor is pointing at a `Ident`, returns it along with a cursor @@ -275,16 +235,64 @@ impl<'a> Cursor<'a> { } } + /// If the cursor is pointing at a `Group` with the given delimiter, returns + /// a cursor into that group and one pointing to the next `TokenTree`. + pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> { + // If we're not trying to enter a none-delimited group, we want to + // ignore them. We have to make sure to _not_ ignore them when we want + // to enter them, of course. For obvious reasons. + if delim != Delimiter::None { + self.ignore_none(); + } + + if let Entry::Group(group, end_offset) = self.entry() { + if group.delimiter() == delim { + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, span, after_group)); + } + } + + None + } + + /// If the cursor is pointing at a `Group`, returns a cursor into the group + /// and one pointing to the next `TokenTree`. + pub fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let delimiter = group.delimiter(); + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, delimiter, span, after_group)); + } + + None + } + + pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((group.clone(), after_group)); + } + + None + } + /// Copies all remaining tokens visible from this cursor into a /// `TokenStream`. pub fn token_stream(self) -> TokenStream { - let mut tts = Vec::new(); + let mut tokens = TokenStream::new(); let mut cursor = self; while let Some((tt, rest)) = cursor.token_tree() { - tts.push(tt); + tokens.append(tt); cursor = rest; } - tts.into_iter().collect() + tokens } /// If the cursor is pointing at a `TokenTree`, returns it along with a @@ -300,7 +308,7 @@ impl<'a> Cursor<'a> { Entry::Literal(literal) => (literal.clone().into(), 1), Entry::Ident(ident) => (ident.clone().into(), 1), Entry::Punct(punct) => (punct.clone().into(), 1), - Entry::End(_) => return None, + Entry::End(..) => return None, }; let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; @@ -309,13 +317,20 @@ impl<'a> Cursor<'a> { /// Returns the `Span` of the current token, or `Span::call_site()` if this /// cursor points to eof. - pub fn span(self) -> Span { + pub fn span(mut self) -> Span { match self.entry() { Entry::Group(group, _) => group.span(), Entry::Literal(literal) => literal.span(), Entry::Ident(ident) => ident.span(), Entry::Punct(punct) => punct.span(), - Entry::End(_) => Span::call_site(), + Entry::End(_, offset) => { + self.ptr = unsafe { self.ptr.offset(*offset) }; + if let Entry::Group(group, _) = self.entry() { + group.span_close() + } else { + Span::call_site() + } + } } } @@ -325,34 +340,19 @@ impl<'a> Cursor<'a> { pub(crate) fn prev_span(mut self) -> Span { if start_of_buffer(self) < self.ptr { self.ptr = unsafe { self.ptr.offset(-1) }; - if let Entry::End(_) = self.entry() { - // Locate the matching Group begin token. - let mut depth = 1; - loop { - self.ptr = unsafe { self.ptr.offset(-1) }; - match self.entry() { - Entry::Group(group, _) => { - depth -= 1; - if depth == 0 { - return group.span(); - } - } - Entry::End(_) => depth += 1, - Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_) => {} - } - } - } } self.span() } - /// Skip over the next token without cloning it. Returns `None` if this - /// cursor points to eof. + /// Skip over the next token that is not a None-delimited group, without + /// cloning it. Returns `None` if this cursor points to eof. /// /// This method treats `'lifetimes` as a single token. - pub(crate) fn skip(self) -> Option<Cursor<'a>> { + pub(crate) fn skip(mut self) -> Option<Cursor<'a>> { + self.ignore_none(); + let len = match self.entry() { - Entry::End(_) => return None, + Entry::End(..) => return None, // Treat lifetimes as a single tt for the purposes of 'skip'. Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { @@ -368,6 +368,16 @@ impl<'a> Cursor<'a> { Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) }) } + + pub(crate) fn scope_delimiter(self) -> Delimiter { + match unsafe { &*self.scope } { + Entry::End(_, offset) => match unsafe { &*self.scope.offset(*offset) } { + Entry::Group(group, _) => group.delimiter(), + _ => Delimiter::None, + }, + _ => unreachable!(), + } + } } impl<'a> Copy for Cursor<'a> {} @@ -382,14 +392,14 @@ impl<'a> Eq for Cursor<'a> {} impl<'a> PartialEq for Cursor<'a> { fn eq(&self, other: &Self) -> bool { - self.ptr == other.ptr + ptr::eq(self.ptr, other.ptr) } } impl<'a> PartialOrd for Cursor<'a> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { if same_buffer(*self, *other) { - Some(self.ptr.cmp(&other.ptr)) + Some(cmp_assuming_same_buffer(*self, *other)) } else { None } @@ -397,23 +407,22 @@ impl<'a> PartialOrd for Cursor<'a> { } pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { - a.scope == b.scope + ptr::eq(a.scope, b.scope) } pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool { - start_of_buffer(a) == start_of_buffer(b) + ptr::eq(start_of_buffer(a), start_of_buffer(b)) } fn start_of_buffer(cursor: Cursor) -> *const Entry { unsafe { match &*cursor.scope { - Entry::End(offset) => cursor.scope.offset(*offset), + Entry::End(offset, _) => cursor.scope.offset(*offset), _ => unreachable!(), } } } -#[cfg(any(feature = "full", feature = "derive"))] pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering { a.ptr.cmp(&b.ptr) } @@ -424,10 +433,3 @@ pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { _ => cursor.span(), } } - -pub(crate) fn close_span_of_group(cursor: Cursor) -> Span { - match cursor.entry() { - Entry::Group(group, _) => group.span_close(), - _ => cursor.span(), - } -} diff --git a/vendor/syn/src/classify.rs b/vendor/syn/src/classify.rs new file mode 100644 index 00000000..8eab19db --- /dev/null +++ b/vendor/syn/src/classify.rs @@ -0,0 +1,311 @@ +#[cfg(feature = "full")] +use crate::expr::Expr; +#[cfg(any(feature = "printing", feature = "full"))] +use crate::generics::TypeParamBound; +#[cfg(any(feature = "printing", feature = "full"))] +use crate::path::{Path, PathArguments}; +#[cfg(any(feature = "printing", feature = "full"))] +use crate::punctuated::Punctuated; +#[cfg(any(feature = "printing", feature = "full"))] +use crate::ty::{ReturnType, Type}; +#[cfg(feature = "full")] +use proc_macro2::{Delimiter, TokenStream, TokenTree}; +#[cfg(any(feature = "printing", feature = "full"))] +use std::ops::ControlFlow; + +#[cfg(feature = "full")] +pub(crate) fn requires_semi_to_be_stmt(expr: &Expr) -> bool { + match expr { + Expr::Macro(expr) => !expr.mac.delimiter.is_brace(), + _ => requires_comma_to_be_match_arm(expr), + } +} + +#[cfg(feature = "full")] +pub(crate) fn requires_comma_to_be_match_arm(expr: &Expr) -> bool { + match expr { + Expr::If(_) + | Expr::Match(_) + | Expr::Block(_) | Expr::Unsafe(_) // both under ExprKind::Block in rustc + | Expr::While(_) + | Expr::Loop(_) + | Expr::ForLoop(_) + | Expr::TryBlock(_) + | Expr::Const(_) => false, + + Expr::Array(_) + | Expr::Assign(_) + | Expr::Async(_) + | Expr::Await(_) + | Expr::Binary(_) + | Expr::Break(_) + | Expr::Call(_) + | Expr::Cast(_) + | Expr::Closure(_) + | Expr::Continue(_) + | Expr::Field(_) + | Expr::Group(_) + | Expr::Index(_) + | Expr::Infer(_) + | Expr::Let(_) + | Expr::Lit(_) + | Expr::Macro(_) + | Expr::MethodCall(_) + | Expr::Paren(_) + | Expr::Path(_) + | Expr::Range(_) + | Expr::RawAddr(_) + | Expr::Reference(_) + | Expr::Repeat(_) + | Expr::Return(_) + | Expr::Struct(_) + | Expr::Try(_) + | Expr::Tuple(_) + | Expr::Unary(_) + | Expr::Yield(_) + | Expr::Verbatim(_) => true, + } +} + +#[cfg(feature = "printing")] +pub(crate) fn trailing_unparameterized_path(mut ty: &Type) -> bool { + loop { + match ty { + Type::BareFn(t) => match &t.output { + ReturnType::Default => return false, + ReturnType::Type(_, ret) => ty = ret, + }, + Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) { + ControlFlow::Break(trailing_path) => return trailing_path, + ControlFlow::Continue(t) => ty = t, + }, + Type::Path(t) => match last_type_in_path(&t.path) { + ControlFlow::Break(trailing_path) => return trailing_path, + ControlFlow::Continue(t) => ty = t, + }, + Type::Ptr(t) => ty = &t.elem, + Type::Reference(t) => ty = &t.elem, + Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) { + ControlFlow::Break(trailing_path) => return trailing_path, + ControlFlow::Continue(t) => ty = t, + }, + + Type::Array(_) + | Type::Group(_) + | Type::Infer(_) + | Type::Macro(_) + | Type::Never(_) + | Type::Paren(_) + | Type::Slice(_) + | Type::Tuple(_) + | Type::Verbatim(_) => return false, + } + } + + fn last_type_in_path(path: &Path) -> ControlFlow<bool, &Type> { + match &path.segments.last().unwrap().arguments { + PathArguments::None => ControlFlow::Break(true), + PathArguments::AngleBracketed(_) => ControlFlow::Break(false), + PathArguments::Parenthesized(arg) => match &arg.output { + ReturnType::Default => ControlFlow::Break(false), + ReturnType::Type(_, ret) => ControlFlow::Continue(ret), + }, + } + } + + fn last_type_in_bounds( + bounds: &Punctuated<TypeParamBound, Token![+]>, + ) -> ControlFlow<bool, &Type> { + match bounds.last().unwrap() { + TypeParamBound::Trait(t) => last_type_in_path(&t.path), + TypeParamBound::Lifetime(_) + | TypeParamBound::PreciseCapture(_) + | TypeParamBound::Verbatim(_) => ControlFlow::Break(false), + } + } +} + +/// Whether the expression's first token is the label of a loop/block. +#[cfg(all(feature = "printing", feature = "full"))] +pub(crate) fn expr_leading_label(mut expr: &Expr) -> bool { + loop { + match expr { + Expr::Block(e) => return e.label.is_some(), + Expr::ForLoop(e) => return e.label.is_some(), + Expr::Loop(e) => return e.label.is_some(), + Expr::While(e) => return e.label.is_some(), + + Expr::Assign(e) => expr = &e.left, + Expr::Await(e) => expr = &e.base, + Expr::Binary(e) => expr = &e.left, + Expr::Call(e) => expr = &e.func, + Expr::Cast(e) => expr = &e.expr, + Expr::Field(e) => expr = &e.base, + Expr::Index(e) => expr = &e.expr, + Expr::MethodCall(e) => expr = &e.receiver, + Expr::Range(e) => match &e.start { + Some(start) => expr = start, + None => return false, + }, + Expr::Try(e) => expr = &e.expr, + + Expr::Array(_) + | Expr::Async(_) + | Expr::Break(_) + | Expr::Closure(_) + | Expr::Const(_) + | Expr::Continue(_) + | Expr::Group(_) + | Expr::If(_) + | Expr::Infer(_) + | Expr::Let(_) + | Expr::Lit(_) + | Expr::Macro(_) + | Expr::Match(_) + | Expr::Paren(_) + | Expr::Path(_) + | Expr::RawAddr(_) + | Expr::Reference(_) + | Expr::Repeat(_) + | Expr::Return(_) + | Expr::Struct(_) + | Expr::TryBlock(_) + | Expr::Tuple(_) + | Expr::Unary(_) + | Expr::Unsafe(_) + | Expr::Verbatim(_) + | Expr::Yield(_) => return false, + } + } +} + +/// Whether the expression's last token is `}`. +#[cfg(feature = "full")] +pub(crate) fn expr_trailing_brace(mut expr: &Expr) -> bool { + loop { + match expr { + Expr::Async(_) + | Expr::Block(_) + | Expr::Const(_) + | Expr::ForLoop(_) + | Expr::If(_) + | Expr::Loop(_) + | Expr::Match(_) + | Expr::Struct(_) + | Expr::TryBlock(_) + | Expr::Unsafe(_) + | Expr::While(_) => return true, + + Expr::Assign(e) => expr = &e.right, + Expr::Binary(e) => expr = &e.right, + Expr::Break(e) => match &e.expr { + Some(e) => expr = e, + None => return false, + }, + Expr::Cast(e) => return type_trailing_brace(&e.ty), + Expr::Closure(e) => expr = &e.body, + Expr::Let(e) => expr = &e.expr, + Expr::Macro(e) => return e.mac.delimiter.is_brace(), + Expr::Range(e) => match &e.end { + Some(end) => expr = end, + None => return false, + }, + Expr::RawAddr(e) => expr = &e.expr, + Expr::Reference(e) => expr = &e.expr, + Expr::Return(e) => match &e.expr { + Some(e) => expr = e, + None => return false, + }, + Expr::Unary(e) => expr = &e.expr, + Expr::Verbatim(e) => return tokens_trailing_brace(e), + Expr::Yield(e) => match &e.expr { + Some(e) => expr = e, + None => return false, + }, + + Expr::Array(_) + | Expr::Await(_) + | Expr::Call(_) + | Expr::Continue(_) + | Expr::Field(_) + | Expr::Group(_) + | Expr::Index(_) + | Expr::Infer(_) + | Expr::Lit(_) + | Expr::MethodCall(_) + | Expr::Paren(_) + | Expr::Path(_) + | Expr::Repeat(_) + | Expr::Try(_) + | Expr::Tuple(_) => return false, + } + } + + fn type_trailing_brace(mut ty: &Type) -> bool { + loop { + match ty { + Type::BareFn(t) => match &t.output { + ReturnType::Default => return false, + ReturnType::Type(_, ret) => ty = ret, + }, + Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) { + ControlFlow::Break(trailing_brace) => return trailing_brace, + ControlFlow::Continue(t) => ty = t, + }, + Type::Macro(t) => return t.mac.delimiter.is_brace(), + Type::Path(t) => match last_type_in_path(&t.path) { + Some(t) => ty = t, + None => return false, + }, + Type::Ptr(t) => ty = &t.elem, + Type::Reference(t) => ty = &t.elem, + Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) { + ControlFlow::Break(trailing_brace) => return trailing_brace, + ControlFlow::Continue(t) => ty = t, + }, + Type::Verbatim(t) => return tokens_trailing_brace(t), + + Type::Array(_) + | Type::Group(_) + | Type::Infer(_) + | Type::Never(_) + | Type::Paren(_) + | Type::Slice(_) + | Type::Tuple(_) => return false, + } + } + } + + fn last_type_in_path(path: &Path) -> Option<&Type> { + match &path.segments.last().unwrap().arguments { + PathArguments::None | PathArguments::AngleBracketed(_) => None, + PathArguments::Parenthesized(arg) => match &arg.output { + ReturnType::Default => None, + ReturnType::Type(_, ret) => Some(ret), + }, + } + } + + fn last_type_in_bounds( + bounds: &Punctuated<TypeParamBound, Token![+]>, + ) -> ControlFlow<bool, &Type> { + match bounds.last().unwrap() { + TypeParamBound::Trait(t) => match last_type_in_path(&t.path) { + Some(t) => ControlFlow::Continue(t), + None => ControlFlow::Break(false), + }, + TypeParamBound::Lifetime(_) | TypeParamBound::PreciseCapture(_) => { + ControlFlow::Break(false) + } + TypeParamBound::Verbatim(t) => ControlFlow::Break(tokens_trailing_brace(t)), + } + } + + fn tokens_trailing_brace(tokens: &TokenStream) -> bool { + if let Some(TokenTree::Group(last)) = tokens.clone().into_iter().last() { + last.delimiter() == Delimiter::Brace + } else { + false + } + } +} diff --git a/vendor/syn/src/custom_keyword.rs b/vendor/syn/src/custom_keyword.rs index 9f3ad870..cc4f632c 100644 --- a/vendor/syn/src/custom_keyword.rs +++ b/vendor/syn/src/custom_keyword.rs @@ -91,6 +91,7 @@ macro_rules! custom_keyword { ($ident:ident) => { #[allow(non_camel_case_types)] pub struct $ident { + #[allow(dead_code)] pub span: $crate::__private::Span, } @@ -128,7 +129,7 @@ macro_rules! custom_keyword { macro_rules! impl_parse_for_custom_keyword { ($ident:ident) => { // For peek. - impl $crate::token::CustomToken for $ident { + impl $crate::__private::CustomToken for $ident { fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { if let $crate::__private::Some((ident, _rest)) = cursor.ident() { ident == $crate::__private::stringify!($ident) diff --git a/vendor/syn/src/custom_punctuation.rs b/vendor/syn/src/custom_punctuation.rs index 062fe516..568bc5d9 100644 --- a/vendor/syn/src/custom_punctuation.rs +++ b/vendor/syn/src/custom_punctuation.rs @@ -31,6 +31,7 @@ /// /// ``` /// use proc_macro2::{TokenStream, TokenTree}; +/// use std::iter; /// use syn::parse::{Parse, ParseStream, Peek, Result}; /// use syn::punctuated::Punctuated; /// use syn::Expr; @@ -64,7 +65,7 @@ /// let mut tokens = TokenStream::new(); /// while !input.is_empty() && !input.peek(end) { /// let next: TokenTree = input.parse()?; -/// tokens.extend(Some(next)); +/// tokens.extend(iter::once(next)); /// } /// Ok(tokens) /// } @@ -78,6 +79,7 @@ macro_rules! custom_punctuation { ($ident:ident, $($tt:tt)+) => { pub struct $ident { + #[allow(dead_code)] pub spans: $crate::custom_punctuation_repr!($($tt)+), } @@ -113,7 +115,7 @@ macro_rules! custom_punctuation { #[macro_export] macro_rules! impl_parse_for_custom_punctuation { ($ident:ident, $($tt:tt)+) => { - impl $crate::token::CustomToken for $ident { + impl $crate::__private::CustomToken for $ident { fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { $crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+)) } @@ -236,50 +238,51 @@ macro_rules! custom_punctuation_repr { #[macro_export] #[rustfmt::skip] macro_rules! custom_punctuation_len { - ($mode:ident, +) => { 1 }; - ($mode:ident, +=) => { 2 }; ($mode:ident, &) => { 1 }; ($mode:ident, &&) => { 2 }; ($mode:ident, &=) => { 2 }; ($mode:ident, @) => { 1 }; - ($mode:ident, !) => { 1 }; ($mode:ident, ^) => { 1 }; ($mode:ident, ^=) => { 2 }; ($mode:ident, :) => { 1 }; - ($mode:ident, ::) => { 2 }; ($mode:ident, ,) => { 1 }; - ($mode:ident, /) => { 1 }; - ($mode:ident, /=) => { 2 }; + ($mode:ident, $) => { 1 }; ($mode:ident, .) => { 1 }; ($mode:ident, ..) => { 2 }; ($mode:ident, ...) => { 3 }; ($mode:ident, ..=) => { 3 }; ($mode:ident, =) => { 1 }; ($mode:ident, ==) => { 2 }; + ($mode:ident, =>) => { 2 }; ($mode:ident, >=) => { 2 }; ($mode:ident, >) => { 1 }; + ($mode:ident, <-) => { 2 }; ($mode:ident, <=) => { 2 }; ($mode:ident, <) => { 1 }; - ($mode:ident, *=) => { 2 }; + ($mode:ident, -) => { 1 }; + ($mode:ident, -=) => { 2 }; ($mode:ident, !=) => { 2 }; + ($mode:ident, !) => { 1 }; ($mode:ident, |) => { 1 }; ($mode:ident, |=) => { 2 }; ($mode:ident, ||) => { 2 }; + ($mode:ident, ::) => { 2 }; + ($mode:ident, %) => { 1 }; + ($mode:ident, %=) => { 2 }; + ($mode:ident, +) => { 1 }; + ($mode:ident, +=) => { 2 }; ($mode:ident, #) => { 1 }; ($mode:ident, ?) => { 1 }; ($mode:ident, ->) => { 2 }; - ($mode:ident, <-) => { 2 }; - ($mode:ident, %) => { 1 }; - ($mode:ident, %=) => { 2 }; - ($mode:ident, =>) => { 2 }; ($mode:ident, ;) => { 1 }; ($mode:ident, <<) => { 2 }; ($mode:ident, <<=) => { 3 }; ($mode:ident, >>) => { 2 }; ($mode:ident, >>=) => { 3 }; + ($mode:ident, /) => { 1 }; + ($mode:ident, /=) => { 2 }; ($mode:ident, *) => { 1 }; - ($mode:ident, -) => { 1 }; - ($mode:ident, -=) => { 2 }; + ($mode:ident, *=) => { 2 }; ($mode:ident, ~) => { 1 }; (lenient, $tt:tt) => { 0 }; (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }}; diff --git a/vendor/syn/src/data.rs b/vendor/syn/src/data.rs index 431c0857..f973004d 100644 --- a/vendor/syn/src/data.rs +++ b/vendor/syn/src/data.rs @@ -1,9 +1,14 @@ -use super::*; -use crate::punctuated::Punctuated; +use crate::attr::Attribute; +use crate::expr::{Expr, Index, Member}; +use crate::ident::Ident; +use crate::punctuated::{self, Punctuated}; +use crate::restriction::{FieldMutability, Visibility}; +use crate::token; +use crate::ty::Type; ast_struct! { /// An enum variant. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Variant { pub attrs: Vec<Attribute>, @@ -25,8 +30,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum Fields { /// Named fields of a struct or struct variant such as `Point { x: f64, /// y: f64 }`. @@ -43,7 +48,7 @@ ast_enum_of_structs! { ast_struct! { /// Named fields of a struct or struct variant such as `Point { x: f64, /// y: f64 }`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct FieldsNamed { pub brace_token: token::Brace, pub named: Punctuated<Field, Token![,]>, @@ -52,7 +57,7 @@ ast_struct! { ast_struct! { /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct FieldsUnnamed { pub paren_token: token::Paren, pub unnamed: Punctuated<Field, Token![,]>, @@ -99,6 +104,47 @@ impl Fields { Fields::Unnamed(f) => f.unnamed.is_empty(), } } + + return_impl_trait! { + /// Get an iterator over the fields of a struct or variant as [`Member`]s. + /// This iterator can be used to iterate over a named or unnamed struct or + /// variant's fields uniformly. + /// + /// # Example + /// + /// The following is a simplistic [`Clone`] derive for structs. (A more + /// complete implementation would additionally want to infer trait bounds on + /// the generic type parameters.) + /// + /// ``` + /// # use quote::quote; + /// # + /// fn derive_clone(input: &syn::ItemStruct) -> proc_macro2::TokenStream { + /// let ident = &input.ident; + /// let members = input.fields.members(); + /// let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + /// quote! { + /// impl #impl_generics Clone for #ident #ty_generics #where_clause { + /// fn clone(&self) -> Self { + /// Self { + /// #(#members: self.#members.clone()),* + /// } + /// } + /// } + /// } + /// } + /// ``` + /// + /// For structs with named fields, it produces an expression like `Self { a: + /// self.a.clone() }`. For structs with unnamed fields, `Self { 0: + /// self.0.clone() }`. And for unit structs, `Self {}`. + pub fn members(&self) -> impl Iterator<Item = Member> + Clone + '_ [Members] { + Members { + fields: self.iter(), + index: 0, + } + } + } } impl IntoIterator for Fields { @@ -134,7 +180,7 @@ impl<'a> IntoIterator for &'a mut Fields { ast_struct! { /// A field of a struct or enum variant. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Field { pub attrs: Vec<Attribute>, @@ -153,13 +199,62 @@ ast_struct! { } } +pub struct Members<'a> { + fields: punctuated::Iter<'a, Field>, + index: u32, +} + +impl<'a> Iterator for Members<'a> { + type Item = Member; + + fn next(&mut self) -> Option<Self::Item> { + let field = self.fields.next()?; + let member = match &field.ident { + Some(ident) => Member::Named(ident.clone()), + None => { + #[cfg(all(feature = "parsing", feature = "printing"))] + let span = crate::spanned::Spanned::span(&field.ty); + #[cfg(not(all(feature = "parsing", feature = "printing")))] + let span = proc_macro2::Span::call_site(); + Member::Unnamed(Index { + index: self.index, + span, + }) + } + }; + self.index += 1; + Some(member) + } +} + +impl<'a> Clone for Members<'a> { + fn clone(&self) -> Self { + Members { + fields: self.fields.clone(), + index: self.index, + } + } +} + #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + use crate::attr::Attribute; + use crate::data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant}; + use crate::error::Result; + use crate::expr::Expr; + use crate::ext::IdentExt as _; + use crate::ident::Ident; + #[cfg(not(feature = "full"))] + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseStream}; + use crate::restriction::{FieldMutability, Visibility}; + #[cfg(not(feature = "full"))] + use crate::scan_expr::scan_expr; + use crate::token; + use crate::ty::Type; + use crate::verbatim; + + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Variant { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -174,7 +269,20 @@ pub(crate) mod parsing { }; let discriminant = if input.peek(Token![=]) { let eq_token: Token![=] = input.parse()?; + #[cfg(feature = "full")] let discriminant: Expr = input.parse()?; + #[cfg(not(feature = "full"))] + let discriminant = { + let begin = input.fork(); + let ahead = input.fork(); + let mut discriminant: Result<Expr> = ahead.parse(); + if discriminant.is_ok() { + input.advance_to(&ahead); + } else if scan_expr(input).is_ok() { + discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input))); + } + discriminant? + }; Some((eq_token, discriminant)) } else { None @@ -188,7 +296,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for FieldsNamed { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -199,7 +307,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for FieldsUnnamed { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -212,7 +320,7 @@ pub(crate) mod parsing { impl Field { /// Parses a named (braced struct) field. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_named(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; let vis: Visibility = input.parse()?; @@ -249,7 +357,7 @@ pub(crate) mod parsing { } /// Parses an unnamed (tuple struct) field. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_unnamed(input: ParseStream) -> Result<Self> { Ok(Field { attrs: input.call(Attribute::parse_outer)?, @@ -265,12 +373,12 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::data::{Field, FieldsNamed, FieldsUnnamed, Variant}; use crate::print::TokensOrDefault; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Variant { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(&self.attrs); @@ -283,7 +391,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for FieldsNamed { fn to_tokens(&self, tokens: &mut TokenStream) { self.brace_token.surround(tokens, |tokens| { @@ -292,7 +400,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for FieldsUnnamed { fn to_tokens(&self, tokens: &mut TokenStream) { self.paren_token.surround(tokens, |tokens| { @@ -301,7 +409,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Field { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(&self.attrs); diff --git a/vendor/syn/src/derive.rs b/vendor/syn/src/derive.rs index 25fa4c91..3443ecfc 100644 --- a/vendor/syn/src/derive.rs +++ b/vendor/syn/src/derive.rs @@ -1,9 +1,14 @@ -use super::*; +use crate::attr::Attribute; +use crate::data::{Fields, FieldsNamed, Variant}; +use crate::generics::Generics; +use crate::ident::Ident; use crate::punctuated::Punctuated; +use crate::restriction::Visibility; +use crate::token; ast_struct! { /// Data structure sent to a `proc_macro_derive` macro. - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub struct DeriveInput { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -20,8 +25,8 @@ ast_enum! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub enum Data { Struct(DataStruct), Enum(DataEnum), @@ -31,7 +36,7 @@ ast_enum! { ast_struct! { /// A struct input to a `proc_macro_derive` macro. - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub struct DataStruct { pub struct_token: Token![struct], pub fields: Fields, @@ -41,7 +46,7 @@ ast_struct! { ast_struct! { /// An enum input to a `proc_macro_derive` macro. - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub struct DataEnum { pub enum_token: Token![enum], pub brace_token: token::Brace, @@ -51,7 +56,7 @@ ast_struct! { ast_struct! { /// An untagged union input to a `proc_macro_derive` macro. - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub struct DataUnion { pub union_token: Token![union], pub fields: FieldsNamed, @@ -60,10 +65,18 @@ ast_struct! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; + use crate::attr::Attribute; + use crate::data::{Fields, FieldsNamed, Variant}; + use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput}; + use crate::error::Result; + use crate::generics::{Generics, WhereClause}; + use crate::ident::Ident; + use crate::parse::{Parse, ParseStream}; + use crate::punctuated::Punctuated; + use crate::restriction::Visibility; + use crate::token; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for DeriveInput { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -193,13 +206,14 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; use crate::attr::FilterAttrs; + use crate::data::Fields; + use crate::derive::{Data, DeriveInput}; use crate::print::TokensOrDefault; use proc_macro2::TokenStream; use quote::ToTokens; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for DeriveInput { fn to_tokens(&self, tokens: &mut TokenStream) { for attr in self.attrs.outer() { diff --git a/vendor/syn/src/discouraged.rs b/vendor/syn/src/discouraged.rs index fb98d633..c8d6bfe8 100644 --- a/vendor/syn/src/discouraged.rs +++ b/vendor/syn/src/discouraged.rs @@ -1,7 +1,13 @@ //! Extensions to the parsing API with niche applicability. -use super::*; +use crate::buffer::Cursor; +use crate::error::Result; +use crate::parse::{inner_unexpected, ParseBuffer, Unexpected}; use proc_macro2::extra::DelimSpan; +use proc_macro2::Delimiter; +use std::cell::Cell; +use std::mem; +use std::rc::Rc; /// Extensions to the `ParseStream` API to support speculative parsing. pub trait Speculative { @@ -161,7 +167,7 @@ pub trait Speculative { impl<'a> Speculative for ParseBuffer<'a> { fn advance_to(&self, fork: &Self) { if !crate::buffer::same_scope(self.cursor(), fork.cursor()) { - panic!("Fork was not derived from the advancing parse stream"); + panic!("fork was not derived from the advancing parse stream"); } let (self_unexp, self_sp) = inner_unexpected(self); @@ -169,17 +175,17 @@ impl<'a> Speculative for ParseBuffer<'a> { if !Rc::ptr_eq(&self_unexp, &fork_unexp) { match (fork_sp, self_sp) { // Unexpected set on the fork, but not on `self`, copy it over. - (Some(span), None) => { - self_unexp.set(Unexpected::Some(span)); + (Some((span, delimiter)), None) => { + self_unexp.set(Unexpected::Some(span, delimiter)); } // Unexpected unset. Use chain to propagate errors from fork. (None, None) => { fork_unexp.set(Unexpected::Chain(self_unexp)); // Ensure toplevel 'unexpected' tokens from the fork don't - // bubble up the chain by replacing the root `unexpected` + // propagate up the chain by replacing the root `unexpected` // pointer, only 'unexpected' tokens from existing group - // parsers should bubble. + // parsers should propagate. fork.unexpected .set(Some(Rc::new(Cell::new(Unexpected::None)))); } @@ -206,7 +212,7 @@ impl<'a> AnyDelimiter for ParseBuffer<'a> { fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> { self.step(|cursor| { if let Some((content, delimiter, span, rest)) = cursor.any_group() { - let scope = crate::buffer::close_span_of_group(*cursor); + let scope = span.close(); let nested = crate::parse::advance_step_cursor(cursor, content); let unexpected = crate::parse::get_unexpected(self); let content = crate::parse::new_parse_buffer(scope, nested, unexpected); diff --git a/vendor/syn/src/drops.rs b/vendor/syn/src/drops.rs index 89b42d82..c54308f0 100644 --- a/vendor/syn/src/drops.rs +++ b/vendor/syn/src/drops.rs @@ -32,10 +32,10 @@ impl<T: ?Sized> DerefMut for NoDrop<T> { pub(crate) trait TrivialDrop {} impl<T> TrivialDrop for iter::Empty<T> {} -impl<'a, T> TrivialDrop for slice::Iter<'a, T> {} -impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {} -impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {} -impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {} +impl<T> TrivialDrop for slice::Iter<'_, T> {} +impl<T> TrivialDrop for slice::IterMut<'_, T> {} +impl<T> TrivialDrop for option::IntoIter<&T> {} +impl<T> TrivialDrop for option::IntoIter<&mut T> {} #[test] fn test_needs_drop() { diff --git a/vendor/syn/src/error.rs b/vendor/syn/src/error.rs index 3fe31d5c..f89278c2 100644 --- a/vendor/syn/src/error.rs +++ b/vendor/syn/src/error.rs @@ -1,5 +1,6 @@ #[cfg(feature = "parsing")] use crate::buffer::Cursor; +use crate::ext::{PunctExt as _, TokenStreamExt as _}; use crate::thread::ThreadBound; use proc_macro2::{ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, @@ -185,6 +186,7 @@ impl Error { /// When in doubt it's recommended to stick to `Error::new` (or /// `ParseStream::error`)! #[cfg(feature = "printing")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self { return new_spanned(tokens.into_token_stream(), message.to_string()); @@ -222,10 +224,11 @@ impl Error { /// [`compile_error!`]: std::compile_error! /// [`parse_macro_input!`]: crate::parse_macro_input! pub fn to_compile_error(&self) -> TokenStream { - self.messages - .iter() - .map(ErrorMessage::to_compile_error) - .collect() + let mut tokens = TokenStream::new(); + for msg in &self.messages { + ErrorMessage::to_compile_error(msg, &mut tokens); + } + tokens } /// Render the error as an invocation of [`compile_error!`]. @@ -272,53 +275,52 @@ impl Error { } impl ErrorMessage { - fn to_compile_error(&self) -> TokenStream { + fn to_compile_error(&self, tokens: &mut TokenStream) { let (start, end) = match self.span.get() { Some(range) => (range.start, range.end), None => (Span::call_site(), Span::call_site()), }; // ::core::compile_error!($message) - TokenStream::from_iter(vec![ - TokenTree::Punct({ - let mut punct = Punct::new(':', Spacing::Joint); - punct.set_span(start); - punct - }), - TokenTree::Punct({ - let mut punct = Punct::new(':', Spacing::Alone); - punct.set_span(start); - punct - }), - TokenTree::Ident(Ident::new("core", start)), - TokenTree::Punct({ - let mut punct = Punct::new(':', Spacing::Joint); - punct.set_span(start); - punct - }), - TokenTree::Punct({ - let mut punct = Punct::new(':', Spacing::Alone); - punct.set_span(start); - punct - }), - TokenTree::Ident(Ident::new("compile_error", start)), - TokenTree::Punct({ - let mut punct = Punct::new('!', Spacing::Alone); - punct.set_span(start); - punct - }), - TokenTree::Group({ - let mut group = Group::new(Delimiter::Brace, { - TokenStream::from_iter(vec![TokenTree::Literal({ - let mut string = Literal::string(&self.message); - string.set_span(end); - string - })]) - }); - group.set_span(end); - group - }), - ]) + tokens.append(TokenTree::Punct(Punct::new_spanned( + ':', + Spacing::Joint, + start, + ))); + tokens.append(TokenTree::Punct(Punct::new_spanned( + ':', + Spacing::Alone, + start, + ))); + tokens.append(TokenTree::Ident(Ident::new("core", start))); + tokens.append(TokenTree::Punct(Punct::new_spanned( + ':', + Spacing::Joint, + start, + ))); + tokens.append(TokenTree::Punct(Punct::new_spanned( + ':', + Spacing::Alone, + start, + ))); + tokens.append(TokenTree::Ident(Ident::new("compile_error", start))); + tokens.append(TokenTree::Punct(Punct::new_spanned( + '!', + Spacing::Alone, + start, + ))); + tokens.append(TokenTree::Group({ + let mut group = Group::new( + Delimiter::Brace, + TokenStream::from({ + let mut string = Literal::string(&self.message); + string.set_span(end); + TokenTree::Literal(string) + }), + ); + group.set_span(end); + group + })); } } @@ -403,7 +405,7 @@ impl std::error::Error for Error {} impl From<LexError> for Error { fn from(err: LexError) -> Self { - Error::new(err.span(), "lex error") + Error::new(err.span(), err) } } diff --git a/vendor/syn/src/export.rs b/vendor/syn/src/export.rs index febd322e..b9ea5c74 100644 --- a/vendor/syn/src/export.rs +++ b/vendor/syn/src/export.rs @@ -57,6 +57,10 @@ pub use crate::token::parsing::{peek_punct, punct as parse_punct}; #[doc(hidden)] pub use crate::token::printing::punct as print_punct; +#[cfg(feature = "parsing")] +#[doc(hidden)] +pub use crate::token::private::CustomToken; + #[cfg(feature = "proc-macro")] #[doc(hidden)] pub type TokenStream = proc_macro::TokenStream; diff --git a/vendor/syn/src/expr.rs b/vendor/syn/src/expr.rs index ae723242..b1b16465 100644 --- a/vendor/syn/src/expr.rs +++ b/vendor/syn/src/expr.rs @@ -1,12 +1,35 @@ -use super::*; +use crate::attr::Attribute; +#[cfg(all(feature = "parsing", feature = "full"))] +use crate::error::Result; +#[cfg(feature = "parsing")] +use crate::ext::IdentExt as _; +#[cfg(feature = "full")] +use crate::generics::BoundLifetimes; +use crate::ident::Ident; +#[cfg(any(feature = "parsing", feature = "full"))] +use crate::lifetime::Lifetime; +use crate::lit::Lit; +use crate::mac::Macro; +use crate::op::{BinOp, UnOp}; +#[cfg(feature = "parsing")] +use crate::parse::ParseStream; +#[cfg(feature = "full")] +use crate::pat::Pat; +use crate::path::{AngleBracketedGenericArguments, Path, QSelf}; use crate::punctuated::Punctuated; +#[cfg(feature = "full")] +use crate::stmt::Block; +use crate::token; +#[cfg(feature = "full")] +use crate::ty::ReturnType; +use crate::ty::Type; use proc_macro2::{Span, TokenStream}; #[cfg(feature = "printing")] use quote::IdentFragment; #[cfg(feature = "printing")] use std::fmt::{self, Display}; use std::hash::{Hash, Hasher}; -#[cfg(feature = "parsing")] +#[cfg(all(feature = "parsing", feature = "full"))] use std::mem; ast_enum_of_structs! { @@ -84,7 +107,7 @@ ast_enum_of_structs! { /// A sign that you may not be choosing the right variable names is if you /// see names getting repeated in your code, like accessing /// `receiver.receiver` or `pat.pat` or `cond.cond`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum Expr { /// A slice literal expression: `[a, b, c, d]`. @@ -181,6 +204,9 @@ ast_enum_of_structs! { /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. Range(ExprRange), + /// Address-of operation: `&raw const place` or `&raw mut place`. + RawAddr(ExprRawAddr), + /// A referencing operation: `&a` or `&mut a`. Reference(ExprReference), @@ -223,12 +249,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match expr { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // Expr::Array(expr) => {...} // Expr::Assign(expr) => {...} // ... // Expr::Yield(expr) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -241,7 +268,7 @@ ast_enum_of_structs! { ast_struct! { /// A slice literal expression: `[a, b, c, d]`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprArray #full { pub attrs: Vec<Attribute>, pub bracket_token: token::Bracket, @@ -251,7 +278,7 @@ ast_struct! { ast_struct! { /// An assignment expression: `a = compute()`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprAssign #full { pub attrs: Vec<Attribute>, pub left: Box<Expr>, @@ -262,7 +289,7 @@ ast_struct! { ast_struct! { /// An async block: `async { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprAsync #full { pub attrs: Vec<Attribute>, pub async_token: Token![async], @@ -273,7 +300,7 @@ ast_struct! { ast_struct! { /// An await expression: `fut.await`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprAwait #full { pub attrs: Vec<Attribute>, pub base: Box<Expr>, @@ -284,7 +311,7 @@ ast_struct! { ast_struct! { /// A binary operation: `a + b`, `a += b`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprBinary { pub attrs: Vec<Attribute>, pub left: Box<Expr>, @@ -295,7 +322,7 @@ ast_struct! { ast_struct! { /// A blocked scope: `{ ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprBlock #full { pub attrs: Vec<Attribute>, pub label: Option<Label>, @@ -306,7 +333,7 @@ ast_struct! { ast_struct! { /// A `break`, with an optional label to break and an optional /// expression. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprBreak #full { pub attrs: Vec<Attribute>, pub break_token: Token![break], @@ -317,7 +344,7 @@ ast_struct! { ast_struct! { /// A function call expression: `invoke(a, b)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprCall { pub attrs: Vec<Attribute>, pub func: Box<Expr>, @@ -328,7 +355,7 @@ ast_struct! { ast_struct! { /// A cast expression: `foo as f64`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprCast { pub attrs: Vec<Attribute>, pub expr: Box<Expr>, @@ -339,7 +366,7 @@ ast_struct! { ast_struct! { /// A closure expression: `|a, b| a + b`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprClosure #full { pub attrs: Vec<Attribute>, pub lifetimes: Option<BoundLifetimes>, @@ -357,6 +384,7 @@ ast_struct! { ast_struct! { /// A const block: `const { ... }`. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprConst #full { pub attrs: Vec<Attribute>, pub const_token: Token![const], @@ -366,7 +394,7 @@ ast_struct! { ast_struct! { /// A `continue`, with an optional label. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprContinue #full { pub attrs: Vec<Attribute>, pub continue_token: Token![continue], @@ -377,7 +405,7 @@ ast_struct! { ast_struct! { /// Access of a named struct field (`obj.k`) or unnamed tuple struct /// field (`obj.0`). - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprField { pub attrs: Vec<Attribute>, pub base: Box<Expr>, @@ -388,7 +416,7 @@ ast_struct! { ast_struct! { /// A for loop: `for pat in expr { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprForLoop #full { pub attrs: Vec<Attribute>, pub label: Option<Label>, @@ -406,7 +434,7 @@ ast_struct! { /// This variant is important for faithfully representing the precedence /// of expressions and is related to `None`-delimited spans in a /// `TokenStream`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprGroup { pub attrs: Vec<Attribute>, pub group_token: token::Group, @@ -420,7 +448,7 @@ ast_struct! { /// /// The `else` branch expression may only be an `If` or `Block` /// expression, not any of the other types of expression. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprIf #full { pub attrs: Vec<Attribute>, pub if_token: Token![if], @@ -432,7 +460,7 @@ ast_struct! { ast_struct! { /// A square bracketed indexing expression: `vector[2]`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprIndex { pub attrs: Vec<Attribute>, pub expr: Box<Expr>, @@ -443,6 +471,7 @@ ast_struct! { ast_struct! { /// The inferred value of a const generic argument, denoted `_`. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprInfer #full { pub attrs: Vec<Attribute>, pub underscore_token: Token![_], @@ -451,7 +480,7 @@ ast_struct! { ast_struct! { /// A `let` guard: `let Some(x) = opt`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprLet #full { pub attrs: Vec<Attribute>, pub let_token: Token![let], @@ -463,7 +492,7 @@ ast_struct! { ast_struct! { /// A literal in place of an expression: `1`, `"foo"`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprLit { pub attrs: Vec<Attribute>, pub lit: Lit, @@ -472,7 +501,7 @@ ast_struct! { ast_struct! { /// Conditionless loop: `loop { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprLoop #full { pub attrs: Vec<Attribute>, pub label: Option<Label>, @@ -483,6 +512,7 @@ ast_struct! { ast_struct! { /// A macro invocation expression: `format!("{}", q)`. + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprMacro { pub attrs: Vec<Attribute>, pub mac: Macro, @@ -491,7 +521,7 @@ ast_struct! { ast_struct! { /// A `match` expression: `match n { Some(n) => {}, None => {} }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprMatch #full { pub attrs: Vec<Attribute>, pub match_token: Token![match], @@ -503,8 +533,8 @@ ast_struct! { ast_struct! { /// A method call expression: `x.foo::<T>(a, b)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprMethodCall #full { + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprMethodCall { pub attrs: Vec<Attribute>, pub receiver: Box<Expr>, pub dot_token: Token![.], @@ -517,7 +547,7 @@ ast_struct! { ast_struct! { /// A parenthesized expression: `(a + b)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprParen { pub attrs: Vec<Attribute>, pub paren_token: token::Paren, @@ -530,7 +560,7 @@ ast_struct! { /// parameters and a qualified self-type. /// /// A plain identifier like `x` is a path of length 1. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprPath { pub attrs: Vec<Attribute>, pub qself: Option<QSelf>, @@ -540,7 +570,7 @@ ast_struct! { ast_struct! { /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprRange #full { pub attrs: Vec<Attribute>, pub start: Option<Box<Expr>>, @@ -549,10 +579,22 @@ ast_struct! { } } +ast_struct! { + /// Address-of operation: `&raw const place` or `&raw mut place`. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + pub struct ExprRawAddr #full { + pub attrs: Vec<Attribute>, + pub and_token: Token![&], + pub raw: Token![raw], + pub mutability: PointerMutability, + pub expr: Box<Expr>, + } +} + ast_struct! { /// A referencing operation: `&a` or `&mut a`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprReference #full { + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprReference { pub attrs: Vec<Attribute>, pub and_token: Token![&], pub mutability: Option<Token![mut]>, @@ -562,7 +604,7 @@ ast_struct! { ast_struct! { /// An array literal constructed from one repeated element: `[0u8; N]`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprRepeat #full { pub attrs: Vec<Attribute>, pub bracket_token: token::Bracket, @@ -574,7 +616,7 @@ ast_struct! { ast_struct! { /// A `return`, with an optional value to be returned. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprReturn #full { pub attrs: Vec<Attribute>, pub return_token: Token![return], @@ -587,8 +629,8 @@ ast_struct! { /// /// The `rest` provides the value of the remaining fields as in `S { a: /// 1, b: 1, ..rest }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprStruct #full { + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprStruct { pub attrs: Vec<Attribute>, pub qself: Option<QSelf>, pub path: Path, @@ -601,7 +643,7 @@ ast_struct! { ast_struct! { /// A try-expression: `expr?`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprTry #full { pub attrs: Vec<Attribute>, pub expr: Box<Expr>, @@ -611,7 +653,7 @@ ast_struct! { ast_struct! { /// A try block: `try { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprTryBlock #full { pub attrs: Vec<Attribute>, pub try_token: Token![try], @@ -621,8 +663,8 @@ ast_struct! { ast_struct! { /// A tuple expression: `(a, b, c, d)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprTuple #full { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + pub struct ExprTuple { pub attrs: Vec<Attribute>, pub paren_token: token::Paren, pub elems: Punctuated<Expr, Token![,]>, @@ -631,7 +673,7 @@ ast_struct! { ast_struct! { /// A unary operation: `!x`, `*x`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ExprUnary { pub attrs: Vec<Attribute>, pub op: UnOp, @@ -641,7 +683,7 @@ ast_struct! { ast_struct! { /// An unsafe block: `unsafe { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprUnsafe #full { pub attrs: Vec<Attribute>, pub unsafe_token: Token![unsafe], @@ -651,7 +693,7 @@ ast_struct! { ast_struct! { /// A while loop: `while expr { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprWhile #full { pub attrs: Vec<Attribute>, pub label: Option<Label>, @@ -663,7 +705,7 @@ ast_struct! { ast_struct! { /// A yield expression: `yield expr`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ExprYield #full { pub attrs: Vec<Attribute>, pub yield_token: Token![yield], @@ -672,8 +714,26 @@ ast_struct! { } impl Expr { - #[cfg(feature = "parsing")] - const DUMMY: Self = Expr::Path(ExprPath { + /// An unspecified invalid expression. + /// + /// ``` + /// use quote::ToTokens; + /// use std::mem; + /// use syn::{parse_quote, Expr}; + /// + /// fn unparenthesize(e: &mut Expr) { + /// while let Expr::Paren(paren) = e { + /// *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER); + /// } + /// } + /// + /// fn main() { + /// let mut e: Expr = parse_quote! { ((1 + 1)) }; + /// unparenthesize(&mut e); + /// assert_eq!("1 + 1", e.to_token_stream().to_string()); + /// } + /// ``` + pub const PLACEHOLDER: Self = Expr::Path(ExprPath { attrs: Vec::new(), qself: None, path: Path { @@ -682,6 +742,185 @@ impl Expr { }, }); + /// An alternative to the primary `Expr::parse` parser (from the [`Parse`] + /// trait) for ambiguous syntactic positions in which a trailing brace + /// should not be taken as part of the expression. + /// + /// [`Parse`]: crate::parse::Parse + /// + /// Rust grammar has an ambiguity where braces sometimes turn a path + /// expression into a struct initialization and sometimes do not. In the + /// following code, the expression `S {}` is one expression. Presumably + /// there is an empty struct `struct S {}` defined somewhere which it is + /// instantiating. + /// + /// ``` + /// # struct S; + /// # impl std::ops::Deref for S { + /// # type Target = bool; + /// # fn deref(&self) -> &Self::Target { + /// # &true + /// # } + /// # } + /// let _ = *S {}; + /// + /// // parsed by rustc as: `*(S {})` + /// ``` + /// + /// We would want to parse the above using `Expr::parse` after the `=` + /// token. + /// + /// But in the following, `S {}` is *not* a struct init expression. + /// + /// ``` + /// # const S: &bool = &true; + /// if *S {} {} + /// + /// // parsed by rustc as: + /// // + /// // if (*S) { + /// // /* empty block */ + /// // } + /// // { + /// // /* another empty block */ + /// // } + /// ``` + /// + /// For that reason we would want to parse if-conditions using + /// `Expr::parse_without_eager_brace` after the `if` token. Same for similar + /// syntactic positions such as the condition expr after a `while` token or + /// the expr at the top of a `match`. + /// + /// The Rust grammar's choices around which way this ambiguity is resolved + /// at various syntactic positions is fairly arbitrary. Really either parse + /// behavior could work in most positions, and language designers just + /// decide each case based on which is more likely to be what the programmer + /// had in mind most of the time. + /// + /// ``` + /// # struct S; + /// # fn doc() -> S { + /// if return S {} {} + /// # unreachable!() + /// # } + /// + /// // parsed by rustc as: + /// // + /// // if (return (S {})) { + /// // } + /// // + /// // but could equally well have been this other arbitrary choice: + /// // + /// // if (return S) { + /// // } + /// // {} + /// ``` + /// + /// Note the grammar ambiguity on trailing braces is distinct from + /// precedence and is not captured by assigning a precedence level to the + /// braced struct init expr in relation to other operators. This can be + /// illustrated by `return 0..S {}` vs `match 0..S {}`. The former parses as + /// `return (0..(S {}))` implying tighter precedence for struct init than + /// `..`, while the latter parses as `match (0..S) {}` implying tighter + /// precedence for `..` than struct init, a contradiction. + #[cfg(all(feature = "full", feature = "parsing"))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "full", feature = "parsing"))))] + pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> { + parsing::ambiguous_expr(input, parsing::AllowStruct(false)) + } + + /// An alternative to the primary `Expr::parse` parser (from the [`Parse`] + /// trait) for syntactic positions in which expression boundaries are placed + /// more eagerly than done by the typical expression grammar. This includes + /// expressions at the head of a statement or in the right-hand side of a + /// `match` arm. + /// + /// [`Parse`]: crate::parse::Parse + /// + /// Compare the following cases: + /// + /// 1. + /// ``` + /// # let result = (); + /// # let guard = false; + /// # let cond = true; + /// # let f = true; + /// # let g = f; + /// # + /// let _ = match result { + /// () if guard => if cond { f } else { g } + /// () => false, + /// }; + /// ``` + /// + /// 2. + /// ``` + /// # let cond = true; + /// # let f = (); + /// # let g = f; + /// # + /// let _ = || { + /// if cond { f } else { g } + /// () + /// }; + /// ``` + /// + /// 3. + /// ``` + /// # let cond = true; + /// # let f = || (); + /// # let g = f; + /// # + /// let _ = [if cond { f } else { g } ()]; + /// ``` + /// + /// The same sequence of tokens `if cond { f } else { g } ()` appears in + /// expression position 3 times. The first two syntactic positions use eager + /// placement of expression boundaries, and parse as `Expr::If`, with the + /// adjacent `()` becoming `Pat::Tuple` or `Expr::Tuple`. In contrast, the + /// third case uses standard expression boundaries and parses as + /// `Expr::Call`. + /// + /// As with [`parse_without_eager_brace`], this ambiguity in the Rust + /// grammar is independent of precedence. + /// + /// [`parse_without_eager_brace`]: Self::parse_without_eager_brace + #[cfg(all(feature = "full", feature = "parsing"))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "full", feature = "parsing"))))] + pub fn parse_with_earlier_boundary_rule(input: ParseStream) -> Result<Expr> { + parsing::parse_with_earlier_boundary_rule(input) + } + + /// Returns whether the next token in the parse stream is one that might + /// possibly form the beginning of an expr. + /// + /// This classification is a load-bearing part of the grammar of some Rust + /// expressions, notably `return` and `break`. For example `return < …` will + /// never parse `<` as a binary operator regardless of what comes after, + /// because `<` is a legal starting token for an expression and so it's + /// required to be continued as a return value, such as `return <Struct as + /// Trait>::CONST`. Meanwhile `return > …` treats the `>` as a binary + /// operator because it cannot be a starting token for any Rust expression. + #[cfg(feature = "parsing")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + pub fn peek(input: ParseStream) -> bool { + input.peek(Ident::peek_any) && !input.peek(Token![as]) // value name or keyword + || input.peek(token::Paren) // tuple + || input.peek(token::Bracket) // array + || input.peek(token::Brace) // block + || input.peek(Lit) // literal + || input.peek(Token![!]) && !input.peek(Token![!=]) // operator not + || input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus + || input.peek(Token![*]) && !input.peek(Token![*=]) // dereference + || input.peek(Token![|]) && !input.peek(Token![|=]) // closure + || input.peek(Token![&]) && !input.peek(Token![&=]) // reference + || input.peek(Token![..]) // range + || input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path + || input.peek(Token![::]) // absolute path + || input.peek(Lifetime) // labeled loop + || input.peek(Token![#]) // expression attributes + } + #[cfg(all(feature = "parsing", feature = "full"))] pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> { match self { @@ -712,6 +951,7 @@ impl Expr { | Expr::Paren(ExprParen { attrs, .. }) | Expr::Path(ExprPath { attrs, .. }) | Expr::Range(ExprRange { attrs, .. }) + | Expr::RawAddr(ExprRawAddr { attrs, .. }) | Expr::Reference(ExprReference { attrs, .. }) | Expr::Repeat(ExprRepeat { attrs, .. }) | Expr::Return(ExprReturn { attrs, .. }) @@ -731,7 +971,7 @@ impl Expr { ast_enum! { /// A struct or tuple struct field accessed in a struct literal or field /// expression. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum Member { /// A named field like `self.x`. Named(Ident), @@ -796,9 +1036,19 @@ impl IdentFragment for Member { } } +#[cfg(any(feature = "parsing", feature = "printing"))] +impl Member { + pub(crate) fn is_named(&self) -> bool { + match self { + Member::Named(_) => true, + Member::Unnamed(_) => false, + } + } +} + ast_struct! { /// The index of an unnamed tuple struct field. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Index { pub index: u32, pub span: Span, @@ -807,7 +1057,7 @@ ast_struct! { impl From<usize> for Index { fn from(index: usize) -> Index { - assert!(index < u32::max_value() as usize); + assert!(index < u32::MAX as usize); Index { index: index as u32, span: Span::call_site(), @@ -840,10 +1090,9 @@ impl IdentFragment for Index { } } -#[cfg(feature = "full")] ast_struct! { /// A field-value pair in a struct literal. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct FieldValue { pub attrs: Vec<Attribute>, pub member: Member, @@ -859,7 +1108,7 @@ ast_struct! { #[cfg(feature = "full")] ast_struct! { /// A lifetime labeling a `for`, `while`, or `loop`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Label { pub name: Lifetime, pub colon_token: Token![:], @@ -885,7 +1134,7 @@ ast_struct! { /// # false /// # } /// ``` - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Arm { pub attrs: Vec<Attribute>, pub pat: Pat, @@ -899,7 +1148,7 @@ ast_struct! { #[cfg(feature = "full")] ast_enum! { /// Limit types of a range, inclusive or exclusive. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub enum RangeLimits { /// Inclusive at the beginning, exclusive at the end. HalfOpen(Token![..]), @@ -908,66 +1157,64 @@ ast_enum! { } } -#[cfg(any(feature = "parsing", feature = "printing"))] #[cfg(feature = "full")] -pub(crate) fn requires_terminator(expr: &Expr) -> bool { - // see https://github.com/rust-lang/rust/blob/9a19e7604/compiler/rustc_ast/src/util/classify.rs#L7-L26 - match expr { - Expr::If(_) - | Expr::Match(_) - | Expr::Block(_) | Expr::Unsafe(_) // both under ExprKind::Block in rustc - | Expr::While(_) - | Expr::Loop(_) - | Expr::ForLoop(_) - | Expr::TryBlock(_) - | Expr::Const(_) => false, - Expr::Array(_) - | Expr::Assign(_) - | Expr::Async(_) - | Expr::Await(_) - | Expr::Binary(_) - | Expr::Break(_) - | Expr::Call(_) - | Expr::Cast(_) - | Expr::Closure(_) - | Expr::Continue(_) - | Expr::Field(_) - | Expr::Group(_) - | Expr::Index(_) - | Expr::Infer(_) - | Expr::Let(_) - | Expr::Lit(_) - | Expr::Macro(_) - | Expr::MethodCall(_) - | Expr::Paren(_) - | Expr::Path(_) - | Expr::Range(_) - | Expr::Reference(_) - | Expr::Repeat(_) - | Expr::Return(_) - | Expr::Struct(_) - | Expr::Try(_) - | Expr::Tuple(_) - | Expr::Unary(_) - | Expr::Yield(_) - | Expr::Verbatim(_) => true +ast_enum! { + /// Mutability of a raw pointer (`*const T`, `*mut T`), in which non-mutable + /// isn't the implicit default. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + pub enum PointerMutability { + Const(Token![const]), + Mut(Token![mut]), } } #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::discouraged::Speculative; + #[cfg(feature = "full")] + use crate::attr; + use crate::attr::Attribute; + #[cfg(feature = "full")] + use crate::classify; + use crate::error::{Error, Result}; + #[cfg(feature = "full")] + use crate::expr::{ + Arm, ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprClosure, + ExprConst, ExprContinue, ExprForLoop, ExprIf, ExprInfer, ExprLet, ExprLoop, ExprMatch, + ExprRange, ExprRawAddr, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprUnsafe, + ExprWhile, ExprYield, Label, PointerMutability, RangeLimits, + }; + use crate::expr::{ + Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprGroup, ExprIndex, ExprLit, ExprMacro, + ExprMethodCall, ExprParen, ExprPath, ExprReference, ExprStruct, ExprTuple, ExprUnary, + FieldValue, Index, Member, + }; + #[cfg(feature = "full")] + use crate::generics::{self, BoundLifetimes}; + use crate::ident::Ident; + #[cfg(feature = "full")] + use crate::lifetime::Lifetime; + use crate::lit::{Lit, LitFloat, LitInt}; + use crate::mac::{self, Macro}; + use crate::op::BinOp; + use crate::parse::discouraged::Speculative as _; #[cfg(feature = "full")] use crate::parse::ParseBuffer; - use crate::parse::{Parse, ParseStream, Result}; - use crate::path; - use std::cmp::Ordering; - - mod kw { - crate::custom_keyword!(builtin); - crate::custom_keyword!(raw); - } + use crate::parse::{Parse, ParseStream}; + #[cfg(feature = "full")] + use crate::pat::{Pat, PatType}; + use crate::path::{self, AngleBracketedGenericArguments, Path, QSelf}; + use crate::precedence::Precedence; + use crate::punctuated::Punctuated; + #[cfg(feature = "full")] + use crate::stmt::Block; + use crate::token; + use crate::ty; + #[cfg(feature = "full")] + use crate::ty::{ReturnType, Type}; + use crate::verbatim; + #[cfg(feature = "full")] + use proc_macro2::{Span, TokenStream}; + use std::mem; // When we're parsing expressions which occur before blocks, like in an if // statement's condition, we cannot parse a struct literal. @@ -975,56 +1222,9 @@ pub(crate) mod parsing { // Struct literals are ambiguous in certain positions // https://github.com/rust-lang/rfcs/pull/92 #[cfg(feature = "full")] - pub(crate) struct AllowStruct(bool); - - enum Precedence { - Any, - Assign, - Range, - Or, - And, - Compare, - BitOr, - BitXor, - BitAnd, - Shift, - Arithmetic, - Term, - Cast, - } - - impl Precedence { - fn of(op: &BinOp) -> Self { - match op { - BinOp::Add(_) | BinOp::Sub(_) => Precedence::Arithmetic, - BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Term, - BinOp::And(_) => Precedence::And, - BinOp::Or(_) => Precedence::Or, - BinOp::BitXor(_) => Precedence::BitXor, - BinOp::BitAnd(_) => Precedence::BitAnd, - BinOp::BitOr(_) => Precedence::BitOr, - BinOp::Shl(_) | BinOp::Shr(_) => Precedence::Shift, - BinOp::Eq(_) - | BinOp::Lt(_) - | BinOp::Le(_) - | BinOp::Ne(_) - | BinOp::Ge(_) - | BinOp::Gt(_) => Precedence::Compare, - BinOp::AddAssign(_) - | BinOp::SubAssign(_) - | BinOp::MulAssign(_) - | BinOp::DivAssign(_) - | BinOp::RemAssign(_) - | BinOp::BitXorAssign(_) - | BinOp::BitAndAssign(_) - | BinOp::BitOrAssign(_) - | BinOp::ShlAssign(_) - | BinOp::ShrAssign(_) => Precedence::Assign, - } - } - } + pub(super) struct AllowStruct(pub bool); - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Expr { fn parse(input: ParseStream) -> Result<Self> { ambiguous_expr( @@ -1035,92 +1235,65 @@ pub(crate) mod parsing { } } - impl Expr { - /// An alternative to the primary `Expr::parse` parser (from the - /// [`Parse`] trait) for ambiguous syntactic positions in which a - /// trailing brace should not be taken as part of the expression. - /// - /// Rust grammar has an ambiguity where braces sometimes turn a path - /// expression into a struct initialization and sometimes do not. In the - /// following code, the expression `S {}` is one expression. Presumably - /// there is an empty struct `struct S {}` defined somewhere which it is - /// instantiating. - /// - /// ``` - /// # struct S; - /// # impl std::ops::Deref for S { - /// # type Target = bool; - /// # fn deref(&self) -> &Self::Target { - /// # &true - /// # } - /// # } - /// let _ = *S {}; - /// - /// // parsed by rustc as: `*(S {})` - /// ``` - /// - /// We would want to parse the above using `Expr::parse` after the `=` - /// token. - /// - /// But in the following, `S {}` is *not* a struct init expression. - /// - /// ``` - /// # const S: &bool = &true; - /// if *S {} {} - /// - /// // parsed by rustc as: - /// // - /// // if (*S) { - /// // /* empty block */ - /// // } - /// // { - /// // /* another empty block */ - /// // } - /// ``` - /// - /// For that reason we would want to parse if-conditions using - /// `Expr::parse_without_eager_brace` after the `if` token. Same for - /// similar syntactic positions such as the condition expr after a - /// `while` token or the expr at the top of a `match`. - /// - /// The Rust grammar's choices around which way this ambiguity is - /// resolved at various syntactic positions is fairly arbitrary. Really - /// either parse behavior could work in most positions, and language - /// designers just decide each case based on which is more likely to be - /// what the programmer had in mind most of the time. - /// - /// ``` - /// # struct S; - /// # fn doc() -> S { - /// if return S {} {} - /// # unreachable!() - /// # } - /// - /// // parsed by rustc as: - /// // - /// // if (return (S {})) { - /// // } - /// // - /// // but could equally well have been this other arbitrary choice: - /// // - /// // if (return S) { - /// // } - /// // {} - /// ``` - /// - /// Note the grammar ambiguity on trailing braces is distinct from - /// precedence and is not captured by assigning a precedence level to - /// the braced struct init expr in relation to other operators. This can - /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former - /// parses as `return (0..(S {}))` implying tighter precedence for - /// struct init than `..`, while the latter parses as `match (0..S) {}` - /// implying tighter precedence for `..` than struct init, a - /// contradiction. - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(all(feature = "full", feature = "parsing"))))] - pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> { - ambiguous_expr(input, AllowStruct(false)) + #[cfg(feature = "full")] + pub(super) fn parse_with_earlier_boundary_rule(input: ParseStream) -> Result<Expr> { + let mut attrs = input.call(expr_attrs)?; + let mut expr = if input.peek(token::Group) { + let allow_struct = AllowStruct(true); + let atom = expr_group(input, allow_struct)?; + if continue_parsing_early(&atom) { + trailer_helper(input, atom)? + } else { + atom + } + } else if input.peek(Token![if]) { + Expr::If(input.parse()?) + } else if input.peek(Token![while]) { + Expr::While(input.parse()?) + } else if input.peek(Token![for]) + && !generics::parsing::choose_generics_over_qpath_after_keyword(input) + { + Expr::ForLoop(input.parse()?) + } else if input.peek(Token![loop]) { + Expr::Loop(input.parse()?) + } else if input.peek(Token![match]) { + Expr::Match(input.parse()?) + } else if input.peek(Token![try]) && input.peek2(token::Brace) { + Expr::TryBlock(input.parse()?) + } else if input.peek(Token![unsafe]) { + Expr::Unsafe(input.parse()?) + } else if input.peek(Token![const]) && input.peek2(token::Brace) { + Expr::Const(input.parse()?) + } else if input.peek(token::Brace) { + Expr::Block(input.parse()?) + } else if input.peek(Lifetime) { + atom_labeled(input)? + } else { + let allow_struct = AllowStruct(true); + unary_expr(input, allow_struct)? + }; + + if continue_parsing_early(&expr) { + attrs.extend(expr.replace_attrs(Vec::new())); + expr.replace_attrs(attrs); + + let allow_struct = AllowStruct(true); + return parse_expr(input, expr, allow_struct, Precedence::MIN); + } + + if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) { + expr = trailer_helper(input, expr)?; + + attrs.extend(expr.replace_attrs(Vec::new())); + expr.replace_attrs(attrs); + + let allow_struct = AllowStruct(true); + return parse_expr(input, expr, allow_struct, Precedence::MIN); } + + attrs.extend(expr.replace_attrs(Vec::new())); + expr.replace_attrs(attrs); + Ok(expr) } #[cfg(feature = "full")] @@ -1133,28 +1306,6 @@ pub(crate) mod parsing { } } - impl Copy for Precedence {} - - impl Clone for Precedence { - fn clone(&self) -> Self { - *self - } - } - - impl PartialEq for Precedence { - fn eq(&self, other: &Self) -> bool { - *self as u8 == *other as u8 - } - } - - impl PartialOrd for Precedence { - fn partial_cmp(&self, other: &Self) -> Option<Ordering> { - let this = *self as u8; - let other = *other as u8; - Some(this.cmp(&other)) - } - } - #[cfg(feature = "full")] fn parse_expr( input: ParseStream, @@ -1164,75 +1315,58 @@ pub(crate) mod parsing { ) -> Result<Expr> { loop { let ahead = input.fork(); - if let Some(op) = match ahead.parse::<BinOp>() { - Ok(op) if Precedence::of(&op) >= base => Some(op), - _ => None, - } { - input.advance_to(&ahead); - let precedence = Precedence::of(&op); - let mut rhs = unary_expr(input, allow_struct)?; - loop { - let next = peek_precedence(input); - if next > precedence || next == precedence && precedence == Precedence::Assign { - rhs = parse_expr(input, rhs, allow_struct, next)?; - } else { + if let Expr::Range(_) = lhs { + // A range cannot be the left-hand side of another binary operator. + break; + } else if let Ok(op) = ahead.parse::<BinOp>() { + let precedence = Precedence::of_binop(&op); + if precedence < base { + break; + } + if precedence == Precedence::Assign { + if let Expr::Range(_) = lhs { break; } } + if precedence == Precedence::Compare { + if let Expr::Binary(lhs) = &lhs { + if Precedence::of_binop(&lhs.op) == Precedence::Compare { + return Err(input.error("comparison operators cannot be chained")); + } + } + } + input.advance_to(&ahead); + let right = parse_binop_rhs(input, allow_struct, precedence)?; lhs = Expr::Binary(ExprBinary { attrs: Vec::new(), left: Box::new(lhs), op, - right: Box::new(rhs), + right, }); } else if Precedence::Assign >= base && input.peek(Token![=]) - && !input.peek(Token![==]) && !input.peek(Token![=>]) + && match lhs { + Expr::Range(_) => false, + _ => true, + } { let eq_token: Token![=] = input.parse()?; - let mut rhs = unary_expr(input, allow_struct)?; - loop { - let next = peek_precedence(input); - if next >= Precedence::Assign { - rhs = parse_expr(input, rhs, allow_struct, next)?; - } else { - break; - } - } + let right = parse_binop_rhs(input, allow_struct, Precedence::Assign)?; lhs = Expr::Assign(ExprAssign { attrs: Vec::new(), left: Box::new(lhs), eq_token, - right: Box::new(rhs), + right, }); } else if Precedence::Range >= base && input.peek(Token![..]) { let limits: RangeLimits = input.parse()?; - let rhs = if matches!(limits, RangeLimits::HalfOpen(_)) - && (input.is_empty() - || input.peek(Token![,]) - || input.peek(Token![;]) - || input.peek(Token![.]) && !input.peek(Token![..]) - || !allow_struct.0 && input.peek(token::Brace)) - { - None - } else { - let mut rhs = unary_expr(input, allow_struct)?; - loop { - let next = peek_precedence(input); - if next > Precedence::Range { - rhs = parse_expr(input, rhs, allow_struct, next)?; - } else { - break; - } - } - Some(rhs) - }; + let end = parse_range_end(input, &limits, allow_struct)?; lhs = Expr::Range(ExprRange { attrs: Vec::new(), start: Some(Box::new(lhs)), limits, - end: rhs.map(Box::new), + end, }); } else if Precedence::Cast >= base && input.peek(Token![as]) { let as_token: Token![as] = input.parse()?; @@ -1257,26 +1391,25 @@ pub(crate) mod parsing { fn parse_expr(input: ParseStream, mut lhs: Expr, base: Precedence) -> Result<Expr> { loop { let ahead = input.fork(); - if let Some(op) = match ahead.parse::<BinOp>() { - Ok(op) if Precedence::of(&op) >= base => Some(op), - _ => None, - } { - input.advance_to(&ahead); - let precedence = Precedence::of(&op); - let mut rhs = unary_expr(input)?; - loop { - let next = peek_precedence(input); - if next > precedence || next == precedence && precedence == Precedence::Assign { - rhs = parse_expr(input, rhs, next)?; - } else { - break; + if let Ok(op) = ahead.parse::<BinOp>() { + let precedence = Precedence::of_binop(&op); + if precedence < base { + break; + } + if precedence == Precedence::Compare { + if let Expr::Binary(lhs) = &lhs { + if Precedence::of_binop(&lhs.op) == Precedence::Compare { + return Err(input.error("comparison operators cannot be chained")); + } } } + input.advance_to(&ahead); + let right = parse_binop_rhs(input, precedence)?; lhs = Expr::Binary(ExprBinary { attrs: Vec::new(), left: Box::new(lhs), op, - right: Box::new(rhs), + right, }); } else if Precedence::Cast >= base && input.peek(Token![as]) { let as_token: Token![as] = input.parse()?; @@ -1297,9 +1430,44 @@ pub(crate) mod parsing { Ok(lhs) } + fn parse_binop_rhs( + input: ParseStream, + #[cfg(feature = "full")] allow_struct: AllowStruct, + precedence: Precedence, + ) -> Result<Box<Expr>> { + let mut rhs = unary_expr( + input, + #[cfg(feature = "full")] + allow_struct, + )?; + loop { + let next = peek_precedence(input); + if next > precedence || next == precedence && precedence == Precedence::Assign { + let cursor = input.cursor(); + rhs = parse_expr( + input, + rhs, + #[cfg(feature = "full")] + allow_struct, + next, + )?; + if cursor == input.cursor() { + // Bespoke grammar restrictions separate from precedence can + // cause parsing to not advance, such as `..a` being + // disallowed in the left-hand side of binary operators, + // even ones that have lower precedence than `..`. + break; + } + } else { + break; + } + } + Ok(Box::new(rhs)) + } + fn peek_precedence(input: ParseStream) -> Precedence { if let Ok(op) = input.fork().parse() { - Precedence::of(&op) + Precedence::of_binop(&op) } else if input.peek(Token![=]) && !input.peek(Token![=>]) { Precedence::Assign } else if input.peek(Token![..]) { @@ -1307,12 +1475,12 @@ pub(crate) mod parsing { } else if input.peek(Token![as]) { Precedence::Cast } else { - Precedence::Any + Precedence::MIN } } // Parse an arbitrary expression. - fn ambiguous_expr( + pub(super) fn ambiguous_expr( input: ParseStream, #[cfg(feature = "full")] allow_struct: AllowStruct, ) -> Result<Expr> { @@ -1326,30 +1494,15 @@ pub(crate) mod parsing { lhs, #[cfg(feature = "full")] allow_struct, - Precedence::Any, + Precedence::MIN, ) } #[cfg(feature = "full")] fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> { let mut attrs = Vec::new(); - loop { - if input.peek(token::Group) { - let ahead = input.fork(); - let group = crate::group::parse_group(&ahead)?; - if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) { - break; - } - let attr = group.content.call(attr::parsing::single_parse_outer)?; - if !group.content.is_empty() { - break; - } - attrs.push(attr); - } else if input.peek(Token![#]) { - attrs.push(input.call(attr::parsing::single_parse_outer)?); - } else { - break; - } + while !input.peek(token::Group) && input.peek(Token![#]) { + attrs.push(input.call(attr::parsing::single_parse_outer)?); } Ok(attrs) } @@ -1362,9 +1515,13 @@ pub(crate) mod parsing { fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { let begin = input.fork(); let attrs = input.call(expr_attrs)?; + if input.peek(token::Group) { + return trailer_expr(begin, attrs, input, allow_struct); + } + if input.peek(Token![&]) { let and_token: Token![&] = input.parse()?; - let raw: Option<kw::raw> = if input.peek(kw::raw) + let raw: Option<Token![raw]> = if input.peek(Token![raw]) && (input.peek2(Token![mut]) || input.peek2(Token![const])) { Some(input.parse()?) @@ -1372,12 +1529,23 @@ pub(crate) mod parsing { None }; let mutability: Option<Token![mut]> = input.parse()?; - if raw.is_some() && mutability.is_none() { - input.parse::<Token![const]>()?; - } + let const_token: Option<Token![const]> = if raw.is_some() && mutability.is_none() { + Some(input.parse()?) + } else { + None + }; let expr = Box::new(unary_expr(input, allow_struct)?); - if raw.is_some() { - Ok(Expr::Verbatim(verbatim::between(&begin, input))) + if let Some(raw) = raw { + Ok(Expr::RawAddr(ExprRawAddr { + attrs, + and_token, + raw, + mutability: match mutability { + Some(mut_token) => PointerMutability::Mut(mut_token), + None => PointerMutability::Const(const_token.unwrap()), + }, + expr, + })) } else { Ok(Expr::Reference(ExprReference { attrs, @@ -1395,7 +1563,14 @@ pub(crate) mod parsing { #[cfg(not(feature = "full"))] fn unary_expr(input: ParseStream) -> Result<Expr> { - if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) { + if input.peek(Token![&]) { + Ok(Expr::Reference(ExprReference { + attrs: Vec::new(), + and_token: input.parse()?, + mutability: input.parse()?, + expr: Box::new(unary_expr(input)?), + })) + } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) { Ok(Expr::Unary(ExprUnary { attrs: Vec::new(), op: input.parse()?, @@ -1424,7 +1599,18 @@ pub(crate) mod parsing { if let Expr::Verbatim(tokens) = &mut e { *tokens = verbatim::between(&begin, input); - } else { + } else if !attrs.is_empty() { + if let Expr::Range(range) = e { + let spans: &[Span] = match &range.limits { + RangeLimits::HalfOpen(limits) => &limits.spans, + RangeLimits::Closed(limits) => &limits.spans, + }; + return Err(crate::error::new2( + spans[0], + *spans.last().unwrap(), + "attributes are not allowed on range expressions starting with `..`", + )); + } let inner_attrs = e.replace_attrs(Vec::new()); attrs.extend(inner_attrs); e.replace_attrs(attrs); @@ -1508,7 +1694,12 @@ pub(crate) mod parsing { bracket_token: bracketed!(content in input), index: content.parse()?, }); - } else if input.peek(Token![?]) { + } else if input.peek(Token![?]) + && match e { + Expr::Range(_) => false, + _ => true, + } + { e = Expr::Try(ExprTry { attrs: Vec::new(), expr: Box::new(e), @@ -1539,44 +1730,68 @@ pub(crate) mod parsing { && !input.peek2(Token![await]) { let mut dot_token: Token![.] = input.parse()?; + let float_token: Option<LitFloat> = input.parse()?; if let Some(float_token) = float_token { if multi_index(&mut e, &mut dot_token, float_token)? { continue; } } - e = Expr::Field(ExprField { - attrs: Vec::new(), - base: Box::new(e), - dot_token, - member: input.parse()?, - }); - } else if input.peek(token::Bracket) { - let content; - e = Expr::Index(ExprIndex { - attrs: Vec::new(), - expr: Box::new(e), - bracket_token: bracketed!(content in input), - index: content.parse()?, - }); - } else { - break; - } - } - - Ok(e) - } - // Parse all atomic expressions which don't have to worry about precedence - // interactions, as they are fully contained. + let member: Member = input.parse()?; + let turbofish = if member.is_named() && input.peek(Token![::]) { + let colon2_token: Token![::] = input.parse()?; + let turbofish = + AngleBracketedGenericArguments::do_parse(Some(colon2_token), input)?; + Some(turbofish) + } else { + None + }; + + if turbofish.is_some() || input.peek(token::Paren) { + if let Member::Named(method) = member { + let content; + e = Expr::MethodCall(ExprMethodCall { + attrs: Vec::new(), + receiver: Box::new(e), + dot_token, + method, + turbofish, + paren_token: parenthesized!(content in input), + args: content.parse_terminated(Expr::parse, Token![,])?, + }); + continue; + } + } + + e = Expr::Field(ExprField { + attrs: Vec::new(), + base: Box::new(e), + dot_token, + member, + }); + } else if input.peek(token::Bracket) { + let content; + e = Expr::Index(ExprIndex { + attrs: Vec::new(), + expr: Box::new(e), + bracket_token: bracketed!(content in input), + index: content.parse()?, + }); + } else { + break; + } + } + + Ok(e) + } + + // Parse all atomic expressions which don't have to worry about precedence + // interactions, as they are fully contained. #[cfg(feature = "full")] fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { - if input.peek(token::Group) - && !input.peek2(Token![::]) - && !input.peek2(Token![!]) - && !input.peek2(token::Brace) - { - input.call(expr_group).map(Expr::Group) + if input.peek(token::Group) { + expr_group(input, allow_struct) } else if input.peek(Lit) { input.parse().map(Expr::Lit) } else if input.peek(Token![async]) @@ -1588,14 +1803,14 @@ pub(crate) mod parsing { } else if input.peek(Token![|]) || input.peek(Token![move]) || input.peek(Token![for]) - && input.peek2(Token![<]) - && (input.peek3(Lifetime) || input.peek3(Token![>])) + && generics::parsing::choose_generics_over_qpath_after_keyword(input) || input.peek(Token![const]) && !input.peek2(token::Brace) || input.peek(Token![static]) || input.peek(Token![async]) && (input.peek2(Token![|]) || input.peek2(Token![move])) { expr_closure(input, allow_struct).map(Expr::Closure) - } else if input.peek(kw::builtin) && input.peek2(Token![#]) { + } else if token::parsing::peek_keyword(input.cursor(), "builtin") && input.peek2(Token![#]) + { expr_builtin(input) } else if input.peek(Ident) || input.peek(Token![::]) @@ -1614,11 +1829,13 @@ pub(crate) mod parsing { } else if input.peek(Token![continue]) { input.parse().map(Expr::Continue) } else if input.peek(Token![return]) { - expr_ret(input, allow_struct).map(Expr::Return) + input.parse().map(Expr::Return) + } else if input.peek(Token![become]) { + expr_become(input) } else if input.peek(token::Bracket) { array_or_repeat(input) } else if input.peek(Token![let]) { - input.parse().map(Expr::Let) + expr_let(input, allow_struct).map(Expr::Let) } else if input.peek(Token![if]) { input.parse().map(Expr::If) } else if input.peek(Token![while]) { @@ -1642,43 +1859,44 @@ pub(crate) mod parsing { } else if input.peek(Token![_]) { input.parse().map(Expr::Infer) } else if input.peek(Lifetime) { - let the_label: Label = input.parse()?; - let mut expr = if input.peek(Token![while]) { - Expr::While(input.parse()?) - } else if input.peek(Token![for]) { - Expr::ForLoop(input.parse()?) - } else if input.peek(Token![loop]) { - Expr::Loop(input.parse()?) - } else if input.peek(token::Brace) { - Expr::Block(input.parse()?) - } else { - return Err(input.error("expected loop or block expression")); - }; - match &mut expr { - Expr::While(ExprWhile { label, .. }) - | Expr::ForLoop(ExprForLoop { label, .. }) - | Expr::Loop(ExprLoop { label, .. }) - | Expr::Block(ExprBlock { label, .. }) => *label = Some(the_label), - _ => unreachable!(), - } - Ok(expr) + atom_labeled(input) } else { Err(input.error("expected an expression")) } } + #[cfg(feature = "full")] + fn atom_labeled(input: ParseStream) -> Result<Expr> { + let the_label: Label = input.parse()?; + let mut expr = if input.peek(Token![while]) { + Expr::While(input.parse()?) + } else if input.peek(Token![for]) { + Expr::ForLoop(input.parse()?) + } else if input.peek(Token![loop]) { + Expr::Loop(input.parse()?) + } else if input.peek(token::Brace) { + Expr::Block(input.parse()?) + } else { + return Err(input.error("expected loop or block expression")); + }; + match &mut expr { + Expr::While(ExprWhile { label, .. }) + | Expr::ForLoop(ExprForLoop { label, .. }) + | Expr::Loop(ExprLoop { label, .. }) + | Expr::Block(ExprBlock { label, .. }) => *label = Some(the_label), + _ => unreachable!(), + } + Ok(expr) + } + #[cfg(not(feature = "full"))] fn atom_expr(input: ParseStream) -> Result<Expr> { - if input.peek(token::Group) - && !input.peek2(Token![::]) - && !input.peek2(Token![!]) - && !input.peek2(token::Brace) - { - input.call(expr_group).map(Expr::Group) + if input.peek(token::Group) { + expr_group(input) } else if input.peek(Lit) { input.parse().map(Expr::Lit) } else if input.peek(token::Paren) { - input.call(expr_paren).map(Expr::Paren) + paren_or_tuple(input) } else if input.peek(Ident) || input.peek(Token![::]) || input.peek(Token![<]) @@ -1709,7 +1927,7 @@ pub(crate) mod parsing { fn expr_builtin(input: ParseStream) -> Result<Expr> { let begin = input.fork(); - input.parse::<kw::builtin>()?; + token::parsing::keyword(input, "builtin")?; input.parse::<Token![#]>()?; input.parse::<Ident>()?; @@ -1724,8 +1942,23 @@ pub(crate) mod parsing { input: ParseStream, #[cfg(feature = "full")] allow_struct: AllowStruct, ) -> Result<Expr> { - let (qself, path) = path::parsing::qpath(input, true)?; + let expr_style = true; + let (qself, path) = path::parsing::qpath(input, expr_style)?; + rest_of_path_or_macro_or_struct( + qself, + path, + input, + #[cfg(feature = "full")] + allow_struct, + ) + } + fn rest_of_path_or_macro_or_struct( + qself: Option<QSelf>, + path: Path, + input: ParseStream, + #[cfg(feature = "full")] allow_struct: AllowStruct, + ) -> Result<Expr> { if qself.is_none() && input.peek(Token![!]) && !input.peek(Token![!=]) @@ -1744,7 +1977,8 @@ pub(crate) mod parsing { })); } - #[cfg(feature = "full")] + #[cfg(not(feature = "full"))] + let allow_struct = (true,); if allow_struct.0 && input.peek(token::Brace) { return expr_struct_helper(input, qself, path).map(Expr::Struct); } @@ -1756,7 +1990,7 @@ pub(crate) mod parsing { })) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprMacro { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprMacro { @@ -1766,7 +2000,6 @@ pub(crate) mod parsing { } } - #[cfg(feature = "full")] fn paren_or_tuple(input: ParseStream) -> Result<Expr> { let content; let paren_token = parenthesized!(content in input); @@ -1851,7 +2084,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprArray { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -1877,7 +2110,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprRepeat { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -1892,54 +2125,25 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> { - let mut attrs = input.call(expr_attrs)?; - let mut expr = if input.peek(Token![if]) { - Expr::If(input.parse()?) - } else if input.peek(Token![while]) { - Expr::While(input.parse()?) - } else if input.peek(Token![for]) - && !(input.peek2(Token![<]) && (input.peek3(Lifetime) || input.peek3(Token![>]))) - { - Expr::ForLoop(input.parse()?) - } else if input.peek(Token![loop]) { - Expr::Loop(input.parse()?) - } else if input.peek(Token![match]) { - Expr::Match(input.parse()?) - } else if input.peek(Token![try]) && input.peek2(token::Brace) { - Expr::TryBlock(input.parse()?) - } else if input.peek(Token![unsafe]) { - Expr::Unsafe(input.parse()?) - } else if input.peek(Token![const]) && input.peek2(token::Brace) { - Expr::Const(input.parse()?) - } else if input.peek(token::Brace) { - Expr::Block(input.parse()?) - } else { - let allow_struct = AllowStruct(true); - let mut expr = unary_expr(input, allow_struct)?; - - attrs.extend(expr.replace_attrs(Vec::new())); - expr.replace_attrs(attrs); - - return parse_expr(input, expr, allow_struct, Precedence::Any); - }; - - if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) { - expr = trailer_helper(input, expr)?; - - attrs.extend(expr.replace_attrs(Vec::new())); - expr.replace_attrs(attrs); - - let allow_struct = AllowStruct(true); - return parse_expr(input, expr, allow_struct, Precedence::Any); + fn continue_parsing_early(mut expr: &Expr) -> bool { + while let Expr::Group(group) = expr { + expr = &group.expr; + } + match expr { + Expr::If(_) + | Expr::While(_) + | Expr::ForLoop(_) + | Expr::Loop(_) + | Expr::Match(_) + | Expr::TryBlock(_) + | Expr::Unsafe(_) + | Expr::Const(_) + | Expr::Block(_) => false, + _ => true, } - - attrs.extend(expr.replace_attrs(Vec::new())); - expr.replace_attrs(attrs); - Ok(expr) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprLit { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprLit { @@ -1949,93 +2153,132 @@ pub(crate) mod parsing { } } - fn expr_group(input: ParseStream) -> Result<ExprGroup> { + fn expr_group( + input: ParseStream, + #[cfg(feature = "full")] allow_struct: AllowStruct, + ) -> Result<Expr> { let group = crate::group::parse_group(input)?; - Ok(ExprGroup { + let mut inner: Expr = group.content.parse()?; + + match inner { + Expr::Path(mut expr) if expr.attrs.is_empty() => { + let grouped_len = expr.path.segments.len(); + Path::parse_rest(input, &mut expr.path, true)?; + match rest_of_path_or_macro_or_struct( + expr.qself, + expr.path, + input, + #[cfg(feature = "full")] + allow_struct, + )? { + Expr::Path(expr) if expr.path.segments.len() == grouped_len => { + inner = Expr::Path(expr); + } + extended => return Ok(extended), + } + } + _ => {} + } + + Ok(Expr::Group(ExprGroup { attrs: Vec::new(), group_token: group.token, - expr: group.content.parse()?, - }) + expr: Box::new(inner), + })) } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprParen { fn parse(input: ParseStream) -> Result<Self> { - expr_paren(input) + let content; + Ok(ExprParen { + attrs: Vec::new(), + paren_token: parenthesized!(content in input), + expr: content.parse()?, + }) } } - fn expr_paren(input: ParseStream) -> Result<ExprParen> { - let content; - Ok(ExprParen { - attrs: Vec::new(), - paren_token: parenthesized!(content in input), - expr: content.parse()?, - }) - } - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprLet { fn parse(input: ParseStream) -> Result<Self> { - Ok(ExprLet { - attrs: Vec::new(), - let_token: input.parse()?, - pat: Box::new(Pat::parse_multi_with_leading_vert(input)?), - eq_token: input.parse()?, - expr: Box::new({ - let allow_struct = AllowStruct(false); - let lhs = unary_expr(input, allow_struct)?; - parse_expr(input, lhs, allow_struct, Precedence::Compare)? - }), - }) + let allow_struct = AllowStruct(true); + expr_let(input, allow_struct) } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + fn expr_let(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprLet> { + Ok(ExprLet { + attrs: Vec::new(), + let_token: input.parse()?, + pat: Box::new(Pat::parse_multi_with_leading_vert(input)?), + eq_token: input.parse()?, + expr: Box::new({ + let lhs = unary_expr(input, allow_struct)?; + parse_expr(input, lhs, allow_struct, Precedence::Compare)? + }), + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprIf { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; - Ok(ExprIf { - attrs, - if_token: input.parse()?, - cond: Box::new(input.call(Expr::parse_without_eager_brace)?), - then_branch: input.parse()?, - else_branch: { - if input.peek(Token![else]) { - Some(input.call(else_block)?) - } else { - None - } - }, - }) - } - } - #[cfg(feature = "full")] - fn else_block(input: ParseStream) -> Result<(Token![else], Box<Expr>)> { - let else_token: Token![else] = input.parse()?; + let mut clauses = Vec::new(); + let mut expr; + loop { + let if_token: Token![if] = input.parse()?; + let cond = input.call(Expr::parse_without_eager_brace)?; + let then_branch: Block = input.parse()?; - let lookahead = input.lookahead1(); - let else_branch = if input.peek(Token![if]) { - input.parse().map(Expr::If)? - } else if input.peek(token::Brace) { - Expr::Block(ExprBlock { - attrs: Vec::new(), - label: None, - block: input.parse()?, - }) - } else { - return Err(lookahead.error()); - }; + expr = ExprIf { + attrs: Vec::new(), + if_token, + cond: Box::new(cond), + then_branch, + else_branch: None, + }; - Ok((else_token, Box::new(else_branch))) + if !input.peek(Token![else]) { + break; + } + + let else_token: Token![else] = input.parse()?; + let lookahead = input.lookahead1(); + if lookahead.peek(Token![if]) { + expr.else_branch = Some((else_token, Box::new(Expr::PLACEHOLDER))); + clauses.push(expr); + } else if lookahead.peek(token::Brace) { + expr.else_branch = Some(( + else_token, + Box::new(Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: input.parse()?, + })), + )); + break; + } else { + return Err(lookahead.error()); + } + } + + while let Some(mut prev) = clauses.pop() { + *prev.else_branch.as_mut().unwrap().1 = Expr::If(expr); + expr = prev; + } + expr.attrs = attrs; + Ok(expr) + } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprInfer { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprInfer { @@ -2046,7 +2289,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprForLoop { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -2076,7 +2319,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprLoop { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -2098,7 +2341,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprMatch { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -2109,10 +2352,7 @@ pub(crate) mod parsing { let brace_token = braced!(content in input); attr::parsing::parse_inner(&content, &mut attrs)?; - let mut arms = Vec::new(); - while !content.is_empty() { - arms.push(content.call(Arm::parse)?); - } + let arms = Arm::parse_multiple(&content)?; Ok(ExprMatch { attrs, @@ -2132,7 +2372,7 @@ pub(crate) mod parsing { ) => { $( #[cfg(all(feature = "full", feature = "printing"))] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for $expr_type { fn parse(input: ParseStream) -> Result<Self> { let mut expr: Expr = input.parse()?; @@ -2164,7 +2404,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprUnary { fn parse(input: ParseStream) -> Result<Self> { let attrs = Vec::new(); @@ -2187,7 +2427,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprClosure { fn parse(input: ParseStream) -> Result<Self> { let allow_struct = AllowStruct(true); @@ -2196,7 +2436,22 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + impl Parse for ExprRawAddr { + fn parse(input: ParseStream) -> Result<Self> { + let allow_struct = AllowStruct(true); + Ok(ExprRawAddr { + attrs: Vec::new(), + and_token: input.parse()?, + raw: input.parse()?, + mutability: input.parse()?, + expr: Box::new(unary_expr(input, allow_struct)?), + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprReference { fn parse(input: ParseStream) -> Result<Self> { let allow_struct = AllowStruct(true); @@ -2210,7 +2465,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprBreak { fn parse(input: ParseStream) -> Result<Self> { let allow_struct = AllowStruct(true); @@ -2219,16 +2474,33 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprReturn { fn parse(input: ParseStream) -> Result<Self> { - let allow_struct = AllowStruct(true); - expr_ret(input, allow_struct) + Ok(ExprReturn { + attrs: Vec::new(), + return_token: input.parse()?, + expr: { + if Expr::peek(input) { + Some(input.parse()?) + } else { + None + } + }, + }) } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + fn expr_become(input: ParseStream) -> Result<Expr> { + let begin = input.fork(); + input.parse::<Token![become]>()?; + input.parse::<Expr>()?; + Ok(Expr::Verbatim(verbatim::between(&begin, input))) + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprTryBlock { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprTryBlock { @@ -2240,14 +2512,14 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprYield { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprYield { attrs: Vec::new(), yield_token: input.parse()?, expr: { - if !input.is_empty() && !input.peek(Token![,]) && !input.peek(Token![;]) { + if Expr::peek(input) { Some(input.parse()?) } else { None @@ -2314,7 +2586,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprAsync { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprAsync { @@ -2363,7 +2635,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprWhile { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -2387,7 +2659,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprConst { fn parse(input: ParseStream) -> Result<Self> { let const_token: Token![const] = input.parse()?; @@ -2406,7 +2678,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Label { fn parse(input: ParseStream) -> Result<Self> { Ok(Label { @@ -2417,7 +2689,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Option<Label> { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Lifetime) { @@ -2429,7 +2701,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprContinue { fn parse(input: ParseStream) -> Result<Self> { Ok(ExprContinue { @@ -2442,48 +2714,39 @@ pub(crate) mod parsing { #[cfg(feature = "full")] fn expr_break(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprBreak> { - Ok(ExprBreak { - attrs: Vec::new(), - break_token: input.parse()?, - label: input.parse()?, - expr: { - if input.is_empty() - || input.peek(Token![,]) - || input.peek(Token![;]) - || !allow_struct.0 && input.peek(token::Brace) - { - None - } else { - let expr = ambiguous_expr(input, allow_struct)?; - Some(Box::new(expr)) - } - }, - }) - } + let break_token: Token![break] = input.parse()?; + + let ahead = input.fork(); + let label: Option<Lifetime> = ahead.parse()?; + if label.is_some() && ahead.peek(Token![:]) { + // Not allowed: `break 'label: loop {...}` + // Parentheses are required. `break ('label: loop {...})` + let _: Expr = input.parse()?; + let start_span = label.unwrap().apostrophe; + let end_span = input.cursor().prev_span(); + return Err(crate::error::new2( + start_span, + end_span, + "parentheses required", + )); + } - #[cfg(feature = "full")] - fn expr_ret(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprReturn> { - Ok(ExprReturn { + input.advance_to(&ahead); + let expr = if Expr::peek(input) && (allow_struct.0 || !input.peek(token::Brace)) { + Some(input.parse()?) + } else { + None + }; + + Ok(ExprBreak { attrs: Vec::new(), - return_token: input.parse()?, - expr: { - if input.is_empty() || input.peek(Token![,]) || input.peek(Token![;]) { - None - } else { - // NOTE: return is greedy and eats blocks after it even when in a - // position where structs are not allowed, such as in if statement - // conditions. For example: - // - // if return { println!("A") } {} // Prints "A" - let expr = ambiguous_expr(input, allow_struct)?; - Some(Box::new(expr)) - } - }, + break_token, + label, + expr, }) } - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for FieldValue { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2512,16 +2775,15 @@ pub(crate) mod parsing { } } - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprStruct { fn parse(input: ParseStream) -> Result<Self> { - let (qself, path) = path::parsing::qpath(input, true)?; + let expr_style = true; + let (qself, path) = path::parsing::qpath(input, expr_style)?; expr_struct_helper(input, qself, path) } } - #[cfg(feature = "full")] fn expr_struct_helper( input: ParseStream, qself: Option<QSelf>, @@ -2568,7 +2830,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprUnsafe { fn parse(input: ParseStream) -> Result<Self> { let unsafe_token: Token![unsafe] = input.parse()?; @@ -2587,7 +2849,7 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprBlock { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -2609,18 +2871,7 @@ pub(crate) mod parsing { #[cfg(feature = "full")] fn expr_range(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprRange> { let limits: RangeLimits = input.parse()?; - let end = if matches!(limits, RangeLimits::HalfOpen(_)) - && (input.is_empty() - || input.peek(Token![,]) - || input.peek(Token![;]) - || input.peek(Token![.]) && !input.peek(Token![..]) - || !allow_struct.0 && input.peek(token::Brace)) - { - None - } else { - let to = ambiguous_expr(input, allow_struct)?; - Some(Box::new(to)) - }; + let end = parse_range_end(input, &limits, allow_struct)?; Ok(ExprRange { attrs: Vec::new(), start: None, @@ -2630,7 +2881,43 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + fn parse_range_end( + input: ParseStream, + limits: &RangeLimits, + allow_struct: AllowStruct, + ) -> Result<Option<Box<Expr>>> { + if matches!(limits, RangeLimits::HalfOpen(_)) + && (input.is_empty() + || input.peek(Token![,]) + || input.peek(Token![;]) + || input.peek(Token![.]) && !input.peek(Token![..]) + || input.peek(Token![?]) + || input.peek(Token![=>]) + || !allow_struct.0 && input.peek(token::Brace) + || input.peek(Token![=]) + || input.peek(Token![+]) + || input.peek(Token![/]) + || input.peek(Token![%]) + || input.peek(Token![^]) + || input.peek(Token![>]) + || input.peek(Token![<=]) + || input.peek(Token![!=]) + || input.peek(Token![-=]) + || input.peek(Token![*=]) + || input.peek(Token![&=]) + || input.peek(Token![|=]) + || input.peek(Token![<<=]) + || input.peek(Token![as])) + { + Ok(None) + } else { + let end = parse_binop_rhs(input, allow_struct, Precedence::Range)?; + Ok(Some(end)) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for RangeLimits { fn parse(input: ParseStream) -> Result<Self> { let lookahead = input.lookahead1(); @@ -2667,7 +2954,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ExprPath { fn parse(input: ParseStream) -> Result<Self> { #[cfg(not(feature = "full"))] @@ -2675,13 +2962,14 @@ pub(crate) mod parsing { #[cfg(feature = "full")] let attrs = input.call(Attribute::parse_outer)?; - let (qself, path) = path::parsing::qpath(input, true)?; + let expr_style = true; + let (qself, path) = path::parsing::qpath(input, expr_style)?; Ok(ExprPath { attrs, qself, path }) } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Member { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Ident) { @@ -2695,7 +2983,18 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Arm { + pub(crate) fn parse_multiple(input: ParseStream) -> Result<Vec<Self>> { + let mut arms = Vec::new(); + while !input.is_empty() { + arms.push(input.call(Arm::parse)?); + } + Ok(arms) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Arm { fn parse(input: ParseStream) -> Result<Arm> { let requires_comma; @@ -2713,8 +3012,8 @@ pub(crate) mod parsing { }, fat_arrow_token: input.parse()?, body: { - let body = input.call(expr_early)?; - requires_comma = requires_terminator(&body); + let body = Expr::parse_with_earlier_boundary_rule(input)?; + requires_comma = classify::requires_comma_to_be_match_arm(&body); Box::new(body) }, comma: { @@ -2728,7 +3027,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Index { fn parse(input: ParseStream) -> Result<Self> { let lit: LitInt = input.parse()?; @@ -2762,7 +3061,7 @@ pub(crate) mod parsing { let part_end = offset + part.len(); index.span = float_token.subspan(offset..part_end).unwrap_or(float_span); - let base = mem::replace(e, Expr::DUMMY); + let base = mem::replace(e, Expr::PLACEHOLDER); *e = Expr::Field(ExprField { attrs: Vec::new(), base: Box::new(base), @@ -2781,11 +3080,16 @@ pub(crate) mod parsing { } #[cfg(feature = "full")] - impl Member { - fn is_named(&self) -> bool { - match self { - Member::Named(_) => true, - Member::Unnamed(_) => false, + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + impl Parse for PointerMutability { + fn parse(input: ParseStream) -> Result<Self> { + let lookahead = input.lookahead1(); + if lookahead.peek(Token![const]) { + Ok(PointerMutability::Const(input.parse()?)) + } else if lookahead.peek(Token![mut]) { + Ok(PointerMutability::Mut(input.parse()?)) + } else { + Err(lookahead.error()) } } } @@ -2815,24 +3119,33 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] pub(crate) mod printing { - use super::*; + use crate::attr::Attribute; #[cfg(feature = "full")] use crate::attr::FilterAttrs; - use proc_macro2::{Literal, TokenStream}; - use quote::{ToTokens, TokenStreamExt}; - - // If the given expression is a bare `ExprStruct`, wraps it in parenthesis - // before appending it to `TokenStream`. #[cfg(feature = "full")] - fn wrap_bare_struct(tokens: &mut TokenStream, e: &Expr) { - if let Expr::Struct(_) = *e { - token::Paren::default().surround(tokens, |tokens| { - e.to_tokens(tokens); - }); - } else { - e.to_tokens(tokens); - } - } + use crate::classify; + #[cfg(feature = "full")] + use crate::expr::{ + Arm, ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprClosure, + ExprConst, ExprContinue, ExprForLoop, ExprIf, ExprInfer, ExprLet, ExprLoop, ExprMatch, + ExprRange, ExprRawAddr, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprUnsafe, + ExprWhile, ExprYield, Label, PointerMutability, RangeLimits, + }; + use crate::expr::{ + Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprGroup, ExprIndex, ExprLit, ExprMacro, + ExprMethodCall, ExprParen, ExprPath, ExprReference, ExprStruct, ExprTuple, ExprUnary, + FieldValue, Index, Member, + }; + use crate::fixup::FixupContext; + use crate::op::BinOp; + use crate::path; + use crate::path::printing::PathStyle; + use crate::precedence::Precedence; + use crate::token; + #[cfg(feature = "full")] + use crate::ty::ReturnType; + use proc_macro2::{Literal, Span, TokenStream}; + use quote::{ToTokens, TokenStreamExt as _}; #[cfg(feature = "full")] pub(crate) fn outer_attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) { @@ -2847,8 +3160,125 @@ pub(crate) mod printing { #[cfg(not(feature = "full"))] pub(crate) fn outer_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {} + pub(crate) fn print_subexpression( + expr: &Expr, + needs_group: bool, + tokens: &mut TokenStream, + mut fixup: FixupContext, + ) { + if needs_group { + // If we are surrounding the whole cond in parentheses, such as: + // + // if (return Struct {}) {} + // + // then there is no need for parenthesizing the individual struct + // expressions within. On the other hand if the whole cond is not + // parenthesized, then print_expr must parenthesize exterior struct + // literals. + // + // if x == (Struct {}) {} + // + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| print_expr(expr, tokens, fixup); + + if needs_group { + token::Paren::default().surround(tokens, do_print_expr); + } else { + do_print_expr(tokens); + } + } + + pub(crate) fn print_expr(expr: &Expr, tokens: &mut TokenStream, mut fixup: FixupContext) { + #[cfg(feature = "full")] + let needs_group = fixup.parenthesize(expr); + #[cfg(not(feature = "full"))] + let needs_group = false; + + if needs_group { + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| match expr { + #[cfg(feature = "full")] + Expr::Array(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Assign(e) => print_expr_assign(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Async(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Await(e) => print_expr_await(e, tokens, fixup), + Expr::Binary(e) => print_expr_binary(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Block(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Break(e) => print_expr_break(e, tokens, fixup), + Expr::Call(e) => print_expr_call(e, tokens, fixup), + Expr::Cast(e) => print_expr_cast(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Closure(e) => print_expr_closure(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Const(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Continue(e) => e.to_tokens(tokens), + Expr::Field(e) => print_expr_field(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::ForLoop(e) => e.to_tokens(tokens), + Expr::Group(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::If(e) => e.to_tokens(tokens), + Expr::Index(e) => print_expr_index(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Infer(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Let(e) => print_expr_let(e, tokens, fixup), + Expr::Lit(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Loop(e) => e.to_tokens(tokens), + Expr::Macro(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Match(e) => e.to_tokens(tokens), + Expr::MethodCall(e) => print_expr_method_call(e, tokens, fixup), + Expr::Paren(e) => e.to_tokens(tokens), + Expr::Path(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Range(e) => print_expr_range(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::RawAddr(e) => print_expr_raw_addr(e, tokens, fixup), + Expr::Reference(e) => print_expr_reference(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Repeat(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Return(e) => print_expr_return(e, tokens, fixup), + Expr::Struct(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Try(e) => print_expr_try(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::TryBlock(e) => e.to_tokens(tokens), + Expr::Tuple(e) => e.to_tokens(tokens), + Expr::Unary(e) => print_expr_unary(e, tokens, fixup), + #[cfg(feature = "full")] + Expr::Unsafe(e) => e.to_tokens(tokens), + Expr::Verbatim(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::While(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] + Expr::Yield(e) => print_expr_yield(e, tokens, fixup), + + #[cfg(not(feature = "full"))] + _ => unreachable!(), + }; + + if needs_group { + token::Paren::default().surround(tokens, do_print_expr); + } else { + do_print_expr(tokens); + } + } + #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprArray { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -2859,18 +3289,47 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprAssign { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.left.to_tokens(tokens); - self.eq_token.to_tokens(tokens); - self.right.to_tokens(tokens); + print_expr_assign(self, tokens, FixupContext::NONE); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_assign(e: &ExprAssign, tokens: &mut TokenStream, mut fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + + let needs_group = !e.attrs.is_empty(); + if needs_group { + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| { + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.left, + false, + false, + Precedence::Assign, + ); + print_subexpression(&e.left, left_prec <= Precedence::Range, tokens, left_fixup); + e.eq_token.to_tokens(tokens); + print_expr( + &e.right, + tokens, + fixup.rightmost_subexpression_fixup(false, false, Precedence::Assign), + ); + }; + + if needs_group { + token::Paren::default().surround(tokens, do_print_expr); + } else { + do_print_expr(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprAsync { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -2881,28 +3340,96 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprAwait { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.base.to_tokens(tokens); - self.dot_token.to_tokens(tokens); - self.await_token.to_tokens(tokens); + print_expr_await(self, tokens, FixupContext::NONE); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg(feature = "full")] + fn print_expr_await(e: &ExprAwait, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.base); + print_subexpression( + &e.base, + left_prec < Precedence::Unambiguous, + tokens, + left_fixup, + ); + e.dot_token.to_tokens(tokens); + e.await_token.to_tokens(tokens); + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprBinary { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.left.to_tokens(tokens); - self.op.to_tokens(tokens); - self.right.to_tokens(tokens); + print_expr_binary(self, tokens, FixupContext::NONE); + } + } + + fn print_expr_binary(e: &ExprBinary, tokens: &mut TokenStream, mut fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + + let needs_group = !e.attrs.is_empty(); + if needs_group { + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| { + let binop_prec = Precedence::of_binop(&e.op); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.left, + #[cfg(feature = "full")] + match &e.op { + BinOp::Sub(_) + | BinOp::Mul(_) + | BinOp::And(_) + | BinOp::Or(_) + | BinOp::BitAnd(_) + | BinOp::BitOr(_) + | BinOp::Shl(_) + | BinOp::Lt(_) => true, + _ => false, + }, + match &e.op { + BinOp::Shl(_) | BinOp::Lt(_) => true, + _ => false, + }, + #[cfg(feature = "full")] + binop_prec, + ); + let left_needs_group = match binop_prec { + Precedence::Assign => left_prec <= Precedence::Range, + Precedence::Compare => left_prec <= binop_prec, + _ => left_prec < binop_prec, + }; + + let right_fixup = fixup.rightmost_subexpression_fixup( + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + binop_prec, + ); + let right_needs_group = binop_prec != Precedence::Assign + && right_fixup.rightmost_subexpression_precedence(&e.right) <= binop_prec; + + print_subexpression(&e.left, left_needs_group, tokens, left_fixup); + e.op.to_tokens(tokens); + print_subexpression(&e.right, right_needs_group, tokens, right_fixup); + }; + + if needs_group { + token::Paren::default().surround(tokens, do_print_expr); + } else { + do_print_expr(tokens); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprBlock { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -2915,57 +3442,133 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprBreak { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.break_token.to_tokens(tokens); - self.label.to_tokens(tokens); - self.expr.to_tokens(tokens); + print_expr_break(self, tokens, FixupContext::NONE); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg(feature = "full")] + fn print_expr_break(e: &ExprBreak, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.break_token.to_tokens(tokens); + e.label.to_tokens(tokens); + if let Some(value) = &e.expr { + print_subexpression( + value, + // Parenthesize `break 'inner: loop { break 'inner 1 } + 1` + // ^---------------------------------^ + e.label.is_none() && classify::expr_leading_label(value), + tokens, + fixup.rightmost_subexpression_fixup(true, true, Precedence::Jump), + ); + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprCall { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.func.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.args.to_tokens(tokens); - }); + print_expr_call(self, tokens, FixupContext::NONE); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_call(e: &ExprCall, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.func, + #[cfg(feature = "full")] + true, + false, + #[cfg(feature = "full")] + Precedence::Unambiguous, + ); + let needs_group = if let Expr::Field(func) = &*e.func { + func.member.is_named() + } else { + left_prec < Precedence::Unambiguous + }; + print_subexpression(&e.func, needs_group, tokens, left_fixup); + + e.paren_token.surround(tokens, |tokens| { + e.args.to_tokens(tokens); + }); + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprCast { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.expr.to_tokens(tokens); - self.as_token.to_tokens(tokens); - self.ty.to_tokens(tokens); + print_expr_cast(self, tokens, FixupContext::NONE); + } + } + + fn print_expr_cast(e: &ExprCast, tokens: &mut TokenStream, mut fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + + let needs_group = !e.attrs.is_empty(); + if needs_group { + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| { + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.expr, + #[cfg(feature = "full")] + false, + false, + #[cfg(feature = "full")] + Precedence::Cast, + ); + print_subexpression(&e.expr, left_prec < Precedence::Cast, tokens, left_fixup); + e.as_token.to_tokens(tokens); + e.ty.to_tokens(tokens); + }; + + if needs_group { + token::Paren::default().surround(tokens, do_print_expr); + } else { + do_print_expr(tokens); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprClosure { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.lifetimes.to_tokens(tokens); - self.constness.to_tokens(tokens); - self.movability.to_tokens(tokens); - self.asyncness.to_tokens(tokens); - self.capture.to_tokens(tokens); - self.or1_token.to_tokens(tokens); - self.inputs.to_tokens(tokens); - self.or2_token.to_tokens(tokens); - self.output.to_tokens(tokens); - self.body.to_tokens(tokens); + print_expr_closure(self, tokens, FixupContext::NONE); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_closure(e: &ExprClosure, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.lifetimes.to_tokens(tokens); + e.constness.to_tokens(tokens); + e.movability.to_tokens(tokens); + e.asyncness.to_tokens(tokens); + e.capture.to_tokens(tokens); + e.or1_token.to_tokens(tokens); + e.inputs.to_tokens(tokens); + e.or2_token.to_tokens(tokens); + e.output.to_tokens(tokens); + if matches!(e.output, ReturnType::Default) + || matches!(&*e.body, Expr::Block(body) if body.attrs.is_empty() && body.label.is_none()) + { + print_expr( + &e.body, + tokens, + fixup.rightmost_subexpression_fixup(false, false, Precedence::Jump), + ); + } else { + token::Brace::default().surround(tokens, |tokens| { + print_expr(&e.body, tokens, FixupContext::new_stmt()); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprConst { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -2978,7 +3581,7 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprContinue { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -2987,18 +3590,28 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprField { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.base.to_tokens(tokens); - self.dot_token.to_tokens(tokens); - self.member.to_tokens(tokens); + print_expr_field(self, tokens, FixupContext::NONE); } } + fn print_expr_field(e: &ExprField, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.base); + print_subexpression( + &e.base, + left_prec < Precedence::Unambiguous, + tokens, + left_fixup, + ); + e.dot_token.to_tokens(tokens); + e.member.to_tokens(tokens); + } + #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprForLoop { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3006,7 +3619,7 @@ pub(crate) mod printing { self.for_token.to_tokens(tokens); self.pat.to_tokens(tokens); self.in_token.to_tokens(tokens); - wrap_bare_struct(tokens, &self.expr); + print_expr(&self.expr, tokens, FixupContext::new_condition()); self.body.brace_token.surround(tokens, |tokens| { inner_attrs_to_tokens(&self.attrs, tokens); tokens.append_all(&self.body.stmts); @@ -3014,7 +3627,7 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprGroup { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3025,38 +3638,74 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprIf { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); - self.if_token.to_tokens(tokens); - wrap_bare_struct(tokens, &self.cond); - self.then_branch.to_tokens(tokens); - if let Some((else_token, else_)) = &self.else_branch { + + let mut expr = self; + loop { + expr.if_token.to_tokens(tokens); + print_expr(&expr.cond, tokens, FixupContext::new_condition()); + expr.then_branch.to_tokens(tokens); + + let (else_token, else_) = match &expr.else_branch { + Some(else_branch) => else_branch, + None => break, + }; + else_token.to_tokens(tokens); - // If we are not one of the valid expressions to exist in an else - // clause, wrap ourselves in a block. - match **else_ { - Expr::If(_) | Expr::Block(_) => else_.to_tokens(tokens), - _ => token::Brace::default().surround(tokens, |tokens| else_.to_tokens(tokens)), + match &**else_ { + Expr::If(next) => { + expr = next; + } + Expr::Block(last) => { + last.to_tokens(tokens); + break; + } + // If this is not one of the valid expressions to exist in + // an else clause, wrap it in a block. + other => { + token::Brace::default().surround(tokens, |tokens| { + print_expr(other, tokens, FixupContext::new_stmt()); + }); + break; + } } } } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprIndex { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.expr.to_tokens(tokens); - self.bracket_token.surround(tokens, |tokens| { - self.index.to_tokens(tokens); - }); + print_expr_index(self, tokens, FixupContext::NONE); } } + fn print_expr_index(e: &ExprIndex, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.expr, + #[cfg(feature = "full")] + true, + false, + #[cfg(feature = "full")] + Precedence::Unambiguous, + ); + print_subexpression( + &e.expr, + left_prec < Precedence::Unambiguous, + tokens, + left_fixup, + ); + e.bracket_token.surround(tokens, |tokens| { + e.index.to_tokens(tokens); + }); + } + #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprInfer { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3065,18 +3714,24 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprLet { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.let_token.to_tokens(tokens); - self.pat.to_tokens(tokens); - self.eq_token.to_tokens(tokens); - wrap_bare_struct(tokens, &self.expr); + print_expr_let(self, tokens, FixupContext::NONE); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg(feature = "full")] + fn print_expr_let(e: &ExprLet, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.let_token.to_tokens(tokens); + e.pat.to_tokens(tokens); + e.eq_token.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression(&e.expr, Precedence::Let); + print_subexpression(&e.expr, right_prec < Precedence::Let, tokens, right_fixup); + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprLit { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3085,7 +3740,7 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprLoop { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3098,7 +3753,7 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprMacro { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3107,12 +3762,12 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprMatch { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); self.match_token.to_tokens(tokens); - wrap_bare_struct(tokens, &self.expr); + print_expr(&self.expr, tokens, FixupContext::new_condition()); self.brace_token.surround(tokens, |tokens| { inner_attrs_to_tokens(&self.attrs, tokens); for (i, arm) in self.arms.iter().enumerate() { @@ -3120,7 +3775,10 @@ pub(crate) mod printing { // Ensure that we have a comma after a non-block arm, except // for the last one. let is_last = i == self.arms.len() - 1; - if !is_last && requires_terminator(&arm.body) && arm.comma.is_none() { + if !is_last + && classify::requires_comma_to_be_match_arm(&arm.body) + && arm.comma.is_none() + { <Token![,]>::default().to_tokens(tokens); } } @@ -3128,22 +3786,37 @@ pub(crate) mod printing { } } - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprMethodCall { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.receiver.to_tokens(tokens); - self.dot_token.to_tokens(tokens); - self.method.to_tokens(tokens); - self.turbofish.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.args.to_tokens(tokens); - }); + print_expr_method_call(self, tokens, FixupContext::NONE); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_method_call(e: &ExprMethodCall, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.receiver); + print_subexpression( + &e.receiver, + left_prec < Precedence::Unambiguous, + tokens, + left_fixup, + ); + e.dot_token.to_tokens(tokens); + e.method.to_tokens(tokens); + if let Some(turbofish) = &e.turbofish { + path::printing::print_angle_bracketed_generic_arguments( + tokens, + turbofish, + PathStyle::Expr, + ); + } + e.paren_token.surround(tokens, |tokens| { + e.args.to_tokens(tokens); + }); + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprParen { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3153,38 +3826,106 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprPath { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); - path::printing::print_path(tokens, &self.qself, &self.path); + path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::Expr); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprRange { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.start.to_tokens(tokens); - self.limits.to_tokens(tokens); - self.end.to_tokens(tokens); + print_expr_range(self, tokens, FixupContext::NONE); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_range(e: &ExprRange, tokens: &mut TokenStream, mut fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + + let needs_group = !e.attrs.is_empty(); + if needs_group { + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| { + if let Some(start) = &e.start { + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + start, + true, + false, + Precedence::Range, + ); + print_subexpression(start, left_prec <= Precedence::Range, tokens, left_fixup); + } + e.limits.to_tokens(tokens); + if let Some(end) = &e.end { + let right_fixup = + fixup.rightmost_subexpression_fixup(false, true, Precedence::Range); + let right_prec = right_fixup.rightmost_subexpression_precedence(end); + print_subexpression(end, right_prec <= Precedence::Range, tokens, right_fixup); + } + }; + + if needs_group { + token::Paren::default().surround(tokens, do_print_expr); + } else { + do_print_expr(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + impl ToTokens for ExprRawAddr { + fn to_tokens(&self, tokens: &mut TokenStream) { + print_expr_raw_addr(self, tokens, FixupContext::NONE); + } + } + + #[cfg(feature = "full")] + fn print_expr_raw_addr(e: &ExprRawAddr, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.and_token.to_tokens(tokens); + e.raw.to_tokens(tokens); + e.mutability.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression(&e.expr, Precedence::Prefix); + print_subexpression( + &e.expr, + right_prec < Precedence::Prefix, + tokens, + right_fixup, + ); + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprReference { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.and_token.to_tokens(tokens); - self.mutability.to_tokens(tokens); - self.expr.to_tokens(tokens); + print_expr_reference(self, tokens, FixupContext::NONE); } } + fn print_expr_reference(e: &ExprReference, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.and_token.to_tokens(tokens); + e.mutability.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression( + &e.expr, + #[cfg(feature = "full")] + Precedence::Prefix, + ); + print_subexpression( + &e.expr, + right_prec < Precedence::Prefix, + tokens, + right_fixup, + ); + } + #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprRepeat { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3197,21 +3938,31 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprReturn { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.return_token.to_tokens(tokens); - self.expr.to_tokens(tokens); + print_expr_return(self, tokens, FixupContext::NONE); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_return(e: &ExprReturn, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.return_token.to_tokens(tokens); + if let Some(expr) = &e.expr { + print_expr( + expr, + tokens, + fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump), + ); + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprStruct { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); - path::printing::print_path(tokens, &self.qself, &self.path); + path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::Expr); self.brace_token.surround(tokens, |tokens| { self.fields.to_tokens(tokens); if let Some(dot2_token) = &self.dot2_token { @@ -3225,17 +3976,28 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprTry { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.expr.to_tokens(tokens); - self.question_token.to_tokens(tokens); + print_expr_try(self, tokens, FixupContext::NONE); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_try(e: &ExprTry, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.expr); + print_subexpression( + &e.expr, + left_prec < Precedence::Unambiguous, + tokens, + left_fixup, + ); + e.question_token.to_tokens(tokens); + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprTryBlock { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3244,8 +4006,7 @@ pub(crate) mod printing { } } - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprTuple { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3260,17 +4021,31 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprUnary { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.op.to_tokens(tokens); - self.expr.to_tokens(tokens); + print_expr_unary(self, tokens, FixupContext::NONE); } } + fn print_expr_unary(e: &ExprUnary, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.op.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression( + &e.expr, + #[cfg(feature = "full")] + Precedence::Prefix, + ); + print_subexpression( + &e.expr, + right_prec < Precedence::Prefix, + tokens, + right_fixup, + ); + } + #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprUnsafe { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3283,13 +4058,13 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprWhile { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); self.label.to_tokens(tokens); self.while_token.to_tokens(tokens); - wrap_bare_struct(tokens, &self.cond); + print_expr(&self.cond, tokens, FixupContext::new_condition()); self.body.brace_token.surround(tokens, |tokens| { inner_attrs_to_tokens(&self.attrs, tokens); tokens.append_all(&self.body.stmts); @@ -3298,17 +4073,28 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprYield { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.yield_token.to_tokens(tokens); - self.expr.to_tokens(tokens); + print_expr_yield(self, tokens, FixupContext::NONE); } } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_expr_yield(e: &ExprYield, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.yield_token.to_tokens(tokens); + if let Some(expr) = &e.expr { + print_expr( + expr, + tokens, + fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump), + ); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Arm { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(&self.attrs); @@ -3318,13 +4104,12 @@ pub(crate) mod printing { guard.to_tokens(tokens); } self.fat_arrow_token.to_tokens(tokens); - self.body.to_tokens(tokens); + print_expr(&self.body, tokens, FixupContext::new_match_arm()); self.comma.to_tokens(tokens); } } - #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for FieldValue { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); @@ -3336,7 +4121,7 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Index { fn to_tokens(&self, tokens: &mut TokenStream) { let mut lit = Literal::i64_unsuffixed(i64::from(self.index)); @@ -3346,7 +4131,7 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Label { fn to_tokens(&self, tokens: &mut TokenStream) { self.name.to_tokens(tokens); @@ -3354,7 +4139,7 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Member { fn to_tokens(&self, tokens: &mut TokenStream) { match self { @@ -3365,7 +4150,7 @@ pub(crate) mod printing { } #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for RangeLimits { fn to_tokens(&self, tokens: &mut TokenStream) { match self { @@ -3374,4 +4159,15 @@ pub(crate) mod printing { } } } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + impl ToTokens for PointerMutability { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + PointerMutability::Const(const_token) => const_token.to_tokens(tokens), + PointerMutability::Mut(mut_token) => mut_token.to_tokens(tokens), + } + } + } } diff --git a/vendor/syn/src/ext.rs b/vendor/syn/src/ext.rs index 9ee56725..7cf62bd4 100644 --- a/vendor/syn/src/ext.rs +++ b/vendor/syn/src/ext.rs @@ -1,11 +1,19 @@ //! Extension traits to provide parsing methods on foreign types. +#[cfg(feature = "parsing")] use crate::buffer::Cursor; +#[cfg(feature = "parsing")] +use crate::error::Result; +#[cfg(feature = "parsing")] +use crate::parse::ParseStream; +#[cfg(feature = "parsing")] use crate::parse::Peek; -use crate::parse::{ParseStream, Result}; +#[cfg(feature = "parsing")] use crate::sealed::lookahead; +#[cfg(feature = "parsing")] use crate::token::CustomToken; -use proc_macro2::Ident; +use proc_macro2::{Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::iter; /// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro. /// @@ -42,6 +50,8 @@ pub trait IdentExt: Sized + private::Sealed { /// Ok(name) /// } /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] fn parse_any(input: ParseStream) -> Result<Self>; /// Peeks any identifier including keywords. Usage: @@ -49,6 +59,8 @@ pub trait IdentExt: Sized + private::Sealed { /// /// This is different from `input.peek(Ident)` which only returns true in /// the case of an ident which is not a Rust keyword. + #[cfg(feature = "parsing")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] #[allow(non_upper_case_globals)] const peek_any: private::PeekFn = private::PeekFn; @@ -83,6 +95,7 @@ pub trait IdentExt: Sized + private::Sealed { } impl IdentExt for Ident { + #[cfg(feature = "parsing")] fn parse_any(input: ParseStream) -> Result<Self> { input.step(|cursor| match cursor.ident() { Some((ident, rest)) => Ok((ident, rest)), @@ -100,10 +113,12 @@ impl IdentExt for Ident { } } +#[cfg(feature = "parsing")] impl Peek for private::PeekFn { type Token = private::IdentAny; } +#[cfg(feature = "parsing")] impl CustomToken for private::IdentAny { fn peek(cursor: Cursor) -> bool { cursor.ident().is_some() @@ -114,8 +129,31 @@ impl CustomToken for private::IdentAny { } } +#[cfg(feature = "parsing")] impl lookahead::Sealed for private::PeekFn {} +pub(crate) trait TokenStreamExt { + fn append(&mut self, token: TokenTree); +} + +impl TokenStreamExt for TokenStream { + fn append(&mut self, token: TokenTree) { + self.extend(iter::once(token)); + } +} + +pub(crate) trait PunctExt { + fn new_spanned(ch: char, spacing: Spacing, span: Span) -> Self; +} + +impl PunctExt for Punct { + fn new_spanned(ch: char, spacing: Spacing, span: Span) -> Self { + let mut punct = Punct::new(ch, spacing); + punct.set_span(span); + punct + } +} + mod private { use proc_macro2::Ident; @@ -123,10 +161,16 @@ mod private { impl Sealed for Ident {} + #[cfg(feature = "parsing")] pub struct PeekFn; + + #[cfg(feature = "parsing")] pub struct IdentAny; + #[cfg(feature = "parsing")] impl Copy for PeekFn {} + + #[cfg(feature = "parsing")] impl Clone for PeekFn { fn clone(&self) -> Self { *self diff --git a/vendor/syn/src/file.rs b/vendor/syn/src/file.rs index 2d9f298c..066f97b1 100644 --- a/vendor/syn/src/file.rs +++ b/vendor/syn/src/file.rs @@ -1,10 +1,13 @@ -use super::*; +use crate::attr::Attribute; +use crate::item::Item; ast_struct! { /// A complete file of Rust source code. /// /// Typically `File` objects are created with [`parse_file`]. /// + /// [`parse_file`]: crate::parse_file + /// /// # Example /// /// Parse a Rust source file into a `syn::File` and print out a debug @@ -12,8 +15,7 @@ ast_struct! { /// /// ``` /// use std::env; - /// use std::fs::File; - /// use std::io::Read; + /// use std::fs; /// use std::process; /// /// fn main() { @@ -31,12 +33,8 @@ ast_struct! { /// } /// }; /// - /// let mut file = File::open(&filename).expect("Unable to open file"); - /// - /// let mut src = String::new(); - /// file.read_to_string(&mut src).expect("Unable to read file"); - /// - /// let syntax = syn::parse_file(&src).expect("Unable to parse file"); + /// let src = fs::read_to_string(&filename).expect("unable to read file"); + /// let syntax = syn::parse_file(&src).expect("unable to parse file"); /// /// // Debug impl is available if Syn is built with "extra-traits" feature. /// println!("{:#?}", syntax); @@ -77,7 +75,7 @@ ast_struct! { /// ), /// ... /// ``` - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct File { pub shebang: Option<String>, pub attrs: Vec<Attribute>, @@ -87,10 +85,12 @@ ast_struct! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; + use crate::attr::Attribute; + use crate::error::Result; + use crate::file::File; + use crate::parse::{Parse, ParseStream}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for File { fn parse(input: ParseStream) -> Result<Self> { Ok(File { @@ -110,12 +110,12 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; use crate::attr::FilterAttrs; + use crate::file::File; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for File { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.inner()); diff --git a/vendor/syn/src/fixup.rs b/vendor/syn/src/fixup.rs new file mode 100644 index 00000000..6d2c3092 --- /dev/null +++ b/vendor/syn/src/fixup.rs @@ -0,0 +1,773 @@ +use crate::classify; +use crate::expr::Expr; +#[cfg(feature = "full")] +use crate::expr::{ + ExprBreak, ExprRange, ExprRawAddr, ExprReference, ExprReturn, ExprUnary, ExprYield, +}; +use crate::precedence::Precedence; +#[cfg(feature = "full")] +use crate::ty::ReturnType; + +pub(crate) struct FixupContext { + #[cfg(feature = "full")] + previous_operator: Precedence, + #[cfg(feature = "full")] + next_operator: Precedence, + + // Print expression such that it can be parsed back as a statement + // consisting of the original expression. + // + // The effect of this is for binary operators in statement position to set + // `leftmost_subexpression_in_stmt` when printing their left-hand operand. + // + // (match x {}) - 1; // match needs parens when LHS of binary operator + // + // match x {}; // not when its own statement + // + #[cfg(feature = "full")] + stmt: bool, + + // This is the difference between: + // + // (match x {}) - 1; // subexpression needs parens + // + // let _ = match x {} - 1; // no parens + // + // There are 3 distinguishable contexts in which `print_expr` might be + // called with the expression `$match` as its argument, where `$match` + // represents an expression of kind `ExprKind::Match`: + // + // - stmt=false leftmost_subexpression_in_stmt=false + // + // Example: `let _ = $match - 1;` + // + // No parentheses required. + // + // - stmt=false leftmost_subexpression_in_stmt=true + // + // Example: `$match - 1;` + // + // Must parenthesize `($match)`, otherwise parsing back the output as a + // statement would terminate the statement after the closing brace of + // the match, parsing `-1;` as a separate statement. + // + // - stmt=true leftmost_subexpression_in_stmt=false + // + // Example: `$match;` + // + // No parentheses required. + #[cfg(feature = "full")] + leftmost_subexpression_in_stmt: bool, + + // Print expression such that it can be parsed as a match arm. + // + // This is almost equivalent to `stmt`, but the grammar diverges a tiny bit + // between statements and match arms when it comes to braced macro calls. + // Macro calls with brace delimiter terminate a statement without a + // semicolon, but do not terminate a match-arm without comma. + // + // m! {} - 1; // two statements: a macro call followed by -1 literal + // + // match () { + // _ => m! {} - 1, // binary subtraction operator + // } + // + #[cfg(feature = "full")] + match_arm: bool, + + // This is almost equivalent to `leftmost_subexpression_in_stmt`, other than + // for braced macro calls. + // + // If we have `m! {} - 1` as an expression, the leftmost subexpression + // `m! {}` will need to be parenthesized in the statement case but not the + // match-arm case. + // + // (m! {}) - 1; // subexpression needs parens + // + // match () { + // _ => m! {} - 1, // no parens + // } + // + #[cfg(feature = "full")] + leftmost_subexpression_in_match_arm: bool, + + // This is the difference between: + // + // if let _ = (Struct {}) {} // needs parens + // + // match () { + // () if let _ = Struct {} => {} // no parens + // } + // + #[cfg(feature = "full")] + condition: bool, + + // This is the difference between: + // + // if break Struct {} == (break) {} // needs parens + // + // if break break == Struct {} {} // no parens + // + #[cfg(feature = "full")] + rightmost_subexpression_in_condition: bool, + + // This is the difference between: + // + // if break ({ x }).field + 1 {} needs parens + // + // if break 1 + { x }.field {} // no parens + // + #[cfg(feature = "full")] + leftmost_subexpression_in_optional_operand: bool, + + // This is the difference between: + // + // let _ = (return) - 1; // without paren, this would return -1 + // + // let _ = return + 1; // no paren because '+' cannot begin expr + // + #[cfg(feature = "full")] + next_operator_can_begin_expr: bool, + + // This is the difference between: + // + // let _ = 1 + return 1; // no parens if rightmost subexpression + // + // let _ = 1 + (return 1) + 1; // needs parens + // + #[cfg(feature = "full")] + next_operator_can_continue_expr: bool, + + // This is the difference between: + // + // let _ = x as u8 + T; + // + // let _ = (x as u8) < T; + // + // Without parens, the latter would want to parse `u8<T...` as a type. + next_operator_can_begin_generics: bool, +} + +impl FixupContext { + /// The default amount of fixing is minimal fixing. Fixups should be turned + /// on in a targeted fashion where needed. + pub const NONE: Self = FixupContext { + #[cfg(feature = "full")] + previous_operator: Precedence::MIN, + #[cfg(feature = "full")] + next_operator: Precedence::MIN, + #[cfg(feature = "full")] + stmt: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_stmt: false, + #[cfg(feature = "full")] + match_arm: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_match_arm: false, + #[cfg(feature = "full")] + condition: false, + #[cfg(feature = "full")] + rightmost_subexpression_in_condition: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_optional_operand: false, + #[cfg(feature = "full")] + next_operator_can_begin_expr: false, + #[cfg(feature = "full")] + next_operator_can_continue_expr: false, + next_operator_can_begin_generics: false, + }; + + /// Create the initial fixup for printing an expression in statement + /// position. + #[cfg(feature = "full")] + pub fn new_stmt() -> Self { + FixupContext { + stmt: true, + ..FixupContext::NONE + } + } + + /// Create the initial fixup for printing an expression as the right-hand + /// side of a match arm. + #[cfg(feature = "full")] + pub fn new_match_arm() -> Self { + FixupContext { + match_arm: true, + ..FixupContext::NONE + } + } + + /// Create the initial fixup for printing an expression as the "condition" + /// of an `if` or `while`. There are a few other positions which are + /// grammatically equivalent and also use this, such as the iterator + /// expression in `for` and the scrutinee in `match`. + #[cfg(feature = "full")] + pub fn new_condition() -> Self { + FixupContext { + condition: true, + rightmost_subexpression_in_condition: true, + ..FixupContext::NONE + } + } + + /// Transform this fixup into the one that should apply when printing the + /// leftmost subexpression of the current expression. + /// + /// The leftmost subexpression is any subexpression that has the same first + /// token as the current expression, but has a different last token. + /// + /// For example in `$a + $b` and `$a.method()`, the subexpression `$a` is a + /// leftmost subexpression. + /// + /// Not every expression has a leftmost subexpression. For example neither + /// `-$a` nor `[$a]` have one. + pub fn leftmost_subexpression_with_operator( + self, + expr: &Expr, + #[cfg(feature = "full")] next_operator_can_begin_expr: bool, + next_operator_can_begin_generics: bool, + #[cfg(feature = "full")] precedence: Precedence, + ) -> (Precedence, Self) { + let fixup = FixupContext { + #[cfg(feature = "full")] + next_operator: precedence, + #[cfg(feature = "full")] + stmt: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_stmt: self.stmt || self.leftmost_subexpression_in_stmt, + #[cfg(feature = "full")] + match_arm: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_match_arm: self.match_arm + || self.leftmost_subexpression_in_match_arm, + #[cfg(feature = "full")] + rightmost_subexpression_in_condition: false, + #[cfg(feature = "full")] + next_operator_can_begin_expr, + #[cfg(feature = "full")] + next_operator_can_continue_expr: true, + next_operator_can_begin_generics, + ..self + }; + + (fixup.leftmost_subexpression_precedence(expr), fixup) + } + + /// Transform this fixup into the one that should apply when printing a + /// leftmost subexpression followed by a `.` or `?` token, which confer + /// different statement boundary rules compared to other leftmost + /// subexpressions. + pub fn leftmost_subexpression_with_dot(self, expr: &Expr) -> (Precedence, Self) { + let fixup = FixupContext { + #[cfg(feature = "full")] + next_operator: Precedence::Unambiguous, + #[cfg(feature = "full")] + stmt: self.stmt || self.leftmost_subexpression_in_stmt, + #[cfg(feature = "full")] + leftmost_subexpression_in_stmt: false, + #[cfg(feature = "full")] + match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm, + #[cfg(feature = "full")] + leftmost_subexpression_in_match_arm: false, + #[cfg(feature = "full")] + rightmost_subexpression_in_condition: false, + #[cfg(feature = "full")] + next_operator_can_begin_expr: false, + #[cfg(feature = "full")] + next_operator_can_continue_expr: true, + next_operator_can_begin_generics: false, + ..self + }; + + (fixup.leftmost_subexpression_precedence(expr), fixup) + } + + fn leftmost_subexpression_precedence(self, expr: &Expr) -> Precedence { + #[cfg(feature = "full")] + if !self.next_operator_can_begin_expr || self.next_operator == Precedence::Range { + if let Scan::Bailout = scan_right(expr, self, Precedence::MIN, 0, 0) { + if scan_left(expr, self) { + return Precedence::Unambiguous; + } + } + } + + self.precedence(expr) + } + + /// Transform this fixup into the one that should apply when printing the + /// rightmost subexpression of the current expression. + /// + /// The rightmost subexpression is any subexpression that has a different + /// first token than the current expression, but has the same last token. + /// + /// For example in `$a + $b` and `-$b`, the subexpression `$b` is a + /// rightmost subexpression. + /// + /// Not every expression has a rightmost subexpression. For example neither + /// `[$b]` nor `$a.f($b)` have one. + pub fn rightmost_subexpression( + self, + expr: &Expr, + #[cfg(feature = "full")] precedence: Precedence, + ) -> (Precedence, Self) { + let fixup = self.rightmost_subexpression_fixup( + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + precedence, + ); + (fixup.rightmost_subexpression_precedence(expr), fixup) + } + + pub fn rightmost_subexpression_fixup( + self, + #[cfg(feature = "full")] reset_allow_struct: bool, + #[cfg(feature = "full")] optional_operand: bool, + #[cfg(feature = "full")] precedence: Precedence, + ) -> Self { + FixupContext { + #[cfg(feature = "full")] + previous_operator: precedence, + #[cfg(feature = "full")] + stmt: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_stmt: false, + #[cfg(feature = "full")] + match_arm: false, + #[cfg(feature = "full")] + leftmost_subexpression_in_match_arm: false, + #[cfg(feature = "full")] + condition: self.condition && !reset_allow_struct, + #[cfg(feature = "full")] + leftmost_subexpression_in_optional_operand: self.condition && optional_operand, + ..self + } + } + + pub fn rightmost_subexpression_precedence(self, expr: &Expr) -> Precedence { + let default_prec = self.precedence(expr); + + #[cfg(feature = "full")] + if match self.previous_operator { + Precedence::Assign | Precedence::Let | Precedence::Prefix => { + default_prec < self.previous_operator + } + _ => default_prec <= self.previous_operator, + } && match self.next_operator { + Precedence::Range | Precedence::Or | Precedence::And => true, + _ => !self.next_operator_can_begin_expr, + } { + if let Scan::Bailout | Scan::Fail = scan_right(expr, self, self.previous_operator, 1, 0) + { + if scan_left(expr, self) { + return Precedence::Prefix; + } + } + } + + default_prec + } + + /// Determine whether parentheses are needed around the given expression to + /// head off the early termination of a statement or condition. + #[cfg(feature = "full")] + pub fn parenthesize(self, expr: &Expr) -> bool { + (self.leftmost_subexpression_in_stmt && !classify::requires_semi_to_be_stmt(expr)) + || ((self.stmt || self.leftmost_subexpression_in_stmt) && matches!(expr, Expr::Let(_))) + || (self.leftmost_subexpression_in_match_arm + && !classify::requires_comma_to_be_match_arm(expr)) + || (self.condition && matches!(expr, Expr::Struct(_))) + || (self.rightmost_subexpression_in_condition + && matches!( + expr, + Expr::Return(ExprReturn { expr: None, .. }) + | Expr::Yield(ExprYield { expr: None, .. }) + )) + || (self.rightmost_subexpression_in_condition + && !self.condition + && matches!( + expr, + Expr::Break(ExprBreak { expr: None, .. }) + | Expr::Path(_) + | Expr::Range(ExprRange { end: None, .. }) + )) + || (self.leftmost_subexpression_in_optional_operand + && matches!(expr, Expr::Block(expr) if expr.attrs.is_empty() && expr.label.is_none())) + } + + /// Determines the effective precedence of a subexpression. Some expressions + /// have higher or lower precedence when adjacent to particular operators. + fn precedence(self, expr: &Expr) -> Precedence { + #[cfg(feature = "full")] + if self.next_operator_can_begin_expr { + // Decrease precedence of value-less jumps when followed by an + // operator that would otherwise get interpreted as beginning a + // value for the jump. + if let Expr::Break(ExprBreak { expr: None, .. }) + | Expr::Return(ExprReturn { expr: None, .. }) + | Expr::Yield(ExprYield { expr: None, .. }) = expr + { + return Precedence::Jump; + } + } + + #[cfg(feature = "full")] + if !self.next_operator_can_continue_expr { + match expr { + // Increase precedence of expressions that extend to the end of + // current statement or group. + Expr::Break(_) + | Expr::Closure(_) + | Expr::Let(_) + | Expr::Return(_) + | Expr::Yield(_) => { + return Precedence::Prefix; + } + Expr::Range(e) if e.start.is_none() => return Precedence::Prefix, + _ => {} + } + } + + if self.next_operator_can_begin_generics { + if let Expr::Cast(cast) = expr { + if classify::trailing_unparameterized_path(&cast.ty) { + return Precedence::MIN; + } + } + } + + Precedence::of(expr) + } +} + +impl Copy for FixupContext {} + +impl Clone for FixupContext { + fn clone(&self) -> Self { + *self + } +} + +#[cfg(feature = "full")] +enum Scan { + Fail, + Bailout, + Consume, +} + +#[cfg(feature = "full")] +impl Copy for Scan {} + +#[cfg(feature = "full")] +impl Clone for Scan { + fn clone(&self) -> Self { + *self + } +} + +#[cfg(feature = "full")] +impl PartialEq for Scan { + fn eq(&self, other: &Self) -> bool { + *self as u8 == *other as u8 + } +} + +#[cfg(feature = "full")] +fn scan_left(expr: &Expr, fixup: FixupContext) -> bool { + match expr { + Expr::Assign(_) => fixup.previous_operator <= Precedence::Assign, + Expr::Binary(e) => match Precedence::of_binop(&e.op) { + Precedence::Assign => fixup.previous_operator <= Precedence::Assign, + binop_prec => fixup.previous_operator < binop_prec, + }, + Expr::Cast(_) => fixup.previous_operator < Precedence::Cast, + Expr::Range(e) => e.start.is_none() || fixup.previous_operator < Precedence::Assign, + _ => true, + } +} + +#[cfg(feature = "full")] +fn scan_right( + expr: &Expr, + fixup: FixupContext, + precedence: Precedence, + fail_offset: u8, + bailout_offset: u8, +) -> Scan { + let consume_by_precedence = if match precedence { + Precedence::Assign | Precedence::Compare => precedence <= fixup.next_operator, + _ => precedence < fixup.next_operator, + } || fixup.next_operator == Precedence::MIN + { + Scan::Consume + } else { + Scan::Bailout + }; + if fixup.parenthesize(expr) { + return consume_by_precedence; + } + match expr { + Expr::Assign(e) if e.attrs.is_empty() => { + if match fixup.next_operator { + Precedence::Unambiguous => fail_offset >= 2, + _ => bailout_offset >= 1, + } { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Assign); + let scan = scan_right( + &e.right, + right_fixup, + Precedence::Assign, + match fixup.next_operator { + Precedence::Unambiguous => fail_offset, + _ => 1, + }, + 1, + ); + if let Scan::Bailout | Scan::Consume = scan { + Scan::Consume + } else if let Precedence::Unambiguous = fixup.next_operator { + Scan::Fail + } else { + Scan::Bailout + } + } + Expr::Binary(e) if e.attrs.is_empty() => { + if match fixup.next_operator { + Precedence::Unambiguous => { + fail_offset >= 2 + && (consume_by_precedence == Scan::Consume || bailout_offset >= 1) + } + _ => bailout_offset >= 1, + } { + return Scan::Consume; + } + let binop_prec = Precedence::of_binop(&e.op); + if binop_prec == Precedence::Compare && fixup.next_operator == Precedence::Compare { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, binop_prec); + let scan = scan_right( + &e.right, + right_fixup, + binop_prec, + match fixup.next_operator { + Precedence::Unambiguous => fail_offset, + _ => 1, + }, + consume_by_precedence as u8 - Scan::Bailout as u8, + ); + match scan { + Scan::Fail => {} + Scan::Bailout => return consume_by_precedence, + Scan::Consume => return Scan::Consume, + } + let right_needs_group = binop_prec != Precedence::Assign + && right_fixup.rightmost_subexpression_precedence(&e.right) <= binop_prec; + if right_needs_group { + consume_by_precedence + } else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) { + Scan::Fail + } else { + Scan::Bailout + } + } + Expr::RawAddr(ExprRawAddr { expr, .. }) + | Expr::Reference(ExprReference { expr, .. }) + | Expr::Unary(ExprUnary { expr, .. }) => { + if match fixup.next_operator { + Precedence::Unambiguous => { + fail_offset >= 2 + && (consume_by_precedence == Scan::Consume || bailout_offset >= 1) + } + _ => bailout_offset >= 1, + } { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Prefix); + let scan = scan_right( + expr, + right_fixup, + precedence, + match fixup.next_operator { + Precedence::Unambiguous => fail_offset, + _ => 1, + }, + consume_by_precedence as u8 - Scan::Bailout as u8, + ); + match scan { + Scan::Fail => {} + Scan::Bailout => return consume_by_precedence, + Scan::Consume => return Scan::Consume, + } + if right_fixup.rightmost_subexpression_precedence(expr) < Precedence::Prefix { + consume_by_precedence + } else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) { + Scan::Fail + } else { + Scan::Bailout + } + } + Expr::Range(e) if e.attrs.is_empty() => match &e.end { + Some(end) => { + if fail_offset >= 2 { + return Scan::Consume; + } + let right_fixup = + fixup.rightmost_subexpression_fixup(false, true, Precedence::Range); + let scan = scan_right( + end, + right_fixup, + Precedence::Range, + fail_offset, + match fixup.next_operator { + Precedence::Assign | Precedence::Range => 0, + _ => 1, + }, + ); + if match (scan, fixup.next_operator) { + (Scan::Fail, _) => false, + (Scan::Bailout, Precedence::Assign | Precedence::Range) => false, + (Scan::Bailout | Scan::Consume, _) => true, + } { + return Scan::Consume; + } + if right_fixup.rightmost_subexpression_precedence(end) <= Precedence::Range { + Scan::Consume + } else { + Scan::Fail + } + } + None => { + if fixup.next_operator_can_begin_expr { + Scan::Consume + } else { + Scan::Fail + } + } + }, + Expr::Break(e) => match &e.expr { + Some(value) => { + if bailout_offset >= 1 || e.label.is_none() && classify::expr_leading_label(value) { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(true, true, Precedence::Jump); + match scan_right(value, right_fixup, Precedence::Jump, 1, 1) { + Scan::Fail => Scan::Bailout, + Scan::Bailout | Scan::Consume => Scan::Consume, + } + } + None => match fixup.next_operator { + Precedence::Assign if precedence > Precedence::Assign => Scan::Fail, + _ => Scan::Consume, + }, + }, + Expr::Return(ExprReturn { expr, .. }) | Expr::Yield(ExprYield { expr, .. }) => match expr { + Some(e) => { + if bailout_offset >= 1 { + return Scan::Consume; + } + let right_fixup = + fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump); + match scan_right(e, right_fixup, Precedence::Jump, 1, 1) { + Scan::Fail => Scan::Bailout, + Scan::Bailout | Scan::Consume => Scan::Consume, + } + } + None => match fixup.next_operator { + Precedence::Assign if precedence > Precedence::Assign => Scan::Fail, + _ => Scan::Consume, + }, + }, + Expr::Closure(e) => { + if matches!(e.output, ReturnType::Default) + || matches!(&*e.body, Expr::Block(body) if body.attrs.is_empty() && body.label.is_none()) + { + if bailout_offset >= 1 { + return Scan::Consume; + } + let right_fixup = + fixup.rightmost_subexpression_fixup(false, false, Precedence::Jump); + match scan_right(&e.body, right_fixup, Precedence::Jump, 1, 1) { + Scan::Fail => Scan::Bailout, + Scan::Bailout | Scan::Consume => Scan::Consume, + } + } else { + Scan::Consume + } + } + Expr::Let(e) => { + if bailout_offset >= 1 { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Let); + let scan = scan_right( + &e.expr, + right_fixup, + Precedence::Let, + 1, + if fixup.next_operator < Precedence::Let { + 0 + } else { + 1 + }, + ); + match scan { + Scan::Fail | Scan::Bailout if fixup.next_operator < Precedence::Let => { + return Scan::Bailout; + } + Scan::Consume => return Scan::Consume, + _ => {} + } + if right_fixup.rightmost_subexpression_precedence(&e.expr) < Precedence::Let { + Scan::Consume + } else if let Scan::Fail = scan { + Scan::Bailout + } else { + Scan::Consume + } + } + Expr::Array(_) + | Expr::Assign(_) + | Expr::Async(_) + | Expr::Await(_) + | Expr::Binary(_) + | Expr::Block(_) + | Expr::Call(_) + | Expr::Cast(_) + | Expr::Const(_) + | Expr::Continue(_) + | Expr::Field(_) + | Expr::ForLoop(_) + | Expr::Group(_) + | Expr::If(_) + | Expr::Index(_) + | Expr::Infer(_) + | Expr::Lit(_) + | Expr::Loop(_) + | Expr::Macro(_) + | Expr::Match(_) + | Expr::MethodCall(_) + | Expr::Paren(_) + | Expr::Path(_) + | Expr::Range(_) + | Expr::Repeat(_) + | Expr::Struct(_) + | Expr::Try(_) + | Expr::TryBlock(_) + | Expr::Tuple(_) + | Expr::Unsafe(_) + | Expr::Verbatim(_) + | Expr::While(_) => match fixup.next_operator { + Precedence::Assign | Precedence::Range if precedence == Precedence::Range => Scan::Fail, + _ if precedence == Precedence::Let && fixup.next_operator < Precedence::Let => { + Scan::Fail + } + _ => consume_by_precedence, + }, + } +} diff --git a/vendor/syn/src/gen/clone.rs b/vendor/syn/src/gen/clone.rs index d275f511..be2b6984 100644 --- a/vendor/syn/src/gen/clone.rs +++ b/vendor/syn/src/gen/clone.rs @@ -2,22 +2,21 @@ // It is not intended for manual editing. #![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)] -use crate::*; #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Abi { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Abi { fn clone(&self) -> Self { - Abi { + crate::Abi { extern_token: self.extern_token.clone(), name: self.name.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for AngleBracketedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::AngleBracketedGenericArguments { fn clone(&self) -> Self { - AngleBracketedGenericArguments { + crate::AngleBracketedGenericArguments { colon2_token: self.colon2_token.clone(), lt_token: self.lt_token.clone(), args: self.args.clone(), @@ -26,10 +25,10 @@ impl Clone for AngleBracketedGenericArguments { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Arm { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Arm { fn clone(&self) -> Self { - Arm { + crate::Arm { attrs: self.attrs.clone(), pat: self.pat.clone(), guard: self.guard.clone(), @@ -40,10 +39,10 @@ impl Clone for Arm { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for AssocConst { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::AssocConst { fn clone(&self) -> Self { - AssocConst { + crate::AssocConst { ident: self.ident.clone(), generics: self.generics.clone(), eq_token: self.eq_token.clone(), @@ -52,10 +51,10 @@ impl Clone for AssocConst { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for AssocType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::AssocType { fn clone(&self) -> Self { - AssocType { + crate::AssocType { ident: self.ident.clone(), generics: self.generics.clone(), eq_token: self.eq_token.clone(), @@ -64,20 +63,20 @@ impl Clone for AssocType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Copy for AttrStyle {} +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Copy for crate::AttrStyle {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for AttrStyle { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::AttrStyle { fn clone(&self) -> Self { *self } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Attribute { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Attribute { fn clone(&self) -> Self { - Attribute { + crate::Attribute { pound_token: self.pound_token.clone(), style: self.style.clone(), bracket_token: self.bracket_token.clone(), @@ -86,10 +85,10 @@ impl Clone for Attribute { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for BareFnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::BareFnArg { fn clone(&self) -> Self { - BareFnArg { + crate::BareFnArg { attrs: self.attrs.clone(), name: self.name.clone(), ty: self.ty.clone(), @@ -97,10 +96,10 @@ impl Clone for BareFnArg { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for BareVariadic { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::BareVariadic { fn clone(&self) -> Self { - BareVariadic { + crate::BareVariadic { attrs: self.attrs.clone(), name: self.name.clone(), dots: self.dots.clone(), @@ -109,30 +108,30 @@ impl Clone for BareVariadic { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Copy for BinOp {} +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Copy for crate::BinOp {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for BinOp { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::BinOp { fn clone(&self) -> Self { *self } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Block { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Block { fn clone(&self) -> Self { - Block { + crate::Block { brace_token: self.brace_token.clone(), stmts: self.stmts.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for BoundLifetimes { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::BoundLifetimes { fn clone(&self) -> Self { - BoundLifetimes { + crate::BoundLifetimes { for_token: self.for_token.clone(), lt_token: self.lt_token.clone(), lifetimes: self.lifetimes.clone(), @@ -140,11 +139,23 @@ impl Clone for BoundLifetimes { } } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::CapturedParam { + fn clone(&self) -> Self { + match self { + crate::CapturedParam::Lifetime(v0) => { + crate::CapturedParam::Lifetime(v0.clone()) + } + crate::CapturedParam::Ident(v0) => crate::CapturedParam::Ident(v0.clone()), + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ConstParam { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ConstParam { fn clone(&self) -> Self { - ConstParam { + crate::ConstParam { attrs: self.attrs.clone(), const_token: self.const_token.clone(), ident: self.ident.clone(), @@ -156,10 +167,10 @@ impl Clone for ConstParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Constraint { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Constraint { fn clone(&self) -> Self { - Constraint { + crate::Constraint { ident: self.ident.clone(), generics: self.generics.clone(), colon_token: self.colon_token.clone(), @@ -168,21 +179,21 @@ impl Clone for Constraint { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Data { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Data { fn clone(&self) -> Self { match self { - Data::Struct(v0) => Data::Struct(v0.clone()), - Data::Enum(v0) => Data::Enum(v0.clone()), - Data::Union(v0) => Data::Union(v0.clone()), + crate::Data::Struct(v0) => crate::Data::Struct(v0.clone()), + crate::Data::Enum(v0) => crate::Data::Enum(v0.clone()), + crate::Data::Union(v0) => crate::Data::Union(v0.clone()), } } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for DataEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::DataEnum { fn clone(&self) -> Self { - DataEnum { + crate::DataEnum { enum_token: self.enum_token.clone(), brace_token: self.brace_token.clone(), variants: self.variants.clone(), @@ -190,10 +201,10 @@ impl Clone for DataEnum { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for DataStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::DataStruct { fn clone(&self) -> Self { - DataStruct { + crate::DataStruct { struct_token: self.struct_token.clone(), fields: self.fields.clone(), semi_token: self.semi_token.clone(), @@ -201,20 +212,20 @@ impl Clone for DataStruct { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for DataUnion { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::DataUnion { fn clone(&self) -> Self { - DataUnion { + crate::DataUnion { union_token: self.union_token.clone(), fields: self.fields.clone(), } } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for DeriveInput { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::DeriveInput { fn clone(&self) -> Self { - DeriveInput { + crate::DeriveInput { attrs: self.attrs.clone(), vis: self.vis.clone(), ident: self.ident.clone(), @@ -224,86 +235,84 @@ impl Clone for DeriveInput { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Expr { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Expr { fn clone(&self) -> Self { match self { #[cfg(feature = "full")] - Expr::Array(v0) => Expr::Array(v0.clone()), - #[cfg(feature = "full")] - Expr::Assign(v0) => Expr::Assign(v0.clone()), + crate::Expr::Array(v0) => crate::Expr::Array(v0.clone()), #[cfg(feature = "full")] - Expr::Async(v0) => Expr::Async(v0.clone()), + crate::Expr::Assign(v0) => crate::Expr::Assign(v0.clone()), #[cfg(feature = "full")] - Expr::Await(v0) => Expr::Await(v0.clone()), - Expr::Binary(v0) => Expr::Binary(v0.clone()), + crate::Expr::Async(v0) => crate::Expr::Async(v0.clone()), #[cfg(feature = "full")] - Expr::Block(v0) => Expr::Block(v0.clone()), + crate::Expr::Await(v0) => crate::Expr::Await(v0.clone()), + crate::Expr::Binary(v0) => crate::Expr::Binary(v0.clone()), #[cfg(feature = "full")] - Expr::Break(v0) => Expr::Break(v0.clone()), - Expr::Call(v0) => Expr::Call(v0.clone()), - Expr::Cast(v0) => Expr::Cast(v0.clone()), + crate::Expr::Block(v0) => crate::Expr::Block(v0.clone()), #[cfg(feature = "full")] - Expr::Closure(v0) => Expr::Closure(v0.clone()), + crate::Expr::Break(v0) => crate::Expr::Break(v0.clone()), + crate::Expr::Call(v0) => crate::Expr::Call(v0.clone()), + crate::Expr::Cast(v0) => crate::Expr::Cast(v0.clone()), #[cfg(feature = "full")] - Expr::Const(v0) => Expr::Const(v0.clone()), + crate::Expr::Closure(v0) => crate::Expr::Closure(v0.clone()), #[cfg(feature = "full")] - Expr::Continue(v0) => Expr::Continue(v0.clone()), - Expr::Field(v0) => Expr::Field(v0.clone()), + crate::Expr::Const(v0) => crate::Expr::Const(v0.clone()), #[cfg(feature = "full")] - Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()), - Expr::Group(v0) => Expr::Group(v0.clone()), + crate::Expr::Continue(v0) => crate::Expr::Continue(v0.clone()), + crate::Expr::Field(v0) => crate::Expr::Field(v0.clone()), #[cfg(feature = "full")] - Expr::If(v0) => Expr::If(v0.clone()), - Expr::Index(v0) => Expr::Index(v0.clone()), + crate::Expr::ForLoop(v0) => crate::Expr::ForLoop(v0.clone()), + crate::Expr::Group(v0) => crate::Expr::Group(v0.clone()), #[cfg(feature = "full")] - Expr::Infer(v0) => Expr::Infer(v0.clone()), + crate::Expr::If(v0) => crate::Expr::If(v0.clone()), + crate::Expr::Index(v0) => crate::Expr::Index(v0.clone()), #[cfg(feature = "full")] - Expr::Let(v0) => Expr::Let(v0.clone()), - Expr::Lit(v0) => Expr::Lit(v0.clone()), + crate::Expr::Infer(v0) => crate::Expr::Infer(v0.clone()), #[cfg(feature = "full")] - Expr::Loop(v0) => Expr::Loop(v0.clone()), - Expr::Macro(v0) => Expr::Macro(v0.clone()), + crate::Expr::Let(v0) => crate::Expr::Let(v0.clone()), + crate::Expr::Lit(v0) => crate::Expr::Lit(v0.clone()), #[cfg(feature = "full")] - Expr::Match(v0) => Expr::Match(v0.clone()), + crate::Expr::Loop(v0) => crate::Expr::Loop(v0.clone()), + crate::Expr::Macro(v0) => crate::Expr::Macro(v0.clone()), #[cfg(feature = "full")] - Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()), - Expr::Paren(v0) => Expr::Paren(v0.clone()), - Expr::Path(v0) => Expr::Path(v0.clone()), + crate::Expr::Match(v0) => crate::Expr::Match(v0.clone()), + crate::Expr::MethodCall(v0) => crate::Expr::MethodCall(v0.clone()), + crate::Expr::Paren(v0) => crate::Expr::Paren(v0.clone()), + crate::Expr::Path(v0) => crate::Expr::Path(v0.clone()), #[cfg(feature = "full")] - Expr::Range(v0) => Expr::Range(v0.clone()), + crate::Expr::Range(v0) => crate::Expr::Range(v0.clone()), #[cfg(feature = "full")] - Expr::Reference(v0) => Expr::Reference(v0.clone()), + crate::Expr::RawAddr(v0) => crate::Expr::RawAddr(v0.clone()), + crate::Expr::Reference(v0) => crate::Expr::Reference(v0.clone()), #[cfg(feature = "full")] - Expr::Repeat(v0) => Expr::Repeat(v0.clone()), + crate::Expr::Repeat(v0) => crate::Expr::Repeat(v0.clone()), #[cfg(feature = "full")] - Expr::Return(v0) => Expr::Return(v0.clone()), + crate::Expr::Return(v0) => crate::Expr::Return(v0.clone()), + crate::Expr::Struct(v0) => crate::Expr::Struct(v0.clone()), #[cfg(feature = "full")] - Expr::Struct(v0) => Expr::Struct(v0.clone()), + crate::Expr::Try(v0) => crate::Expr::Try(v0.clone()), #[cfg(feature = "full")] - Expr::Try(v0) => Expr::Try(v0.clone()), + crate::Expr::TryBlock(v0) => crate::Expr::TryBlock(v0.clone()), + crate::Expr::Tuple(v0) => crate::Expr::Tuple(v0.clone()), + crate::Expr::Unary(v0) => crate::Expr::Unary(v0.clone()), #[cfg(feature = "full")] - Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()), + crate::Expr::Unsafe(v0) => crate::Expr::Unsafe(v0.clone()), + crate::Expr::Verbatim(v0) => crate::Expr::Verbatim(v0.clone()), #[cfg(feature = "full")] - Expr::Tuple(v0) => Expr::Tuple(v0.clone()), - Expr::Unary(v0) => Expr::Unary(v0.clone()), + crate::Expr::While(v0) => crate::Expr::While(v0.clone()), #[cfg(feature = "full")] - Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()), - Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()), - #[cfg(feature = "full")] - Expr::While(v0) => Expr::While(v0.clone()), - #[cfg(feature = "full")] - Expr::Yield(v0) => Expr::Yield(v0.clone()), + crate::Expr::Yield(v0) => crate::Expr::Yield(v0.clone()), #[cfg(not(feature = "full"))] _ => unreachable!(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprArray { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprArray { fn clone(&self) -> Self { - ExprArray { + crate::ExprArray { attrs: self.attrs.clone(), bracket_token: self.bracket_token.clone(), elems: self.elems.clone(), @@ -311,10 +320,10 @@ impl Clone for ExprArray { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprAssign { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprAssign { fn clone(&self) -> Self { - ExprAssign { + crate::ExprAssign { attrs: self.attrs.clone(), left: self.left.clone(), eq_token: self.eq_token.clone(), @@ -323,10 +332,10 @@ impl Clone for ExprAssign { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprAsync { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprAsync { fn clone(&self) -> Self { - ExprAsync { + crate::ExprAsync { attrs: self.attrs.clone(), async_token: self.async_token.clone(), capture: self.capture.clone(), @@ -335,10 +344,10 @@ impl Clone for ExprAsync { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprAwait { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprAwait { fn clone(&self) -> Self { - ExprAwait { + crate::ExprAwait { attrs: self.attrs.clone(), base: self.base.clone(), dot_token: self.dot_token.clone(), @@ -347,10 +356,10 @@ impl Clone for ExprAwait { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprBinary { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprBinary { fn clone(&self) -> Self { - ExprBinary { + crate::ExprBinary { attrs: self.attrs.clone(), left: self.left.clone(), op: self.op.clone(), @@ -359,10 +368,10 @@ impl Clone for ExprBinary { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprBlock { fn clone(&self) -> Self { - ExprBlock { + crate::ExprBlock { attrs: self.attrs.clone(), label: self.label.clone(), block: self.block.clone(), @@ -370,10 +379,10 @@ impl Clone for ExprBlock { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprBreak { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprBreak { fn clone(&self) -> Self { - ExprBreak { + crate::ExprBreak { attrs: self.attrs.clone(), break_token: self.break_token.clone(), label: self.label.clone(), @@ -382,10 +391,10 @@ impl Clone for ExprBreak { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprCall { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprCall { fn clone(&self) -> Self { - ExprCall { + crate::ExprCall { attrs: self.attrs.clone(), func: self.func.clone(), paren_token: self.paren_token.clone(), @@ -394,10 +403,10 @@ impl Clone for ExprCall { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprCast { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprCast { fn clone(&self) -> Self { - ExprCast { + crate::ExprCast { attrs: self.attrs.clone(), expr: self.expr.clone(), as_token: self.as_token.clone(), @@ -406,10 +415,10 @@ impl Clone for ExprCast { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprClosure { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprClosure { fn clone(&self) -> Self { - ExprClosure { + crate::ExprClosure { attrs: self.attrs.clone(), lifetimes: self.lifetimes.clone(), constness: self.constness.clone(), @@ -425,10 +434,10 @@ impl Clone for ExprClosure { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprConst { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprConst { fn clone(&self) -> Self { - ExprConst { + crate::ExprConst { attrs: self.attrs.clone(), const_token: self.const_token.clone(), block: self.block.clone(), @@ -436,10 +445,10 @@ impl Clone for ExprConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprContinue { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprContinue { fn clone(&self) -> Self { - ExprContinue { + crate::ExprContinue { attrs: self.attrs.clone(), continue_token: self.continue_token.clone(), label: self.label.clone(), @@ -447,10 +456,10 @@ impl Clone for ExprContinue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprField { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprField { fn clone(&self) -> Self { - ExprField { + crate::ExprField { attrs: self.attrs.clone(), base: self.base.clone(), dot_token: self.dot_token.clone(), @@ -459,10 +468,10 @@ impl Clone for ExprField { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprForLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprForLoop { fn clone(&self) -> Self { - ExprForLoop { + crate::ExprForLoop { attrs: self.attrs.clone(), label: self.label.clone(), for_token: self.for_token.clone(), @@ -474,10 +483,10 @@ impl Clone for ExprForLoop { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprGroup { fn clone(&self) -> Self { - ExprGroup { + crate::ExprGroup { attrs: self.attrs.clone(), group_token: self.group_token.clone(), expr: self.expr.clone(), @@ -485,10 +494,10 @@ impl Clone for ExprGroup { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprIf { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprIf { fn clone(&self) -> Self { - ExprIf { + crate::ExprIf { attrs: self.attrs.clone(), if_token: self.if_token.clone(), cond: self.cond.clone(), @@ -498,10 +507,10 @@ impl Clone for ExprIf { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprIndex { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprIndex { fn clone(&self) -> Self { - ExprIndex { + crate::ExprIndex { attrs: self.attrs.clone(), expr: self.expr.clone(), bracket_token: self.bracket_token.clone(), @@ -510,20 +519,20 @@ impl Clone for ExprIndex { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprInfer { fn clone(&self) -> Self { - ExprInfer { + crate::ExprInfer { attrs: self.attrs.clone(), underscore_token: self.underscore_token.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprLet { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprLet { fn clone(&self) -> Self { - ExprLet { + crate::ExprLet { attrs: self.attrs.clone(), let_token: self.let_token.clone(), pat: self.pat.clone(), @@ -533,20 +542,20 @@ impl Clone for ExprLet { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprLit { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprLit { fn clone(&self) -> Self { - ExprLit { + crate::ExprLit { attrs: self.attrs.clone(), lit: self.lit.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprLoop { fn clone(&self) -> Self { - ExprLoop { + crate::ExprLoop { attrs: self.attrs.clone(), label: self.label.clone(), loop_token: self.loop_token.clone(), @@ -555,20 +564,20 @@ impl Clone for ExprLoop { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprMacro { fn clone(&self) -> Self { - ExprMacro { + crate::ExprMacro { attrs: self.attrs.clone(), mac: self.mac.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprMatch { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprMatch { fn clone(&self) -> Self { - ExprMatch { + crate::ExprMatch { attrs: self.attrs.clone(), match_token: self.match_token.clone(), expr: self.expr.clone(), @@ -577,11 +586,11 @@ impl Clone for ExprMatch { } } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprMethodCall { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprMethodCall { fn clone(&self) -> Self { - ExprMethodCall { + crate::ExprMethodCall { attrs: self.attrs.clone(), receiver: self.receiver.clone(), dot_token: self.dot_token.clone(), @@ -593,10 +602,10 @@ impl Clone for ExprMethodCall { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprParen { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprParen { fn clone(&self) -> Self { - ExprParen { + crate::ExprParen { attrs: self.attrs.clone(), paren_token: self.paren_token.clone(), expr: self.expr.clone(), @@ -604,10 +613,10 @@ impl Clone for ExprParen { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprPath { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprPath { fn clone(&self) -> Self { - ExprPath { + crate::ExprPath { attrs: self.attrs.clone(), qself: self.qself.clone(), path: self.path.clone(), @@ -615,10 +624,10 @@ impl Clone for ExprPath { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprRange { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprRange { fn clone(&self) -> Self { - ExprRange { + crate::ExprRange { attrs: self.attrs.clone(), start: self.start.clone(), limits: self.limits.clone(), @@ -627,10 +636,23 @@ impl Clone for ExprRange { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprReference { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprRawAddr { + fn clone(&self) -> Self { + crate::ExprRawAddr { + attrs: self.attrs.clone(), + and_token: self.and_token.clone(), + raw: self.raw.clone(), + mutability: self.mutability.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprReference { fn clone(&self) -> Self { - ExprReference { + crate::ExprReference { attrs: self.attrs.clone(), and_token: self.and_token.clone(), mutability: self.mutability.clone(), @@ -639,10 +661,10 @@ impl Clone for ExprReference { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprRepeat { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprRepeat { fn clone(&self) -> Self { - ExprRepeat { + crate::ExprRepeat { attrs: self.attrs.clone(), bracket_token: self.bracket_token.clone(), expr: self.expr.clone(), @@ -652,21 +674,21 @@ impl Clone for ExprRepeat { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprReturn { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprReturn { fn clone(&self) -> Self { - ExprReturn { + crate::ExprReturn { attrs: self.attrs.clone(), return_token: self.return_token.clone(), expr: self.expr.clone(), } } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprStruct { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprStruct { fn clone(&self) -> Self { - ExprStruct { + crate::ExprStruct { attrs: self.attrs.clone(), qself: self.qself.clone(), path: self.path.clone(), @@ -678,10 +700,10 @@ impl Clone for ExprStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprTry { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprTry { fn clone(&self) -> Self { - ExprTry { + crate::ExprTry { attrs: self.attrs.clone(), expr: self.expr.clone(), question_token: self.question_token.clone(), @@ -689,21 +711,21 @@ impl Clone for ExprTry { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprTryBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprTryBlock { fn clone(&self) -> Self { - ExprTryBlock { + crate::ExprTryBlock { attrs: self.attrs.clone(), try_token: self.try_token.clone(), block: self.block.clone(), } } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprTuple { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprTuple { fn clone(&self) -> Self { - ExprTuple { + crate::ExprTuple { attrs: self.attrs.clone(), paren_token: self.paren_token.clone(), elems: self.elems.clone(), @@ -711,10 +733,10 @@ impl Clone for ExprTuple { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprUnary { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprUnary { fn clone(&self) -> Self { - ExprUnary { + crate::ExprUnary { attrs: self.attrs.clone(), op: self.op.clone(), expr: self.expr.clone(), @@ -722,10 +744,10 @@ impl Clone for ExprUnary { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprUnsafe { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprUnsafe { fn clone(&self) -> Self { - ExprUnsafe { + crate::ExprUnsafe { attrs: self.attrs.clone(), unsafe_token: self.unsafe_token.clone(), block: self.block.clone(), @@ -733,10 +755,10 @@ impl Clone for ExprUnsafe { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprWhile { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprWhile { fn clone(&self) -> Self { - ExprWhile { + crate::ExprWhile { attrs: self.attrs.clone(), label: self.label.clone(), while_token: self.while_token.clone(), @@ -746,10 +768,10 @@ impl Clone for ExprWhile { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ExprYield { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ExprYield { fn clone(&self) -> Self { - ExprYield { + crate::ExprYield { attrs: self.attrs.clone(), yield_token: self.yield_token.clone(), expr: self.expr.clone(), @@ -757,10 +779,10 @@ impl Clone for ExprYield { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Field { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Field { fn clone(&self) -> Self { - Field { + crate::Field { attrs: self.attrs.clone(), vis: self.vis.clone(), mutability: self.mutability.clone(), @@ -771,19 +793,19 @@ impl Clone for Field { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for FieldMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::FieldMutability { fn clone(&self) -> Self { match self { - FieldMutability::None => FieldMutability::None, + crate::FieldMutability::None => crate::FieldMutability::None, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for FieldPat { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::FieldPat { fn clone(&self) -> Self { - FieldPat { + crate::FieldPat { attrs: self.attrs.clone(), member: self.member.clone(), colon_token: self.colon_token.clone(), @@ -791,11 +813,11 @@ impl Clone for FieldPat { } } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for FieldValue { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::FieldValue { fn clone(&self) -> Self { - FieldValue { + crate::FieldValue { attrs: self.attrs.clone(), member: self.member.clone(), colon_token: self.colon_token.clone(), @@ -804,41 +826,41 @@ impl Clone for FieldValue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Fields { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Fields { fn clone(&self) -> Self { match self { - Fields::Named(v0) => Fields::Named(v0.clone()), - Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()), - Fields::Unit => Fields::Unit, + crate::Fields::Named(v0) => crate::Fields::Named(v0.clone()), + crate::Fields::Unnamed(v0) => crate::Fields::Unnamed(v0.clone()), + crate::Fields::Unit => crate::Fields::Unit, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for FieldsNamed { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::FieldsNamed { fn clone(&self) -> Self { - FieldsNamed { + crate::FieldsNamed { brace_token: self.brace_token.clone(), named: self.named.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for FieldsUnnamed { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::FieldsUnnamed { fn clone(&self) -> Self { - FieldsUnnamed { + crate::FieldsUnnamed { paren_token: self.paren_token.clone(), unnamed: self.unnamed.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for File { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::File { fn clone(&self) -> Self { - File { + crate::File { shebang: self.shebang.clone(), attrs: self.attrs.clone(), items: self.items.clone(), @@ -846,33 +868,33 @@ impl Clone for File { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for FnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::FnArg { fn clone(&self) -> Self { match self { - FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()), - FnArg::Typed(v0) => FnArg::Typed(v0.clone()), + crate::FnArg::Receiver(v0) => crate::FnArg::Receiver(v0.clone()), + crate::FnArg::Typed(v0) => crate::FnArg::Typed(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ForeignItem { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ForeignItem { fn clone(&self) -> Self { match self { - ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()), - ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()), - ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()), - ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()), - ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()), + crate::ForeignItem::Fn(v0) => crate::ForeignItem::Fn(v0.clone()), + crate::ForeignItem::Static(v0) => crate::ForeignItem::Static(v0.clone()), + crate::ForeignItem::Type(v0) => crate::ForeignItem::Type(v0.clone()), + crate::ForeignItem::Macro(v0) => crate::ForeignItem::Macro(v0.clone()), + crate::ForeignItem::Verbatim(v0) => crate::ForeignItem::Verbatim(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ForeignItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ForeignItemFn { fn clone(&self) -> Self { - ForeignItemFn { + crate::ForeignItemFn { attrs: self.attrs.clone(), vis: self.vis.clone(), sig: self.sig.clone(), @@ -881,10 +903,10 @@ impl Clone for ForeignItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ForeignItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ForeignItemMacro { fn clone(&self) -> Self { - ForeignItemMacro { + crate::ForeignItemMacro { attrs: self.attrs.clone(), mac: self.mac.clone(), semi_token: self.semi_token.clone(), @@ -892,10 +914,10 @@ impl Clone for ForeignItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ForeignItemStatic { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ForeignItemStatic { fn clone(&self) -> Self { - ForeignItemStatic { + crate::ForeignItemStatic { attrs: self.attrs.clone(), vis: self.vis.clone(), static_token: self.static_token.clone(), @@ -908,10 +930,10 @@ impl Clone for ForeignItemStatic { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ForeignItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ForeignItemType { fn clone(&self) -> Self { - ForeignItemType { + crate::ForeignItemType { attrs: self.attrs.clone(), vis: self.vis.clone(), type_token: self.type_token.clone(), @@ -922,35 +944,47 @@ impl Clone for ForeignItemType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for GenericArgument { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::GenericArgument { fn clone(&self) -> Self { match self { - GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()), - GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()), - GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()), - GenericArgument::AssocType(v0) => GenericArgument::AssocType(v0.clone()), - GenericArgument::AssocConst(v0) => GenericArgument::AssocConst(v0.clone()), - GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()), + crate::GenericArgument::Lifetime(v0) => { + crate::GenericArgument::Lifetime(v0.clone()) + } + crate::GenericArgument::Type(v0) => crate::GenericArgument::Type(v0.clone()), + crate::GenericArgument::Const(v0) => { + crate::GenericArgument::Const(v0.clone()) + } + crate::GenericArgument::AssocType(v0) => { + crate::GenericArgument::AssocType(v0.clone()) + } + crate::GenericArgument::AssocConst(v0) => { + crate::GenericArgument::AssocConst(v0.clone()) + } + crate::GenericArgument::Constraint(v0) => { + crate::GenericArgument::Constraint(v0.clone()) + } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for GenericParam { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::GenericParam { fn clone(&self) -> Self { match self { - GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()), - GenericParam::Type(v0) => GenericParam::Type(v0.clone()), - GenericParam::Const(v0) => GenericParam::Const(v0.clone()), + crate::GenericParam::Lifetime(v0) => { + crate::GenericParam::Lifetime(v0.clone()) + } + crate::GenericParam::Type(v0) => crate::GenericParam::Type(v0.clone()), + crate::GenericParam::Const(v0) => crate::GenericParam::Const(v0.clone()), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Generics { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Generics { fn clone(&self) -> Self { - Generics { + crate::Generics { lt_token: self.lt_token.clone(), params: self.params.clone(), gt_token: self.gt_token.clone(), @@ -959,23 +993,23 @@ impl Clone for Generics { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ImplItem { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ImplItem { fn clone(&self) -> Self { match self { - ImplItem::Const(v0) => ImplItem::Const(v0.clone()), - ImplItem::Fn(v0) => ImplItem::Fn(v0.clone()), - ImplItem::Type(v0) => ImplItem::Type(v0.clone()), - ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()), - ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()), + crate::ImplItem::Const(v0) => crate::ImplItem::Const(v0.clone()), + crate::ImplItem::Fn(v0) => crate::ImplItem::Fn(v0.clone()), + crate::ImplItem::Type(v0) => crate::ImplItem::Type(v0.clone()), + crate::ImplItem::Macro(v0) => crate::ImplItem::Macro(v0.clone()), + crate::ImplItem::Verbatim(v0) => crate::ImplItem::Verbatim(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ImplItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ImplItemConst { fn clone(&self) -> Self { - ImplItemConst { + crate::ImplItemConst { attrs: self.attrs.clone(), vis: self.vis.clone(), defaultness: self.defaultness.clone(), @@ -991,10 +1025,10 @@ impl Clone for ImplItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ImplItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ImplItemFn { fn clone(&self) -> Self { - ImplItemFn { + crate::ImplItemFn { attrs: self.attrs.clone(), vis: self.vis.clone(), defaultness: self.defaultness.clone(), @@ -1004,10 +1038,10 @@ impl Clone for ImplItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ImplItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ImplItemMacro { fn clone(&self) -> Self { - ImplItemMacro { + crate::ImplItemMacro { attrs: self.attrs.clone(), mac: self.mac.clone(), semi_token: self.semi_token.clone(), @@ -1015,10 +1049,10 @@ impl Clone for ImplItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ImplItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ImplItemType { fn clone(&self) -> Self { - ImplItemType { + crate::ImplItemType { attrs: self.attrs.clone(), vis: self.vis.clone(), defaultness: self.defaultness.clone(), @@ -1032,51 +1066,51 @@ impl Clone for ImplItemType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ImplRestriction { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ImplRestriction { fn clone(&self) -> Self { match *self {} } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Index { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Index { fn clone(&self) -> Self { - Index { + crate::Index { index: self.index.clone(), span: self.span.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Item { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Item { fn clone(&self) -> Self { match self { - Item::Const(v0) => Item::Const(v0.clone()), - Item::Enum(v0) => Item::Enum(v0.clone()), - Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()), - Item::Fn(v0) => Item::Fn(v0.clone()), - Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()), - Item::Impl(v0) => Item::Impl(v0.clone()), - Item::Macro(v0) => Item::Macro(v0.clone()), - Item::Mod(v0) => Item::Mod(v0.clone()), - Item::Static(v0) => Item::Static(v0.clone()), - Item::Struct(v0) => Item::Struct(v0.clone()), - Item::Trait(v0) => Item::Trait(v0.clone()), - Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()), - Item::Type(v0) => Item::Type(v0.clone()), - Item::Union(v0) => Item::Union(v0.clone()), - Item::Use(v0) => Item::Use(v0.clone()), - Item::Verbatim(v0) => Item::Verbatim(v0.clone()), + crate::Item::Const(v0) => crate::Item::Const(v0.clone()), + crate::Item::Enum(v0) => crate::Item::Enum(v0.clone()), + crate::Item::ExternCrate(v0) => crate::Item::ExternCrate(v0.clone()), + crate::Item::Fn(v0) => crate::Item::Fn(v0.clone()), + crate::Item::ForeignMod(v0) => crate::Item::ForeignMod(v0.clone()), + crate::Item::Impl(v0) => crate::Item::Impl(v0.clone()), + crate::Item::Macro(v0) => crate::Item::Macro(v0.clone()), + crate::Item::Mod(v0) => crate::Item::Mod(v0.clone()), + crate::Item::Static(v0) => crate::Item::Static(v0.clone()), + crate::Item::Struct(v0) => crate::Item::Struct(v0.clone()), + crate::Item::Trait(v0) => crate::Item::Trait(v0.clone()), + crate::Item::TraitAlias(v0) => crate::Item::TraitAlias(v0.clone()), + crate::Item::Type(v0) => crate::Item::Type(v0.clone()), + crate::Item::Union(v0) => crate::Item::Union(v0.clone()), + crate::Item::Use(v0) => crate::Item::Use(v0.clone()), + crate::Item::Verbatim(v0) => crate::Item::Verbatim(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemConst { fn clone(&self) -> Self { - ItemConst { + crate::ItemConst { attrs: self.attrs.clone(), vis: self.vis.clone(), const_token: self.const_token.clone(), @@ -1091,10 +1125,10 @@ impl Clone for ItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemEnum { fn clone(&self) -> Self { - ItemEnum { + crate::ItemEnum { attrs: self.attrs.clone(), vis: self.vis.clone(), enum_token: self.enum_token.clone(), @@ -1106,10 +1140,10 @@ impl Clone for ItemEnum { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemExternCrate { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemExternCrate { fn clone(&self) -> Self { - ItemExternCrate { + crate::ItemExternCrate { attrs: self.attrs.clone(), vis: self.vis.clone(), extern_token: self.extern_token.clone(), @@ -1121,10 +1155,10 @@ impl Clone for ItemExternCrate { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemFn { fn clone(&self) -> Self { - ItemFn { + crate::ItemFn { attrs: self.attrs.clone(), vis: self.vis.clone(), sig: self.sig.clone(), @@ -1133,10 +1167,10 @@ impl Clone for ItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemForeignMod { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemForeignMod { fn clone(&self) -> Self { - ItemForeignMod { + crate::ItemForeignMod { attrs: self.attrs.clone(), unsafety: self.unsafety.clone(), abi: self.abi.clone(), @@ -1146,10 +1180,10 @@ impl Clone for ItemForeignMod { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemImpl { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemImpl { fn clone(&self) -> Self { - ItemImpl { + crate::ItemImpl { attrs: self.attrs.clone(), defaultness: self.defaultness.clone(), unsafety: self.unsafety.clone(), @@ -1163,10 +1197,10 @@ impl Clone for ItemImpl { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemMacro { fn clone(&self) -> Self { - ItemMacro { + crate::ItemMacro { attrs: self.attrs.clone(), ident: self.ident.clone(), mac: self.mac.clone(), @@ -1175,10 +1209,10 @@ impl Clone for ItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemMod { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemMod { fn clone(&self) -> Self { - ItemMod { + crate::ItemMod { attrs: self.attrs.clone(), vis: self.vis.clone(), unsafety: self.unsafety.clone(), @@ -1190,10 +1224,10 @@ impl Clone for ItemMod { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemStatic { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemStatic { fn clone(&self) -> Self { - ItemStatic { + crate::ItemStatic { attrs: self.attrs.clone(), vis: self.vis.clone(), static_token: self.static_token.clone(), @@ -1208,10 +1242,10 @@ impl Clone for ItemStatic { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemStruct { fn clone(&self) -> Self { - ItemStruct { + crate::ItemStruct { attrs: self.attrs.clone(), vis: self.vis.clone(), struct_token: self.struct_token.clone(), @@ -1223,10 +1257,10 @@ impl Clone for ItemStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemTrait { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemTrait { fn clone(&self) -> Self { - ItemTrait { + crate::ItemTrait { attrs: self.attrs.clone(), vis: self.vis.clone(), unsafety: self.unsafety.clone(), @@ -1243,10 +1277,10 @@ impl Clone for ItemTrait { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemTraitAlias { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemTraitAlias { fn clone(&self) -> Self { - ItemTraitAlias { + crate::ItemTraitAlias { attrs: self.attrs.clone(), vis: self.vis.clone(), trait_token: self.trait_token.clone(), @@ -1259,10 +1293,10 @@ impl Clone for ItemTraitAlias { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemType { fn clone(&self) -> Self { - ItemType { + crate::ItemType { attrs: self.attrs.clone(), vis: self.vis.clone(), type_token: self.type_token.clone(), @@ -1275,10 +1309,10 @@ impl Clone for ItemType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemUnion { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemUnion { fn clone(&self) -> Self { - ItemUnion { + crate::ItemUnion { attrs: self.attrs.clone(), vis: self.vis.clone(), union_token: self.union_token.clone(), @@ -1289,10 +1323,10 @@ impl Clone for ItemUnion { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ItemUse { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ItemUse { fn clone(&self) -> Self { - ItemUse { + crate::ItemUse { attrs: self.attrs.clone(), vis: self.vis.clone(), use_token: self.use_token.clone(), @@ -1303,20 +1337,20 @@ impl Clone for ItemUse { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Label { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Label { fn clone(&self) -> Self { - Label { + crate::Label { name: self.name.clone(), colon_token: self.colon_token.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for LifetimeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::LifetimeParam { fn clone(&self) -> Self { - LifetimeParam { + crate::LifetimeParam { attrs: self.attrs.clone(), lifetime: self.lifetime.clone(), colon_token: self.colon_token.clone(), @@ -1324,35 +1358,36 @@ impl Clone for LifetimeParam { } } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Lit { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Lit { fn clone(&self) -> Self { match self { - Lit::Str(v0) => Lit::Str(v0.clone()), - Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()), - Lit::Byte(v0) => Lit::Byte(v0.clone()), - Lit::Char(v0) => Lit::Char(v0.clone()), - Lit::Int(v0) => Lit::Int(v0.clone()), - Lit::Float(v0) => Lit::Float(v0.clone()), - Lit::Bool(v0) => Lit::Bool(v0.clone()), - Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()), + crate::Lit::Str(v0) => crate::Lit::Str(v0.clone()), + crate::Lit::ByteStr(v0) => crate::Lit::ByteStr(v0.clone()), + crate::Lit::CStr(v0) => crate::Lit::CStr(v0.clone()), + crate::Lit::Byte(v0) => crate::Lit::Byte(v0.clone()), + crate::Lit::Char(v0) => crate::Lit::Char(v0.clone()), + crate::Lit::Int(v0) => crate::Lit::Int(v0.clone()), + crate::Lit::Float(v0) => crate::Lit::Float(v0.clone()), + crate::Lit::Bool(v0) => crate::Lit::Bool(v0.clone()), + crate::Lit::Verbatim(v0) => crate::Lit::Verbatim(v0.clone()), } } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for LitBool { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::LitBool { fn clone(&self) -> Self { - LitBool { + crate::LitBool { value: self.value.clone(), span: self.span.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Local { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Local { fn clone(&self) -> Self { - Local { + crate::Local { attrs: self.attrs.clone(), let_token: self.let_token.clone(), pat: self.pat.clone(), @@ -1362,10 +1397,10 @@ impl Clone for Local { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for LocalInit { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::LocalInit { fn clone(&self) -> Self { - LocalInit { + crate::LocalInit { eq_token: self.eq_token.clone(), expr: self.expr.clone(), diverge: self.diverge.clone(), @@ -1373,10 +1408,10 @@ impl Clone for LocalInit { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Macro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Macro { fn clone(&self) -> Self { - Macro { + crate::Macro { path: self.path.clone(), bang_token: self.bang_token.clone(), delimiter: self.delimiter.clone(), @@ -1385,42 +1420,44 @@ impl Clone for Macro { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for MacroDelimiter { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::MacroDelimiter { fn clone(&self) -> Self { match self { - MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()), - MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()), - MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()), + crate::MacroDelimiter::Paren(v0) => crate::MacroDelimiter::Paren(v0.clone()), + crate::MacroDelimiter::Brace(v0) => crate::MacroDelimiter::Brace(v0.clone()), + crate::MacroDelimiter::Bracket(v0) => { + crate::MacroDelimiter::Bracket(v0.clone()) + } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Member { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Member { fn clone(&self) -> Self { match self { - Member::Named(v0) => Member::Named(v0.clone()), - Member::Unnamed(v0) => Member::Unnamed(v0.clone()), + crate::Member::Named(v0) => crate::Member::Named(v0.clone()), + crate::Member::Unnamed(v0) => crate::Member::Unnamed(v0.clone()), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Meta { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Meta { fn clone(&self) -> Self { match self { - Meta::Path(v0) => Meta::Path(v0.clone()), - Meta::List(v0) => Meta::List(v0.clone()), - Meta::NameValue(v0) => Meta::NameValue(v0.clone()), + crate::Meta::Path(v0) => crate::Meta::Path(v0.clone()), + crate::Meta::List(v0) => crate::Meta::List(v0.clone()), + crate::Meta::NameValue(v0) => crate::Meta::NameValue(v0.clone()), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for MetaList { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::MetaList { fn clone(&self) -> Self { - MetaList { + crate::MetaList { path: self.path.clone(), delimiter: self.delimiter.clone(), tokens: self.tokens.clone(), @@ -1428,10 +1465,10 @@ impl Clone for MetaList { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for MetaNameValue { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::MetaNameValue { fn clone(&self) -> Self { - MetaNameValue { + crate::MetaNameValue { path: self.path.clone(), eq_token: self.eq_token.clone(), value: self.value.clone(), @@ -1439,10 +1476,10 @@ impl Clone for MetaNameValue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ParenthesizedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ParenthesizedGenericArguments { fn clone(&self) -> Self { - ParenthesizedGenericArguments { + crate::ParenthesizedGenericArguments { paren_token: self.paren_token.clone(), inputs: self.inputs.clone(), output: self.output.clone(), @@ -1450,35 +1487,35 @@ impl Clone for ParenthesizedGenericArguments { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Pat { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Pat { fn clone(&self) -> Self { match self { - Pat::Const(v0) => Pat::Const(v0.clone()), - Pat::Ident(v0) => Pat::Ident(v0.clone()), - Pat::Lit(v0) => Pat::Lit(v0.clone()), - Pat::Macro(v0) => Pat::Macro(v0.clone()), - Pat::Or(v0) => Pat::Or(v0.clone()), - Pat::Paren(v0) => Pat::Paren(v0.clone()), - Pat::Path(v0) => Pat::Path(v0.clone()), - Pat::Range(v0) => Pat::Range(v0.clone()), - Pat::Reference(v0) => Pat::Reference(v0.clone()), - Pat::Rest(v0) => Pat::Rest(v0.clone()), - Pat::Slice(v0) => Pat::Slice(v0.clone()), - Pat::Struct(v0) => Pat::Struct(v0.clone()), - Pat::Tuple(v0) => Pat::Tuple(v0.clone()), - Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()), - Pat::Type(v0) => Pat::Type(v0.clone()), - Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()), - Pat::Wild(v0) => Pat::Wild(v0.clone()), + crate::Pat::Const(v0) => crate::Pat::Const(v0.clone()), + crate::Pat::Ident(v0) => crate::Pat::Ident(v0.clone()), + crate::Pat::Lit(v0) => crate::Pat::Lit(v0.clone()), + crate::Pat::Macro(v0) => crate::Pat::Macro(v0.clone()), + crate::Pat::Or(v0) => crate::Pat::Or(v0.clone()), + crate::Pat::Paren(v0) => crate::Pat::Paren(v0.clone()), + crate::Pat::Path(v0) => crate::Pat::Path(v0.clone()), + crate::Pat::Range(v0) => crate::Pat::Range(v0.clone()), + crate::Pat::Reference(v0) => crate::Pat::Reference(v0.clone()), + crate::Pat::Rest(v0) => crate::Pat::Rest(v0.clone()), + crate::Pat::Slice(v0) => crate::Pat::Slice(v0.clone()), + crate::Pat::Struct(v0) => crate::Pat::Struct(v0.clone()), + crate::Pat::Tuple(v0) => crate::Pat::Tuple(v0.clone()), + crate::Pat::TupleStruct(v0) => crate::Pat::TupleStruct(v0.clone()), + crate::Pat::Type(v0) => crate::Pat::Type(v0.clone()), + crate::Pat::Verbatim(v0) => crate::Pat::Verbatim(v0.clone()), + crate::Pat::Wild(v0) => crate::Pat::Wild(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatIdent { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatIdent { fn clone(&self) -> Self { - PatIdent { + crate::PatIdent { attrs: self.attrs.clone(), by_ref: self.by_ref.clone(), mutability: self.mutability.clone(), @@ -1488,10 +1525,10 @@ impl Clone for PatIdent { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatOr { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatOr { fn clone(&self) -> Self { - PatOr { + crate::PatOr { attrs: self.attrs.clone(), leading_vert: self.leading_vert.clone(), cases: self.cases.clone(), @@ -1499,10 +1536,10 @@ impl Clone for PatOr { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatParen { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatParen { fn clone(&self) -> Self { - PatParen { + crate::PatParen { attrs: self.attrs.clone(), paren_token: self.paren_token.clone(), pat: self.pat.clone(), @@ -1510,10 +1547,10 @@ impl Clone for PatParen { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatReference { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatReference { fn clone(&self) -> Self { - PatReference { + crate::PatReference { attrs: self.attrs.clone(), and_token: self.and_token.clone(), mutability: self.mutability.clone(), @@ -1522,20 +1559,20 @@ impl Clone for PatReference { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatRest { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatRest { fn clone(&self) -> Self { - PatRest { + crate::PatRest { attrs: self.attrs.clone(), dot2_token: self.dot2_token.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatSlice { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatSlice { fn clone(&self) -> Self { - PatSlice { + crate::PatSlice { attrs: self.attrs.clone(), bracket_token: self.bracket_token.clone(), elems: self.elems.clone(), @@ -1543,10 +1580,10 @@ impl Clone for PatSlice { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatStruct { fn clone(&self) -> Self { - PatStruct { + crate::PatStruct { attrs: self.attrs.clone(), qself: self.qself.clone(), path: self.path.clone(), @@ -1557,10 +1594,10 @@ impl Clone for PatStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatTuple { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatTuple { fn clone(&self) -> Self { - PatTuple { + crate::PatTuple { attrs: self.attrs.clone(), paren_token: self.paren_token.clone(), elems: self.elems.clone(), @@ -1568,10 +1605,10 @@ impl Clone for PatTuple { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatTupleStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatTupleStruct { fn clone(&self) -> Self { - PatTupleStruct { + crate::PatTupleStruct { attrs: self.attrs.clone(), qself: self.qself.clone(), path: self.path.clone(), @@ -1581,10 +1618,10 @@ impl Clone for PatTupleStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatType { fn clone(&self) -> Self { - PatType { + crate::PatType { attrs: self.attrs.clone(), pat: self.pat.clone(), colon_token: self.colon_token.clone(), @@ -1593,53 +1630,81 @@ impl Clone for PatType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PatWild { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PatWild { fn clone(&self) -> Self { - PatWild { + crate::PatWild { attrs: self.attrs.clone(), underscore_token: self.underscore_token.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Path { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Path { fn clone(&self) -> Self { - Path { + crate::Path { leading_colon: self.leading_colon.clone(), segments: self.segments.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PathArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PathArguments { fn clone(&self) -> Self { match self { - PathArguments::None => PathArguments::None, - PathArguments::AngleBracketed(v0) => { - PathArguments::AngleBracketed(v0.clone()) + crate::PathArguments::None => crate::PathArguments::None, + crate::PathArguments::AngleBracketed(v0) => { + crate::PathArguments::AngleBracketed(v0.clone()) + } + crate::PathArguments::Parenthesized(v0) => { + crate::PathArguments::Parenthesized(v0.clone()) } - PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PathSegment { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PathSegment { fn clone(&self) -> Self { - PathSegment { + crate::PathSegment { ident: self.ident.clone(), arguments: self.arguments.clone(), } } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PointerMutability { + fn clone(&self) -> Self { + match self { + crate::PointerMutability::Const(v0) => { + crate::PointerMutability::Const(v0.clone()) + } + crate::PointerMutability::Mut(v0) => { + crate::PointerMutability::Mut(v0.clone()) + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PreciseCapture { + fn clone(&self) -> Self { + crate::PreciseCapture { + use_token: self.use_token.clone(), + lt_token: self.lt_token.clone(), + params: self.params.clone(), + gt_token: self.gt_token.clone(), + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PredicateLifetime { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PredicateLifetime { fn clone(&self) -> Self { - PredicateLifetime { + crate::PredicateLifetime { lifetime: self.lifetime.clone(), colon_token: self.colon_token.clone(), bounds: self.bounds.clone(), @@ -1647,10 +1712,10 @@ impl Clone for PredicateLifetime { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for PredicateType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::PredicateType { fn clone(&self) -> Self { - PredicateType { + crate::PredicateType { lifetimes: self.lifetimes.clone(), bounded_ty: self.bounded_ty.clone(), colon_token: self.colon_token.clone(), @@ -1659,10 +1724,10 @@ impl Clone for PredicateType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for QSelf { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::QSelf { fn clone(&self) -> Self { - QSelf { + crate::QSelf { lt_token: self.lt_token.clone(), ty: self.ty.clone(), position: self.position.clone(), @@ -1672,20 +1737,20 @@ impl Clone for QSelf { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Copy for RangeLimits {} +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Copy for crate::RangeLimits {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for RangeLimits { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::RangeLimits { fn clone(&self) -> Self { *self } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Receiver { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Receiver { fn clone(&self) -> Self { - Receiver { + crate::Receiver { attrs: self.attrs.clone(), reference: self.reference.clone(), mutability: self.mutability.clone(), @@ -1696,20 +1761,22 @@ impl Clone for Receiver { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for ReturnType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::ReturnType { fn clone(&self) -> Self { match self { - ReturnType::Default => ReturnType::Default, - ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()), + crate::ReturnType::Default => crate::ReturnType::Default, + crate::ReturnType::Type(v0, v1) => { + crate::ReturnType::Type(v0.clone(), v1.clone()) + } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Signature { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Signature { fn clone(&self) -> Self { - Signature { + crate::Signature { constness: self.constness.clone(), asyncness: self.asyncness.clone(), unsafety: self.unsafety.clone(), @@ -1725,32 +1792,32 @@ impl Clone for Signature { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for StaticMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::StaticMutability { fn clone(&self) -> Self { match self { - StaticMutability::Mut(v0) => StaticMutability::Mut(v0.clone()), - StaticMutability::None => StaticMutability::None, + crate::StaticMutability::Mut(v0) => crate::StaticMutability::Mut(v0.clone()), + crate::StaticMutability::None => crate::StaticMutability::None, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Stmt { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Stmt { fn clone(&self) -> Self { match self { - Stmt::Local(v0) => Stmt::Local(v0.clone()), - Stmt::Item(v0) => Stmt::Item(v0.clone()), - Stmt::Expr(v0, v1) => Stmt::Expr(v0.clone(), v1.clone()), - Stmt::Macro(v0) => Stmt::Macro(v0.clone()), + crate::Stmt::Local(v0) => crate::Stmt::Local(v0.clone()), + crate::Stmt::Item(v0) => crate::Stmt::Item(v0.clone()), + crate::Stmt::Expr(v0, v1) => crate::Stmt::Expr(v0.clone(), v1.clone()), + crate::Stmt::Macro(v0) => crate::Stmt::Macro(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for StmtMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::StmtMacro { fn clone(&self) -> Self { - StmtMacro { + crate::StmtMacro { attrs: self.attrs.clone(), mac: self.mac.clone(), semi_token: self.semi_token.clone(), @@ -1758,10 +1825,10 @@ impl Clone for StmtMacro { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitBound { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitBound { fn clone(&self) -> Self { - TraitBound { + crate::TraitBound { paren_token: self.paren_token.clone(), modifier: self.modifier.clone(), lifetimes: self.lifetimes.clone(), @@ -1770,33 +1837,33 @@ impl Clone for TraitBound { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Copy for TraitBoundModifier {} +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Copy for crate::TraitBoundModifier {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitBoundModifier { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitBoundModifier { fn clone(&self) -> Self { *self } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitItem { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitItem { fn clone(&self) -> Self { match self { - TraitItem::Const(v0) => TraitItem::Const(v0.clone()), - TraitItem::Fn(v0) => TraitItem::Fn(v0.clone()), - TraitItem::Type(v0) => TraitItem::Type(v0.clone()), - TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()), - TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()), + crate::TraitItem::Const(v0) => crate::TraitItem::Const(v0.clone()), + crate::TraitItem::Fn(v0) => crate::TraitItem::Fn(v0.clone()), + crate::TraitItem::Type(v0) => crate::TraitItem::Type(v0.clone()), + crate::TraitItem::Macro(v0) => crate::TraitItem::Macro(v0.clone()), + crate::TraitItem::Verbatim(v0) => crate::TraitItem::Verbatim(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitItemConst { fn clone(&self) -> Self { - TraitItemConst { + crate::TraitItemConst { attrs: self.attrs.clone(), const_token: self.const_token.clone(), ident: self.ident.clone(), @@ -1809,10 +1876,10 @@ impl Clone for TraitItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitItemFn { fn clone(&self) -> Self { - TraitItemFn { + crate::TraitItemFn { attrs: self.attrs.clone(), sig: self.sig.clone(), default: self.default.clone(), @@ -1821,10 +1888,10 @@ impl Clone for TraitItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitItemMacro { fn clone(&self) -> Self { - TraitItemMacro { + crate::TraitItemMacro { attrs: self.attrs.clone(), mac: self.mac.clone(), semi_token: self.semi_token.clone(), @@ -1832,10 +1899,10 @@ impl Clone for TraitItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TraitItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TraitItemType { fn clone(&self) -> Self { - TraitItemType { + crate::TraitItemType { attrs: self.attrs.clone(), type_token: self.type_token.clone(), ident: self.ident.clone(), @@ -1848,33 +1915,33 @@ impl Clone for TraitItemType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Type { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Type { fn clone(&self) -> Self { match self { - Type::Array(v0) => Type::Array(v0.clone()), - Type::BareFn(v0) => Type::BareFn(v0.clone()), - Type::Group(v0) => Type::Group(v0.clone()), - Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()), - Type::Infer(v0) => Type::Infer(v0.clone()), - Type::Macro(v0) => Type::Macro(v0.clone()), - Type::Never(v0) => Type::Never(v0.clone()), - Type::Paren(v0) => Type::Paren(v0.clone()), - Type::Path(v0) => Type::Path(v0.clone()), - Type::Ptr(v0) => Type::Ptr(v0.clone()), - Type::Reference(v0) => Type::Reference(v0.clone()), - Type::Slice(v0) => Type::Slice(v0.clone()), - Type::TraitObject(v0) => Type::TraitObject(v0.clone()), - Type::Tuple(v0) => Type::Tuple(v0.clone()), - Type::Verbatim(v0) => Type::Verbatim(v0.clone()), + crate::Type::Array(v0) => crate::Type::Array(v0.clone()), + crate::Type::BareFn(v0) => crate::Type::BareFn(v0.clone()), + crate::Type::Group(v0) => crate::Type::Group(v0.clone()), + crate::Type::ImplTrait(v0) => crate::Type::ImplTrait(v0.clone()), + crate::Type::Infer(v0) => crate::Type::Infer(v0.clone()), + crate::Type::Macro(v0) => crate::Type::Macro(v0.clone()), + crate::Type::Never(v0) => crate::Type::Never(v0.clone()), + crate::Type::Paren(v0) => crate::Type::Paren(v0.clone()), + crate::Type::Path(v0) => crate::Type::Path(v0.clone()), + crate::Type::Ptr(v0) => crate::Type::Ptr(v0.clone()), + crate::Type::Reference(v0) => crate::Type::Reference(v0.clone()), + crate::Type::Slice(v0) => crate::Type::Slice(v0.clone()), + crate::Type::TraitObject(v0) => crate::Type::TraitObject(v0.clone()), + crate::Type::Tuple(v0) => crate::Type::Tuple(v0.clone()), + crate::Type::Verbatim(v0) => crate::Type::Verbatim(v0.clone()), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeArray { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeArray { fn clone(&self) -> Self { - TypeArray { + crate::TypeArray { bracket_token: self.bracket_token.clone(), elem: self.elem.clone(), semi_token: self.semi_token.clone(), @@ -1883,10 +1950,10 @@ impl Clone for TypeArray { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeBareFn { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeBareFn { fn clone(&self) -> Self { - TypeBareFn { + crate::TypeBareFn { lifetimes: self.lifetimes.clone(), unsafety: self.unsafety.clone(), abi: self.abi.clone(), @@ -1899,55 +1966,57 @@ impl Clone for TypeBareFn { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeGroup { fn clone(&self) -> Self { - TypeGroup { + crate::TypeGroup { group_token: self.group_token.clone(), elem: self.elem.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeImplTrait { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeImplTrait { fn clone(&self) -> Self { - TypeImplTrait { + crate::TypeImplTrait { impl_token: self.impl_token.clone(), bounds: self.bounds.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeInfer { fn clone(&self) -> Self { - TypeInfer { + crate::TypeInfer { underscore_token: self.underscore_token.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeMacro { fn clone(&self) -> Self { - TypeMacro { mac: self.mac.clone() } + crate::TypeMacro { + mac: self.mac.clone(), + } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeNever { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeNever { fn clone(&self) -> Self { - TypeNever { + crate::TypeNever { bang_token: self.bang_token.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeParam { fn clone(&self) -> Self { - TypeParam { + crate::TypeParam { attrs: self.attrs.clone(), ident: self.ident.clone(), colon_token: self.colon_token.clone(), @@ -1958,41 +2027,51 @@ impl Clone for TypeParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeParamBound { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeParamBound { fn clone(&self) -> Self { match self { - TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()), - TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()), - TypeParamBound::Verbatim(v0) => TypeParamBound::Verbatim(v0.clone()), + crate::TypeParamBound::Trait(v0) => crate::TypeParamBound::Trait(v0.clone()), + crate::TypeParamBound::Lifetime(v0) => { + crate::TypeParamBound::Lifetime(v0.clone()) + } + #[cfg(feature = "full")] + crate::TypeParamBound::PreciseCapture(v0) => { + crate::TypeParamBound::PreciseCapture(v0.clone()) + } + crate::TypeParamBound::Verbatim(v0) => { + crate::TypeParamBound::Verbatim(v0.clone()) + } + #[cfg(not(feature = "full"))] + _ => unreachable!(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeParen { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeParen { fn clone(&self) -> Self { - TypeParen { + crate::TypeParen { paren_token: self.paren_token.clone(), elem: self.elem.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypePath { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypePath { fn clone(&self) -> Self { - TypePath { + crate::TypePath { qself: self.qself.clone(), path: self.path.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypePtr { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypePtr { fn clone(&self) -> Self { - TypePtr { + crate::TypePtr { star_token: self.star_token.clone(), const_token: self.const_token.clone(), mutability: self.mutability.clone(), @@ -2001,10 +2080,10 @@ impl Clone for TypePtr { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeReference { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeReference { fn clone(&self) -> Self { - TypeReference { + crate::TypeReference { and_token: self.and_token.clone(), lifetime: self.lifetime.clone(), mutability: self.mutability.clone(), @@ -2013,78 +2092,78 @@ impl Clone for TypeReference { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeSlice { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeSlice { fn clone(&self) -> Self { - TypeSlice { + crate::TypeSlice { bracket_token: self.bracket_token.clone(), elem: self.elem.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeTraitObject { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeTraitObject { fn clone(&self) -> Self { - TypeTraitObject { + crate::TypeTraitObject { dyn_token: self.dyn_token.clone(), bounds: self.bounds.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for TypeTuple { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::TypeTuple { fn clone(&self) -> Self { - TypeTuple { + crate::TypeTuple { paren_token: self.paren_token.clone(), elems: self.elems.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Copy for UnOp {} +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Copy for crate::UnOp {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UnOp { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UnOp { fn clone(&self) -> Self { *self } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UseGlob { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UseGlob { fn clone(&self) -> Self { - UseGlob { + crate::UseGlob { star_token: self.star_token.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UseGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UseGroup { fn clone(&self) -> Self { - UseGroup { + crate::UseGroup { brace_token: self.brace_token.clone(), items: self.items.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UseName { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UseName { fn clone(&self) -> Self { - UseName { + crate::UseName { ident: self.ident.clone(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UsePath { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UsePath { fn clone(&self) -> Self { - UsePath { + crate::UsePath { ident: self.ident.clone(), colon2_token: self.colon2_token.clone(), tree: self.tree.clone(), @@ -2092,10 +2171,10 @@ impl Clone for UsePath { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UseRename { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UseRename { fn clone(&self) -> Self { - UseRename { + crate::UseRename { ident: self.ident.clone(), as_token: self.as_token.clone(), rename: self.rename.clone(), @@ -2103,23 +2182,23 @@ impl Clone for UseRename { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for UseTree { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::UseTree { fn clone(&self) -> Self { match self { - UseTree::Path(v0) => UseTree::Path(v0.clone()), - UseTree::Name(v0) => UseTree::Name(v0.clone()), - UseTree::Rename(v0) => UseTree::Rename(v0.clone()), - UseTree::Glob(v0) => UseTree::Glob(v0.clone()), - UseTree::Group(v0) => UseTree::Group(v0.clone()), + crate::UseTree::Path(v0) => crate::UseTree::Path(v0.clone()), + crate::UseTree::Name(v0) => crate::UseTree::Name(v0.clone()), + crate::UseTree::Rename(v0) => crate::UseTree::Rename(v0.clone()), + crate::UseTree::Glob(v0) => crate::UseTree::Glob(v0.clone()), + crate::UseTree::Group(v0) => crate::UseTree::Group(v0.clone()), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Variadic { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Variadic { fn clone(&self) -> Self { - Variadic { + crate::Variadic { attrs: self.attrs.clone(), pat: self.pat.clone(), dots: self.dots.clone(), @@ -2128,10 +2207,10 @@ impl Clone for Variadic { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Variant { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Variant { fn clone(&self) -> Self { - Variant { + crate::Variant { attrs: self.attrs.clone(), ident: self.ident.clone(), fields: self.fields.clone(), @@ -2140,10 +2219,10 @@ impl Clone for Variant { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for VisRestricted { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::VisRestricted { fn clone(&self) -> Self { - VisRestricted { + crate::VisRestricted { pub_token: self.pub_token.clone(), paren_token: self.paren_token.clone(), in_token: self.in_token.clone(), @@ -2152,33 +2231,37 @@ impl Clone for VisRestricted { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for Visibility { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::Visibility { fn clone(&self) -> Self { match self { - Visibility::Public(v0) => Visibility::Public(v0.clone()), - Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()), - Visibility::Inherited => Visibility::Inherited, + crate::Visibility::Public(v0) => crate::Visibility::Public(v0.clone()), + crate::Visibility::Restricted(v0) => { + crate::Visibility::Restricted(v0.clone()) + } + crate::Visibility::Inherited => crate::Visibility::Inherited, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for WhereClause { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::WhereClause { fn clone(&self) -> Self { - WhereClause { + crate::WhereClause { where_token: self.where_token.clone(), predicates: self.predicates.clone(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] -impl Clone for WherePredicate { +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for crate::WherePredicate { fn clone(&self) -> Self { match self { - WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()), - WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()), + crate::WherePredicate::Lifetime(v0) => { + crate::WherePredicate::Lifetime(v0.clone()) + } + crate::WherePredicate::Type(v0) => crate::WherePredicate::Type(v0.clone()), } } } diff --git a/vendor/syn/src/gen/debug.rs b/vendor/syn/src/gen/debug.rs index 837fe99f..aa42e32c 100644 --- a/vendor/syn/src/gen/debug.rs +++ b/vendor/syn/src/gen/debug.rs @@ -1,11 +1,11 @@ // This file is @generated by syn-internal-codegen. // It is not intended for manual editing. -use crate::*; +#![allow(unknown_lints, non_local_definitions)] use std::fmt::{self, Debug}; #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Abi { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Abi { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Abi"); formatter.field("extern_token", &self.extern_token); @@ -14,25 +14,26 @@ impl Debug for Abi { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for AngleBracketedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::AngleBracketedGenericArguments { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl AngleBracketedGenericArguments { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("colon2_token", &self.colon2_token); - formatter.field("lt_token", &self.lt_token); - formatter.field("args", &self.args); - formatter.field("gt_token", &self.gt_token); - formatter.finish() - } - } self.debug(formatter, "AngleBracketedGenericArguments") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::AngleBracketedGenericArguments { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("colon2_token", &self.colon2_token); + formatter.field("lt_token", &self.lt_token); + formatter.field("args", &self.args); + formatter.field("gt_token", &self.gt_token); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Arm { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Arm { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Arm"); formatter.field("attrs", &self.attrs); @@ -45,8 +46,8 @@ impl Debug for Arm { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for AssocConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::AssocConst { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("AssocConst"); formatter.field("ident", &self.ident); @@ -57,8 +58,8 @@ impl Debug for AssocConst { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for AssocType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::AssocType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("AssocType"); formatter.field("ident", &self.ident); @@ -69,13 +70,13 @@ impl Debug for AssocType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for AttrStyle { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::AttrStyle { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("AttrStyle::")?; match self { - AttrStyle::Outer => formatter.write_str("Outer"), - AttrStyle::Inner(v0) => { + crate::AttrStyle::Outer => formatter.write_str("Outer"), + crate::AttrStyle::Inner(v0) => { let mut formatter = formatter.debug_tuple("Inner"); formatter.field(v0); formatter.finish() @@ -84,8 +85,8 @@ impl Debug for AttrStyle { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Attribute { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Attribute { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Attribute"); formatter.field("pound_token", &self.pound_token); @@ -96,8 +97,8 @@ impl Debug for Attribute { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for BareFnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::BareFnArg { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("BareFnArg"); formatter.field("attrs", &self.attrs); @@ -107,8 +108,8 @@ impl Debug for BareFnArg { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for BareVariadic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::BareVariadic { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("BareVariadic"); formatter.field("attrs", &self.attrs); @@ -119,147 +120,147 @@ impl Debug for BareVariadic { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for BinOp { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::BinOp { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("BinOp::")?; match self { - BinOp::Add(v0) => { + crate::BinOp::Add(v0) => { let mut formatter = formatter.debug_tuple("Add"); formatter.field(v0); formatter.finish() } - BinOp::Sub(v0) => { + crate::BinOp::Sub(v0) => { let mut formatter = formatter.debug_tuple("Sub"); formatter.field(v0); formatter.finish() } - BinOp::Mul(v0) => { + crate::BinOp::Mul(v0) => { let mut formatter = formatter.debug_tuple("Mul"); formatter.field(v0); formatter.finish() } - BinOp::Div(v0) => { + crate::BinOp::Div(v0) => { let mut formatter = formatter.debug_tuple("Div"); formatter.field(v0); formatter.finish() } - BinOp::Rem(v0) => { + crate::BinOp::Rem(v0) => { let mut formatter = formatter.debug_tuple("Rem"); formatter.field(v0); formatter.finish() } - BinOp::And(v0) => { + crate::BinOp::And(v0) => { let mut formatter = formatter.debug_tuple("And"); formatter.field(v0); formatter.finish() } - BinOp::Or(v0) => { + crate::BinOp::Or(v0) => { let mut formatter = formatter.debug_tuple("Or"); formatter.field(v0); formatter.finish() } - BinOp::BitXor(v0) => { + crate::BinOp::BitXor(v0) => { let mut formatter = formatter.debug_tuple("BitXor"); formatter.field(v0); formatter.finish() } - BinOp::BitAnd(v0) => { + crate::BinOp::BitAnd(v0) => { let mut formatter = formatter.debug_tuple("BitAnd"); formatter.field(v0); formatter.finish() } - BinOp::BitOr(v0) => { + crate::BinOp::BitOr(v0) => { let mut formatter = formatter.debug_tuple("BitOr"); formatter.field(v0); formatter.finish() } - BinOp::Shl(v0) => { + crate::BinOp::Shl(v0) => { let mut formatter = formatter.debug_tuple("Shl"); formatter.field(v0); formatter.finish() } - BinOp::Shr(v0) => { + crate::BinOp::Shr(v0) => { let mut formatter = formatter.debug_tuple("Shr"); formatter.field(v0); formatter.finish() } - BinOp::Eq(v0) => { + crate::BinOp::Eq(v0) => { let mut formatter = formatter.debug_tuple("Eq"); formatter.field(v0); formatter.finish() } - BinOp::Lt(v0) => { + crate::BinOp::Lt(v0) => { let mut formatter = formatter.debug_tuple("Lt"); formatter.field(v0); formatter.finish() } - BinOp::Le(v0) => { + crate::BinOp::Le(v0) => { let mut formatter = formatter.debug_tuple("Le"); formatter.field(v0); formatter.finish() } - BinOp::Ne(v0) => { + crate::BinOp::Ne(v0) => { let mut formatter = formatter.debug_tuple("Ne"); formatter.field(v0); formatter.finish() } - BinOp::Ge(v0) => { + crate::BinOp::Ge(v0) => { let mut formatter = formatter.debug_tuple("Ge"); formatter.field(v0); formatter.finish() } - BinOp::Gt(v0) => { + crate::BinOp::Gt(v0) => { let mut formatter = formatter.debug_tuple("Gt"); formatter.field(v0); formatter.finish() } - BinOp::AddAssign(v0) => { + crate::BinOp::AddAssign(v0) => { let mut formatter = formatter.debug_tuple("AddAssign"); formatter.field(v0); formatter.finish() } - BinOp::SubAssign(v0) => { + crate::BinOp::SubAssign(v0) => { let mut formatter = formatter.debug_tuple("SubAssign"); formatter.field(v0); formatter.finish() } - BinOp::MulAssign(v0) => { + crate::BinOp::MulAssign(v0) => { let mut formatter = formatter.debug_tuple("MulAssign"); formatter.field(v0); formatter.finish() } - BinOp::DivAssign(v0) => { + crate::BinOp::DivAssign(v0) => { let mut formatter = formatter.debug_tuple("DivAssign"); formatter.field(v0); formatter.finish() } - BinOp::RemAssign(v0) => { + crate::BinOp::RemAssign(v0) => { let mut formatter = formatter.debug_tuple("RemAssign"); formatter.field(v0); formatter.finish() } - BinOp::BitXorAssign(v0) => { + crate::BinOp::BitXorAssign(v0) => { let mut formatter = formatter.debug_tuple("BitXorAssign"); formatter.field(v0); formatter.finish() } - BinOp::BitAndAssign(v0) => { + crate::BinOp::BitAndAssign(v0) => { let mut formatter = formatter.debug_tuple("BitAndAssign"); formatter.field(v0); formatter.finish() } - BinOp::BitOrAssign(v0) => { + crate::BinOp::BitOrAssign(v0) => { let mut formatter = formatter.debug_tuple("BitOrAssign"); formatter.field(v0); formatter.finish() } - BinOp::ShlAssign(v0) => { + crate::BinOp::ShlAssign(v0) => { let mut formatter = formatter.debug_tuple("ShlAssign"); formatter.field(v0); formatter.finish() } - BinOp::ShrAssign(v0) => { + crate::BinOp::ShrAssign(v0) => { let mut formatter = formatter.debug_tuple("ShrAssign"); formatter.field(v0); formatter.finish() @@ -268,8 +269,8 @@ impl Debug for BinOp { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Block { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Block { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Block"); formatter.field("brace_token", &self.brace_token); @@ -278,8 +279,8 @@ impl Debug for Block { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for BoundLifetimes { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::BoundLifetimes { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("BoundLifetimes"); formatter.field("for_token", &self.for_token); @@ -289,9 +290,28 @@ impl Debug for BoundLifetimes { formatter.finish() } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::CapturedParam { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("CapturedParam::")?; + match self { + crate::CapturedParam::Lifetime(v0) => { + let mut formatter = formatter.debug_tuple("Lifetime"); + formatter.field(v0); + formatter.finish() + } + crate::CapturedParam::Ident(v0) => { + let mut formatter = formatter.debug_tuple("Ident"); + formatter.field(v0); + formatter.finish() + } + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ConstParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ConstParam { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("ConstParam"); formatter.field("attrs", &self.attrs); @@ -305,8 +325,8 @@ impl Debug for ConstParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Constraint { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Constraint { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Constraint"); formatter.field("ident", &self.ident); @@ -317,67 +337,70 @@ impl Debug for Constraint { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Data { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Data { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Data::")?; match self { - Data::Struct(v0) => v0.debug(formatter, "Struct"), - Data::Enum(v0) => v0.debug(formatter, "Enum"), - Data::Union(v0) => v0.debug(formatter, "Union"), + crate::Data::Struct(v0) => v0.debug(formatter, "Struct"), + crate::Data::Enum(v0) => v0.debug(formatter, "Enum"), + crate::Data::Union(v0) => v0.debug(formatter, "Union"), } } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for DataEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::DataEnum { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl DataEnum { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("enum_token", &self.enum_token); - formatter.field("brace_token", &self.brace_token); - formatter.field("variants", &self.variants); - formatter.finish() - } - } self.debug(formatter, "DataEnum") } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for DataStruct { +impl crate::DataEnum { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("enum_token", &self.enum_token); + formatter.field("brace_token", &self.brace_token); + formatter.field("variants", &self.variants); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::DataStruct { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl DataStruct { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("struct_token", &self.struct_token); - formatter.field("fields", &self.fields); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "DataStruct") } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for DataUnion { +impl crate::DataStruct { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("struct_token", &self.struct_token); + formatter.field("fields", &self.fields); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::DataUnion { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl DataUnion { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("union_token", &self.union_token); - formatter.field("fields", &self.fields); - formatter.finish() - } - } self.debug(formatter, "DataUnion") } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for DeriveInput { +impl crate::DataUnion { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("union_token", &self.union_token); + formatter.field("fields", &self.fields); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::DeriveInput { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("DeriveInput"); formatter.field("attrs", &self.attrs); @@ -389,736 +412,791 @@ impl Debug for DeriveInput { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Expr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Expr { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Expr::")?; match self { #[cfg(feature = "full")] - Expr::Array(v0) => v0.debug(formatter, "Array"), - #[cfg(feature = "full")] - Expr::Assign(v0) => v0.debug(formatter, "Assign"), + crate::Expr::Array(v0) => v0.debug(formatter, "Array"), #[cfg(feature = "full")] - Expr::Async(v0) => v0.debug(formatter, "Async"), + crate::Expr::Assign(v0) => v0.debug(formatter, "Assign"), #[cfg(feature = "full")] - Expr::Await(v0) => v0.debug(formatter, "Await"), - Expr::Binary(v0) => v0.debug(formatter, "Binary"), + crate::Expr::Async(v0) => v0.debug(formatter, "Async"), #[cfg(feature = "full")] - Expr::Block(v0) => v0.debug(formatter, "Block"), + crate::Expr::Await(v0) => v0.debug(formatter, "Await"), + crate::Expr::Binary(v0) => v0.debug(formatter, "Binary"), #[cfg(feature = "full")] - Expr::Break(v0) => v0.debug(formatter, "Break"), - Expr::Call(v0) => v0.debug(formatter, "Call"), - Expr::Cast(v0) => v0.debug(formatter, "Cast"), + crate::Expr::Block(v0) => v0.debug(formatter, "Block"), #[cfg(feature = "full")] - Expr::Closure(v0) => v0.debug(formatter, "Closure"), + crate::Expr::Break(v0) => v0.debug(formatter, "Break"), + crate::Expr::Call(v0) => v0.debug(formatter, "Call"), + crate::Expr::Cast(v0) => v0.debug(formatter, "Cast"), #[cfg(feature = "full")] - Expr::Const(v0) => v0.debug(formatter, "Const"), + crate::Expr::Closure(v0) => v0.debug(formatter, "Closure"), #[cfg(feature = "full")] - Expr::Continue(v0) => v0.debug(formatter, "Continue"), - Expr::Field(v0) => v0.debug(formatter, "Field"), + crate::Expr::Const(v0) => v0.debug(formatter, "Const"), #[cfg(feature = "full")] - Expr::ForLoop(v0) => v0.debug(formatter, "ForLoop"), - Expr::Group(v0) => v0.debug(formatter, "Group"), + crate::Expr::Continue(v0) => v0.debug(formatter, "Continue"), + crate::Expr::Field(v0) => v0.debug(formatter, "Field"), #[cfg(feature = "full")] - Expr::If(v0) => v0.debug(formatter, "If"), - Expr::Index(v0) => v0.debug(formatter, "Index"), + crate::Expr::ForLoop(v0) => v0.debug(formatter, "ForLoop"), + crate::Expr::Group(v0) => v0.debug(formatter, "Group"), #[cfg(feature = "full")] - Expr::Infer(v0) => v0.debug(formatter, "Infer"), + crate::Expr::If(v0) => v0.debug(formatter, "If"), + crate::Expr::Index(v0) => v0.debug(formatter, "Index"), #[cfg(feature = "full")] - Expr::Let(v0) => v0.debug(formatter, "Let"), - Expr::Lit(v0) => v0.debug(formatter, "Lit"), + crate::Expr::Infer(v0) => v0.debug(formatter, "Infer"), #[cfg(feature = "full")] - Expr::Loop(v0) => v0.debug(formatter, "Loop"), - Expr::Macro(v0) => v0.debug(formatter, "Macro"), + crate::Expr::Let(v0) => v0.debug(formatter, "Let"), + crate::Expr::Lit(v0) => v0.debug(formatter, "Lit"), #[cfg(feature = "full")] - Expr::Match(v0) => v0.debug(formatter, "Match"), + crate::Expr::Loop(v0) => v0.debug(formatter, "Loop"), + crate::Expr::Macro(v0) => v0.debug(formatter, "Macro"), #[cfg(feature = "full")] - Expr::MethodCall(v0) => v0.debug(formatter, "MethodCall"), - Expr::Paren(v0) => v0.debug(formatter, "Paren"), - Expr::Path(v0) => v0.debug(formatter, "Path"), + crate::Expr::Match(v0) => v0.debug(formatter, "Match"), + crate::Expr::MethodCall(v0) => v0.debug(formatter, "MethodCall"), + crate::Expr::Paren(v0) => v0.debug(formatter, "Paren"), + crate::Expr::Path(v0) => v0.debug(formatter, "Path"), #[cfg(feature = "full")] - Expr::Range(v0) => v0.debug(formatter, "Range"), + crate::Expr::Range(v0) => v0.debug(formatter, "Range"), #[cfg(feature = "full")] - Expr::Reference(v0) => v0.debug(formatter, "Reference"), + crate::Expr::RawAddr(v0) => v0.debug(formatter, "RawAddr"), + crate::Expr::Reference(v0) => v0.debug(formatter, "Reference"), #[cfg(feature = "full")] - Expr::Repeat(v0) => v0.debug(formatter, "Repeat"), + crate::Expr::Repeat(v0) => v0.debug(formatter, "Repeat"), #[cfg(feature = "full")] - Expr::Return(v0) => v0.debug(formatter, "Return"), + crate::Expr::Return(v0) => v0.debug(formatter, "Return"), + crate::Expr::Struct(v0) => v0.debug(formatter, "Struct"), #[cfg(feature = "full")] - Expr::Struct(v0) => v0.debug(formatter, "Struct"), + crate::Expr::Try(v0) => v0.debug(formatter, "Try"), #[cfg(feature = "full")] - Expr::Try(v0) => v0.debug(formatter, "Try"), + crate::Expr::TryBlock(v0) => v0.debug(formatter, "TryBlock"), + crate::Expr::Tuple(v0) => v0.debug(formatter, "Tuple"), + crate::Expr::Unary(v0) => v0.debug(formatter, "Unary"), #[cfg(feature = "full")] - Expr::TryBlock(v0) => v0.debug(formatter, "TryBlock"), - #[cfg(feature = "full")] - Expr::Tuple(v0) => v0.debug(formatter, "Tuple"), - Expr::Unary(v0) => v0.debug(formatter, "Unary"), - #[cfg(feature = "full")] - Expr::Unsafe(v0) => v0.debug(formatter, "Unsafe"), - Expr::Verbatim(v0) => { + crate::Expr::Unsafe(v0) => v0.debug(formatter, "Unsafe"), + crate::Expr::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() } #[cfg(feature = "full")] - Expr::While(v0) => v0.debug(formatter, "While"), + crate::Expr::While(v0) => v0.debug(formatter, "While"), #[cfg(feature = "full")] - Expr::Yield(v0) => v0.debug(formatter, "Yield"), + crate::Expr::Yield(v0) => v0.debug(formatter, "Yield"), #[cfg(not(feature = "full"))] _ => unreachable!(), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprArray { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprArray { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprArray { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("bracket_token", &self.bracket_token); - formatter.field("elems", &self.elems); - formatter.finish() - } - } self.debug(formatter, "ExprArray") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprAssign { +impl crate::ExprArray { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprAssign { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprAssign { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("left", &self.left); - formatter.field("eq_token", &self.eq_token); - formatter.field("right", &self.right); - formatter.finish() - } - } self.debug(formatter, "ExprAssign") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprAsync { +impl crate::ExprAssign { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("left", &self.left); + formatter.field("eq_token", &self.eq_token); + formatter.field("right", &self.right); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprAsync { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprAsync { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("async_token", &self.async_token); - formatter.field("capture", &self.capture); - formatter.field("block", &self.block); - formatter.finish() - } - } self.debug(formatter, "ExprAsync") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprAwait { +impl crate::ExprAsync { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("async_token", &self.async_token); + formatter.field("capture", &self.capture); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprAwait { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprAwait { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("base", &self.base); - formatter.field("dot_token", &self.dot_token); - formatter.field("await_token", &self.await_token); - formatter.finish() - } - } self.debug(formatter, "ExprAwait") } } +#[cfg(feature = "full")] +impl crate::ExprAwait { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("base", &self.base); + formatter.field("dot_token", &self.dot_token); + formatter.field("await_token", &self.await_token); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprBinary { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprBinary { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprBinary { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("left", &self.left); - formatter.field("op", &self.op); - formatter.field("right", &self.right); - formatter.finish() - } - } self.debug(formatter, "ExprBinary") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprBinary { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("left", &self.left); + formatter.field("op", &self.op); + formatter.field("right", &self.right); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprBlock { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprBlock { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("label", &self.label); - formatter.field("block", &self.block); - formatter.finish() - } - } self.debug(formatter, "ExprBlock") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprBreak { +impl crate::ExprBlock { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprBreak { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprBreak { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("break_token", &self.break_token); - formatter.field("label", &self.label); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprBreak") } } +#[cfg(feature = "full")] +impl crate::ExprBreak { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("break_token", &self.break_token); + formatter.field("label", &self.label); + formatter.field("expr", &self.expr); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprCall { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprCall { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprCall { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("func", &self.func); - formatter.field("paren_token", &self.paren_token); - formatter.field("args", &self.args); - formatter.finish() - } - } self.debug(formatter, "ExprCall") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprCast { +impl crate::ExprCall { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("func", &self.func); + formatter.field("paren_token", &self.paren_token); + formatter.field("args", &self.args); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprCast { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprCast { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("expr", &self.expr); - formatter.field("as_token", &self.as_token); - formatter.field("ty", &self.ty); - formatter.finish() - } - } self.debug(formatter, "ExprCast") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprCast { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("as_token", &self.as_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprClosure { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprClosure { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprClosure { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("lifetimes", &self.lifetimes); - formatter.field("constness", &self.constness); - formatter.field("movability", &self.movability); - formatter.field("asyncness", &self.asyncness); - formatter.field("capture", &self.capture); - formatter.field("or1_token", &self.or1_token); - formatter.field("inputs", &self.inputs); - formatter.field("or2_token", &self.or2_token); - formatter.field("output", &self.output); - formatter.field("body", &self.body); - formatter.finish() - } - } self.debug(formatter, "ExprClosure") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprConst { +impl crate::ExprClosure { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("lifetimes", &self.lifetimes); + formatter.field("constness", &self.constness); + formatter.field("movability", &self.movability); + formatter.field("asyncness", &self.asyncness); + formatter.field("capture", &self.capture); + formatter.field("or1_token", &self.or1_token); + formatter.field("inputs", &self.inputs); + formatter.field("or2_token", &self.or2_token); + formatter.field("output", &self.output); + formatter.field("body", &self.body); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprConst { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprConst { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("const_token", &self.const_token); - formatter.field("block", &self.block); - formatter.finish() - } - } self.debug(formatter, "ExprConst") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprContinue { +impl crate::ExprConst { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("const_token", &self.const_token); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprContinue { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprContinue { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("continue_token", &self.continue_token); - formatter.field("label", &self.label); - formatter.finish() - } - } self.debug(formatter, "ExprContinue") } } +#[cfg(feature = "full")] +impl crate::ExprContinue { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("continue_token", &self.continue_token); + formatter.field("label", &self.label); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprField { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprField { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprField { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("base", &self.base); - formatter.field("dot_token", &self.dot_token); - formatter.field("member", &self.member); - formatter.finish() - } - } self.debug(formatter, "ExprField") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprField { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("base", &self.base); + formatter.field("dot_token", &self.dot_token); + formatter.field("member", &self.member); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprForLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprForLoop { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprForLoop { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("label", &self.label); - formatter.field("for_token", &self.for_token); - formatter.field("pat", &self.pat); - formatter.field("in_token", &self.in_token); - formatter.field("expr", &self.expr); - formatter.field("body", &self.body); - formatter.finish() - } - } self.debug(formatter, "ExprForLoop") } } +#[cfg(feature = "full")] +impl crate::ExprForLoop { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("for_token", &self.for_token); + formatter.field("pat", &self.pat); + formatter.field("in_token", &self.in_token); + formatter.field("expr", &self.expr); + formatter.field("body", &self.body); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprGroup { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprGroup { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("group_token", &self.group_token); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprGroup") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprGroup { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("group_token", &self.group_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprIf { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprIf { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprIf { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("if_token", &self.if_token); - formatter.field("cond", &self.cond); - formatter.field("then_branch", &self.then_branch); - formatter.field("else_branch", &self.else_branch); - formatter.finish() - } - } self.debug(formatter, "ExprIf") } } +#[cfg(feature = "full")] +impl crate::ExprIf { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("if_token", &self.if_token); + formatter.field("cond", &self.cond); + formatter.field("then_branch", &self.then_branch); + formatter.field("else_branch", &self.else_branch); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprIndex { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprIndex { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprIndex { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("expr", &self.expr); - formatter.field("bracket_token", &self.bracket_token); - formatter.field("index", &self.index); - formatter.finish() - } - } self.debug(formatter, "ExprIndex") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprIndex { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("index", &self.index); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprInfer { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprInfer { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("underscore_token", &self.underscore_token); - formatter.finish() - } - } self.debug(formatter, "ExprInfer") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprLet { +impl crate::ExprInfer { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("underscore_token", &self.underscore_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprLet { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprLet { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("let_token", &self.let_token); - formatter.field("pat", &self.pat); - formatter.field("eq_token", &self.eq_token); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprLet") } } +#[cfg(feature = "full")] +impl crate::ExprLet { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("let_token", &self.let_token); + formatter.field("pat", &self.pat); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprLit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprLit { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprLit { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("lit", &self.lit); - formatter.finish() - } - } self.debug(formatter, "ExprLit") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprLit { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("lit", &self.lit); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprLoop { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprLoop { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("label", &self.label); - formatter.field("loop_token", &self.loop_token); - formatter.field("body", &self.body); - formatter.finish() - } - } self.debug(formatter, "ExprLoop") } } +#[cfg(feature = "full")] +impl crate::ExprLoop { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("loop_token", &self.loop_token); + formatter.field("body", &self.body); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprMacro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("mac", &self.mac); - formatter.finish() - } - } self.debug(formatter, "ExprMacro") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprMatch { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprMatch { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprMatch { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("match_token", &self.match_token); - formatter.field("expr", &self.expr); - formatter.field("brace_token", &self.brace_token); - formatter.field("arms", &self.arms); - formatter.finish() - } - } self.debug(formatter, "ExprMatch") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprMethodCall { +impl crate::ExprMatch { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("match_token", &self.match_token); + formatter.field("expr", &self.expr); + formatter.field("brace_token", &self.brace_token); + formatter.field("arms", &self.arms); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprMethodCall { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprMethodCall { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("receiver", &self.receiver); - formatter.field("dot_token", &self.dot_token); - formatter.field("method", &self.method); - formatter.field("turbofish", &self.turbofish); - formatter.field("paren_token", &self.paren_token); - formatter.field("args", &self.args); - formatter.finish() - } - } self.debug(formatter, "ExprMethodCall") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprParen { +impl crate::ExprMethodCall { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("receiver", &self.receiver); + formatter.field("dot_token", &self.dot_token); + formatter.field("method", &self.method); + formatter.field("turbofish", &self.turbofish); + formatter.field("paren_token", &self.paren_token); + formatter.field("args", &self.args); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprParen { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprParen { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("paren_token", &self.paren_token); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprParen") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprPath { +impl crate::ExprParen { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprPath { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprPath { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("qself", &self.qself); - formatter.field("path", &self.path); - formatter.finish() - } - } self.debug(formatter, "ExprPath") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprPath { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprRange { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprRange { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprRange { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("start", &self.start); - formatter.field("limits", &self.limits); - formatter.field("end", &self.end); - formatter.finish() - } - } self.debug(formatter, "ExprRange") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprReference { +impl crate::ExprRange { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("start", &self.start); + formatter.field("limits", &self.limits); + formatter.field("end", &self.end); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprRawAddr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + self.debug(formatter, "ExprRawAddr") + } +} +#[cfg(feature = "full")] +impl crate::ExprRawAddr { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("and_token", &self.and_token); + formatter.field("raw", &self.raw); + formatter.field("mutability", &self.mutability); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprReference { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprReference { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("and_token", &self.and_token); - formatter.field("mutability", &self.mutability); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprReference") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprReference { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("and_token", &self.and_token); + formatter.field("mutability", &self.mutability); + formatter.field("expr", &self.expr); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprRepeat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprRepeat { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprRepeat { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("bracket_token", &self.bracket_token); - formatter.field("expr", &self.expr); - formatter.field("semi_token", &self.semi_token); - formatter.field("len", &self.len); - formatter.finish() - } - } self.debug(formatter, "ExprRepeat") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprReturn { +impl crate::ExprRepeat { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.field("len", &self.len); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprReturn { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprReturn { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("return_token", &self.return_token); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprReturn") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprStruct { +impl crate::ExprReturn { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("return_token", &self.return_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprStruct { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprStruct { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("qself", &self.qself); - formatter.field("path", &self.path); - formatter.field("brace_token", &self.brace_token); - formatter.field("fields", &self.fields); - formatter.field("dot2_token", &self.dot2_token); - formatter.field("rest", &self.rest); - formatter.finish() - } - } self.debug(formatter, "ExprStruct") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprStruct { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.field("brace_token", &self.brace_token); + formatter.field("fields", &self.fields); + formatter.field("dot2_token", &self.dot2_token); + formatter.field("rest", &self.rest); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprTry { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprTry { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprTry { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("expr", &self.expr); - formatter.field("question_token", &self.question_token); - formatter.finish() - } - } self.debug(formatter, "ExprTry") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprTryBlock { +impl crate::ExprTry { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("question_token", &self.question_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprTryBlock { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprTryBlock { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("try_token", &self.try_token); - formatter.field("block", &self.block); - formatter.finish() - } - } self.debug(formatter, "ExprTryBlock") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprTuple { +impl crate::ExprTryBlock { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("try_token", &self.try_token); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprTuple { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprTuple { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("paren_token", &self.paren_token); - formatter.field("elems", &self.elems); - formatter.finish() - } - } self.debug(formatter, "ExprTuple") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprUnary { +impl crate::ExprTuple { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprUnary { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprUnary { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("op", &self.op); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprUnary") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ExprUnary { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("op", &self.op); + formatter.field("expr", &self.expr); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprUnsafe { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprUnsafe { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprUnsafe { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("unsafe_token", &self.unsafe_token); - formatter.field("block", &self.block); - formatter.finish() - } - } self.debug(formatter, "ExprUnsafe") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprWhile { +impl crate::ExprUnsafe { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("unsafe_token", &self.unsafe_token); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprWhile { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprWhile { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("label", &self.label); - formatter.field("while_token", &self.while_token); - formatter.field("cond", &self.cond); - formatter.field("body", &self.body); - formatter.finish() - } - } self.debug(formatter, "ExprWhile") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ExprYield { +impl crate::ExprWhile { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("while_token", &self.while_token); + formatter.field("cond", &self.cond); + formatter.field("body", &self.body); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ExprYield { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ExprYield { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("yield_token", &self.yield_token); - formatter.field("expr", &self.expr); - formatter.finish() - } - } self.debug(formatter, "ExprYield") } } +#[cfg(feature = "full")] +impl crate::ExprYield { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("yield_token", &self.yield_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Field { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Field { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Field"); formatter.field("attrs", &self.attrs); @@ -1131,18 +1209,18 @@ impl Debug for Field { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for FieldMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::FieldMutability { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("FieldMutability::")?; match self { - FieldMutability::None => formatter.write_str("None"), + crate::FieldMutability::None => formatter.write_str("None"), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for FieldPat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::FieldPat { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("FieldPat"); formatter.field("attrs", &self.attrs); @@ -1152,9 +1230,9 @@ impl Debug for FieldPat { formatter.finish() } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for FieldValue { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::FieldValue { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("FieldValue"); formatter.field("attrs", &self.attrs); @@ -1165,50 +1243,52 @@ impl Debug for FieldValue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Fields { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Fields { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Fields::")?; match self { - Fields::Named(v0) => v0.debug(formatter, "Named"), - Fields::Unnamed(v0) => v0.debug(formatter, "Unnamed"), - Fields::Unit => formatter.write_str("Unit"), + crate::Fields::Named(v0) => v0.debug(formatter, "Named"), + crate::Fields::Unnamed(v0) => v0.debug(formatter, "Unnamed"), + crate::Fields::Unit => formatter.write_str("Unit"), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for FieldsNamed { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::FieldsNamed { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl FieldsNamed { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("brace_token", &self.brace_token); - formatter.field("named", &self.named); - formatter.finish() - } - } self.debug(formatter, "FieldsNamed") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for FieldsUnnamed { +impl crate::FieldsNamed { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("brace_token", &self.brace_token); + formatter.field("named", &self.named); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::FieldsUnnamed { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl FieldsUnnamed { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("paren_token", &self.paren_token); - formatter.field("unnamed", &self.unnamed); - formatter.finish() - } - } self.debug(formatter, "FieldsUnnamed") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::FieldsUnnamed { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("paren_token", &self.paren_token); + formatter.field("unnamed", &self.unnamed); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for File { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::File { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("File"); formatter.field("shebang", &self.shebang); @@ -1218,17 +1298,17 @@ impl Debug for File { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for FnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::FnArg { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("FnArg::")?; match self { - FnArg::Receiver(v0) => { + crate::FnArg::Receiver(v0) => { let mut formatter = formatter.debug_tuple("Receiver"); formatter.field(v0); formatter.finish() } - FnArg::Typed(v0) => { + crate::FnArg::Typed(v0) => { let mut formatter = formatter.debug_tuple("Typed"); formatter.field(v0); formatter.finish() @@ -1237,16 +1317,16 @@ impl Debug for FnArg { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ForeignItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ForeignItem { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("ForeignItem::")?; match self { - ForeignItem::Fn(v0) => v0.debug(formatter, "Fn"), - ForeignItem::Static(v0) => v0.debug(formatter, "Static"), - ForeignItem::Type(v0) => v0.debug(formatter, "Type"), - ForeignItem::Macro(v0) => v0.debug(formatter, "Macro"), - ForeignItem::Verbatim(v0) => { + crate::ForeignItem::Fn(v0) => v0.debug(formatter, "Fn"), + crate::ForeignItem::Static(v0) => v0.debug(formatter, "Static"), + crate::ForeignItem::Type(v0) => v0.debug(formatter, "Type"), + crate::ForeignItem::Macro(v0) => v0.debug(formatter, "Macro"), + crate::ForeignItem::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() @@ -1255,110 +1335,114 @@ impl Debug for ForeignItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ForeignItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ForeignItemFn { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ForeignItemFn { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("sig", &self.sig); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ForeignItemFn") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ForeignItemMacro { +impl crate::ForeignItemFn { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("sig", &self.sig); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ForeignItemMacro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ForeignItemMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("mac", &self.mac); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ForeignItemMacro") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ForeignItemStatic { +impl crate::ForeignItemMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ForeignItemStatic { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ForeignItemStatic { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("static_token", &self.static_token); - formatter.field("mutability", &self.mutability); - formatter.field("ident", &self.ident); - formatter.field("colon_token", &self.colon_token); - formatter.field("ty", &self.ty); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ForeignItemStatic") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ForeignItemType { +impl crate::ForeignItemStatic { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("static_token", &self.static_token); + formatter.field("mutability", &self.mutability); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ForeignItemType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ForeignItemType { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("type_token", &self.type_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ForeignItemType") } } +#[cfg(feature = "full")] +impl crate::ForeignItemType { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for GenericArgument { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::GenericArgument { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("GenericArgument::")?; match self { - GenericArgument::Lifetime(v0) => { + crate::GenericArgument::Lifetime(v0) => { let mut formatter = formatter.debug_tuple("Lifetime"); formatter.field(v0); formatter.finish() } - GenericArgument::Type(v0) => { + crate::GenericArgument::Type(v0) => { let mut formatter = formatter.debug_tuple("Type"); formatter.field(v0); formatter.finish() } - GenericArgument::Const(v0) => { + crate::GenericArgument::Const(v0) => { let mut formatter = formatter.debug_tuple("Const"); formatter.field(v0); formatter.finish() } - GenericArgument::AssocType(v0) => { + crate::GenericArgument::AssocType(v0) => { let mut formatter = formatter.debug_tuple("AssocType"); formatter.field(v0); formatter.finish() } - GenericArgument::AssocConst(v0) => { + crate::GenericArgument::AssocConst(v0) => { let mut formatter = formatter.debug_tuple("AssocConst"); formatter.field(v0); formatter.finish() } - GenericArgument::Constraint(v0) => { + crate::GenericArgument::Constraint(v0) => { let mut formatter = formatter.debug_tuple("Constraint"); formatter.field(v0); formatter.finish() @@ -1367,22 +1451,22 @@ impl Debug for GenericArgument { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for GenericParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::GenericParam { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("GenericParam::")?; match self { - GenericParam::Lifetime(v0) => { + crate::GenericParam::Lifetime(v0) => { let mut formatter = formatter.debug_tuple("Lifetime"); formatter.field(v0); formatter.finish() } - GenericParam::Type(v0) => { + crate::GenericParam::Type(v0) => { let mut formatter = formatter.debug_tuple("Type"); formatter.field(v0); formatter.finish() } - GenericParam::Const(v0) => { + crate::GenericParam::Const(v0) => { let mut formatter = formatter.debug_tuple("Const"); formatter.field(v0); formatter.finish() @@ -1391,8 +1475,8 @@ impl Debug for GenericParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Generics { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Generics { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Generics"); formatter.field("lt_token", &self.lt_token); @@ -1403,16 +1487,16 @@ impl Debug for Generics { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ImplItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ImplItem { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("ImplItem::")?; match self { - ImplItem::Const(v0) => v0.debug(formatter, "Const"), - ImplItem::Fn(v0) => v0.debug(formatter, "Fn"), - ImplItem::Type(v0) => v0.debug(formatter, "Type"), - ImplItem::Macro(v0) => v0.debug(formatter, "Macro"), - ImplItem::Verbatim(v0) => { + crate::ImplItem::Const(v0) => v0.debug(formatter, "Const"), + crate::ImplItem::Fn(v0) => v0.debug(formatter, "Fn"), + crate::ImplItem::Type(v0) => v0.debug(formatter, "Type"), + crate::ImplItem::Macro(v0) => v0.debug(formatter, "Macro"), + crate::ImplItem::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() @@ -1421,124 +1505,128 @@ impl Debug for ImplItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ImplItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ImplItemConst { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ImplItemConst { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("defaultness", &self.defaultness); - formatter.field("const_token", &self.const_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("colon_token", &self.colon_token); - formatter.field("ty", &self.ty); - formatter.field("eq_token", &self.eq_token); - formatter.field("expr", &self.expr); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ImplItemConst") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ImplItemFn { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ImplItemFn { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("defaultness", &self.defaultness); - formatter.field("sig", &self.sig); - formatter.field("block", &self.block); - formatter.finish() - } - } - self.debug(formatter, "ImplItemFn") +impl crate::ImplItemConst { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("defaultness", &self.defaultness); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.finish() } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ImplItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ImplItemFn { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ImplItemMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("mac", &self.mac); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } - self.debug(formatter, "ImplItemMacro") + self.debug(formatter, "ImplItemFn") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ImplItemType { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ImplItemType { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("defaultness", &self.defaultness); - formatter.field("type_token", &self.type_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("eq_token", &self.eq_token); - formatter.field("ty", &self.ty); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } - self.debug(formatter, "ImplItemType") +impl crate::ImplItemFn { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("defaultness", &self.defaultness); + formatter.field("sig", &self.sig); + formatter.field("block", &self.block); + formatter.finish() } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ImplRestriction { - fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result { - match *self {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ImplItemMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + self.debug(formatter, "ImplItemMacro") } } -#[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Index { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - let mut formatter = formatter.debug_struct("Index"); - formatter.field("index", &self.index); - formatter.field("span", &self.span); +#[cfg(feature = "full")] +impl crate::ImplItemMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); formatter.finish() } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Item { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ImplItemType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + self.debug(formatter, "ImplItemType") + } +} +#[cfg(feature = "full")] +impl crate::ImplItemType { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("defaultness", &self.defaultness); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("eq_token", &self.eq_token); + formatter.field("ty", &self.ty); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ImplRestriction { + fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result { + match *self {} + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Index { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Index"); + formatter.field("index", &self.index); + formatter.field("span", &self.span); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Item { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Item::")?; match self { - Item::Const(v0) => v0.debug(formatter, "Const"), - Item::Enum(v0) => v0.debug(formatter, "Enum"), - Item::ExternCrate(v0) => v0.debug(formatter, "ExternCrate"), - Item::Fn(v0) => v0.debug(formatter, "Fn"), - Item::ForeignMod(v0) => v0.debug(formatter, "ForeignMod"), - Item::Impl(v0) => v0.debug(formatter, "Impl"), - Item::Macro(v0) => v0.debug(formatter, "Macro"), - Item::Mod(v0) => v0.debug(formatter, "Mod"), - Item::Static(v0) => v0.debug(formatter, "Static"), - Item::Struct(v0) => v0.debug(formatter, "Struct"), - Item::Trait(v0) => v0.debug(formatter, "Trait"), - Item::TraitAlias(v0) => v0.debug(formatter, "TraitAlias"), - Item::Type(v0) => v0.debug(formatter, "Type"), - Item::Union(v0) => v0.debug(formatter, "Union"), - Item::Use(v0) => v0.debug(formatter, "Use"), - Item::Verbatim(v0) => { + crate::Item::Const(v0) => v0.debug(formatter, "Const"), + crate::Item::Enum(v0) => v0.debug(formatter, "Enum"), + crate::Item::ExternCrate(v0) => v0.debug(formatter, "ExternCrate"), + crate::Item::Fn(v0) => v0.debug(formatter, "Fn"), + crate::Item::ForeignMod(v0) => v0.debug(formatter, "ForeignMod"), + crate::Item::Impl(v0) => v0.debug(formatter, "Impl"), + crate::Item::Macro(v0) => v0.debug(formatter, "Macro"), + crate::Item::Mod(v0) => v0.debug(formatter, "Mod"), + crate::Item::Static(v0) => v0.debug(formatter, "Static"), + crate::Item::Struct(v0) => v0.debug(formatter, "Struct"), + crate::Item::Trait(v0) => v0.debug(formatter, "Trait"), + crate::Item::TraitAlias(v0) => v0.debug(formatter, "TraitAlias"), + crate::Item::Type(v0) => v0.debug(formatter, "Type"), + crate::Item::Union(v0) => v0.debug(formatter, "Union"), + crate::Item::Use(v0) => v0.debug(formatter, "Use"), + crate::Item::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() @@ -1547,313 +1635,328 @@ impl Debug for Item { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemConst { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemConst { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("const_token", &self.const_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("colon_token", &self.colon_token); - formatter.field("ty", &self.ty); - formatter.field("eq_token", &self.eq_token); - formatter.field("expr", &self.expr); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemConst") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemEnum { +impl crate::ItemConst { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemEnum { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemEnum { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("enum_token", &self.enum_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("brace_token", &self.brace_token); - formatter.field("variants", &self.variants); - formatter.finish() - } - } self.debug(formatter, "ItemEnum") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemExternCrate { +impl crate::ItemEnum { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("enum_token", &self.enum_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("brace_token", &self.brace_token); + formatter.field("variants", &self.variants); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemExternCrate { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemExternCrate { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("extern_token", &self.extern_token); - formatter.field("crate_token", &self.crate_token); - formatter.field("ident", &self.ident); - formatter.field("rename", &self.rename); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemExternCrate") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemFn { +impl crate::ItemExternCrate { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("extern_token", &self.extern_token); + formatter.field("crate_token", &self.crate_token); + formatter.field("ident", &self.ident); + formatter.field("rename", &self.rename); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemFn { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemFn { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("sig", &self.sig); - formatter.field("block", &self.block); - formatter.finish() - } - } self.debug(formatter, "ItemFn") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemForeignMod { +impl crate::ItemFn { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("sig", &self.sig); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemForeignMod { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemForeignMod { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("unsafety", &self.unsafety); - formatter.field("abi", &self.abi); - formatter.field("brace_token", &self.brace_token); - formatter.field("items", &self.items); - formatter.finish() - } - } self.debug(formatter, "ItemForeignMod") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemImpl { +impl crate::ItemForeignMod { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("unsafety", &self.unsafety); + formatter.field("abi", &self.abi); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemImpl { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemImpl { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("defaultness", &self.defaultness); - formatter.field("unsafety", &self.unsafety); - formatter.field("impl_token", &self.impl_token); - formatter.field("generics", &self.generics); - formatter.field("trait_", &self.trait_); - formatter.field("self_ty", &self.self_ty); - formatter.field("brace_token", &self.brace_token); - formatter.field("items", &self.items); - formatter.finish() - } - } self.debug(formatter, "ItemImpl") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemMacro { +impl crate::ItemImpl { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("defaultness", &self.defaultness); + formatter.field("unsafety", &self.unsafety); + formatter.field("impl_token", &self.impl_token); + formatter.field("generics", &self.generics); + formatter.field("trait_", &self.trait_); + formatter.field("self_ty", &self.self_ty); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemMacro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("ident", &self.ident); - formatter.field("mac", &self.mac); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemMacro") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemMod { +impl crate::ItemMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("ident", &self.ident); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemMod { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemMod { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("unsafety", &self.unsafety); - formatter.field("mod_token", &self.mod_token); - formatter.field("ident", &self.ident); - formatter.field("content", &self.content); - formatter.field("semi", &self.semi); - formatter.finish() - } - } self.debug(formatter, "ItemMod") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemStatic { +impl crate::ItemMod { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("unsafety", &self.unsafety); + formatter.field("mod_token", &self.mod_token); + formatter.field("ident", &self.ident); + formatter.field("content", &self.content); + formatter.field("semi", &self.semi); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemStatic { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemStatic { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("static_token", &self.static_token); - formatter.field("mutability", &self.mutability); - formatter.field("ident", &self.ident); - formatter.field("colon_token", &self.colon_token); - formatter.field("ty", &self.ty); - formatter.field("eq_token", &self.eq_token); - formatter.field("expr", &self.expr); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemStatic") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemStruct { +impl crate::ItemStatic { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("static_token", &self.static_token); + formatter.field("mutability", &self.mutability); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemStruct { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemStruct { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("struct_token", &self.struct_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("fields", &self.fields); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemStruct") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemTrait { +impl crate::ItemStruct { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("struct_token", &self.struct_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("fields", &self.fields); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemTrait { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemTrait { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("unsafety", &self.unsafety); - formatter.field("auto_token", &self.auto_token); - formatter.field("restriction", &self.restriction); - formatter.field("trait_token", &self.trait_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("colon_token", &self.colon_token); - formatter.field("supertraits", &self.supertraits); - formatter.field("brace_token", &self.brace_token); - formatter.field("items", &self.items); - formatter.finish() - } - } self.debug(formatter, "ItemTrait") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemTraitAlias { +impl crate::ItemTrait { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("unsafety", &self.unsafety); + formatter.field("auto_token", &self.auto_token); + formatter.field("restriction", &self.restriction); + formatter.field("trait_token", &self.trait_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("supertraits", &self.supertraits); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemTraitAlias { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemTraitAlias { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("trait_token", &self.trait_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("eq_token", &self.eq_token); - formatter.field("bounds", &self.bounds); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemTraitAlias") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemType { +impl crate::ItemTraitAlias { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("trait_token", &self.trait_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("eq_token", &self.eq_token); + formatter.field("bounds", &self.bounds); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemType { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("type_token", &self.type_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("eq_token", &self.eq_token); - formatter.field("ty", &self.ty); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemType") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemUnion { +impl crate::ItemType { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("eq_token", &self.eq_token); + formatter.field("ty", &self.ty); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemUnion { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemUnion { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("union_token", &self.union_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("fields", &self.fields); - formatter.finish() - } - } self.debug(formatter, "ItemUnion") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ItemUse { +impl crate::ItemUnion { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("union_token", &self.union_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("fields", &self.fields); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ItemUse { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ItemUse { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("vis", &self.vis); - formatter.field("use_token", &self.use_token); - formatter.field("leading_colon", &self.leading_colon); - formatter.field("tree", &self.tree); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "ItemUse") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Label { +impl crate::ItemUse { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("use_token", &self.use_token); + formatter.field("leading_colon", &self.leading_colon); + formatter.field("tree", &self.tree); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Label { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Label"); formatter.field("name", &self.name); @@ -1861,23 +1964,23 @@ impl Debug for Label { formatter.finish() } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Lifetime { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Lifetime { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl Lifetime { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("apostrophe", &self.apostrophe); - formatter.field("ident", &self.ident); - formatter.finish() - } - } self.debug(formatter, "Lifetime") } } +impl crate::Lifetime { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("apostrophe", &self.apostrophe); + formatter.field("ident", &self.ident); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for LifetimeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::LifetimeParam { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("LifetimeParam"); formatter.field("attrs", &self.attrs); @@ -1887,19 +1990,20 @@ impl Debug for LifetimeParam { formatter.finish() } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Lit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Lit { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Lit::")?; match self { - Lit::Str(v0) => v0.debug(formatter, "Str"), - Lit::ByteStr(v0) => v0.debug(formatter, "ByteStr"), - Lit::Byte(v0) => v0.debug(formatter, "Byte"), - Lit::Char(v0) => v0.debug(formatter, "Char"), - Lit::Int(v0) => v0.debug(formatter, "Int"), - Lit::Float(v0) => v0.debug(formatter, "Float"), - Lit::Bool(v0) => v0.debug(formatter, "Bool"), - Lit::Verbatim(v0) => { + crate::Lit::Str(v0) => v0.debug(formatter, "Str"), + crate::Lit::ByteStr(v0) => v0.debug(formatter, "ByteStr"), + crate::Lit::CStr(v0) => v0.debug(formatter, "CStr"), + crate::Lit::Byte(v0) => v0.debug(formatter, "Byte"), + crate::Lit::Char(v0) => v0.debug(formatter, "Char"), + crate::Lit::Int(v0) => v0.debug(formatter, "Int"), + crate::Lit::Float(v0) => v0.debug(formatter, "Float"), + crate::Lit::Bool(v0) => v0.debug(formatter, "Bool"), + crate::Lit::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() @@ -1908,26 +2012,27 @@ impl Debug for Lit { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Local { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Local { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl Local { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("let_token", &self.let_token); - formatter.field("pat", &self.pat); - formatter.field("init", &self.init); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "Local") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for LocalInit { +impl crate::Local { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("let_token", &self.let_token); + formatter.field("pat", &self.pat); + formatter.field("init", &self.init); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::LocalInit { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("LocalInit"); formatter.field("eq_token", &self.eq_token); @@ -1937,8 +2042,8 @@ impl Debug for LocalInit { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Macro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Macro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Macro"); formatter.field("path", &self.path); @@ -1949,22 +2054,22 @@ impl Debug for Macro { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for MacroDelimiter { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::MacroDelimiter { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("MacroDelimiter::")?; match self { - MacroDelimiter::Paren(v0) => { + crate::MacroDelimiter::Paren(v0) => { let mut formatter = formatter.debug_tuple("Paren"); formatter.field(v0); formatter.finish() } - MacroDelimiter::Brace(v0) => { + crate::MacroDelimiter::Brace(v0) => { let mut formatter = formatter.debug_tuple("Brace"); formatter.field(v0); formatter.finish() } - MacroDelimiter::Bracket(v0) => { + crate::MacroDelimiter::Bracket(v0) => { let mut formatter = formatter.debug_tuple("Bracket"); formatter.field(v0); formatter.finish() @@ -1973,17 +2078,17 @@ impl Debug for MacroDelimiter { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Member { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Member { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Member::")?; match self { - Member::Named(v0) => { + crate::Member::Named(v0) => { let mut formatter = formatter.debug_tuple("Named"); formatter.field(v0); formatter.finish() } - Member::Unnamed(v0) => { + crate::Member::Unnamed(v0) => { let mut formatter = formatter.debug_tuple("Unnamed"); formatter.field(v0); formatter.finish() @@ -1992,308 +2097,327 @@ impl Debug for Member { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Meta { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Meta { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Meta::")?; match self { - Meta::Path(v0) => v0.debug(formatter, "Path"), - Meta::List(v0) => v0.debug(formatter, "List"), - Meta::NameValue(v0) => v0.debug(formatter, "NameValue"), + crate::Meta::Path(v0) => v0.debug(formatter, "Path"), + crate::Meta::List(v0) => v0.debug(formatter, "List"), + crate::Meta::NameValue(v0) => v0.debug(formatter, "NameValue"), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for MetaList { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::MetaList { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl MetaList { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("path", &self.path); - formatter.field("delimiter", &self.delimiter); - formatter.field("tokens", &self.tokens); - formatter.finish() - } - } self.debug(formatter, "MetaList") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for MetaNameValue { +impl crate::MetaList { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("path", &self.path); + formatter.field("delimiter", &self.delimiter); + formatter.field("tokens", &self.tokens); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::MetaNameValue { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl MetaNameValue { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("path", &self.path); - formatter.field("eq_token", &self.eq_token); - formatter.field("value", &self.value); - formatter.finish() - } - } self.debug(formatter, "MetaNameValue") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ParenthesizedGenericArguments { +impl crate::MetaNameValue { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("path", &self.path); + formatter.field("eq_token", &self.eq_token); + formatter.field("value", &self.value); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ParenthesizedGenericArguments { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl ParenthesizedGenericArguments { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("paren_token", &self.paren_token); - formatter.field("inputs", &self.inputs); - formatter.field("output", &self.output); - formatter.finish() - } - } self.debug(formatter, "ParenthesizedGenericArguments") } } +#[cfg(any(feature = "derive", feature = "full"))] +impl crate::ParenthesizedGenericArguments { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("paren_token", &self.paren_token); + formatter.field("inputs", &self.inputs); + formatter.field("output", &self.output); + formatter.finish() + } +} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Pat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Pat { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Pat::")?; match self { - Pat::Const(v0) => v0.debug(formatter, "Const"), - Pat::Ident(v0) => v0.debug(formatter, "Ident"), - Pat::Lit(v0) => v0.debug(formatter, "Lit"), - Pat::Macro(v0) => v0.debug(formatter, "Macro"), - Pat::Or(v0) => v0.debug(formatter, "Or"), - Pat::Paren(v0) => v0.debug(formatter, "Paren"), - Pat::Path(v0) => v0.debug(formatter, "Path"), - Pat::Range(v0) => v0.debug(formatter, "Range"), - Pat::Reference(v0) => v0.debug(formatter, "Reference"), - Pat::Rest(v0) => v0.debug(formatter, "Rest"), - Pat::Slice(v0) => v0.debug(formatter, "Slice"), - Pat::Struct(v0) => v0.debug(formatter, "Struct"), - Pat::Tuple(v0) => v0.debug(formatter, "Tuple"), - Pat::TupleStruct(v0) => v0.debug(formatter, "TupleStruct"), - Pat::Type(v0) => v0.debug(formatter, "Type"), - Pat::Verbatim(v0) => { + crate::Pat::Const(v0) => v0.debug(formatter, "Const"), + crate::Pat::Ident(v0) => v0.debug(formatter, "Ident"), + crate::Pat::Lit(v0) => v0.debug(formatter, "Lit"), + crate::Pat::Macro(v0) => v0.debug(formatter, "Macro"), + crate::Pat::Or(v0) => v0.debug(formatter, "Or"), + crate::Pat::Paren(v0) => v0.debug(formatter, "Paren"), + crate::Pat::Path(v0) => v0.debug(formatter, "Path"), + crate::Pat::Range(v0) => v0.debug(formatter, "Range"), + crate::Pat::Reference(v0) => v0.debug(formatter, "Reference"), + crate::Pat::Rest(v0) => v0.debug(formatter, "Rest"), + crate::Pat::Slice(v0) => v0.debug(formatter, "Slice"), + crate::Pat::Struct(v0) => v0.debug(formatter, "Struct"), + crate::Pat::Tuple(v0) => v0.debug(formatter, "Tuple"), + crate::Pat::TupleStruct(v0) => v0.debug(formatter, "TupleStruct"), + crate::Pat::Type(v0) => v0.debug(formatter, "Type"), + crate::Pat::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() } - Pat::Wild(v0) => v0.debug(formatter, "Wild"), + crate::Pat::Wild(v0) => v0.debug(formatter, "Wild"), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatIdent { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatIdent { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatIdent { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("by_ref", &self.by_ref); - formatter.field("mutability", &self.mutability); - formatter.field("ident", &self.ident); - formatter.field("subpat", &self.subpat); - formatter.finish() - } - } self.debug(formatter, "PatIdent") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatOr { +impl crate::PatIdent { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("by_ref", &self.by_ref); + formatter.field("mutability", &self.mutability); + formatter.field("ident", &self.ident); + formatter.field("subpat", &self.subpat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatOr { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatOr { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("leading_vert", &self.leading_vert); - formatter.field("cases", &self.cases); - formatter.finish() - } - } self.debug(formatter, "PatOr") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatParen { +impl crate::PatOr { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("leading_vert", &self.leading_vert); + formatter.field("cases", &self.cases); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatParen { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatParen { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("paren_token", &self.paren_token); - formatter.field("pat", &self.pat); - formatter.finish() - } - } self.debug(formatter, "PatParen") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatReference { +impl crate::PatParen { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("pat", &self.pat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatReference { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatReference { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("and_token", &self.and_token); - formatter.field("mutability", &self.mutability); - formatter.field("pat", &self.pat); - formatter.finish() - } - } self.debug(formatter, "PatReference") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatRest { +impl crate::PatReference { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("and_token", &self.and_token); + formatter.field("mutability", &self.mutability); + formatter.field("pat", &self.pat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatRest { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatRest { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("dot2_token", &self.dot2_token); - formatter.finish() - } - } self.debug(formatter, "PatRest") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatSlice { +impl crate::PatRest { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("dot2_token", &self.dot2_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatSlice { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatSlice { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("bracket_token", &self.bracket_token); - formatter.field("elems", &self.elems); - formatter.finish() - } - } self.debug(formatter, "PatSlice") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatStruct { +impl crate::PatSlice { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatStruct { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatStruct { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("qself", &self.qself); - formatter.field("path", &self.path); - formatter.field("brace_token", &self.brace_token); - formatter.field("fields", &self.fields); - formatter.field("rest", &self.rest); - formatter.finish() - } - } self.debug(formatter, "PatStruct") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatTuple { +impl crate::PatStruct { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.field("brace_token", &self.brace_token); + formatter.field("fields", &self.fields); + formatter.field("rest", &self.rest); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatTuple { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatTuple { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("paren_token", &self.paren_token); - formatter.field("elems", &self.elems); - formatter.finish() - } - } self.debug(formatter, "PatTuple") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatTupleStruct { +impl crate::PatTuple { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatTupleStruct { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatTupleStruct { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("qself", &self.qself); - formatter.field("path", &self.path); - formatter.field("paren_token", &self.paren_token); - formatter.field("elems", &self.elems); - formatter.finish() - } - } self.debug(formatter, "PatTupleStruct") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatType { +impl crate::PatTupleStruct { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatType { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("pat", &self.pat); - formatter.field("colon_token", &self.colon_token); - formatter.field("ty", &self.ty); - formatter.finish() - } - } self.debug(formatter, "PatType") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PatWild { +impl crate::PatType { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("pat", &self.pat); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PatWild { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl PatWild { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("underscore_token", &self.underscore_token); - formatter.finish() - } - } self.debug(formatter, "PatWild") } } +#[cfg(feature = "full")] +impl crate::PatWild { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("underscore_token", &self.underscore_token); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Path { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Path { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl Path { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("leading_colon", &self.leading_colon); - formatter.field("segments", &self.segments); - formatter.finish() - } - } self.debug(formatter, "Path") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PathArguments { +impl crate::Path { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("leading_colon", &self.leading_colon); + formatter.field("segments", &self.segments); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PathArguments { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("PathArguments::")?; match self { - PathArguments::None => formatter.write_str("None"), - PathArguments::AngleBracketed(v0) => v0.debug(formatter, "AngleBracketed"), - PathArguments::Parenthesized(v0) => v0.debug(formatter, "Parenthesized"), + crate::PathArguments::None => formatter.write_str("None"), + crate::PathArguments::AngleBracketed(v0) => { + v0.debug(formatter, "AngleBracketed") + } + crate::PathArguments::Parenthesized(v0) => { + v0.debug(formatter, "Parenthesized") + } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PathSegment { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PathSegment { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("PathSegment"); formatter.field("ident", &self.ident); @@ -2301,9 +2425,40 @@ impl Debug for PathSegment { formatter.finish() } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PointerMutability { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("PointerMutability::")?; + match self { + crate::PointerMutability::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + crate::PointerMutability::Mut(v0) => { + let mut formatter = formatter.debug_tuple("Mut"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PreciseCapture { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PreciseCapture"); + formatter.field("use_token", &self.use_token); + formatter.field("lt_token", &self.lt_token); + formatter.field("params", &self.params); + formatter.field("gt_token", &self.gt_token); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PredicateLifetime { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PredicateLifetime { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("PredicateLifetime"); formatter.field("lifetime", &self.lifetime); @@ -2313,8 +2468,8 @@ impl Debug for PredicateLifetime { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for PredicateType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::PredicateType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("PredicateType"); formatter.field("lifetimes", &self.lifetimes); @@ -2325,8 +2480,8 @@ impl Debug for PredicateType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for QSelf { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::QSelf { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("QSelf"); formatter.field("lt_token", &self.lt_token); @@ -2338,17 +2493,17 @@ impl Debug for QSelf { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for RangeLimits { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::RangeLimits { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("RangeLimits::")?; match self { - RangeLimits::HalfOpen(v0) => { + crate::RangeLimits::HalfOpen(v0) => { let mut formatter = formatter.debug_tuple("HalfOpen"); formatter.field(v0); formatter.finish() } - RangeLimits::Closed(v0) => { + crate::RangeLimits::Closed(v0) => { let mut formatter = formatter.debug_tuple("Closed"); formatter.field(v0); formatter.finish() @@ -2357,8 +2512,8 @@ impl Debug for RangeLimits { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Receiver { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Receiver { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Receiver"); formatter.field("attrs", &self.attrs); @@ -2371,13 +2526,13 @@ impl Debug for Receiver { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for ReturnType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::ReturnType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("ReturnType::")?; match self { - ReturnType::Default => formatter.write_str("Default"), - ReturnType::Type(v0, v1) => { + crate::ReturnType::Default => formatter.write_str("Default"), + crate::ReturnType::Type(v0, v1) => { let mut formatter = formatter.debug_tuple("Type"); formatter.field(v0); formatter.field(v1); @@ -2387,8 +2542,8 @@ impl Debug for ReturnType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Signature { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Signature { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Signature"); formatter.field("constness", &self.constness); @@ -2406,61 +2561,62 @@ impl Debug for Signature { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for StaticMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::StaticMutability { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("StaticMutability::")?; match self { - StaticMutability::Mut(v0) => { + crate::StaticMutability::Mut(v0) => { let mut formatter = formatter.debug_tuple("Mut"); formatter.field(v0); formatter.finish() } - StaticMutability::None => formatter.write_str("None"), + crate::StaticMutability::None => formatter.write_str("None"), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Stmt { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Stmt { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Stmt::")?; match self { - Stmt::Local(v0) => v0.debug(formatter, "Local"), - Stmt::Item(v0) => { + crate::Stmt::Local(v0) => v0.debug(formatter, "Local"), + crate::Stmt::Item(v0) => { let mut formatter = formatter.debug_tuple("Item"); formatter.field(v0); formatter.finish() } - Stmt::Expr(v0, v1) => { + crate::Stmt::Expr(v0, v1) => { let mut formatter = formatter.debug_tuple("Expr"); formatter.field(v0); formatter.field(v1); formatter.finish() } - Stmt::Macro(v0) => v0.debug(formatter, "Macro"), + crate::Stmt::Macro(v0) => v0.debug(formatter, "Macro"), } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for StmtMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::StmtMacro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl StmtMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("mac", &self.mac); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "StmtMacro") } } +#[cfg(feature = "full")] +impl crate::StmtMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitBound { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitBound { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("TraitBound"); formatter.field("paren_token", &self.paren_token); @@ -2471,13 +2627,13 @@ impl Debug for TraitBound { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitBoundModifier { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitBoundModifier { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("TraitBoundModifier::")?; match self { - TraitBoundModifier::None => formatter.write_str("None"), - TraitBoundModifier::Maybe(v0) => { + crate::TraitBoundModifier::None => formatter.write_str("None"), + crate::TraitBoundModifier::Maybe(v0) => { let mut formatter = formatter.debug_tuple("Maybe"); formatter.field(v0); formatter.finish() @@ -2486,16 +2642,16 @@ impl Debug for TraitBoundModifier { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitItem { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("TraitItem::")?; match self { - TraitItem::Const(v0) => v0.debug(formatter, "Const"), - TraitItem::Fn(v0) => v0.debug(formatter, "Fn"), - TraitItem::Type(v0) => v0.debug(formatter, "Type"), - TraitItem::Macro(v0) => v0.debug(formatter, "Macro"), - TraitItem::Verbatim(v0) => { + crate::TraitItem::Const(v0) => v0.debug(formatter, "Const"), + crate::TraitItem::Fn(v0) => v0.debug(formatter, "Fn"), + crate::TraitItem::Type(v0) => v0.debug(formatter, "Type"), + crate::TraitItem::Macro(v0) => v0.debug(formatter, "Macro"), + crate::TraitItem::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() @@ -2504,101 +2660,105 @@ impl Debug for TraitItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitItemConst { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TraitItemConst { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("const_token", &self.const_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("colon_token", &self.colon_token); - formatter.field("ty", &self.ty); - formatter.field("default", &self.default); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "TraitItemConst") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitItemFn { +impl crate::TraitItemConst { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("default", &self.default); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitItemFn { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TraitItemFn { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("sig", &self.sig); - formatter.field("default", &self.default); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "TraitItemFn") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitItemMacro { +impl crate::TraitItemFn { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("sig", &self.sig); + formatter.field("default", &self.default); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitItemMacro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TraitItemMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("mac", &self.mac); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "TraitItemMacro") } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TraitItemType { +impl crate::TraitItemMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TraitItemType { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TraitItemType { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("attrs", &self.attrs); - formatter.field("type_token", &self.type_token); - formatter.field("ident", &self.ident); - formatter.field("generics", &self.generics); - formatter.field("colon_token", &self.colon_token); - formatter.field("bounds", &self.bounds); - formatter.field("default", &self.default); - formatter.field("semi_token", &self.semi_token); - formatter.finish() - } - } self.debug(formatter, "TraitItemType") } } +#[cfg(feature = "full")] +impl crate::TraitItemType { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("attrs", &self.attrs); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.field("default", &self.default); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Type { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Type { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Type::")?; match self { - Type::Array(v0) => v0.debug(formatter, "Array"), - Type::BareFn(v0) => v0.debug(formatter, "BareFn"), - Type::Group(v0) => v0.debug(formatter, "Group"), - Type::ImplTrait(v0) => v0.debug(formatter, "ImplTrait"), - Type::Infer(v0) => v0.debug(formatter, "Infer"), - Type::Macro(v0) => v0.debug(formatter, "Macro"), - Type::Never(v0) => v0.debug(formatter, "Never"), - Type::Paren(v0) => v0.debug(formatter, "Paren"), - Type::Path(v0) => v0.debug(formatter, "Path"), - Type::Ptr(v0) => v0.debug(formatter, "Ptr"), - Type::Reference(v0) => v0.debug(formatter, "Reference"), - Type::Slice(v0) => v0.debug(formatter, "Slice"), - Type::TraitObject(v0) => v0.debug(formatter, "TraitObject"), - Type::Tuple(v0) => v0.debug(formatter, "Tuple"), - Type::Verbatim(v0) => { + crate::Type::Array(v0) => v0.debug(formatter, "Array"), + crate::Type::BareFn(v0) => v0.debug(formatter, "BareFn"), + crate::Type::Group(v0) => v0.debug(formatter, "Group"), + crate::Type::ImplTrait(v0) => v0.debug(formatter, "ImplTrait"), + crate::Type::Infer(v0) => v0.debug(formatter, "Infer"), + crate::Type::Macro(v0) => v0.debug(formatter, "Macro"), + crate::Type::Never(v0) => v0.debug(formatter, "Never"), + crate::Type::Paren(v0) => v0.debug(formatter, "Paren"), + crate::Type::Path(v0) => v0.debug(formatter, "Path"), + crate::Type::Ptr(v0) => v0.debug(formatter, "Ptr"), + crate::Type::Reference(v0) => v0.debug(formatter, "Reference"), + crate::Type::Slice(v0) => v0.debug(formatter, "Slice"), + crate::Type::TraitObject(v0) => v0.debug(formatter, "TraitObject"), + crate::Type::Tuple(v0) => v0.debug(formatter, "Tuple"), + crate::Type::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() @@ -2607,118 +2767,125 @@ impl Debug for Type { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeArray { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeArray { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeArray { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("bracket_token", &self.bracket_token); - formatter.field("elem", &self.elem); - formatter.field("semi_token", &self.semi_token); - formatter.field("len", &self.len); - formatter.finish() - } - } self.debug(formatter, "TypeArray") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeBareFn { +impl crate::TypeArray { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elem", &self.elem); + formatter.field("semi_token", &self.semi_token); + formatter.field("len", &self.len); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeBareFn { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeBareFn { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("lifetimes", &self.lifetimes); - formatter.field("unsafety", &self.unsafety); - formatter.field("abi", &self.abi); - formatter.field("fn_token", &self.fn_token); - formatter.field("paren_token", &self.paren_token); - formatter.field("inputs", &self.inputs); - formatter.field("variadic", &self.variadic); - formatter.field("output", &self.output); - formatter.finish() - } - } self.debug(formatter, "TypeBareFn") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeGroup { +impl crate::TypeBareFn { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("lifetimes", &self.lifetimes); + formatter.field("unsafety", &self.unsafety); + formatter.field("abi", &self.abi); + formatter.field("fn_token", &self.fn_token); + formatter.field("paren_token", &self.paren_token); + formatter.field("inputs", &self.inputs); + formatter.field("variadic", &self.variadic); + formatter.field("output", &self.output); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeGroup { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeGroup { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("group_token", &self.group_token); - formatter.field("elem", &self.elem); - formatter.finish() - } - } self.debug(formatter, "TypeGroup") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeImplTrait { +impl crate::TypeGroup { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("group_token", &self.group_token); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeImplTrait { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeImplTrait { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("impl_token", &self.impl_token); - formatter.field("bounds", &self.bounds); - formatter.finish() - } - } self.debug(formatter, "TypeImplTrait") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeInfer { +impl crate::TypeImplTrait { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("impl_token", &self.impl_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeInfer { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeInfer { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("underscore_token", &self.underscore_token); - formatter.finish() - } - } self.debug(formatter, "TypeInfer") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeMacro { +impl crate::TypeInfer { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("underscore_token", &self.underscore_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeMacro { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeMacro { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("mac", &self.mac); - formatter.finish() - } - } self.debug(formatter, "TypeMacro") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeNever { +impl crate::TypeMacro { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("mac", &self.mac); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeNever { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeNever { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("bang_token", &self.bang_token); - formatter.finish() - } - } self.debug(formatter, "TypeNever") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeParam { +impl crate::TypeNever { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("bang_token", &self.bang_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeParam { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("TypeParam"); formatter.field("attrs", &self.attrs); @@ -2731,151 +2898,166 @@ impl Debug for TypeParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeParamBound { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeParamBound { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("TypeParamBound::")?; match self { - TypeParamBound::Trait(v0) => { + crate::TypeParamBound::Trait(v0) => { let mut formatter = formatter.debug_tuple("Trait"); formatter.field(v0); formatter.finish() } - TypeParamBound::Lifetime(v0) => v0.debug(formatter, "Lifetime"), - TypeParamBound::Verbatim(v0) => { + crate::TypeParamBound::Lifetime(v0) => v0.debug(formatter, "Lifetime"), + #[cfg(feature = "full")] + crate::TypeParamBound::PreciseCapture(v0) => { + let mut formatter = formatter.debug_tuple("PreciseCapture"); + formatter.field(v0); + formatter.finish() + } + crate::TypeParamBound::Verbatim(v0) => { let mut formatter = formatter.debug_tuple("Verbatim"); formatter.field(v0); formatter.finish() } + #[cfg(not(feature = "full"))] + _ => unreachable!(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeParen { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeParen { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("paren_token", &self.paren_token); - formatter.field("elem", &self.elem); - formatter.finish() - } - } self.debug(formatter, "TypeParen") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypePath { +impl crate::TypeParen { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("paren_token", &self.paren_token); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypePath { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypePath { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("qself", &self.qself); - formatter.field("path", &self.path); - formatter.finish() - } - } self.debug(formatter, "TypePath") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypePtr { +impl crate::TypePath { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypePtr { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypePtr { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("star_token", &self.star_token); - formatter.field("const_token", &self.const_token); - formatter.field("mutability", &self.mutability); - formatter.field("elem", &self.elem); - formatter.finish() - } - } self.debug(formatter, "TypePtr") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeReference { +impl crate::TypePtr { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("star_token", &self.star_token); + formatter.field("const_token", &self.const_token); + formatter.field("mutability", &self.mutability); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeReference { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeReference { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("and_token", &self.and_token); - formatter.field("lifetime", &self.lifetime); - formatter.field("mutability", &self.mutability); - formatter.field("elem", &self.elem); - formatter.finish() - } - } self.debug(formatter, "TypeReference") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeSlice { +impl crate::TypeReference { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("and_token", &self.and_token); + formatter.field("lifetime", &self.lifetime); + formatter.field("mutability", &self.mutability); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeSlice { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeSlice { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("bracket_token", &self.bracket_token); - formatter.field("elem", &self.elem); - formatter.finish() - } - } self.debug(formatter, "TypeSlice") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeTraitObject { +impl crate::TypeSlice { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeTraitObject { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeTraitObject { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("dyn_token", &self.dyn_token); - formatter.field("bounds", &self.bounds); - formatter.finish() - } - } self.debug(formatter, "TypeTraitObject") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for TypeTuple { +impl crate::TypeTraitObject { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("dyn_token", &self.dyn_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::TypeTuple { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl TypeTuple { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("paren_token", &self.paren_token); - formatter.field("elems", &self.elems); - formatter.finish() - } - } self.debug(formatter, "TypeTuple") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UnOp { +impl crate::TypeTuple { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UnOp { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("UnOp::")?; match self { - UnOp::Deref(v0) => { + crate::UnOp::Deref(v0) => { let mut formatter = formatter.debug_tuple("Deref"); formatter.field(v0); formatter.finish() } - UnOp::Not(v0) => { + crate::UnOp::Not(v0) => { let mut formatter = formatter.debug_tuple("Not"); formatter.field(v0); formatter.finish() } - UnOp::Neg(v0) => { + crate::UnOp::Neg(v0) => { let mut formatter = formatter.debug_tuple("Neg"); formatter.field(v0); formatter.finish() @@ -2884,8 +3066,8 @@ impl Debug for UnOp { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UseGlob { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UseGlob { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("UseGlob"); formatter.field("star_token", &self.star_token); @@ -2893,8 +3075,8 @@ impl Debug for UseGlob { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UseGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UseGroup { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("UseGroup"); formatter.field("brace_token", &self.brace_token); @@ -2903,8 +3085,8 @@ impl Debug for UseGroup { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UseName { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UseName { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("UseName"); formatter.field("ident", &self.ident); @@ -2912,8 +3094,8 @@ impl Debug for UseName { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UsePath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UsePath { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("UsePath"); formatter.field("ident", &self.ident); @@ -2923,8 +3105,8 @@ impl Debug for UsePath { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UseRename { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UseRename { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("UseRename"); formatter.field("ident", &self.ident); @@ -2934,32 +3116,32 @@ impl Debug for UseRename { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for UseTree { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::UseTree { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("UseTree::")?; match self { - UseTree::Path(v0) => { + crate::UseTree::Path(v0) => { let mut formatter = formatter.debug_tuple("Path"); formatter.field(v0); formatter.finish() } - UseTree::Name(v0) => { + crate::UseTree::Name(v0) => { let mut formatter = formatter.debug_tuple("Name"); formatter.field(v0); formatter.finish() } - UseTree::Rename(v0) => { + crate::UseTree::Rename(v0) => { let mut formatter = formatter.debug_tuple("Rename"); formatter.field(v0); formatter.finish() } - UseTree::Glob(v0) => { + crate::UseTree::Glob(v0) => { let mut formatter = formatter.debug_tuple("Glob"); formatter.field(v0); formatter.finish() } - UseTree::Group(v0) => { + crate::UseTree::Group(v0) => { let mut formatter = formatter.debug_tuple("Group"); formatter.field(v0); formatter.finish() @@ -2968,8 +3150,8 @@ impl Debug for UseTree { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Variadic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Variadic { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Variadic"); formatter.field("attrs", &self.attrs); @@ -2980,8 +3162,8 @@ impl Debug for Variadic { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Variant { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Variant { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Variant"); formatter.field("attrs", &self.attrs); @@ -2992,41 +3174,42 @@ impl Debug for Variant { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for VisRestricted { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::VisRestricted { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl VisRestricted { - fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { - let mut formatter = formatter.debug_struct(name); - formatter.field("pub_token", &self.pub_token); - formatter.field("paren_token", &self.paren_token); - formatter.field("in_token", &self.in_token); - formatter.field("path", &self.path); - formatter.finish() - } - } self.debug(formatter, "VisRestricted") } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for Visibility { +impl crate::VisRestricted { + fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + let mut formatter = formatter.debug_struct(name); + formatter.field("pub_token", &self.pub_token); + formatter.field("paren_token", &self.paren_token); + formatter.field("in_token", &self.in_token); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::Visibility { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("Visibility::")?; match self { - Visibility::Public(v0) => { + crate::Visibility::Public(v0) => { let mut formatter = formatter.debug_tuple("Public"); formatter.field(v0); formatter.finish() } - Visibility::Restricted(v0) => v0.debug(formatter, "Restricted"), - Visibility::Inherited => formatter.write_str("Inherited"), + crate::Visibility::Restricted(v0) => v0.debug(formatter, "Restricted"), + crate::Visibility::Inherited => formatter.write_str("Inherited"), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for WhereClause { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::WhereClause { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("WhereClause"); formatter.field("where_token", &self.where_token); @@ -3035,17 +3218,17 @@ impl Debug for WhereClause { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Debug for WherePredicate { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Debug for crate::WherePredicate { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("WherePredicate::")?; match self { - WherePredicate::Lifetime(v0) => { + crate::WherePredicate::Lifetime(v0) => { let mut formatter = formatter.debug_tuple("Lifetime"); formatter.field(v0); formatter.finish() } - WherePredicate::Type(v0) => { + crate::WherePredicate::Type(v0) => { let mut formatter = formatter.debug_tuple("Type"); formatter.field(v0); formatter.finish() diff --git a/vendor/syn/src/gen/eq.rs b/vendor/syn/src/gen/eq.rs index a7479c30..128e8991 100644 --- a/vendor/syn/src/gen/eq.rs +++ b/vendor/syn/src/gen/eq.rs @@ -3,421 +3,451 @@ #[cfg(any(feature = "derive", feature = "full"))] use crate::tt::TokenStreamHelper; -use crate::*; #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Abi {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Abi {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Abi { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Abi { fn eq(&self, other: &Self) -> bool { self.name == other.name } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for AngleBracketedGenericArguments {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::AngleBracketedGenericArguments {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for AngleBracketedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::AngleBracketedGenericArguments { fn eq(&self, other: &Self) -> bool { self.colon2_token == other.colon2_token && self.args == other.args } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Arm {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Arm {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Arm { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Arm { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.pat == other.pat && self.guard == other.guard && self.body == other.body && self.comma == other.comma } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for AssocConst {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::AssocConst {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for AssocConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::AssocConst { fn eq(&self, other: &Self) -> bool { self.ident == other.ident && self.generics == other.generics && self.value == other.value } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for AssocType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::AssocType {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for AssocType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::AssocType { fn eq(&self, other: &Self) -> bool { self.ident == other.ident && self.generics == other.generics && self.ty == other.ty } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for AttrStyle {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::AttrStyle {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for AttrStyle { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::AttrStyle { fn eq(&self, other: &Self) -> bool { match (self, other) { - (AttrStyle::Outer, AttrStyle::Outer) => true, - (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true, + (crate::AttrStyle::Outer, crate::AttrStyle::Outer) => true, + (crate::AttrStyle::Inner(_), crate::AttrStyle::Inner(_)) => true, _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Attribute {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Attribute {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Attribute { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Attribute { fn eq(&self, other: &Self) -> bool { self.style == other.style && self.meta == other.meta } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for BareFnArg {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::BareFnArg {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for BareFnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::BareFnArg { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.name == other.name && self.ty == other.ty } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for BareVariadic {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::BareVariadic {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for BareVariadic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::BareVariadic { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.name == other.name && self.comma == other.comma } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for BinOp {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::BinOp {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for BinOp { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::BinOp { fn eq(&self, other: &Self) -> bool { match (self, other) { - (BinOp::Add(_), BinOp::Add(_)) => true, - (BinOp::Sub(_), BinOp::Sub(_)) => true, - (BinOp::Mul(_), BinOp::Mul(_)) => true, - (BinOp::Div(_), BinOp::Div(_)) => true, - (BinOp::Rem(_), BinOp::Rem(_)) => true, - (BinOp::And(_), BinOp::And(_)) => true, - (BinOp::Or(_), BinOp::Or(_)) => true, - (BinOp::BitXor(_), BinOp::BitXor(_)) => true, - (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true, - (BinOp::BitOr(_), BinOp::BitOr(_)) => true, - (BinOp::Shl(_), BinOp::Shl(_)) => true, - (BinOp::Shr(_), BinOp::Shr(_)) => true, - (BinOp::Eq(_), BinOp::Eq(_)) => true, - (BinOp::Lt(_), BinOp::Lt(_)) => true, - (BinOp::Le(_), BinOp::Le(_)) => true, - (BinOp::Ne(_), BinOp::Ne(_)) => true, - (BinOp::Ge(_), BinOp::Ge(_)) => true, - (BinOp::Gt(_), BinOp::Gt(_)) => true, - (BinOp::AddAssign(_), BinOp::AddAssign(_)) => true, - (BinOp::SubAssign(_), BinOp::SubAssign(_)) => true, - (BinOp::MulAssign(_), BinOp::MulAssign(_)) => true, - (BinOp::DivAssign(_), BinOp::DivAssign(_)) => true, - (BinOp::RemAssign(_), BinOp::RemAssign(_)) => true, - (BinOp::BitXorAssign(_), BinOp::BitXorAssign(_)) => true, - (BinOp::BitAndAssign(_), BinOp::BitAndAssign(_)) => true, - (BinOp::BitOrAssign(_), BinOp::BitOrAssign(_)) => true, - (BinOp::ShlAssign(_), BinOp::ShlAssign(_)) => true, - (BinOp::ShrAssign(_), BinOp::ShrAssign(_)) => true, + (crate::BinOp::Add(_), crate::BinOp::Add(_)) => true, + (crate::BinOp::Sub(_), crate::BinOp::Sub(_)) => true, + (crate::BinOp::Mul(_), crate::BinOp::Mul(_)) => true, + (crate::BinOp::Div(_), crate::BinOp::Div(_)) => true, + (crate::BinOp::Rem(_), crate::BinOp::Rem(_)) => true, + (crate::BinOp::And(_), crate::BinOp::And(_)) => true, + (crate::BinOp::Or(_), crate::BinOp::Or(_)) => true, + (crate::BinOp::BitXor(_), crate::BinOp::BitXor(_)) => true, + (crate::BinOp::BitAnd(_), crate::BinOp::BitAnd(_)) => true, + (crate::BinOp::BitOr(_), crate::BinOp::BitOr(_)) => true, + (crate::BinOp::Shl(_), crate::BinOp::Shl(_)) => true, + (crate::BinOp::Shr(_), crate::BinOp::Shr(_)) => true, + (crate::BinOp::Eq(_), crate::BinOp::Eq(_)) => true, + (crate::BinOp::Lt(_), crate::BinOp::Lt(_)) => true, + (crate::BinOp::Le(_), crate::BinOp::Le(_)) => true, + (crate::BinOp::Ne(_), crate::BinOp::Ne(_)) => true, + (crate::BinOp::Ge(_), crate::BinOp::Ge(_)) => true, + (crate::BinOp::Gt(_), crate::BinOp::Gt(_)) => true, + (crate::BinOp::AddAssign(_), crate::BinOp::AddAssign(_)) => true, + (crate::BinOp::SubAssign(_), crate::BinOp::SubAssign(_)) => true, + (crate::BinOp::MulAssign(_), crate::BinOp::MulAssign(_)) => true, + (crate::BinOp::DivAssign(_), crate::BinOp::DivAssign(_)) => true, + (crate::BinOp::RemAssign(_), crate::BinOp::RemAssign(_)) => true, + (crate::BinOp::BitXorAssign(_), crate::BinOp::BitXorAssign(_)) => true, + (crate::BinOp::BitAndAssign(_), crate::BinOp::BitAndAssign(_)) => true, + (crate::BinOp::BitOrAssign(_), crate::BinOp::BitOrAssign(_)) => true, + (crate::BinOp::ShlAssign(_), crate::BinOp::ShlAssign(_)) => true, + (crate::BinOp::ShrAssign(_), crate::BinOp::ShrAssign(_)) => true, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Block {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Block {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Block { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Block { fn eq(&self, other: &Self) -> bool { self.stmts == other.stmts } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for BoundLifetimes {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::BoundLifetimes {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for BoundLifetimes { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::BoundLifetimes { fn eq(&self, other: &Self) -> bool { self.lifetimes == other.lifetimes } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::CapturedParam {} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::CapturedParam { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + ( + crate::CapturedParam::Lifetime(self0), + crate::CapturedParam::Lifetime(other0), + ) => self0 == other0, + (crate::CapturedParam::Ident(self0), crate::CapturedParam::Ident(other0)) => { + self0 == other0 + } + _ => false, + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ConstParam {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ConstParam {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ConstParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ConstParam { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.ident == other.ident && self.ty == other.ty && self.eq_token == other.eq_token && self.default == other.default } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Constraint {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Constraint {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Constraint { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Constraint { fn eq(&self, other: &Self) -> bool { self.ident == other.ident && self.generics == other.generics && self.bounds == other.bounds } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Data {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Data {} #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Data { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Data { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Data::Struct(self0), Data::Struct(other0)) => self0 == other0, - (Data::Enum(self0), Data::Enum(other0)) => self0 == other0, - (Data::Union(self0), Data::Union(other0)) => self0 == other0, + (crate::Data::Struct(self0), crate::Data::Struct(other0)) => self0 == other0, + (crate::Data::Enum(self0), crate::Data::Enum(other0)) => self0 == other0, + (crate::Data::Union(self0), crate::Data::Union(other0)) => self0 == other0, _ => false, } } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for DataEnum {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::DataEnum {} #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for DataEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::DataEnum { fn eq(&self, other: &Self) -> bool { self.variants == other.variants } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for DataStruct {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::DataStruct {} #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for DataStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::DataStruct { fn eq(&self, other: &Self) -> bool { self.fields == other.fields && self.semi_token == other.semi_token } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for DataUnion {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::DataUnion {} #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for DataUnion { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::DataUnion { fn eq(&self, other: &Self) -> bool { self.fields == other.fields } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for DeriveInput {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::DeriveInput {} #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for DeriveInput { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::DeriveInput { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.data == other.data } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Expr {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Expr {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Expr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Expr { fn eq(&self, other: &Self) -> bool { match (self, other) { #[cfg(feature = "full")] - (Expr::Array(self0), Expr::Array(other0)) => self0 == other0, - #[cfg(feature = "full")] - (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0, - #[cfg(feature = "full")] - (Expr::Async(self0), Expr::Async(other0)) => self0 == other0, + (crate::Expr::Array(self0), crate::Expr::Array(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Await(self0), Expr::Await(other0)) => self0 == other0, - (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0, + (crate::Expr::Assign(self0), crate::Expr::Assign(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Block(self0), Expr::Block(other0)) => self0 == other0, + (crate::Expr::Async(self0), crate::Expr::Async(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Break(self0), Expr::Break(other0)) => self0 == other0, - (Expr::Call(self0), Expr::Call(other0)) => self0 == other0, - (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0, + (crate::Expr::Await(self0), crate::Expr::Await(other0)) => self0 == other0, + (crate::Expr::Binary(self0), crate::Expr::Binary(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0, + (crate::Expr::Block(self0), crate::Expr::Block(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Const(self0), Expr::Const(other0)) => self0 == other0, + (crate::Expr::Break(self0), crate::Expr::Break(other0)) => self0 == other0, + (crate::Expr::Call(self0), crate::Expr::Call(other0)) => self0 == other0, + (crate::Expr::Cast(self0), crate::Expr::Cast(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0, - (Expr::Field(self0), Expr::Field(other0)) => self0 == other0, - #[cfg(feature = "full")] - (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0, - (Expr::Group(self0), Expr::Group(other0)) => self0 == other0, + (crate::Expr::Closure(self0), crate::Expr::Closure(other0)) => { + self0 == other0 + } #[cfg(feature = "full")] - (Expr::If(self0), Expr::If(other0)) => self0 == other0, - (Expr::Index(self0), Expr::Index(other0)) => self0 == other0, + (crate::Expr::Const(self0), crate::Expr::Const(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Infer(self0), Expr::Infer(other0)) => self0 == other0, + (crate::Expr::Continue(self0), crate::Expr::Continue(other0)) => { + self0 == other0 + } + (crate::Expr::Field(self0), crate::Expr::Field(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Let(self0), Expr::Let(other0)) => self0 == other0, - (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0, + (crate::Expr::ForLoop(self0), crate::Expr::ForLoop(other0)) => { + self0 == other0 + } + (crate::Expr::Group(self0), crate::Expr::Group(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0, - (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0, + (crate::Expr::If(self0), crate::Expr::If(other0)) => self0 == other0, + (crate::Expr::Index(self0), crate::Expr::Index(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Match(self0), Expr::Match(other0)) => self0 == other0, + (crate::Expr::Infer(self0), crate::Expr::Infer(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0, - (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0, - (Expr::Path(self0), Expr::Path(other0)) => self0 == other0, + (crate::Expr::Let(self0), crate::Expr::Let(other0)) => self0 == other0, + (crate::Expr::Lit(self0), crate::Expr::Lit(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Range(self0), Expr::Range(other0)) => self0 == other0, + (crate::Expr::Loop(self0), crate::Expr::Loop(other0)) => self0 == other0, + (crate::Expr::Macro(self0), crate::Expr::Macro(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0, + (crate::Expr::Match(self0), crate::Expr::Match(other0)) => self0 == other0, + (crate::Expr::MethodCall(self0), crate::Expr::MethodCall(other0)) => { + self0 == other0 + } + (crate::Expr::Paren(self0), crate::Expr::Paren(other0)) => self0 == other0, + (crate::Expr::Path(self0), crate::Expr::Path(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0, + (crate::Expr::Range(self0), crate::Expr::Range(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Return(self0), Expr::Return(other0)) => self0 == other0, + (crate::Expr::RawAddr(self0), crate::Expr::RawAddr(other0)) => { + self0 == other0 + } + (crate::Expr::Reference(self0), crate::Expr::Reference(other0)) => { + self0 == other0 + } #[cfg(feature = "full")] - (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0, + (crate::Expr::Repeat(self0), crate::Expr::Repeat(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Try(self0), Expr::Try(other0)) => self0 == other0, + (crate::Expr::Return(self0), crate::Expr::Return(other0)) => self0 == other0, + (crate::Expr::Struct(self0), crate::Expr::Struct(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0, + (crate::Expr::Try(self0), crate::Expr::Try(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0, - (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0, + (crate::Expr::TryBlock(self0), crate::Expr::TryBlock(other0)) => { + self0 == other0 + } + (crate::Expr::Tuple(self0), crate::Expr::Tuple(other0)) => self0 == other0, + (crate::Expr::Unary(self0), crate::Expr::Unary(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0, - (Expr::Verbatim(self0), Expr::Verbatim(other0)) => { + (crate::Expr::Unsafe(self0), crate::Expr::Unsafe(other0)) => self0 == other0, + (crate::Expr::Verbatim(self0), crate::Expr::Verbatim(other0)) => { TokenStreamHelper(self0) == TokenStreamHelper(other0) } #[cfg(feature = "full")] - (Expr::While(self0), Expr::While(other0)) => self0 == other0, + (crate::Expr::While(self0), crate::Expr::While(other0)) => self0 == other0, #[cfg(feature = "full")] - (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0, + (crate::Expr::Yield(self0), crate::Expr::Yield(other0)) => self0 == other0, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprArray {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprArray {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprArray { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprArray { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.elems == other.elems } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprAssign {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprAssign {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprAssign { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprAssign { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.left == other.left && self.right == other.right } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprAsync {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprAsync {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprAsync { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprAsync { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.capture == other.capture && self.block == other.block } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprAwait {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprAwait {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprAwait { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprAwait { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.base == other.base } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprBinary {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprBinary {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprBinary { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprBinary { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.left == other.left && self.op == other.op && self.right == other.right } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprBlock {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprBlock {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprBlock { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.label == other.label && self.block == other.block } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprBreak {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprBreak {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprBreak { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprBreak { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.label == other.label && self.expr == other.expr } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprCall {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprCall {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprCall { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprCall { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.func == other.func && self.args == other.args } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprCast {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprCast {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprCast { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprCast { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprClosure {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprClosure {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprClosure { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprClosure { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.lifetimes == other.lifetimes && self.constness == other.constness && self.movability == other.movability @@ -427,63 +457,63 @@ impl PartialEq for ExprClosure { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprConst {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprConst {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprConst { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.block == other.block } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprContinue {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprContinue {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprContinue { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprContinue { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.label == other.label } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprField {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprField {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprField { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprField { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.base == other.base && self.member == other.member } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprForLoop {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprForLoop {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprForLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprForLoop { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.label == other.label && self.pat == other.pat && self.expr == other.expr && self.body == other.body } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprGroup {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprGroup {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprGroup { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprIf {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprIf {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprIf { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprIf { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.cond == other.cond && self.then_branch == other.then_branch @@ -491,81 +521,81 @@ impl PartialEq for ExprIf { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprIndex {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprIndex {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprIndex { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprIndex { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr && self.index == other.index } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprInfer {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprInfer {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprInfer { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprLet {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprLet {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprLet { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprLet { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprLit {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprLit {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprLit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprLit { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.lit == other.lit } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprLoop {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprLoop {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprLoop { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.label == other.label && self.body == other.body } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprMacro {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprMacro { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mac == other.mac } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprMatch {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprMatch {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprMatch { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprMatch { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprMethodCall {} -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprMethodCall { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprMethodCall {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprMethodCall { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.receiver == other.receiver && self.method == other.method && self.turbofish == other.turbofish @@ -573,73 +603,84 @@ impl PartialEq for ExprMethodCall { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprParen {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprParen {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprParen { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprPath {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprPath {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprPath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprPath { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.qself == other.qself && self.path == other.path } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprRange {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprRange {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprRange { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprRange { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.start == other.start && self.limits == other.limits && self.end == other.end } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprReference {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprRawAddr {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprReference { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprRawAddr { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mutability == other.mutability + && self.expr == other.expr + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprReference {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprReference { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprRepeat {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprRepeat {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprRepeat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprRepeat { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr && self.len == other.len } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprReturn {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprReturn {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprReturn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprReturn { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprStruct {} -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprStruct { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprStruct {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprStruct { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.qself == other.qself && self.path == other.path && self.fields == other.fields && self.dot2_token == other.dot2_token @@ -647,82 +688,82 @@ impl PartialEq for ExprStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprTry {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprTry {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprTry { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprTry { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprTryBlock {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprTryBlock {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprTryBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprTryBlock { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.block == other.block } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprTuple {} -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprTuple { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprTuple {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprTuple { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.elems == other.elems } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprUnary {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprUnary {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprUnary { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprUnary { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.op == other.op && self.expr == other.expr } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprUnsafe {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprUnsafe {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprUnsafe { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprUnsafe { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.block == other.block } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprWhile {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprWhile {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprWhile { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprWhile { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.label == other.label && self.cond == other.cond && self.body == other.body } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ExprYield {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ExprYield {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ExprYield { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ExprYield { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.expr == other.expr } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Field {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Field {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Field { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Field { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.mutability == other.mutability && self.ident == other.ident @@ -730,145 +771,160 @@ impl PartialEq for Field { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for FieldMutability {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::FieldMutability {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for FieldMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::FieldMutability { fn eq(&self, other: &Self) -> bool { match (self, other) { - (FieldMutability::None, FieldMutability::None) => true, + (crate::FieldMutability::None, crate::FieldMutability::None) => true, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for FieldPat {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::FieldPat {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for FieldPat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::FieldPat { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.member == other.member && self.colon_token == other.colon_token && self.pat == other.pat } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for FieldValue {} -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for FieldValue { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::FieldValue {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::FieldValue { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.member == other.member && self.colon_token == other.colon_token && self.expr == other.expr } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Fields {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Fields {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Fields { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Fields { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Fields::Named(self0), Fields::Named(other0)) => self0 == other0, - (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0, - (Fields::Unit, Fields::Unit) => true, + (crate::Fields::Named(self0), crate::Fields::Named(other0)) => { + self0 == other0 + } + (crate::Fields::Unnamed(self0), crate::Fields::Unnamed(other0)) => { + self0 == other0 + } + (crate::Fields::Unit, crate::Fields::Unit) => true, _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for FieldsNamed {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::FieldsNamed {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for FieldsNamed { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::FieldsNamed { fn eq(&self, other: &Self) -> bool { self.named == other.named } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for FieldsUnnamed {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::FieldsUnnamed {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for FieldsUnnamed { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::FieldsUnnamed { fn eq(&self, other: &Self) -> bool { self.unnamed == other.unnamed } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for File {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::File {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for File { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::File { fn eq(&self, other: &Self) -> bool { self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for FnArg {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::FnArg {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for FnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::FnArg { fn eq(&self, other: &Self) -> bool { match (self, other) { - (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0, - (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0, + (crate::FnArg::Receiver(self0), crate::FnArg::Receiver(other0)) => { + self0 == other0 + } + (crate::FnArg::Typed(self0), crate::FnArg::Typed(other0)) => self0 == other0, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ForeignItem {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ForeignItem {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ForeignItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ForeignItem { fn eq(&self, other: &Self) -> bool { match (self, other) { - (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0, - (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0, - (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0, - (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0, - (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => { - TokenStreamHelper(self0) == TokenStreamHelper(other0) + (crate::ForeignItem::Fn(self0), crate::ForeignItem::Fn(other0)) => { + self0 == other0 + } + (crate::ForeignItem::Static(self0), crate::ForeignItem::Static(other0)) => { + self0 == other0 + } + (crate::ForeignItem::Type(self0), crate::ForeignItem::Type(other0)) => { + self0 == other0 + } + (crate::ForeignItem::Macro(self0), crate::ForeignItem::Macro(other0)) => { + self0 == other0 } + ( + crate::ForeignItem::Verbatim(self0), + crate::ForeignItem::Verbatim(other0), + ) => TokenStreamHelper(self0) == TokenStreamHelper(other0), _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ForeignItemFn {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ForeignItemFn {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ForeignItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ForeignItemFn { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ForeignItemMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ForeignItemMacro {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ForeignItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ForeignItemMacro { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ForeignItemStatic {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ForeignItemStatic {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ForeignItemStatic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ForeignItemStatic { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.mutability == other.mutability && self.ident == other.ident @@ -876,87 +932,104 @@ impl PartialEq for ForeignItemStatic { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ForeignItemType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ForeignItemType {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ForeignItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ForeignItemType { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for GenericArgument {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::GenericArgument {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for GenericArgument { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::GenericArgument { fn eq(&self, other: &Self) -> bool { match (self, other) { - (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => { - self0 == other0 - } - (GenericArgument::Type(self0), GenericArgument::Type(other0)) => { - self0 == other0 - } - (GenericArgument::Const(self0), GenericArgument::Const(other0)) => { - self0 == other0 - } - (GenericArgument::AssocType(self0), GenericArgument::AssocType(other0)) => { - self0 == other0 - } - (GenericArgument::AssocConst(self0), GenericArgument::AssocConst(other0)) => { - self0 == other0 - } - (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => { - self0 == other0 - } + ( + crate::GenericArgument::Lifetime(self0), + crate::GenericArgument::Lifetime(other0), + ) => self0 == other0, + ( + crate::GenericArgument::Type(self0), + crate::GenericArgument::Type(other0), + ) => self0 == other0, + ( + crate::GenericArgument::Const(self0), + crate::GenericArgument::Const(other0), + ) => self0 == other0, + ( + crate::GenericArgument::AssocType(self0), + crate::GenericArgument::AssocType(other0), + ) => self0 == other0, + ( + crate::GenericArgument::AssocConst(self0), + crate::GenericArgument::AssocConst(other0), + ) => self0 == other0, + ( + crate::GenericArgument::Constraint(self0), + crate::GenericArgument::Constraint(other0), + ) => self0 == other0, _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for GenericParam {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::GenericParam {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for GenericParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::GenericParam { fn eq(&self, other: &Self) -> bool { match (self, other) { - (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => { + ( + crate::GenericParam::Lifetime(self0), + crate::GenericParam::Lifetime(other0), + ) => self0 == other0, + (crate::GenericParam::Type(self0), crate::GenericParam::Type(other0)) => { + self0 == other0 + } + (crate::GenericParam::Const(self0), crate::GenericParam::Const(other0)) => { self0 == other0 } - (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0, - (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0, _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Generics {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Generics {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Generics { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Generics { fn eq(&self, other: &Self) -> bool { self.lt_token == other.lt_token && self.params == other.params && self.gt_token == other.gt_token && self.where_clause == other.where_clause } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ImplItem {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ImplItem {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ImplItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ImplItem { fn eq(&self, other: &Self) -> bool { match (self, other) { - (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0, - (ImplItem::Fn(self0), ImplItem::Fn(other0)) => self0 == other0, - (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0, - (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0, - (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => { + (crate::ImplItem::Const(self0), crate::ImplItem::Const(other0)) => { + self0 == other0 + } + (crate::ImplItem::Fn(self0), crate::ImplItem::Fn(other0)) => self0 == other0, + (crate::ImplItem::Type(self0), crate::ImplItem::Type(other0)) => { + self0 == other0 + } + (crate::ImplItem::Macro(self0), crate::ImplItem::Macro(other0)) => { + self0 == other0 + } + (crate::ImplItem::Verbatim(self0), crate::ImplItem::Verbatim(other0)) => { TokenStreamHelper(self0) == TokenStreamHelper(other0) } _ => false, @@ -964,11 +1037,11 @@ impl PartialEq for ImplItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ImplItemConst {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ImplItemConst {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ImplItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ImplItemConst { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.defaultness == other.defaultness && self.ident == other.ident @@ -977,11 +1050,11 @@ impl PartialEq for ImplItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ImplItemFn {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ImplItemFn {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ImplItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ImplItemFn { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.defaultness == other.defaultness && self.sig == other.sig @@ -989,22 +1062,22 @@ impl PartialEq for ImplItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ImplItemMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ImplItemMacro {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ImplItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ImplItemMacro { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ImplItemType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ImplItemType {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ImplItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ImplItemType { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.defaultness == other.defaultness && self.ident == other.ident @@ -1012,39 +1085,45 @@ impl PartialEq for ImplItemType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ImplRestriction {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ImplRestriction {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ImplRestriction { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ImplRestriction { fn eq(&self, _other: &Self) -> bool { match *self {} } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Item {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Item {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Item { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Item { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Item::Const(self0), Item::Const(other0)) => self0 == other0, - (Item::Enum(self0), Item::Enum(other0)) => self0 == other0, - (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0, - (Item::Fn(self0), Item::Fn(other0)) => self0 == other0, - (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0, - (Item::Impl(self0), Item::Impl(other0)) => self0 == other0, - (Item::Macro(self0), Item::Macro(other0)) => self0 == other0, - (Item::Mod(self0), Item::Mod(other0)) => self0 == other0, - (Item::Static(self0), Item::Static(other0)) => self0 == other0, - (Item::Struct(self0), Item::Struct(other0)) => self0 == other0, - (Item::Trait(self0), Item::Trait(other0)) => self0 == other0, - (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0, - (Item::Type(self0), Item::Type(other0)) => self0 == other0, - (Item::Union(self0), Item::Union(other0)) => self0 == other0, - (Item::Use(self0), Item::Use(other0)) => self0 == other0, - (Item::Verbatim(self0), Item::Verbatim(other0)) => { + (crate::Item::Const(self0), crate::Item::Const(other0)) => self0 == other0, + (crate::Item::Enum(self0), crate::Item::Enum(other0)) => self0 == other0, + (crate::Item::ExternCrate(self0), crate::Item::ExternCrate(other0)) => { + self0 == other0 + } + (crate::Item::Fn(self0), crate::Item::Fn(other0)) => self0 == other0, + (crate::Item::ForeignMod(self0), crate::Item::ForeignMod(other0)) => { + self0 == other0 + } + (crate::Item::Impl(self0), crate::Item::Impl(other0)) => self0 == other0, + (crate::Item::Macro(self0), crate::Item::Macro(other0)) => self0 == other0, + (crate::Item::Mod(self0), crate::Item::Mod(other0)) => self0 == other0, + (crate::Item::Static(self0), crate::Item::Static(other0)) => self0 == other0, + (crate::Item::Struct(self0), crate::Item::Struct(other0)) => self0 == other0, + (crate::Item::Trait(self0), crate::Item::Trait(other0)) => self0 == other0, + (crate::Item::TraitAlias(self0), crate::Item::TraitAlias(other0)) => { + self0 == other0 + } + (crate::Item::Type(self0), crate::Item::Type(other0)) => self0 == other0, + (crate::Item::Union(self0), crate::Item::Union(other0)) => self0 == other0, + (crate::Item::Use(self0), crate::Item::Use(other0)) => self0 == other0, + (crate::Item::Verbatim(self0), crate::Item::Verbatim(other0)) => { TokenStreamHelper(self0) == TokenStreamHelper(other0) } _ => false, @@ -1052,11 +1131,11 @@ impl PartialEq for Item { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemConst {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemConst {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemConst { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.ty == other.ty @@ -1064,55 +1143,55 @@ impl PartialEq for ItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemEnum {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemEnum {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemEnum { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.variants == other.variants } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemExternCrate {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemExternCrate {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemExternCrate { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemExternCrate { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.rename == other.rename } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemFn {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemFn {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemFn { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig && self.block == other.block } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemForeignMod {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemForeignMod {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemForeignMod { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemForeignMod { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.unsafety == other.unsafety && self.abi == other.abi && self.items == other.items } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemImpl {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemImpl {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemImpl { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemImpl { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.defaultness == other.defaultness && self.unsafety == other.unsafety && self.generics == other.generics @@ -1121,22 +1200,22 @@ impl PartialEq for ItemImpl { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemMacro {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemMacro { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.ident == other.ident && self.mac == other.mac && self.semi_token == other.semi_token } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemMod {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemMod {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemMod { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemMod { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.unsafety == other.unsafety && self.ident == other.ident @@ -1144,11 +1223,11 @@ impl PartialEq for ItemMod { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemStatic {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemStatic {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemStatic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemStatic { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.mutability == other.mutability && self.ident == other.ident @@ -1156,11 +1235,11 @@ impl PartialEq for ItemStatic { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemStruct {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemStruct {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemStruct { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.fields == other.fields @@ -1168,11 +1247,11 @@ impl PartialEq for ItemStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemTrait {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemTrait {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemTrait { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemTrait { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.unsafety == other.unsafety && self.auto_token == other.auto_token @@ -1182,239 +1261,250 @@ impl PartialEq for ItemTrait { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemTraitAlias {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemTraitAlias {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemTraitAlias { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemTraitAlias { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.bounds == other.bounds } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemType {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemType { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.ty == other.ty } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemUnion {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemUnion {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemUnion { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemUnion { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident && self.generics == other.generics && self.fields == other.fields } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ItemUse {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ItemUse {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ItemUse { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ItemUse { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.vis == other.vis && self.leading_colon == other.leading_colon && self.tree == other.tree } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Label {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Label {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Label { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Label { fn eq(&self, other: &Self) -> bool { self.name == other.name } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LifetimeParam {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LifetimeParam {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for LifetimeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::LifetimeParam { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.lifetime == other.lifetime && self.colon_token == other.colon_token && self.bounds == other.bounds } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Lit {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Lit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Lit {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Lit { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Lit::Str(self0), Lit::Str(other0)) => self0 == other0, - (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0, - (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0, - (Lit::Char(self0), Lit::Char(other0)) => self0 == other0, - (Lit::Int(self0), Lit::Int(other0)) => self0 == other0, - (Lit::Float(self0), Lit::Float(other0)) => self0 == other0, - (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0, - (Lit::Verbatim(self0), Lit::Verbatim(other0)) => { + (crate::Lit::Str(self0), crate::Lit::Str(other0)) => self0 == other0, + (crate::Lit::ByteStr(self0), crate::Lit::ByteStr(other0)) => self0 == other0, + (crate::Lit::CStr(self0), crate::Lit::CStr(other0)) => self0 == other0, + (crate::Lit::Byte(self0), crate::Lit::Byte(other0)) => self0 == other0, + (crate::Lit::Char(self0), crate::Lit::Char(other0)) => self0 == other0, + (crate::Lit::Int(self0), crate::Lit::Int(other0)) => self0 == other0, + (crate::Lit::Float(self0), crate::Lit::Float(other0)) => self0 == other0, + (crate::Lit::Bool(self0), crate::Lit::Bool(other0)) => self0 == other0, + (crate::Lit::Verbatim(self0), crate::Lit::Verbatim(other0)) => { self0.to_string() == other0.to_string() } _ => false, } } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitBool {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for LitBool { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitBool {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::LitBool { fn eq(&self, other: &Self) -> bool { self.value == other.value } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitByte {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitByteStr {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitChar {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitFloat {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitInt {} -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LitStr {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitByte {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitByteStr {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitCStr {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitChar {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitFloat {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitInt {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LitStr {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Local {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Local {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Local { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Local { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.pat == other.pat && self.init == other.init } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for LocalInit {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::LocalInit {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for LocalInit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::LocalInit { fn eq(&self, other: &Self) -> bool { self.expr == other.expr && self.diverge == other.diverge } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Macro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Macro {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Macro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Macro { fn eq(&self, other: &Self) -> bool { self.path == other.path && self.delimiter == other.delimiter && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens) } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for MacroDelimiter {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::MacroDelimiter {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for MacroDelimiter { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::MacroDelimiter { fn eq(&self, other: &Self) -> bool { match (self, other) { - (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true, - (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true, - (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true, + (crate::MacroDelimiter::Paren(_), crate::MacroDelimiter::Paren(_)) => true, + (crate::MacroDelimiter::Brace(_), crate::MacroDelimiter::Brace(_)) => true, + (crate::MacroDelimiter::Bracket(_), crate::MacroDelimiter::Bracket(_)) => { + true + } _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Meta {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Meta {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Meta { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Meta { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Meta::Path(self0), Meta::Path(other0)) => self0 == other0, - (Meta::List(self0), Meta::List(other0)) => self0 == other0, - (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0, + (crate::Meta::Path(self0), crate::Meta::Path(other0)) => self0 == other0, + (crate::Meta::List(self0), crate::Meta::List(other0)) => self0 == other0, + (crate::Meta::NameValue(self0), crate::Meta::NameValue(other0)) => { + self0 == other0 + } _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for MetaList {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::MetaList {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for MetaList { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::MetaList { fn eq(&self, other: &Self) -> bool { self.path == other.path && self.delimiter == other.delimiter && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens) } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for MetaNameValue {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::MetaNameValue {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for MetaNameValue { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::MetaNameValue { fn eq(&self, other: &Self) -> bool { self.path == other.path && self.value == other.value } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ParenthesizedGenericArguments {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ParenthesizedGenericArguments {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ParenthesizedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ParenthesizedGenericArguments { fn eq(&self, other: &Self) -> bool { self.inputs == other.inputs && self.output == other.output } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Pat {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Pat {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Pat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Pat { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Pat::Const(self0), Pat::Const(other0)) => self0 == other0, - (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0, - (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0, - (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0, - (Pat::Or(self0), Pat::Or(other0)) => self0 == other0, - (Pat::Paren(self0), Pat::Paren(other0)) => self0 == other0, - (Pat::Path(self0), Pat::Path(other0)) => self0 == other0, - (Pat::Range(self0), Pat::Range(other0)) => self0 == other0, - (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0, - (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0, - (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0, - (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0, - (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0, - (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0, - (Pat::Type(self0), Pat::Type(other0)) => self0 == other0, - (Pat::Verbatim(self0), Pat::Verbatim(other0)) => { + (crate::Pat::Const(self0), crate::Pat::Const(other0)) => self0 == other0, + (crate::Pat::Ident(self0), crate::Pat::Ident(other0)) => self0 == other0, + (crate::Pat::Lit(self0), crate::Pat::Lit(other0)) => self0 == other0, + (crate::Pat::Macro(self0), crate::Pat::Macro(other0)) => self0 == other0, + (crate::Pat::Or(self0), crate::Pat::Or(other0)) => self0 == other0, + (crate::Pat::Paren(self0), crate::Pat::Paren(other0)) => self0 == other0, + (crate::Pat::Path(self0), crate::Pat::Path(other0)) => self0 == other0, + (crate::Pat::Range(self0), crate::Pat::Range(other0)) => self0 == other0, + (crate::Pat::Reference(self0), crate::Pat::Reference(other0)) => { + self0 == other0 + } + (crate::Pat::Rest(self0), crate::Pat::Rest(other0)) => self0 == other0, + (crate::Pat::Slice(self0), crate::Pat::Slice(other0)) => self0 == other0, + (crate::Pat::Struct(self0), crate::Pat::Struct(other0)) => self0 == other0, + (crate::Pat::Tuple(self0), crate::Pat::Tuple(other0)) => self0 == other0, + (crate::Pat::TupleStruct(self0), crate::Pat::TupleStruct(other0)) => { + self0 == other0 + } + (crate::Pat::Type(self0), crate::Pat::Type(other0)) => self0 == other0, + (crate::Pat::Verbatim(self0), crate::Pat::Verbatim(other0)) => { TokenStreamHelper(self0) == TokenStreamHelper(other0) } - (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0, + (crate::Pat::Wild(self0), crate::Pat::Wild(other0)) => self0 == other0, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatIdent {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatIdent {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatIdent { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatIdent { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.by_ref == other.by_ref && self.mutability == other.mutability && self.ident == other.ident @@ -1422,202 +1512,228 @@ impl PartialEq for PatIdent { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatOr {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatOr {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatOr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatOr { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.leading_vert == other.leading_vert && self.cases == other.cases } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatParen {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatParen {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatParen { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.pat == other.pat } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatReference {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatReference {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatReference { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatReference { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatRest {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatRest {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatRest { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatRest { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatSlice {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatSlice {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatSlice { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatSlice { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.elems == other.elems } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatStruct {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatStruct {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatStruct { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.qself == other.qself && self.path == other.path && self.fields == other.fields && self.rest == other.rest } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatTuple {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatTuple {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatTuple { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatTuple { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.elems == other.elems } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatTupleStruct {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatTupleStruct {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatTupleStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatTupleStruct { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.qself == other.qself && self.path == other.path && self.elems == other.elems } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatType {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatType { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PatWild {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PatWild {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PatWild { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PatWild { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Path {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Path {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Path { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Path { fn eq(&self, other: &Self) -> bool { self.leading_colon == other.leading_colon && self.segments == other.segments } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PathArguments {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PathArguments {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PathArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PathArguments { fn eq(&self, other: &Self) -> bool { match (self, other) { - (PathArguments::None, PathArguments::None) => true, + (crate::PathArguments::None, crate::PathArguments::None) => true, ( - PathArguments::AngleBracketed(self0), - PathArguments::AngleBracketed(other0), + crate::PathArguments::AngleBracketed(self0), + crate::PathArguments::AngleBracketed(other0), ) => self0 == other0, ( - PathArguments::Parenthesized(self0), - PathArguments::Parenthesized(other0), + crate::PathArguments::Parenthesized(self0), + crate::PathArguments::Parenthesized(other0), ) => self0 == other0, _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PathSegment {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PathSegment {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PathSegment { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PathSegment { fn eq(&self, other: &Self) -> bool { self.ident == other.ident && self.arguments == other.arguments } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PointerMutability {} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PointerMutability { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (crate::PointerMutability::Const(_), crate::PointerMutability::Const(_)) => { + true + } + (crate::PointerMutability::Mut(_), crate::PointerMutability::Mut(_)) => true, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PreciseCapture {} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PreciseCapture { + fn eq(&self, other: &Self) -> bool { + self.params == other.params + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PredicateLifetime {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PredicateLifetime {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PredicateLifetime { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PredicateLifetime { fn eq(&self, other: &Self) -> bool { self.lifetime == other.lifetime && self.bounds == other.bounds } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for PredicateType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::PredicateType {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for PredicateType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::PredicateType { fn eq(&self, other: &Self) -> bool { self.lifetimes == other.lifetimes && self.bounded_ty == other.bounded_ty && self.bounds == other.bounds } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for QSelf {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::QSelf {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for QSelf { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::QSelf { fn eq(&self, other: &Self) -> bool { self.ty == other.ty && self.position == other.position && self.as_token == other.as_token } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for RangeLimits {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::RangeLimits {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for RangeLimits { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::RangeLimits { fn eq(&self, other: &Self) -> bool { match (self, other) { - (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true, - (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true, + (crate::RangeLimits::HalfOpen(_), crate::RangeLimits::HalfOpen(_)) => true, + (crate::RangeLimits::Closed(_), crate::RangeLimits::Closed(_)) => true, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Receiver {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Receiver {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Receiver { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Receiver { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.reference == other.reference && self.mutability == other.mutability @@ -1625,25 +1741,27 @@ impl PartialEq for Receiver { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for ReturnType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::ReturnType {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for ReturnType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::ReturnType { fn eq(&self, other: &Self) -> bool { match (self, other) { - (ReturnType::Default, ReturnType::Default) => true, - (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1, + (crate::ReturnType::Default, crate::ReturnType::Default) => true, + (crate::ReturnType::Type(_, self1), crate::ReturnType::Type(_, other1)) => { + self1 == other1 + } _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Signature {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Signature {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Signature { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Signature { fn eq(&self, other: &Self) -> bool { self.constness == other.constness && self.asyncness == other.asyncness && self.unsafety == other.unsafety && self.abi == other.abi @@ -1653,86 +1771,97 @@ impl PartialEq for Signature { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for StaticMutability {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::StaticMutability {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for StaticMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::StaticMutability { fn eq(&self, other: &Self) -> bool { match (self, other) { - (StaticMutability::Mut(_), StaticMutability::Mut(_)) => true, - (StaticMutability::None, StaticMutability::None) => true, + (crate::StaticMutability::Mut(_), crate::StaticMutability::Mut(_)) => true, + (crate::StaticMutability::None, crate::StaticMutability::None) => true, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Stmt {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Stmt {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Stmt { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Stmt { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0, - (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0, - (Stmt::Expr(self0, self1), Stmt::Expr(other0, other1)) => { + (crate::Stmt::Local(self0), crate::Stmt::Local(other0)) => self0 == other0, + (crate::Stmt::Item(self0), crate::Stmt::Item(other0)) => self0 == other0, + (crate::Stmt::Expr(self0, self1), crate::Stmt::Expr(other0, other1)) => { self0 == other0 && self1 == other1 } - (Stmt::Macro(self0), Stmt::Macro(other0)) => self0 == other0, + (crate::Stmt::Macro(self0), crate::Stmt::Macro(other0)) => self0 == other0, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for StmtMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::StmtMacro {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for StmtMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::StmtMacro { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitBound {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitBound {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitBound { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitBound { fn eq(&self, other: &Self) -> bool { self.paren_token == other.paren_token && self.modifier == other.modifier && self.lifetimes == other.lifetimes && self.path == other.path } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitBoundModifier {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitBoundModifier {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitBoundModifier { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitBoundModifier { fn eq(&self, other: &Self) -> bool { match (self, other) { - (TraitBoundModifier::None, TraitBoundModifier::None) => true, - (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true, + (crate::TraitBoundModifier::None, crate::TraitBoundModifier::None) => true, + ( + crate::TraitBoundModifier::Maybe(_), + crate::TraitBoundModifier::Maybe(_), + ) => true, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitItem {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitItem {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitItem { fn eq(&self, other: &Self) -> bool { match (self, other) { - (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0, - (TraitItem::Fn(self0), TraitItem::Fn(other0)) => self0 == other0, - (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0, - (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0, - (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => { + (crate::TraitItem::Const(self0), crate::TraitItem::Const(other0)) => { + self0 == other0 + } + (crate::TraitItem::Fn(self0), crate::TraitItem::Fn(other0)) => { + self0 == other0 + } + (crate::TraitItem::Type(self0), crate::TraitItem::Type(other0)) => { + self0 == other0 + } + (crate::TraitItem::Macro(self0), crate::TraitItem::Macro(other0)) => { + self0 == other0 + } + (crate::TraitItem::Verbatim(self0), crate::TraitItem::Verbatim(other0)) => { TokenStreamHelper(self0) == TokenStreamHelper(other0) } _ => false, @@ -1740,11 +1869,11 @@ impl PartialEq for TraitItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitItemConst {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitItemConst {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitItemConst { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.ident == other.ident && self.generics == other.generics && self.ty == other.ty @@ -1752,33 +1881,33 @@ impl PartialEq for TraitItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitItemFn {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitItemFn {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitItemFn { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.sig == other.sig && self.default == other.default && self.semi_token == other.semi_token } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitItemMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitItemMacro {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitItemMacro { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TraitItemType {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TraitItemType {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TraitItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TraitItemType { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.ident == other.ident && self.generics == other.generics && self.colon_token == other.colon_token @@ -1786,28 +1915,34 @@ impl PartialEq for TraitItemType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Type {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Type {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Type { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Type { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Type::Array(self0), Type::Array(other0)) => self0 == other0, - (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0, - (Type::Group(self0), Type::Group(other0)) => self0 == other0, - (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0, - (Type::Infer(self0), Type::Infer(other0)) => self0 == other0, - (Type::Macro(self0), Type::Macro(other0)) => self0 == other0, - (Type::Never(self0), Type::Never(other0)) => self0 == other0, - (Type::Paren(self0), Type::Paren(other0)) => self0 == other0, - (Type::Path(self0), Type::Path(other0)) => self0 == other0, - (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0, - (Type::Reference(self0), Type::Reference(other0)) => self0 == other0, - (Type::Slice(self0), Type::Slice(other0)) => self0 == other0, - (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0, - (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0, - (Type::Verbatim(self0), Type::Verbatim(other0)) => { + (crate::Type::Array(self0), crate::Type::Array(other0)) => self0 == other0, + (crate::Type::BareFn(self0), crate::Type::BareFn(other0)) => self0 == other0, + (crate::Type::Group(self0), crate::Type::Group(other0)) => self0 == other0, + (crate::Type::ImplTrait(self0), crate::Type::ImplTrait(other0)) => { + self0 == other0 + } + (crate::Type::Infer(self0), crate::Type::Infer(other0)) => self0 == other0, + (crate::Type::Macro(self0), crate::Type::Macro(other0)) => self0 == other0, + (crate::Type::Never(self0), crate::Type::Never(other0)) => self0 == other0, + (crate::Type::Paren(self0), crate::Type::Paren(other0)) => self0 == other0, + (crate::Type::Path(self0), crate::Type::Path(other0)) => self0 == other0, + (crate::Type::Ptr(self0), crate::Type::Ptr(other0)) => self0 == other0, + (crate::Type::Reference(self0), crate::Type::Reference(other0)) => { + self0 == other0 + } + (crate::Type::Slice(self0), crate::Type::Slice(other0)) => self0 == other0, + (crate::Type::TraitObject(self0), crate::Type::TraitObject(other0)) => { + self0 == other0 + } + (crate::Type::Tuple(self0), crate::Type::Tuple(other0)) => self0 == other0, + (crate::Type::Verbatim(self0), crate::Type::Verbatim(other0)) => { TokenStreamHelper(self0) == TokenStreamHelper(other0) } _ => false, @@ -1815,21 +1950,21 @@ impl PartialEq for Type { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeArray {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeArray {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeArray { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeArray { fn eq(&self, other: &Self) -> bool { self.elem == other.elem && self.len == other.len } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeBareFn {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeBareFn {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeBareFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeBareFn { fn eq(&self, other: &Self) -> bool { self.lifetimes == other.lifetimes && self.unsafety == other.unsafety && self.abi == other.abi && self.inputs == other.inputs @@ -1837,61 +1972,61 @@ impl PartialEq for TypeBareFn { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeGroup {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeGroup {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeGroup { fn eq(&self, other: &Self) -> bool { self.elem == other.elem } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeImplTrait {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeImplTrait {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeImplTrait { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeImplTrait { fn eq(&self, other: &Self) -> bool { self.bounds == other.bounds } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeInfer {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeInfer {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeInfer { fn eq(&self, _other: &Self) -> bool { true } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeMacro {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeMacro {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeMacro { fn eq(&self, other: &Self) -> bool { self.mac == other.mac } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeNever {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeNever {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeNever { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeNever { fn eq(&self, _other: &Self) -> bool { true } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeParam {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeParam {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeParam { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.ident == other.ident && self.colon_token == other.colon_token && self.bounds == other.bounds @@ -1899,250 +2034,270 @@ impl PartialEq for TypeParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeParamBound {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeParamBound {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeParamBound { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeParamBound { fn eq(&self, other: &Self) -> bool { match (self, other) { - (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => { - self0 == other0 - } - (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => { - self0 == other0 - } - (TypeParamBound::Verbatim(self0), TypeParamBound::Verbatim(other0)) => { - TokenStreamHelper(self0) == TokenStreamHelper(other0) - } + ( + crate::TypeParamBound::Trait(self0), + crate::TypeParamBound::Trait(other0), + ) => self0 == other0, + ( + crate::TypeParamBound::Lifetime(self0), + crate::TypeParamBound::Lifetime(other0), + ) => self0 == other0, + #[cfg(feature = "full")] + ( + crate::TypeParamBound::PreciseCapture(self0), + crate::TypeParamBound::PreciseCapture(other0), + ) => self0 == other0, + ( + crate::TypeParamBound::Verbatim(self0), + crate::TypeParamBound::Verbatim(other0), + ) => TokenStreamHelper(self0) == TokenStreamHelper(other0), _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeParen {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeParen {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeParen { fn eq(&self, other: &Self) -> bool { self.elem == other.elem } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypePath {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypePath {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypePath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypePath { fn eq(&self, other: &Self) -> bool { self.qself == other.qself && self.path == other.path } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypePtr {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypePtr {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypePtr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypePtr { fn eq(&self, other: &Self) -> bool { self.const_token == other.const_token && self.mutability == other.mutability && self.elem == other.elem } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeReference {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeReference {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeReference { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeReference { fn eq(&self, other: &Self) -> bool { self.lifetime == other.lifetime && self.mutability == other.mutability && self.elem == other.elem } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeSlice {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeSlice {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeSlice { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeSlice { fn eq(&self, other: &Self) -> bool { self.elem == other.elem } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeTraitObject {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeTraitObject {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeTraitObject { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeTraitObject { fn eq(&self, other: &Self) -> bool { self.dyn_token == other.dyn_token && self.bounds == other.bounds } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for TypeTuple {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::TypeTuple {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for TypeTuple { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::TypeTuple { fn eq(&self, other: &Self) -> bool { self.elems == other.elems } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UnOp {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UnOp {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UnOp { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UnOp { fn eq(&self, other: &Self) -> bool { match (self, other) { - (UnOp::Deref(_), UnOp::Deref(_)) => true, - (UnOp::Not(_), UnOp::Not(_)) => true, - (UnOp::Neg(_), UnOp::Neg(_)) => true, + (crate::UnOp::Deref(_), crate::UnOp::Deref(_)) => true, + (crate::UnOp::Not(_), crate::UnOp::Not(_)) => true, + (crate::UnOp::Neg(_), crate::UnOp::Neg(_)) => true, _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UseGlob {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UseGlob {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UseGlob { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UseGlob { fn eq(&self, _other: &Self) -> bool { true } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UseGroup {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UseGroup {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UseGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UseGroup { fn eq(&self, other: &Self) -> bool { self.items == other.items } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UseName {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UseName {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UseName { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UseName { fn eq(&self, other: &Self) -> bool { self.ident == other.ident } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UsePath {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UsePath {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UsePath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UsePath { fn eq(&self, other: &Self) -> bool { self.ident == other.ident && self.tree == other.tree } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UseRename {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UseRename {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UseRename { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UseRename { fn eq(&self, other: &Self) -> bool { self.ident == other.ident && self.rename == other.rename } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for UseTree {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::UseTree {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for UseTree { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::UseTree { fn eq(&self, other: &Self) -> bool { match (self, other) { - (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0, - (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0, - (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0, - (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0, - (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0, + (crate::UseTree::Path(self0), crate::UseTree::Path(other0)) => { + self0 == other0 + } + (crate::UseTree::Name(self0), crate::UseTree::Name(other0)) => { + self0 == other0 + } + (crate::UseTree::Rename(self0), crate::UseTree::Rename(other0)) => { + self0 == other0 + } + (crate::UseTree::Glob(self0), crate::UseTree::Glob(other0)) => { + self0 == other0 + } + (crate::UseTree::Group(self0), crate::UseTree::Group(other0)) => { + self0 == other0 + } _ => false, } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Variadic {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Variadic {} #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Variadic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Variadic { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.pat == other.pat && self.comma == other.comma } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Variant {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Variant {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Variant { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Variant { fn eq(&self, other: &Self) -> bool { self.attrs == other.attrs && self.ident == other.ident && self.fields == other.fields && self.discriminant == other.discriminant } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for VisRestricted {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::VisRestricted {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for VisRestricted { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::VisRestricted { fn eq(&self, other: &Self) -> bool { self.in_token == other.in_token && self.path == other.path } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for Visibility {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::Visibility {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for Visibility { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::Visibility { fn eq(&self, other: &Self) -> bool { match (self, other) { - (Visibility::Public(_), Visibility::Public(_)) => true, - (Visibility::Restricted(self0), Visibility::Restricted(other0)) => { - self0 == other0 - } - (Visibility::Inherited, Visibility::Inherited) => true, + (crate::Visibility::Public(_), crate::Visibility::Public(_)) => true, + ( + crate::Visibility::Restricted(self0), + crate::Visibility::Restricted(other0), + ) => self0 == other0, + (crate::Visibility::Inherited, crate::Visibility::Inherited) => true, _ => false, } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for WhereClause {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::WhereClause {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for WhereClause { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::WhereClause { fn eq(&self, other: &Self) -> bool { self.predicates == other.predicates } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Eq for WherePredicate {} +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Eq for crate::WherePredicate {} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl PartialEq for WherePredicate { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl PartialEq for crate::WherePredicate { fn eq(&self, other: &Self) -> bool { match (self, other) { - (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => { - self0 == other0 - } - (WherePredicate::Type(self0), WherePredicate::Type(other0)) => { + ( + crate::WherePredicate::Lifetime(self0), + crate::WherePredicate::Lifetime(other0), + ) => self0 == other0, + (crate::WherePredicate::Type(self0), crate::WherePredicate::Type(other0)) => { self0 == other0 } _ => false, diff --git a/vendor/syn/src/gen/fold.rs b/vendor/syn/src/gen/fold.rs index 8ea6c75f..1f0afd31 100644 --- a/vendor/syn/src/gen/fold.rs +++ b/vendor/syn/src/gen/fold.rs @@ -7,10 +7,6 @@ clippy::needless_match, clippy::needless_pass_by_ref_mut, )] -#[cfg(any(feature = "full", feature = "derive"))] -use crate::gen::helper::fold::*; -use crate::*; -use proc_macro2::Span; #[cfg(feature = "full")] macro_rules! full { ($e:expr) => { @@ -30,764 +26,1038 @@ macro_rules! full { /// [module documentation]: self pub trait Fold { #[cfg(any(feature = "derive", feature = "full"))] - fn fold_abi(&mut self, i: Abi) -> Abi { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_abi(&mut self, i: crate::Abi) -> crate::Abi { fold_abi(self, i) } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn fold_angle_bracketed_generic_arguments( &mut self, - i: AngleBracketedGenericArguments, - ) -> AngleBracketedGenericArguments { + i: crate::AngleBracketedGenericArguments, + ) -> crate::AngleBracketedGenericArguments { fold_angle_bracketed_generic_arguments(self, i) } #[cfg(feature = "full")] - fn fold_arm(&mut self, i: Arm) -> Arm { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_arm(&mut self, i: crate::Arm) -> crate::Arm { fold_arm(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_assoc_const(&mut self, i: AssocConst) -> AssocConst { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_assoc_const(&mut self, i: crate::AssocConst) -> crate::AssocConst { fold_assoc_const(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_assoc_type(&mut self, i: AssocType) -> AssocType { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_assoc_type(&mut self, i: crate::AssocType) -> crate::AssocType { fold_assoc_type(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_attr_style(&mut self, i: AttrStyle) -> AttrStyle { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_attr_style(&mut self, i: crate::AttrStyle) -> crate::AttrStyle { fold_attr_style(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_attribute(&mut self, i: Attribute) -> Attribute { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_attribute(&mut self, i: crate::Attribute) -> crate::Attribute { fold_attribute(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_bare_fn_arg(&mut self, i: BareFnArg) -> BareFnArg { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_attributes(&mut self, i: Vec<crate::Attribute>) -> Vec<crate::Attribute> { + fold_vec(i, self, Self::fold_attribute) + } + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_bare_fn_arg(&mut self, i: crate::BareFnArg) -> crate::BareFnArg { fold_bare_fn_arg(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_bare_variadic(&mut self, i: BareVariadic) -> BareVariadic { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_bare_variadic(&mut self, i: crate::BareVariadic) -> crate::BareVariadic { fold_bare_variadic(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_bin_op(&mut self, i: BinOp) -> BinOp { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_bin_op(&mut self, i: crate::BinOp) -> crate::BinOp { fold_bin_op(self, i) } #[cfg(feature = "full")] - fn fold_block(&mut self, i: Block) -> Block { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_block(&mut self, i: crate::Block) -> crate::Block { fold_block(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_bound_lifetimes(&mut self, i: BoundLifetimes) -> BoundLifetimes { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_bound_lifetimes( + &mut self, + i: crate::BoundLifetimes, + ) -> crate::BoundLifetimes { fold_bound_lifetimes(self, i) } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_captured_param(&mut self, i: crate::CapturedParam) -> crate::CapturedParam { + fold_captured_param(self, i) + } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_const_param(&mut self, i: ConstParam) -> ConstParam { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_const_param(&mut self, i: crate::ConstParam) -> crate::ConstParam { fold_const_param(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_constraint(&mut self, i: Constraint) -> Constraint { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_constraint(&mut self, i: crate::Constraint) -> crate::Constraint { fold_constraint(self, i) } #[cfg(feature = "derive")] - fn fold_data(&mut self, i: Data) -> Data { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn fold_data(&mut self, i: crate::Data) -> crate::Data { fold_data(self, i) } #[cfg(feature = "derive")] - fn fold_data_enum(&mut self, i: DataEnum) -> DataEnum { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn fold_data_enum(&mut self, i: crate::DataEnum) -> crate::DataEnum { fold_data_enum(self, i) } #[cfg(feature = "derive")] - fn fold_data_struct(&mut self, i: DataStruct) -> DataStruct { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn fold_data_struct(&mut self, i: crate::DataStruct) -> crate::DataStruct { fold_data_struct(self, i) } #[cfg(feature = "derive")] - fn fold_data_union(&mut self, i: DataUnion) -> DataUnion { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn fold_data_union(&mut self, i: crate::DataUnion) -> crate::DataUnion { fold_data_union(self, i) } #[cfg(feature = "derive")] - fn fold_derive_input(&mut self, i: DeriveInput) -> DeriveInput { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn fold_derive_input(&mut self, i: crate::DeriveInput) -> crate::DeriveInput { fold_derive_input(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr(&mut self, i: Expr) -> Expr { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr(&mut self, i: crate::Expr) -> crate::Expr { fold_expr(self, i) } #[cfg(feature = "full")] - fn fold_expr_array(&mut self, i: ExprArray) -> ExprArray { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_array(&mut self, i: crate::ExprArray) -> crate::ExprArray { fold_expr_array(self, i) } #[cfg(feature = "full")] - fn fold_expr_assign(&mut self, i: ExprAssign) -> ExprAssign { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_assign(&mut self, i: crate::ExprAssign) -> crate::ExprAssign { fold_expr_assign(self, i) } #[cfg(feature = "full")] - fn fold_expr_async(&mut self, i: ExprAsync) -> ExprAsync { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_async(&mut self, i: crate::ExprAsync) -> crate::ExprAsync { fold_expr_async(self, i) } #[cfg(feature = "full")] - fn fold_expr_await(&mut self, i: ExprAwait) -> ExprAwait { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_await(&mut self, i: crate::ExprAwait) -> crate::ExprAwait { fold_expr_await(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_binary(&mut self, i: ExprBinary) -> ExprBinary { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_binary(&mut self, i: crate::ExprBinary) -> crate::ExprBinary { fold_expr_binary(self, i) } #[cfg(feature = "full")] - fn fold_expr_block(&mut self, i: ExprBlock) -> ExprBlock { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_block(&mut self, i: crate::ExprBlock) -> crate::ExprBlock { fold_expr_block(self, i) } #[cfg(feature = "full")] - fn fold_expr_break(&mut self, i: ExprBreak) -> ExprBreak { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_break(&mut self, i: crate::ExprBreak) -> crate::ExprBreak { fold_expr_break(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_call(&mut self, i: ExprCall) -> ExprCall { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_call(&mut self, i: crate::ExprCall) -> crate::ExprCall { fold_expr_call(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_cast(&mut self, i: ExprCast) -> ExprCast { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_cast(&mut self, i: crate::ExprCast) -> crate::ExprCast { fold_expr_cast(self, i) } #[cfg(feature = "full")] - fn fold_expr_closure(&mut self, i: ExprClosure) -> ExprClosure { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_closure(&mut self, i: crate::ExprClosure) -> crate::ExprClosure { fold_expr_closure(self, i) } #[cfg(feature = "full")] - fn fold_expr_const(&mut self, i: ExprConst) -> ExprConst { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_const(&mut self, i: crate::ExprConst) -> crate::ExprConst { fold_expr_const(self, i) } #[cfg(feature = "full")] - fn fold_expr_continue(&mut self, i: ExprContinue) -> ExprContinue { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_continue(&mut self, i: crate::ExprContinue) -> crate::ExprContinue { fold_expr_continue(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_field(&mut self, i: ExprField) -> ExprField { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_field(&mut self, i: crate::ExprField) -> crate::ExprField { fold_expr_field(self, i) } #[cfg(feature = "full")] - fn fold_expr_for_loop(&mut self, i: ExprForLoop) -> ExprForLoop { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_for_loop(&mut self, i: crate::ExprForLoop) -> crate::ExprForLoop { fold_expr_for_loop(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_group(&mut self, i: ExprGroup) -> ExprGroup { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_group(&mut self, i: crate::ExprGroup) -> crate::ExprGroup { fold_expr_group(self, i) } #[cfg(feature = "full")] - fn fold_expr_if(&mut self, i: ExprIf) -> ExprIf { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_if(&mut self, i: crate::ExprIf) -> crate::ExprIf { fold_expr_if(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_index(&mut self, i: ExprIndex) -> ExprIndex { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_index(&mut self, i: crate::ExprIndex) -> crate::ExprIndex { fold_expr_index(self, i) } #[cfg(feature = "full")] - fn fold_expr_infer(&mut self, i: ExprInfer) -> ExprInfer { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_infer(&mut self, i: crate::ExprInfer) -> crate::ExprInfer { fold_expr_infer(self, i) } #[cfg(feature = "full")] - fn fold_expr_let(&mut self, i: ExprLet) -> ExprLet { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_let(&mut self, i: crate::ExprLet) -> crate::ExprLet { fold_expr_let(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_lit(&mut self, i: ExprLit) -> ExprLit { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_lit(&mut self, i: crate::ExprLit) -> crate::ExprLit { fold_expr_lit(self, i) } #[cfg(feature = "full")] - fn fold_expr_loop(&mut self, i: ExprLoop) -> ExprLoop { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_loop(&mut self, i: crate::ExprLoop) -> crate::ExprLoop { fold_expr_loop(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_macro(&mut self, i: ExprMacro) -> ExprMacro { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_macro(&mut self, i: crate::ExprMacro) -> crate::ExprMacro { fold_expr_macro(self, i) } #[cfg(feature = "full")] - fn fold_expr_match(&mut self, i: ExprMatch) -> ExprMatch { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_match(&mut self, i: crate::ExprMatch) -> crate::ExprMatch { fold_expr_match(self, i) } - #[cfg(feature = "full")] - fn fold_expr_method_call(&mut self, i: ExprMethodCall) -> ExprMethodCall { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_method_call( + &mut self, + i: crate::ExprMethodCall, + ) -> crate::ExprMethodCall { fold_expr_method_call(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_paren(&mut self, i: ExprParen) -> ExprParen { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_paren(&mut self, i: crate::ExprParen) -> crate::ExprParen { fold_expr_paren(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_path(&mut self, i: ExprPath) -> ExprPath { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_path(&mut self, i: crate::ExprPath) -> crate::ExprPath { fold_expr_path(self, i) } #[cfg(feature = "full")] - fn fold_expr_range(&mut self, i: ExprRange) -> ExprRange { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_range(&mut self, i: crate::ExprRange) -> crate::ExprRange { fold_expr_range(self, i) } #[cfg(feature = "full")] - fn fold_expr_reference(&mut self, i: ExprReference) -> ExprReference { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_raw_addr(&mut self, i: crate::ExprRawAddr) -> crate::ExprRawAddr { + fold_expr_raw_addr(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_reference(&mut self, i: crate::ExprReference) -> crate::ExprReference { fold_expr_reference(self, i) } #[cfg(feature = "full")] - fn fold_expr_repeat(&mut self, i: ExprRepeat) -> ExprRepeat { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_repeat(&mut self, i: crate::ExprRepeat) -> crate::ExprRepeat { fold_expr_repeat(self, i) } #[cfg(feature = "full")] - fn fold_expr_return(&mut self, i: ExprReturn) -> ExprReturn { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_return(&mut self, i: crate::ExprReturn) -> crate::ExprReturn { fold_expr_return(self, i) } - #[cfg(feature = "full")] - fn fold_expr_struct(&mut self, i: ExprStruct) -> ExprStruct { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_struct(&mut self, i: crate::ExprStruct) -> crate::ExprStruct { fold_expr_struct(self, i) } #[cfg(feature = "full")] - fn fold_expr_try(&mut self, i: ExprTry) -> ExprTry { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_try(&mut self, i: crate::ExprTry) -> crate::ExprTry { fold_expr_try(self, i) } #[cfg(feature = "full")] - fn fold_expr_try_block(&mut self, i: ExprTryBlock) -> ExprTryBlock { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_try_block(&mut self, i: crate::ExprTryBlock) -> crate::ExprTryBlock { fold_expr_try_block(self, i) } - #[cfg(feature = "full")] - fn fold_expr_tuple(&mut self, i: ExprTuple) -> ExprTuple { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_tuple(&mut self, i: crate::ExprTuple) -> crate::ExprTuple { fold_expr_tuple(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_expr_unary(&mut self, i: ExprUnary) -> ExprUnary { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_expr_unary(&mut self, i: crate::ExprUnary) -> crate::ExprUnary { fold_expr_unary(self, i) } #[cfg(feature = "full")] - fn fold_expr_unsafe(&mut self, i: ExprUnsafe) -> ExprUnsafe { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_unsafe(&mut self, i: crate::ExprUnsafe) -> crate::ExprUnsafe { fold_expr_unsafe(self, i) } #[cfg(feature = "full")] - fn fold_expr_while(&mut self, i: ExprWhile) -> ExprWhile { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_while(&mut self, i: crate::ExprWhile) -> crate::ExprWhile { fold_expr_while(self, i) } #[cfg(feature = "full")] - fn fold_expr_yield(&mut self, i: ExprYield) -> ExprYield { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_expr_yield(&mut self, i: crate::ExprYield) -> crate::ExprYield { fold_expr_yield(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_field(&mut self, i: Field) -> Field { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_field(&mut self, i: crate::Field) -> crate::Field { fold_field(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_field_mutability(&mut self, i: FieldMutability) -> FieldMutability { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_field_mutability( + &mut self, + i: crate::FieldMutability, + ) -> crate::FieldMutability { fold_field_mutability(self, i) } #[cfg(feature = "full")] - fn fold_field_pat(&mut self, i: FieldPat) -> FieldPat { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_field_pat(&mut self, i: crate::FieldPat) -> crate::FieldPat { fold_field_pat(self, i) } - #[cfg(feature = "full")] - fn fold_field_value(&mut self, i: FieldValue) -> FieldValue { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_field_value(&mut self, i: crate::FieldValue) -> crate::FieldValue { fold_field_value(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_fields(&mut self, i: Fields) -> Fields { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_fields(&mut self, i: crate::Fields) -> crate::Fields { fold_fields(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_fields_named(&mut self, i: FieldsNamed) -> FieldsNamed { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_fields_named(&mut self, i: crate::FieldsNamed) -> crate::FieldsNamed { fold_fields_named(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_fields_unnamed(&mut self, i: FieldsUnnamed) -> FieldsUnnamed { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_fields_unnamed(&mut self, i: crate::FieldsUnnamed) -> crate::FieldsUnnamed { fold_fields_unnamed(self, i) } #[cfg(feature = "full")] - fn fold_file(&mut self, i: File) -> File { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_file(&mut self, i: crate::File) -> crate::File { fold_file(self, i) } #[cfg(feature = "full")] - fn fold_fn_arg(&mut self, i: FnArg) -> FnArg { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_fn_arg(&mut self, i: crate::FnArg) -> crate::FnArg { fold_fn_arg(self, i) } #[cfg(feature = "full")] - fn fold_foreign_item(&mut self, i: ForeignItem) -> ForeignItem { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_foreign_item(&mut self, i: crate::ForeignItem) -> crate::ForeignItem { fold_foreign_item(self, i) } #[cfg(feature = "full")] - fn fold_foreign_item_fn(&mut self, i: ForeignItemFn) -> ForeignItemFn { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_foreign_item_fn(&mut self, i: crate::ForeignItemFn) -> crate::ForeignItemFn { fold_foreign_item_fn(self, i) } #[cfg(feature = "full")] - fn fold_foreign_item_macro(&mut self, i: ForeignItemMacro) -> ForeignItemMacro { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_foreign_item_macro( + &mut self, + i: crate::ForeignItemMacro, + ) -> crate::ForeignItemMacro { fold_foreign_item_macro(self, i) } #[cfg(feature = "full")] - fn fold_foreign_item_static(&mut self, i: ForeignItemStatic) -> ForeignItemStatic { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_foreign_item_static( + &mut self, + i: crate::ForeignItemStatic, + ) -> crate::ForeignItemStatic { fold_foreign_item_static(self, i) } #[cfg(feature = "full")] - fn fold_foreign_item_type(&mut self, i: ForeignItemType) -> ForeignItemType { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_foreign_item_type( + &mut self, + i: crate::ForeignItemType, + ) -> crate::ForeignItemType { fold_foreign_item_type(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_generic_argument(&mut self, i: GenericArgument) -> GenericArgument { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_generic_argument( + &mut self, + i: crate::GenericArgument, + ) -> crate::GenericArgument { fold_generic_argument(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_generic_param(&mut self, i: GenericParam) -> GenericParam { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_generic_param(&mut self, i: crate::GenericParam) -> crate::GenericParam { fold_generic_param(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_generics(&mut self, i: Generics) -> Generics { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_generics(&mut self, i: crate::Generics) -> crate::Generics { fold_generics(self, i) } - fn fold_ident(&mut self, i: Ident) -> Ident { + fn fold_ident(&mut self, i: proc_macro2::Ident) -> proc_macro2::Ident { fold_ident(self, i) } #[cfg(feature = "full")] - fn fold_impl_item(&mut self, i: ImplItem) -> ImplItem { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_impl_item(&mut self, i: crate::ImplItem) -> crate::ImplItem { fold_impl_item(self, i) } #[cfg(feature = "full")] - fn fold_impl_item_const(&mut self, i: ImplItemConst) -> ImplItemConst { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_impl_item_const(&mut self, i: crate::ImplItemConst) -> crate::ImplItemConst { fold_impl_item_const(self, i) } #[cfg(feature = "full")] - fn fold_impl_item_fn(&mut self, i: ImplItemFn) -> ImplItemFn { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_impl_item_fn(&mut self, i: crate::ImplItemFn) -> crate::ImplItemFn { fold_impl_item_fn(self, i) } #[cfg(feature = "full")] - fn fold_impl_item_macro(&mut self, i: ImplItemMacro) -> ImplItemMacro { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_impl_item_macro(&mut self, i: crate::ImplItemMacro) -> crate::ImplItemMacro { fold_impl_item_macro(self, i) } #[cfg(feature = "full")] - fn fold_impl_item_type(&mut self, i: ImplItemType) -> ImplItemType { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_impl_item_type(&mut self, i: crate::ImplItemType) -> crate::ImplItemType { fold_impl_item_type(self, i) } #[cfg(feature = "full")] - fn fold_impl_restriction(&mut self, i: ImplRestriction) -> ImplRestriction { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_impl_restriction( + &mut self, + i: crate::ImplRestriction, + ) -> crate::ImplRestriction { fold_impl_restriction(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_index(&mut self, i: Index) -> Index { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_index(&mut self, i: crate::Index) -> crate::Index { fold_index(self, i) } #[cfg(feature = "full")] - fn fold_item(&mut self, i: Item) -> Item { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item(&mut self, i: crate::Item) -> crate::Item { fold_item(self, i) } #[cfg(feature = "full")] - fn fold_item_const(&mut self, i: ItemConst) -> ItemConst { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_const(&mut self, i: crate::ItemConst) -> crate::ItemConst { fold_item_const(self, i) } #[cfg(feature = "full")] - fn fold_item_enum(&mut self, i: ItemEnum) -> ItemEnum { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_enum(&mut self, i: crate::ItemEnum) -> crate::ItemEnum { fold_item_enum(self, i) } #[cfg(feature = "full")] - fn fold_item_extern_crate(&mut self, i: ItemExternCrate) -> ItemExternCrate { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_extern_crate( + &mut self, + i: crate::ItemExternCrate, + ) -> crate::ItemExternCrate { fold_item_extern_crate(self, i) } #[cfg(feature = "full")] - fn fold_item_fn(&mut self, i: ItemFn) -> ItemFn { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_fn(&mut self, i: crate::ItemFn) -> crate::ItemFn { fold_item_fn(self, i) } #[cfg(feature = "full")] - fn fold_item_foreign_mod(&mut self, i: ItemForeignMod) -> ItemForeignMod { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_foreign_mod( + &mut self, + i: crate::ItemForeignMod, + ) -> crate::ItemForeignMod { fold_item_foreign_mod(self, i) } #[cfg(feature = "full")] - fn fold_item_impl(&mut self, i: ItemImpl) -> ItemImpl { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_impl(&mut self, i: crate::ItemImpl) -> crate::ItemImpl { fold_item_impl(self, i) } #[cfg(feature = "full")] - fn fold_item_macro(&mut self, i: ItemMacro) -> ItemMacro { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_macro(&mut self, i: crate::ItemMacro) -> crate::ItemMacro { fold_item_macro(self, i) } #[cfg(feature = "full")] - fn fold_item_mod(&mut self, i: ItemMod) -> ItemMod { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_mod(&mut self, i: crate::ItemMod) -> crate::ItemMod { fold_item_mod(self, i) } #[cfg(feature = "full")] - fn fold_item_static(&mut self, i: ItemStatic) -> ItemStatic { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_static(&mut self, i: crate::ItemStatic) -> crate::ItemStatic { fold_item_static(self, i) } #[cfg(feature = "full")] - fn fold_item_struct(&mut self, i: ItemStruct) -> ItemStruct { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_struct(&mut self, i: crate::ItemStruct) -> crate::ItemStruct { fold_item_struct(self, i) } #[cfg(feature = "full")] - fn fold_item_trait(&mut self, i: ItemTrait) -> ItemTrait { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_trait(&mut self, i: crate::ItemTrait) -> crate::ItemTrait { fold_item_trait(self, i) } #[cfg(feature = "full")] - fn fold_item_trait_alias(&mut self, i: ItemTraitAlias) -> ItemTraitAlias { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_trait_alias( + &mut self, + i: crate::ItemTraitAlias, + ) -> crate::ItemTraitAlias { fold_item_trait_alias(self, i) } #[cfg(feature = "full")] - fn fold_item_type(&mut self, i: ItemType) -> ItemType { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_type(&mut self, i: crate::ItemType) -> crate::ItemType { fold_item_type(self, i) } #[cfg(feature = "full")] - fn fold_item_union(&mut self, i: ItemUnion) -> ItemUnion { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_union(&mut self, i: crate::ItemUnion) -> crate::ItemUnion { fold_item_union(self, i) } #[cfg(feature = "full")] - fn fold_item_use(&mut self, i: ItemUse) -> ItemUse { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_item_use(&mut self, i: crate::ItemUse) -> crate::ItemUse { fold_item_use(self, i) } #[cfg(feature = "full")] - fn fold_label(&mut self, i: Label) -> Label { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_label(&mut self, i: crate::Label) -> crate::Label { fold_label(self, i) } - fn fold_lifetime(&mut self, i: Lifetime) -> Lifetime { + fn fold_lifetime(&mut self, i: crate::Lifetime) -> crate::Lifetime { fold_lifetime(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_lifetime_param(&mut self, i: LifetimeParam) -> LifetimeParam { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_lifetime_param(&mut self, i: crate::LifetimeParam) -> crate::LifetimeParam { fold_lifetime_param(self, i) } - fn fold_lit(&mut self, i: Lit) -> Lit { + fn fold_lit(&mut self, i: crate::Lit) -> crate::Lit { fold_lit(self, i) } - fn fold_lit_bool(&mut self, i: LitBool) -> LitBool { + fn fold_lit_bool(&mut self, i: crate::LitBool) -> crate::LitBool { fold_lit_bool(self, i) } - fn fold_lit_byte(&mut self, i: LitByte) -> LitByte { + fn fold_lit_byte(&mut self, i: crate::LitByte) -> crate::LitByte { fold_lit_byte(self, i) } - fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr { + fn fold_lit_byte_str(&mut self, i: crate::LitByteStr) -> crate::LitByteStr { fold_lit_byte_str(self, i) } - fn fold_lit_char(&mut self, i: LitChar) -> LitChar { + fn fold_lit_cstr(&mut self, i: crate::LitCStr) -> crate::LitCStr { + fold_lit_cstr(self, i) + } + fn fold_lit_char(&mut self, i: crate::LitChar) -> crate::LitChar { fold_lit_char(self, i) } - fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat { + fn fold_lit_float(&mut self, i: crate::LitFloat) -> crate::LitFloat { fold_lit_float(self, i) } - fn fold_lit_int(&mut self, i: LitInt) -> LitInt { + fn fold_lit_int(&mut self, i: crate::LitInt) -> crate::LitInt { fold_lit_int(self, i) } - fn fold_lit_str(&mut self, i: LitStr) -> LitStr { + fn fold_lit_str(&mut self, i: crate::LitStr) -> crate::LitStr { fold_lit_str(self, i) } #[cfg(feature = "full")] - fn fold_local(&mut self, i: Local) -> Local { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_local(&mut self, i: crate::Local) -> crate::Local { fold_local(self, i) } #[cfg(feature = "full")] - fn fold_local_init(&mut self, i: LocalInit) -> LocalInit { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_local_init(&mut self, i: crate::LocalInit) -> crate::LocalInit { fold_local_init(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_macro(&mut self, i: Macro) -> Macro { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_macro(&mut self, i: crate::Macro) -> crate::Macro { fold_macro(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_macro_delimiter(&mut self, i: MacroDelimiter) -> MacroDelimiter { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_macro_delimiter( + &mut self, + i: crate::MacroDelimiter, + ) -> crate::MacroDelimiter { fold_macro_delimiter(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_member(&mut self, i: Member) -> Member { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_member(&mut self, i: crate::Member) -> crate::Member { fold_member(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_meta(&mut self, i: Meta) -> Meta { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_meta(&mut self, i: crate::Meta) -> crate::Meta { fold_meta(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_meta_list(&mut self, i: MetaList) -> MetaList { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_meta_list(&mut self, i: crate::MetaList) -> crate::MetaList { fold_meta_list(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_meta_name_value(&mut self, i: MetaNameValue) -> MetaNameValue { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_meta_name_value(&mut self, i: crate::MetaNameValue) -> crate::MetaNameValue { fold_meta_name_value(self, i) } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn fold_parenthesized_generic_arguments( &mut self, - i: ParenthesizedGenericArguments, - ) -> ParenthesizedGenericArguments { + i: crate::ParenthesizedGenericArguments, + ) -> crate::ParenthesizedGenericArguments { fold_parenthesized_generic_arguments(self, i) } #[cfg(feature = "full")] - fn fold_pat(&mut self, i: Pat) -> Pat { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat(&mut self, i: crate::Pat) -> crate::Pat { fold_pat(self, i) } #[cfg(feature = "full")] - fn fold_pat_ident(&mut self, i: PatIdent) -> PatIdent { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_ident(&mut self, i: crate::PatIdent) -> crate::PatIdent { fold_pat_ident(self, i) } #[cfg(feature = "full")] - fn fold_pat_or(&mut self, i: PatOr) -> PatOr { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_or(&mut self, i: crate::PatOr) -> crate::PatOr { fold_pat_or(self, i) } #[cfg(feature = "full")] - fn fold_pat_paren(&mut self, i: PatParen) -> PatParen { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_paren(&mut self, i: crate::PatParen) -> crate::PatParen { fold_pat_paren(self, i) } #[cfg(feature = "full")] - fn fold_pat_reference(&mut self, i: PatReference) -> PatReference { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_reference(&mut self, i: crate::PatReference) -> crate::PatReference { fold_pat_reference(self, i) } #[cfg(feature = "full")] - fn fold_pat_rest(&mut self, i: PatRest) -> PatRest { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_rest(&mut self, i: crate::PatRest) -> crate::PatRest { fold_pat_rest(self, i) } #[cfg(feature = "full")] - fn fold_pat_slice(&mut self, i: PatSlice) -> PatSlice { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_slice(&mut self, i: crate::PatSlice) -> crate::PatSlice { fold_pat_slice(self, i) } #[cfg(feature = "full")] - fn fold_pat_struct(&mut self, i: PatStruct) -> PatStruct { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_struct(&mut self, i: crate::PatStruct) -> crate::PatStruct { fold_pat_struct(self, i) } #[cfg(feature = "full")] - fn fold_pat_tuple(&mut self, i: PatTuple) -> PatTuple { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_tuple(&mut self, i: crate::PatTuple) -> crate::PatTuple { fold_pat_tuple(self, i) } #[cfg(feature = "full")] - fn fold_pat_tuple_struct(&mut self, i: PatTupleStruct) -> PatTupleStruct { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_tuple_struct( + &mut self, + i: crate::PatTupleStruct, + ) -> crate::PatTupleStruct { fold_pat_tuple_struct(self, i) } #[cfg(feature = "full")] - fn fold_pat_type(&mut self, i: PatType) -> PatType { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_type(&mut self, i: crate::PatType) -> crate::PatType { fold_pat_type(self, i) } #[cfg(feature = "full")] - fn fold_pat_wild(&mut self, i: PatWild) -> PatWild { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pat_wild(&mut self, i: crate::PatWild) -> crate::PatWild { fold_pat_wild(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_path(&mut self, i: Path) -> Path { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_path(&mut self, i: crate::Path) -> crate::Path { fold_path(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_path_arguments(&mut self, i: PathArguments) -> PathArguments { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_path_arguments(&mut self, i: crate::PathArguments) -> crate::PathArguments { fold_path_arguments(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_path_segment(&mut self, i: PathSegment) -> PathSegment { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_path_segment(&mut self, i: crate::PathSegment) -> crate::PathSegment { fold_path_segment(self, i) } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_pointer_mutability( + &mut self, + i: crate::PointerMutability, + ) -> crate::PointerMutability { + fold_pointer_mutability(self, i) + } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_precise_capture( + &mut self, + i: crate::PreciseCapture, + ) -> crate::PreciseCapture { + fold_precise_capture(self, i) + } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_predicate_lifetime(&mut self, i: PredicateLifetime) -> PredicateLifetime { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_predicate_lifetime( + &mut self, + i: crate::PredicateLifetime, + ) -> crate::PredicateLifetime { fold_predicate_lifetime(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_predicate_type(&mut self, i: PredicateType) -> PredicateType { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_predicate_type(&mut self, i: crate::PredicateType) -> crate::PredicateType { fold_predicate_type(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_qself(&mut self, i: QSelf) -> QSelf { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_qself(&mut self, i: crate::QSelf) -> crate::QSelf { fold_qself(self, i) } #[cfg(feature = "full")] - fn fold_range_limits(&mut self, i: RangeLimits) -> RangeLimits { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_range_limits(&mut self, i: crate::RangeLimits) -> crate::RangeLimits { fold_range_limits(self, i) } #[cfg(feature = "full")] - fn fold_receiver(&mut self, i: Receiver) -> Receiver { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_receiver(&mut self, i: crate::Receiver) -> crate::Receiver { fold_receiver(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_return_type(&mut self, i: ReturnType) -> ReturnType { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_return_type(&mut self, i: crate::ReturnType) -> crate::ReturnType { fold_return_type(self, i) } #[cfg(feature = "full")] - fn fold_signature(&mut self, i: Signature) -> Signature { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_signature(&mut self, i: crate::Signature) -> crate::Signature { fold_signature(self, i) } - fn fold_span(&mut self, i: Span) -> Span { - fold_span(self, i) + fn fold_span(&mut self, i: proc_macro2::Span) -> proc_macro2::Span { + i } #[cfg(feature = "full")] - fn fold_static_mutability(&mut self, i: StaticMutability) -> StaticMutability { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_static_mutability( + &mut self, + i: crate::StaticMutability, + ) -> crate::StaticMutability { fold_static_mutability(self, i) } #[cfg(feature = "full")] - fn fold_stmt(&mut self, i: Stmt) -> Stmt { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_stmt(&mut self, i: crate::Stmt) -> crate::Stmt { fold_stmt(self, i) } #[cfg(feature = "full")] - fn fold_stmt_macro(&mut self, i: StmtMacro) -> StmtMacro { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_stmt_macro(&mut self, i: crate::StmtMacro) -> crate::StmtMacro { fold_stmt_macro(self, i) } + fn fold_token_stream( + &mut self, + i: proc_macro2::TokenStream, + ) -> proc_macro2::TokenStream { + i + } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_trait_bound(&mut self, i: TraitBound) -> TraitBound { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_trait_bound(&mut self, i: crate::TraitBound) -> crate::TraitBound { fold_trait_bound(self, i) } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn fold_trait_bound_modifier( &mut self, - i: TraitBoundModifier, - ) -> TraitBoundModifier { + i: crate::TraitBoundModifier, + ) -> crate::TraitBoundModifier { fold_trait_bound_modifier(self, i) } #[cfg(feature = "full")] - fn fold_trait_item(&mut self, i: TraitItem) -> TraitItem { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_trait_item(&mut self, i: crate::TraitItem) -> crate::TraitItem { fold_trait_item(self, i) } #[cfg(feature = "full")] - fn fold_trait_item_const(&mut self, i: TraitItemConst) -> TraitItemConst { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_trait_item_const( + &mut self, + i: crate::TraitItemConst, + ) -> crate::TraitItemConst { fold_trait_item_const(self, i) } #[cfg(feature = "full")] - fn fold_trait_item_fn(&mut self, i: TraitItemFn) -> TraitItemFn { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_trait_item_fn(&mut self, i: crate::TraitItemFn) -> crate::TraitItemFn { fold_trait_item_fn(self, i) } #[cfg(feature = "full")] - fn fold_trait_item_macro(&mut self, i: TraitItemMacro) -> TraitItemMacro { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_trait_item_macro( + &mut self, + i: crate::TraitItemMacro, + ) -> crate::TraitItemMacro { fold_trait_item_macro(self, i) } #[cfg(feature = "full")] - fn fold_trait_item_type(&mut self, i: TraitItemType) -> TraitItemType { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_trait_item_type(&mut self, i: crate::TraitItemType) -> crate::TraitItemType { fold_trait_item_type(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type(&mut self, i: Type) -> Type { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type(&mut self, i: crate::Type) -> crate::Type { fold_type(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_array(&mut self, i: TypeArray) -> TypeArray { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_array(&mut self, i: crate::TypeArray) -> crate::TypeArray { fold_type_array(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_bare_fn(&mut self, i: TypeBareFn) -> TypeBareFn { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_bare_fn(&mut self, i: crate::TypeBareFn) -> crate::TypeBareFn { fold_type_bare_fn(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_group(&mut self, i: TypeGroup) -> TypeGroup { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_group(&mut self, i: crate::TypeGroup) -> crate::TypeGroup { fold_type_group(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_impl_trait(&mut self, i: TypeImplTrait) -> TypeImplTrait { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_impl_trait(&mut self, i: crate::TypeImplTrait) -> crate::TypeImplTrait { fold_type_impl_trait(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_infer(&mut self, i: TypeInfer) -> TypeInfer { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_infer(&mut self, i: crate::TypeInfer) -> crate::TypeInfer { fold_type_infer(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_macro(&mut self, i: TypeMacro) -> TypeMacro { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_macro(&mut self, i: crate::TypeMacro) -> crate::TypeMacro { fold_type_macro(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_never(&mut self, i: TypeNever) -> TypeNever { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_never(&mut self, i: crate::TypeNever) -> crate::TypeNever { fold_type_never(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_param(&mut self, i: TypeParam) -> TypeParam { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_param(&mut self, i: crate::TypeParam) -> crate::TypeParam { fold_type_param(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_param_bound(&mut self, i: TypeParamBound) -> TypeParamBound { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_param_bound( + &mut self, + i: crate::TypeParamBound, + ) -> crate::TypeParamBound { fold_type_param_bound(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_paren(&mut self, i: TypeParen) -> TypeParen { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_paren(&mut self, i: crate::TypeParen) -> crate::TypeParen { fold_type_paren(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_path(&mut self, i: TypePath) -> TypePath { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_path(&mut self, i: crate::TypePath) -> crate::TypePath { fold_type_path(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_ptr(&mut self, i: TypePtr) -> TypePtr { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_ptr(&mut self, i: crate::TypePtr) -> crate::TypePtr { fold_type_ptr(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_reference(&mut self, i: TypeReference) -> TypeReference { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_reference(&mut self, i: crate::TypeReference) -> crate::TypeReference { fold_type_reference(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_slice(&mut self, i: TypeSlice) -> TypeSlice { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_slice(&mut self, i: crate::TypeSlice) -> crate::TypeSlice { fold_type_slice(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_trait_object(&mut self, i: TypeTraitObject) -> TypeTraitObject { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_trait_object( + &mut self, + i: crate::TypeTraitObject, + ) -> crate::TypeTraitObject { fold_type_trait_object(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_type_tuple(&mut self, i: TypeTuple) -> TypeTuple { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_type_tuple(&mut self, i: crate::TypeTuple) -> crate::TypeTuple { fold_type_tuple(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_un_op(&mut self, i: UnOp) -> UnOp { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_un_op(&mut self, i: crate::UnOp) -> crate::UnOp { fold_un_op(self, i) } #[cfg(feature = "full")] - fn fold_use_glob(&mut self, i: UseGlob) -> UseGlob { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_use_glob(&mut self, i: crate::UseGlob) -> crate::UseGlob { fold_use_glob(self, i) } #[cfg(feature = "full")] - fn fold_use_group(&mut self, i: UseGroup) -> UseGroup { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_use_group(&mut self, i: crate::UseGroup) -> crate::UseGroup { fold_use_group(self, i) } #[cfg(feature = "full")] - fn fold_use_name(&mut self, i: UseName) -> UseName { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_use_name(&mut self, i: crate::UseName) -> crate::UseName { fold_use_name(self, i) } #[cfg(feature = "full")] - fn fold_use_path(&mut self, i: UsePath) -> UsePath { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_use_path(&mut self, i: crate::UsePath) -> crate::UsePath { fold_use_path(self, i) } #[cfg(feature = "full")] - fn fold_use_rename(&mut self, i: UseRename) -> UseRename { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_use_rename(&mut self, i: crate::UseRename) -> crate::UseRename { fold_use_rename(self, i) } #[cfg(feature = "full")] - fn fold_use_tree(&mut self, i: UseTree) -> UseTree { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_use_tree(&mut self, i: crate::UseTree) -> crate::UseTree { fold_use_tree(self, i) } #[cfg(feature = "full")] - fn fold_variadic(&mut self, i: Variadic) -> Variadic { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn fold_variadic(&mut self, i: crate::Variadic) -> crate::Variadic { fold_variadic(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_variant(&mut self, i: Variant) -> Variant { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_variant(&mut self, i: crate::Variant) -> crate::Variant { fold_variant(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_vis_restricted(&mut self, i: VisRestricted) -> VisRestricted { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_vis_restricted(&mut self, i: crate::VisRestricted) -> crate::VisRestricted { fold_vis_restricted(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_visibility(&mut self, i: Visibility) -> Visibility { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_visibility(&mut self, i: crate::Visibility) -> crate::Visibility { fold_visibility(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_where_clause(&mut self, i: WhereClause) -> WhereClause { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_where_clause(&mut self, i: crate::WhereClause) -> crate::WhereClause { fold_where_clause(self, i) } #[cfg(any(feature = "derive", feature = "full"))] - fn fold_where_predicate(&mut self, i: WherePredicate) -> WherePredicate { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn fold_where_predicate( + &mut self, + i: crate::WherePredicate, + ) -> crate::WherePredicate { fold_where_predicate(self, i) } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_abi<F>(f: &mut F, node: Abi) -> Abi +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_abi<F>(f: &mut F, node: crate::Abi) -> crate::Abi where F: Fold + ?Sized, { - Abi { + crate::Abi { extern_token: node.extern_token, name: (node.name).map(|it| f.fold_lit_str(it)), } } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn fold_angle_bracketed_generic_arguments<F>( f: &mut F, - node: AngleBracketedGenericArguments, -) -> AngleBracketedGenericArguments + node: crate::AngleBracketedGenericArguments, +) -> crate::AngleBracketedGenericArguments where F: Fold + ?Sized, { - AngleBracketedGenericArguments { + crate::AngleBracketedGenericArguments { colon2_token: node.colon2_token, lt_token: node.lt_token, - args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)), + args: crate::punctuated::fold(node.args, f, F::fold_generic_argument), gt_token: node.gt_token, } } #[cfg(feature = "full")] -pub fn fold_arm<F>(f: &mut F, node: Arm) -> Arm +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_arm<F>(f: &mut F, node: crate::Arm) -> crate::Arm where F: Fold + ?Sized, { - Arm { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Arm { + attrs: f.fold_attributes(node.attrs), pat: f.fold_pat(node.pat), guard: (node.guard).map(|it| ((it).0, Box::new(f.fold_expr(*(it).1)))), fat_arrow_token: node.fat_arrow_token, @@ -796,11 +1066,12 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_assoc_const<F>(f: &mut F, node: AssocConst) -> AssocConst +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_assoc_const<F>(f: &mut F, node: crate::AssocConst) -> crate::AssocConst where F: Fold + ?Sized, { - AssocConst { + crate::AssocConst { ident: f.fold_ident(node.ident), generics: (node.generics).map(|it| f.fold_angle_bracketed_generic_arguments(it)), eq_token: node.eq_token, @@ -808,11 +1079,12 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_assoc_type<F>(f: &mut F, node: AssocType) -> AssocType +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_assoc_type<F>(f: &mut F, node: crate::AssocType) -> crate::AssocType where F: Fold + ?Sized, { - AssocType { + crate::AssocType { ident: f.fold_ident(node.ident), generics: (node.generics).map(|it| f.fold_angle_bracketed_generic_arguments(it)), eq_token: node.eq_token, @@ -820,21 +1092,23 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_attr_style<F>(f: &mut F, node: AttrStyle) -> AttrStyle +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_attr_style<F>(f: &mut F, node: crate::AttrStyle) -> crate::AttrStyle where F: Fold + ?Sized, { match node { - AttrStyle::Outer => AttrStyle::Outer, - AttrStyle::Inner(_binding_0) => AttrStyle::Inner(_binding_0), + crate::AttrStyle::Outer => crate::AttrStyle::Outer, + crate::AttrStyle::Inner(_binding_0) => crate::AttrStyle::Inner(_binding_0), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_attribute<F>(f: &mut F, node: Attribute) -> Attribute +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_attribute<F>(f: &mut F, node: crate::Attribute) -> crate::Attribute where F: Fold + ?Sized, { - Attribute { + crate::Attribute { pound_token: node.pound_token, style: f.fold_attr_style(node.style), bracket_token: node.bracket_token, @@ -842,93 +1116,120 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_bare_fn_arg<F>(f: &mut F, node: BareFnArg) -> BareFnArg +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_bare_fn_arg<F>(f: &mut F, node: crate::BareFnArg) -> crate::BareFnArg where F: Fold + ?Sized, { - BareFnArg { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::BareFnArg { + attrs: f.fold_attributes(node.attrs), name: (node.name).map(|it| (f.fold_ident((it).0), (it).1)), ty: f.fold_type(node.ty), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_bare_variadic<F>(f: &mut F, node: BareVariadic) -> BareVariadic +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_bare_variadic<F>(f: &mut F, node: crate::BareVariadic) -> crate::BareVariadic where F: Fold + ?Sized, { - BareVariadic { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::BareVariadic { + attrs: f.fold_attributes(node.attrs), name: (node.name).map(|it| (f.fold_ident((it).0), (it).1)), dots: node.dots, comma: node.comma, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_bin_op<F>(f: &mut F, node: BinOp) -> BinOp +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_bin_op<F>(f: &mut F, node: crate::BinOp) -> crate::BinOp where F: Fold + ?Sized, { match node { - BinOp::Add(_binding_0) => BinOp::Add(_binding_0), - BinOp::Sub(_binding_0) => BinOp::Sub(_binding_0), - BinOp::Mul(_binding_0) => BinOp::Mul(_binding_0), - BinOp::Div(_binding_0) => BinOp::Div(_binding_0), - BinOp::Rem(_binding_0) => BinOp::Rem(_binding_0), - BinOp::And(_binding_0) => BinOp::And(_binding_0), - BinOp::Or(_binding_0) => BinOp::Or(_binding_0), - BinOp::BitXor(_binding_0) => BinOp::BitXor(_binding_0), - BinOp::BitAnd(_binding_0) => BinOp::BitAnd(_binding_0), - BinOp::BitOr(_binding_0) => BinOp::BitOr(_binding_0), - BinOp::Shl(_binding_0) => BinOp::Shl(_binding_0), - BinOp::Shr(_binding_0) => BinOp::Shr(_binding_0), - BinOp::Eq(_binding_0) => BinOp::Eq(_binding_0), - BinOp::Lt(_binding_0) => BinOp::Lt(_binding_0), - BinOp::Le(_binding_0) => BinOp::Le(_binding_0), - BinOp::Ne(_binding_0) => BinOp::Ne(_binding_0), - BinOp::Ge(_binding_0) => BinOp::Ge(_binding_0), - BinOp::Gt(_binding_0) => BinOp::Gt(_binding_0), - BinOp::AddAssign(_binding_0) => BinOp::AddAssign(_binding_0), - BinOp::SubAssign(_binding_0) => BinOp::SubAssign(_binding_0), - BinOp::MulAssign(_binding_0) => BinOp::MulAssign(_binding_0), - BinOp::DivAssign(_binding_0) => BinOp::DivAssign(_binding_0), - BinOp::RemAssign(_binding_0) => BinOp::RemAssign(_binding_0), - BinOp::BitXorAssign(_binding_0) => BinOp::BitXorAssign(_binding_0), - BinOp::BitAndAssign(_binding_0) => BinOp::BitAndAssign(_binding_0), - BinOp::BitOrAssign(_binding_0) => BinOp::BitOrAssign(_binding_0), - BinOp::ShlAssign(_binding_0) => BinOp::ShlAssign(_binding_0), - BinOp::ShrAssign(_binding_0) => BinOp::ShrAssign(_binding_0), - } -} -#[cfg(feature = "full")] -pub fn fold_block<F>(f: &mut F, node: Block) -> Block -where - F: Fold + ?Sized, -{ - Block { + crate::BinOp::Add(_binding_0) => crate::BinOp::Add(_binding_0), + crate::BinOp::Sub(_binding_0) => crate::BinOp::Sub(_binding_0), + crate::BinOp::Mul(_binding_0) => crate::BinOp::Mul(_binding_0), + crate::BinOp::Div(_binding_0) => crate::BinOp::Div(_binding_0), + crate::BinOp::Rem(_binding_0) => crate::BinOp::Rem(_binding_0), + crate::BinOp::And(_binding_0) => crate::BinOp::And(_binding_0), + crate::BinOp::Or(_binding_0) => crate::BinOp::Or(_binding_0), + crate::BinOp::BitXor(_binding_0) => crate::BinOp::BitXor(_binding_0), + crate::BinOp::BitAnd(_binding_0) => crate::BinOp::BitAnd(_binding_0), + crate::BinOp::BitOr(_binding_0) => crate::BinOp::BitOr(_binding_0), + crate::BinOp::Shl(_binding_0) => crate::BinOp::Shl(_binding_0), + crate::BinOp::Shr(_binding_0) => crate::BinOp::Shr(_binding_0), + crate::BinOp::Eq(_binding_0) => crate::BinOp::Eq(_binding_0), + crate::BinOp::Lt(_binding_0) => crate::BinOp::Lt(_binding_0), + crate::BinOp::Le(_binding_0) => crate::BinOp::Le(_binding_0), + crate::BinOp::Ne(_binding_0) => crate::BinOp::Ne(_binding_0), + crate::BinOp::Ge(_binding_0) => crate::BinOp::Ge(_binding_0), + crate::BinOp::Gt(_binding_0) => crate::BinOp::Gt(_binding_0), + crate::BinOp::AddAssign(_binding_0) => crate::BinOp::AddAssign(_binding_0), + crate::BinOp::SubAssign(_binding_0) => crate::BinOp::SubAssign(_binding_0), + crate::BinOp::MulAssign(_binding_0) => crate::BinOp::MulAssign(_binding_0), + crate::BinOp::DivAssign(_binding_0) => crate::BinOp::DivAssign(_binding_0), + crate::BinOp::RemAssign(_binding_0) => crate::BinOp::RemAssign(_binding_0), + crate::BinOp::BitXorAssign(_binding_0) => crate::BinOp::BitXorAssign(_binding_0), + crate::BinOp::BitAndAssign(_binding_0) => crate::BinOp::BitAndAssign(_binding_0), + crate::BinOp::BitOrAssign(_binding_0) => crate::BinOp::BitOrAssign(_binding_0), + crate::BinOp::ShlAssign(_binding_0) => crate::BinOp::ShlAssign(_binding_0), + crate::BinOp::ShrAssign(_binding_0) => crate::BinOp::ShrAssign(_binding_0), + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_block<F>(f: &mut F, node: crate::Block) -> crate::Block +where + F: Fold + ?Sized, +{ + crate::Block { brace_token: node.brace_token, - stmts: FoldHelper::lift(node.stmts, |it| f.fold_stmt(it)), + stmts: fold_vec(node.stmts, f, F::fold_stmt), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_bound_lifetimes<F>(f: &mut F, node: BoundLifetimes) -> BoundLifetimes +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_bound_lifetimes<F>( + f: &mut F, + node: crate::BoundLifetimes, +) -> crate::BoundLifetimes where F: Fold + ?Sized, { - BoundLifetimes { + crate::BoundLifetimes { for_token: node.for_token, lt_token: node.lt_token, - lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_generic_param(it)), + lifetimes: crate::punctuated::fold(node.lifetimes, f, F::fold_generic_param), gt_token: node.gt_token, } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_captured_param<F>( + f: &mut F, + node: crate::CapturedParam, +) -> crate::CapturedParam +where + F: Fold + ?Sized, +{ + match node { + crate::CapturedParam::Lifetime(_binding_0) => { + crate::CapturedParam::Lifetime(f.fold_lifetime(_binding_0)) + } + crate::CapturedParam::Ident(_binding_0) => { + crate::CapturedParam::Ident(f.fold_ident(_binding_0)) + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_const_param<F>(f: &mut F, node: ConstParam) -> ConstParam +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_const_param<F>(f: &mut F, node: crate::ConstParam) -> crate::ConstParam where F: Fold + ?Sized, { - ConstParam { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ConstParam { + attrs: f.fold_attributes(node.attrs), const_token: node.const_token, ident: f.fold_ident(node.ident), colon_token: node.colon_token, @@ -938,67 +1239,77 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_constraint<F>(f: &mut F, node: Constraint) -> Constraint +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_constraint<F>(f: &mut F, node: crate::Constraint) -> crate::Constraint where F: Fold + ?Sized, { - Constraint { + crate::Constraint { ident: f.fold_ident(node.ident), generics: (node.generics).map(|it| f.fold_angle_bracketed_generic_arguments(it)), colon_token: node.colon_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), } } #[cfg(feature = "derive")] -pub fn fold_data<F>(f: &mut F, node: Data) -> Data +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn fold_data<F>(f: &mut F, node: crate::Data) -> crate::Data where F: Fold + ?Sized, { match node { - Data::Struct(_binding_0) => Data::Struct(f.fold_data_struct(_binding_0)), - Data::Enum(_binding_0) => Data::Enum(f.fold_data_enum(_binding_0)), - Data::Union(_binding_0) => Data::Union(f.fold_data_union(_binding_0)), + crate::Data::Struct(_binding_0) => { + crate::Data::Struct(f.fold_data_struct(_binding_0)) + } + crate::Data::Enum(_binding_0) => crate::Data::Enum(f.fold_data_enum(_binding_0)), + crate::Data::Union(_binding_0) => { + crate::Data::Union(f.fold_data_union(_binding_0)) + } } } #[cfg(feature = "derive")] -pub fn fold_data_enum<F>(f: &mut F, node: DataEnum) -> DataEnum +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn fold_data_enum<F>(f: &mut F, node: crate::DataEnum) -> crate::DataEnum where F: Fold + ?Sized, { - DataEnum { + crate::DataEnum { enum_token: node.enum_token, brace_token: node.brace_token, - variants: FoldHelper::lift(node.variants, |it| f.fold_variant(it)), + variants: crate::punctuated::fold(node.variants, f, F::fold_variant), } } #[cfg(feature = "derive")] -pub fn fold_data_struct<F>(f: &mut F, node: DataStruct) -> DataStruct +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn fold_data_struct<F>(f: &mut F, node: crate::DataStruct) -> crate::DataStruct where F: Fold + ?Sized, { - DataStruct { + crate::DataStruct { struct_token: node.struct_token, fields: f.fold_fields(node.fields), semi_token: node.semi_token, } } #[cfg(feature = "derive")] -pub fn fold_data_union<F>(f: &mut F, node: DataUnion) -> DataUnion +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn fold_data_union<F>(f: &mut F, node: crate::DataUnion) -> crate::DataUnion where F: Fold + ?Sized, { - DataUnion { + crate::DataUnion { union_token: node.union_token, fields: f.fold_fields_named(node.fields), } } #[cfg(feature = "derive")] -pub fn fold_derive_input<F>(f: &mut F, node: DeriveInput) -> DeriveInput +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn fold_derive_input<F>(f: &mut F, node: crate::DeriveInput) -> crate::DeriveInput where F: Fold + ?Sized, { - DeriveInput { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::DeriveInput { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), @@ -1006,230 +1317,304 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr<F>(f: &mut F, node: Expr) -> Expr +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr<F>(f: &mut F, node: crate::Expr) -> crate::Expr where F: Fold + ?Sized, { match node { - Expr::Array(_binding_0) => Expr::Array(full!(f.fold_expr_array(_binding_0))), - Expr::Assign(_binding_0) => Expr::Assign(full!(f.fold_expr_assign(_binding_0))), - Expr::Async(_binding_0) => Expr::Async(full!(f.fold_expr_async(_binding_0))), - Expr::Await(_binding_0) => Expr::Await(full!(f.fold_expr_await(_binding_0))), - Expr::Binary(_binding_0) => Expr::Binary(f.fold_expr_binary(_binding_0)), - Expr::Block(_binding_0) => Expr::Block(full!(f.fold_expr_block(_binding_0))), - Expr::Break(_binding_0) => Expr::Break(full!(f.fold_expr_break(_binding_0))), - Expr::Call(_binding_0) => Expr::Call(f.fold_expr_call(_binding_0)), - Expr::Cast(_binding_0) => Expr::Cast(f.fold_expr_cast(_binding_0)), - Expr::Closure(_binding_0) => { - Expr::Closure(full!(f.fold_expr_closure(_binding_0))) - } - Expr::Const(_binding_0) => Expr::Const(full!(f.fold_expr_const(_binding_0))), - Expr::Continue(_binding_0) => { - Expr::Continue(full!(f.fold_expr_continue(_binding_0))) - } - Expr::Field(_binding_0) => Expr::Field(f.fold_expr_field(_binding_0)), - Expr::ForLoop(_binding_0) => { - Expr::ForLoop(full!(f.fold_expr_for_loop(_binding_0))) - } - Expr::Group(_binding_0) => Expr::Group(f.fold_expr_group(_binding_0)), - Expr::If(_binding_0) => Expr::If(full!(f.fold_expr_if(_binding_0))), - Expr::Index(_binding_0) => Expr::Index(f.fold_expr_index(_binding_0)), - Expr::Infer(_binding_0) => Expr::Infer(full!(f.fold_expr_infer(_binding_0))), - Expr::Let(_binding_0) => Expr::Let(full!(f.fold_expr_let(_binding_0))), - Expr::Lit(_binding_0) => Expr::Lit(f.fold_expr_lit(_binding_0)), - Expr::Loop(_binding_0) => Expr::Loop(full!(f.fold_expr_loop(_binding_0))), - Expr::Macro(_binding_0) => Expr::Macro(f.fold_expr_macro(_binding_0)), - Expr::Match(_binding_0) => Expr::Match(full!(f.fold_expr_match(_binding_0))), - Expr::MethodCall(_binding_0) => { - Expr::MethodCall(full!(f.fold_expr_method_call(_binding_0))) - } - Expr::Paren(_binding_0) => Expr::Paren(f.fold_expr_paren(_binding_0)), - Expr::Path(_binding_0) => Expr::Path(f.fold_expr_path(_binding_0)), - Expr::Range(_binding_0) => Expr::Range(full!(f.fold_expr_range(_binding_0))), - Expr::Reference(_binding_0) => { - Expr::Reference(full!(f.fold_expr_reference(_binding_0))) - } - Expr::Repeat(_binding_0) => Expr::Repeat(full!(f.fold_expr_repeat(_binding_0))), - Expr::Return(_binding_0) => Expr::Return(full!(f.fold_expr_return(_binding_0))), - Expr::Struct(_binding_0) => Expr::Struct(full!(f.fold_expr_struct(_binding_0))), - Expr::Try(_binding_0) => Expr::Try(full!(f.fold_expr_try(_binding_0))), - Expr::TryBlock(_binding_0) => { - Expr::TryBlock(full!(f.fold_expr_try_block(_binding_0))) - } - Expr::Tuple(_binding_0) => Expr::Tuple(full!(f.fold_expr_tuple(_binding_0))), - Expr::Unary(_binding_0) => Expr::Unary(f.fold_expr_unary(_binding_0)), - Expr::Unsafe(_binding_0) => Expr::Unsafe(full!(f.fold_expr_unsafe(_binding_0))), - Expr::Verbatim(_binding_0) => Expr::Verbatim(_binding_0), - Expr::While(_binding_0) => Expr::While(full!(f.fold_expr_while(_binding_0))), - Expr::Yield(_binding_0) => Expr::Yield(full!(f.fold_expr_yield(_binding_0))), - } -} -#[cfg(feature = "full")] -pub fn fold_expr_array<F>(f: &mut F, node: ExprArray) -> ExprArray -where - F: Fold + ?Sized, -{ - ExprArray { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Expr::Array(_binding_0) => { + crate::Expr::Array(full!(f.fold_expr_array(_binding_0))) + } + crate::Expr::Assign(_binding_0) => { + crate::Expr::Assign(full!(f.fold_expr_assign(_binding_0))) + } + crate::Expr::Async(_binding_0) => { + crate::Expr::Async(full!(f.fold_expr_async(_binding_0))) + } + crate::Expr::Await(_binding_0) => { + crate::Expr::Await(full!(f.fold_expr_await(_binding_0))) + } + crate::Expr::Binary(_binding_0) => { + crate::Expr::Binary(f.fold_expr_binary(_binding_0)) + } + crate::Expr::Block(_binding_0) => { + crate::Expr::Block(full!(f.fold_expr_block(_binding_0))) + } + crate::Expr::Break(_binding_0) => { + crate::Expr::Break(full!(f.fold_expr_break(_binding_0))) + } + crate::Expr::Call(_binding_0) => crate::Expr::Call(f.fold_expr_call(_binding_0)), + crate::Expr::Cast(_binding_0) => crate::Expr::Cast(f.fold_expr_cast(_binding_0)), + crate::Expr::Closure(_binding_0) => { + crate::Expr::Closure(full!(f.fold_expr_closure(_binding_0))) + } + crate::Expr::Const(_binding_0) => { + crate::Expr::Const(full!(f.fold_expr_const(_binding_0))) + } + crate::Expr::Continue(_binding_0) => { + crate::Expr::Continue(full!(f.fold_expr_continue(_binding_0))) + } + crate::Expr::Field(_binding_0) => { + crate::Expr::Field(f.fold_expr_field(_binding_0)) + } + crate::Expr::ForLoop(_binding_0) => { + crate::Expr::ForLoop(full!(f.fold_expr_for_loop(_binding_0))) + } + crate::Expr::Group(_binding_0) => { + crate::Expr::Group(f.fold_expr_group(_binding_0)) + } + crate::Expr::If(_binding_0) => crate::Expr::If(full!(f.fold_expr_if(_binding_0))), + crate::Expr::Index(_binding_0) => { + crate::Expr::Index(f.fold_expr_index(_binding_0)) + } + crate::Expr::Infer(_binding_0) => { + crate::Expr::Infer(full!(f.fold_expr_infer(_binding_0))) + } + crate::Expr::Let(_binding_0) => { + crate::Expr::Let(full!(f.fold_expr_let(_binding_0))) + } + crate::Expr::Lit(_binding_0) => crate::Expr::Lit(f.fold_expr_lit(_binding_0)), + crate::Expr::Loop(_binding_0) => { + crate::Expr::Loop(full!(f.fold_expr_loop(_binding_0))) + } + crate::Expr::Macro(_binding_0) => { + crate::Expr::Macro(f.fold_expr_macro(_binding_0)) + } + crate::Expr::Match(_binding_0) => { + crate::Expr::Match(full!(f.fold_expr_match(_binding_0))) + } + crate::Expr::MethodCall(_binding_0) => { + crate::Expr::MethodCall(f.fold_expr_method_call(_binding_0)) + } + crate::Expr::Paren(_binding_0) => { + crate::Expr::Paren(f.fold_expr_paren(_binding_0)) + } + crate::Expr::Path(_binding_0) => crate::Expr::Path(f.fold_expr_path(_binding_0)), + crate::Expr::Range(_binding_0) => { + crate::Expr::Range(full!(f.fold_expr_range(_binding_0))) + } + crate::Expr::RawAddr(_binding_0) => { + crate::Expr::RawAddr(full!(f.fold_expr_raw_addr(_binding_0))) + } + crate::Expr::Reference(_binding_0) => { + crate::Expr::Reference(f.fold_expr_reference(_binding_0)) + } + crate::Expr::Repeat(_binding_0) => { + crate::Expr::Repeat(full!(f.fold_expr_repeat(_binding_0))) + } + crate::Expr::Return(_binding_0) => { + crate::Expr::Return(full!(f.fold_expr_return(_binding_0))) + } + crate::Expr::Struct(_binding_0) => { + crate::Expr::Struct(f.fold_expr_struct(_binding_0)) + } + crate::Expr::Try(_binding_0) => { + crate::Expr::Try(full!(f.fold_expr_try(_binding_0))) + } + crate::Expr::TryBlock(_binding_0) => { + crate::Expr::TryBlock(full!(f.fold_expr_try_block(_binding_0))) + } + crate::Expr::Tuple(_binding_0) => { + crate::Expr::Tuple(f.fold_expr_tuple(_binding_0)) + } + crate::Expr::Unary(_binding_0) => { + crate::Expr::Unary(f.fold_expr_unary(_binding_0)) + } + crate::Expr::Unsafe(_binding_0) => { + crate::Expr::Unsafe(full!(f.fold_expr_unsafe(_binding_0))) + } + crate::Expr::Verbatim(_binding_0) => { + crate::Expr::Verbatim(f.fold_token_stream(_binding_0)) + } + crate::Expr::While(_binding_0) => { + crate::Expr::While(full!(f.fold_expr_while(_binding_0))) + } + crate::Expr::Yield(_binding_0) => { + crate::Expr::Yield(full!(f.fold_expr_yield(_binding_0))) + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_array<F>(f: &mut F, node: crate::ExprArray) -> crate::ExprArray +where + F: Fold + ?Sized, +{ + crate::ExprArray { + attrs: f.fold_attributes(node.attrs), bracket_token: node.bracket_token, - elems: FoldHelper::lift(node.elems, |it| f.fold_expr(it)), + elems: crate::punctuated::fold(node.elems, f, F::fold_expr), } } #[cfg(feature = "full")] -pub fn fold_expr_assign<F>(f: &mut F, node: ExprAssign) -> ExprAssign +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_assign<F>(f: &mut F, node: crate::ExprAssign) -> crate::ExprAssign where F: Fold + ?Sized, { - ExprAssign { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprAssign { + attrs: f.fold_attributes(node.attrs), left: Box::new(f.fold_expr(*node.left)), eq_token: node.eq_token, right: Box::new(f.fold_expr(*node.right)), } } #[cfg(feature = "full")] -pub fn fold_expr_async<F>(f: &mut F, node: ExprAsync) -> ExprAsync +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_async<F>(f: &mut F, node: crate::ExprAsync) -> crate::ExprAsync where F: Fold + ?Sized, { - ExprAsync { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprAsync { + attrs: f.fold_attributes(node.attrs), async_token: node.async_token, capture: node.capture, block: f.fold_block(node.block), } } #[cfg(feature = "full")] -pub fn fold_expr_await<F>(f: &mut F, node: ExprAwait) -> ExprAwait +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_await<F>(f: &mut F, node: crate::ExprAwait) -> crate::ExprAwait where F: Fold + ?Sized, { - ExprAwait { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprAwait { + attrs: f.fold_attributes(node.attrs), base: Box::new(f.fold_expr(*node.base)), dot_token: node.dot_token, await_token: node.await_token, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_binary<F>(f: &mut F, node: ExprBinary) -> ExprBinary +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_binary<F>(f: &mut F, node: crate::ExprBinary) -> crate::ExprBinary where F: Fold + ?Sized, { - ExprBinary { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprBinary { + attrs: f.fold_attributes(node.attrs), left: Box::new(f.fold_expr(*node.left)), op: f.fold_bin_op(node.op), right: Box::new(f.fold_expr(*node.right)), } } #[cfg(feature = "full")] -pub fn fold_expr_block<F>(f: &mut F, node: ExprBlock) -> ExprBlock +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_block<F>(f: &mut F, node: crate::ExprBlock) -> crate::ExprBlock where F: Fold + ?Sized, { - ExprBlock { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprBlock { + attrs: f.fold_attributes(node.attrs), label: (node.label).map(|it| f.fold_label(it)), block: f.fold_block(node.block), } } #[cfg(feature = "full")] -pub fn fold_expr_break<F>(f: &mut F, node: ExprBreak) -> ExprBreak +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_break<F>(f: &mut F, node: crate::ExprBreak) -> crate::ExprBreak where F: Fold + ?Sized, { - ExprBreak { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprBreak { + attrs: f.fold_attributes(node.attrs), break_token: node.break_token, label: (node.label).map(|it| f.fold_lifetime(it)), expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_call<F>(f: &mut F, node: ExprCall) -> ExprCall +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_call<F>(f: &mut F, node: crate::ExprCall) -> crate::ExprCall where F: Fold + ?Sized, { - ExprCall { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprCall { + attrs: f.fold_attributes(node.attrs), func: Box::new(f.fold_expr(*node.func)), paren_token: node.paren_token, - args: FoldHelper::lift(node.args, |it| f.fold_expr(it)), + args: crate::punctuated::fold(node.args, f, F::fold_expr), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_cast<F>(f: &mut F, node: ExprCast) -> ExprCast +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_cast<F>(f: &mut F, node: crate::ExprCast) -> crate::ExprCast where F: Fold + ?Sized, { - ExprCast { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprCast { + attrs: f.fold_attributes(node.attrs), expr: Box::new(f.fold_expr(*node.expr)), as_token: node.as_token, ty: Box::new(f.fold_type(*node.ty)), } } #[cfg(feature = "full")] -pub fn fold_expr_closure<F>(f: &mut F, node: ExprClosure) -> ExprClosure +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_closure<F>(f: &mut F, node: crate::ExprClosure) -> crate::ExprClosure where F: Fold + ?Sized, { - ExprClosure { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprClosure { + attrs: f.fold_attributes(node.attrs), lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), constness: node.constness, movability: node.movability, asyncness: node.asyncness, capture: node.capture, or1_token: node.or1_token, - inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)), + inputs: crate::punctuated::fold(node.inputs, f, F::fold_pat), or2_token: node.or2_token, output: f.fold_return_type(node.output), body: Box::new(f.fold_expr(*node.body)), } } #[cfg(feature = "full")] -pub fn fold_expr_const<F>(f: &mut F, node: ExprConst) -> ExprConst +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_const<F>(f: &mut F, node: crate::ExprConst) -> crate::ExprConst where F: Fold + ?Sized, { - ExprConst { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprConst { + attrs: f.fold_attributes(node.attrs), const_token: node.const_token, block: f.fold_block(node.block), } } #[cfg(feature = "full")] -pub fn fold_expr_continue<F>(f: &mut F, node: ExprContinue) -> ExprContinue +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_continue<F>(f: &mut F, node: crate::ExprContinue) -> crate::ExprContinue where F: Fold + ?Sized, { - ExprContinue { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprContinue { + attrs: f.fold_attributes(node.attrs), continue_token: node.continue_token, label: (node.label).map(|it| f.fold_lifetime(it)), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_field<F>(f: &mut F, node: ExprField) -> ExprField +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_field<F>(f: &mut F, node: crate::ExprField) -> crate::ExprField where F: Fold + ?Sized, { - ExprField { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprField { + attrs: f.fold_attributes(node.attrs), base: Box::new(f.fold_expr(*node.base)), dot_token: node.dot_token, member: f.fold_member(node.member), } } #[cfg(feature = "full")] -pub fn fold_expr_for_loop<F>(f: &mut F, node: ExprForLoop) -> ExprForLoop +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_for_loop<F>(f: &mut F, node: crate::ExprForLoop) -> crate::ExprForLoop where F: Fold + ?Sized, { - ExprForLoop { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprForLoop { + attrs: f.fold_attributes(node.attrs), label: (node.label).map(|it| f.fold_label(it)), for_token: node.for_token, pat: Box::new(f.fold_pat(*node.pat)), @@ -1239,23 +1624,25 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_group<F>(f: &mut F, node: ExprGroup) -> ExprGroup +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_group<F>(f: &mut F, node: crate::ExprGroup) -> crate::ExprGroup where F: Fold + ?Sized, { - ExprGroup { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprGroup { + attrs: f.fold_attributes(node.attrs), group_token: node.group_token, expr: Box::new(f.fold_expr(*node.expr)), } } #[cfg(feature = "full")] -pub fn fold_expr_if<F>(f: &mut F, node: ExprIf) -> ExprIf +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_if<F>(f: &mut F, node: crate::ExprIf) -> crate::ExprIf where F: Fold + ?Sized, { - ExprIf { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprIf { + attrs: f.fold_attributes(node.attrs), if_token: node.if_token, cond: Box::new(f.fold_expr(*node.cond)), then_branch: f.fold_block(node.then_branch), @@ -1264,34 +1651,37 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_index<F>(f: &mut F, node: ExprIndex) -> ExprIndex +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_index<F>(f: &mut F, node: crate::ExprIndex) -> crate::ExprIndex where F: Fold + ?Sized, { - ExprIndex { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprIndex { + attrs: f.fold_attributes(node.attrs), expr: Box::new(f.fold_expr(*node.expr)), bracket_token: node.bracket_token, index: Box::new(f.fold_expr(*node.index)), } } #[cfg(feature = "full")] -pub fn fold_expr_infer<F>(f: &mut F, node: ExprInfer) -> ExprInfer +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_infer<F>(f: &mut F, node: crate::ExprInfer) -> crate::ExprInfer where F: Fold + ?Sized, { - ExprInfer { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprInfer { + attrs: f.fold_attributes(node.attrs), underscore_token: node.underscore_token, } } #[cfg(feature = "full")] -pub fn fold_expr_let<F>(f: &mut F, node: ExprLet) -> ExprLet +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_let<F>(f: &mut F, node: crate::ExprLet) -> crate::ExprLet where F: Fold + ?Sized, { - ExprLet { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprLet { + attrs: f.fold_attributes(node.attrs), let_token: node.let_token, pat: Box::new(f.fold_pat(*node.pat)), eq_token: node.eq_token, @@ -1299,119 +1689,149 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_lit<F>(f: &mut F, node: ExprLit) -> ExprLit +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_lit<F>(f: &mut F, node: crate::ExprLit) -> crate::ExprLit where F: Fold + ?Sized, { - ExprLit { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprLit { + attrs: f.fold_attributes(node.attrs), lit: f.fold_lit(node.lit), } } #[cfg(feature = "full")] -pub fn fold_expr_loop<F>(f: &mut F, node: ExprLoop) -> ExprLoop +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_loop<F>(f: &mut F, node: crate::ExprLoop) -> crate::ExprLoop where F: Fold + ?Sized, { - ExprLoop { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprLoop { + attrs: f.fold_attributes(node.attrs), label: (node.label).map(|it| f.fold_label(it)), loop_token: node.loop_token, body: f.fold_block(node.body), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_macro<F>(f: &mut F, node: ExprMacro) -> ExprMacro +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_macro<F>(f: &mut F, node: crate::ExprMacro) -> crate::ExprMacro where F: Fold + ?Sized, { - ExprMacro { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprMacro { + attrs: f.fold_attributes(node.attrs), mac: f.fold_macro(node.mac), } } #[cfg(feature = "full")] -pub fn fold_expr_match<F>(f: &mut F, node: ExprMatch) -> ExprMatch +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_match<F>(f: &mut F, node: crate::ExprMatch) -> crate::ExprMatch where F: Fold + ?Sized, { - ExprMatch { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprMatch { + attrs: f.fold_attributes(node.attrs), match_token: node.match_token, expr: Box::new(f.fold_expr(*node.expr)), brace_token: node.brace_token, - arms: FoldHelper::lift(node.arms, |it| f.fold_arm(it)), + arms: fold_vec(node.arms, f, F::fold_arm), } } -#[cfg(feature = "full")] -pub fn fold_expr_method_call<F>(f: &mut F, node: ExprMethodCall) -> ExprMethodCall +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_method_call<F>( + f: &mut F, + node: crate::ExprMethodCall, +) -> crate::ExprMethodCall where F: Fold + ?Sized, { - ExprMethodCall { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprMethodCall { + attrs: f.fold_attributes(node.attrs), receiver: Box::new(f.fold_expr(*node.receiver)), dot_token: node.dot_token, method: f.fold_ident(node.method), turbofish: (node.turbofish) .map(|it| f.fold_angle_bracketed_generic_arguments(it)), paren_token: node.paren_token, - args: FoldHelper::lift(node.args, |it| f.fold_expr(it)), + args: crate::punctuated::fold(node.args, f, F::fold_expr), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_paren<F>(f: &mut F, node: ExprParen) -> ExprParen +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_paren<F>(f: &mut F, node: crate::ExprParen) -> crate::ExprParen where F: Fold + ?Sized, { - ExprParen { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprParen { + attrs: f.fold_attributes(node.attrs), paren_token: node.paren_token, expr: Box::new(f.fold_expr(*node.expr)), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_path<F>(f: &mut F, node: ExprPath) -> ExprPath +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_path<F>(f: &mut F, node: crate::ExprPath) -> crate::ExprPath where F: Fold + ?Sized, { - ExprPath { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprPath { + attrs: f.fold_attributes(node.attrs), qself: (node.qself).map(|it| f.fold_qself(it)), path: f.fold_path(node.path), } } #[cfg(feature = "full")] -pub fn fold_expr_range<F>(f: &mut F, node: ExprRange) -> ExprRange +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_range<F>(f: &mut F, node: crate::ExprRange) -> crate::ExprRange where F: Fold + ?Sized, { - ExprRange { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprRange { + attrs: f.fold_attributes(node.attrs), start: (node.start).map(|it| Box::new(f.fold_expr(*it))), limits: f.fold_range_limits(node.limits), end: (node.end).map(|it| Box::new(f.fold_expr(*it))), } } #[cfg(feature = "full")] -pub fn fold_expr_reference<F>(f: &mut F, node: ExprReference) -> ExprReference +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_raw_addr<F>(f: &mut F, node: crate::ExprRawAddr) -> crate::ExprRawAddr +where + F: Fold + ?Sized, +{ + crate::ExprRawAddr { + attrs: f.fold_attributes(node.attrs), + and_token: node.and_token, + raw: node.raw, + mutability: f.fold_pointer_mutability(node.mutability), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_reference<F>( + f: &mut F, + node: crate::ExprReference, +) -> crate::ExprReference where F: Fold + ?Sized, { - ExprReference { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprReference { + attrs: f.fold_attributes(node.attrs), and_token: node.and_token, mutability: node.mutability, expr: Box::new(f.fold_expr(*node.expr)), } } #[cfg(feature = "full")] -pub fn fold_expr_repeat<F>(f: &mut F, node: ExprRepeat) -> ExprRepeat +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_repeat<F>(f: &mut F, node: crate::ExprRepeat) -> crate::ExprRepeat where F: Fold + ?Sized, { - ExprRepeat { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprRepeat { + attrs: f.fold_attributes(node.attrs), bracket_token: node.bracket_token, expr: Box::new(f.fold_expr(*node.expr)), semi_token: node.semi_token, @@ -1419,93 +1839,104 @@ where } } #[cfg(feature = "full")] -pub fn fold_expr_return<F>(f: &mut F, node: ExprReturn) -> ExprReturn +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_return<F>(f: &mut F, node: crate::ExprReturn) -> crate::ExprReturn where F: Fold + ?Sized, { - ExprReturn { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprReturn { + attrs: f.fold_attributes(node.attrs), return_token: node.return_token, expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))), } } -#[cfg(feature = "full")] -pub fn fold_expr_struct<F>(f: &mut F, node: ExprStruct) -> ExprStruct +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_struct<F>(f: &mut F, node: crate::ExprStruct) -> crate::ExprStruct where F: Fold + ?Sized, { - ExprStruct { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprStruct { + attrs: f.fold_attributes(node.attrs), qself: (node.qself).map(|it| f.fold_qself(it)), path: f.fold_path(node.path), brace_token: node.brace_token, - fields: FoldHelper::lift(node.fields, |it| f.fold_field_value(it)), + fields: crate::punctuated::fold(node.fields, f, F::fold_field_value), dot2_token: node.dot2_token, rest: (node.rest).map(|it| Box::new(f.fold_expr(*it))), } } #[cfg(feature = "full")] -pub fn fold_expr_try<F>(f: &mut F, node: ExprTry) -> ExprTry +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_try<F>(f: &mut F, node: crate::ExprTry) -> crate::ExprTry where F: Fold + ?Sized, { - ExprTry { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprTry { + attrs: f.fold_attributes(node.attrs), expr: Box::new(f.fold_expr(*node.expr)), question_token: node.question_token, } } #[cfg(feature = "full")] -pub fn fold_expr_try_block<F>(f: &mut F, node: ExprTryBlock) -> ExprTryBlock +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_try_block<F>( + f: &mut F, + node: crate::ExprTryBlock, +) -> crate::ExprTryBlock where F: Fold + ?Sized, { - ExprTryBlock { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprTryBlock { + attrs: f.fold_attributes(node.attrs), try_token: node.try_token, block: f.fold_block(node.block), } } -#[cfg(feature = "full")] -pub fn fold_expr_tuple<F>(f: &mut F, node: ExprTuple) -> ExprTuple +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_tuple<F>(f: &mut F, node: crate::ExprTuple) -> crate::ExprTuple where F: Fold + ?Sized, { - ExprTuple { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprTuple { + attrs: f.fold_attributes(node.attrs), paren_token: node.paren_token, - elems: FoldHelper::lift(node.elems, |it| f.fold_expr(it)), + elems: crate::punctuated::fold(node.elems, f, F::fold_expr), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_expr_unary<F>(f: &mut F, node: ExprUnary) -> ExprUnary +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_expr_unary<F>(f: &mut F, node: crate::ExprUnary) -> crate::ExprUnary where F: Fold + ?Sized, { - ExprUnary { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprUnary { + attrs: f.fold_attributes(node.attrs), op: f.fold_un_op(node.op), expr: Box::new(f.fold_expr(*node.expr)), } } #[cfg(feature = "full")] -pub fn fold_expr_unsafe<F>(f: &mut F, node: ExprUnsafe) -> ExprUnsafe +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_unsafe<F>(f: &mut F, node: crate::ExprUnsafe) -> crate::ExprUnsafe where F: Fold + ?Sized, { - ExprUnsafe { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprUnsafe { + attrs: f.fold_attributes(node.attrs), unsafe_token: node.unsafe_token, block: f.fold_block(node.block), } } #[cfg(feature = "full")] -pub fn fold_expr_while<F>(f: &mut F, node: ExprWhile) -> ExprWhile +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_while<F>(f: &mut F, node: crate::ExprWhile) -> crate::ExprWhile where F: Fold + ?Sized, { - ExprWhile { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprWhile { + attrs: f.fold_attributes(node.attrs), label: (node.label).map(|it| f.fold_label(it)), while_token: node.while_token, cond: Box::new(f.fold_expr(*node.cond)), @@ -1513,23 +1944,25 @@ where } } #[cfg(feature = "full")] -pub fn fold_expr_yield<F>(f: &mut F, node: ExprYield) -> ExprYield +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_expr_yield<F>(f: &mut F, node: crate::ExprYield) -> crate::ExprYield where F: Fold + ?Sized, { - ExprYield { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ExprYield { + attrs: f.fold_attributes(node.attrs), yield_token: node.yield_token, expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_field<F>(f: &mut F, node: Field) -> Field +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_field<F>(f: &mut F, node: crate::Field) -> crate::Field where F: Fold + ?Sized, { - Field { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Field { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), mutability: f.fold_field_mutability(node.mutability), ident: (node.ident).map(|it| f.fold_ident(it)), @@ -1538,144 +1971,178 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_field_mutability<F>(f: &mut F, node: FieldMutability) -> FieldMutability +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_field_mutability<F>( + f: &mut F, + node: crate::FieldMutability, +) -> crate::FieldMutability where F: Fold + ?Sized, { match node { - FieldMutability::None => FieldMutability::None, + crate::FieldMutability::None => crate::FieldMutability::None, } } #[cfg(feature = "full")] -pub fn fold_field_pat<F>(f: &mut F, node: FieldPat) -> FieldPat +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_field_pat<F>(f: &mut F, node: crate::FieldPat) -> crate::FieldPat where F: Fold + ?Sized, { - FieldPat { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::FieldPat { + attrs: f.fold_attributes(node.attrs), member: f.fold_member(node.member), colon_token: node.colon_token, pat: Box::new(f.fold_pat(*node.pat)), } } -#[cfg(feature = "full")] -pub fn fold_field_value<F>(f: &mut F, node: FieldValue) -> FieldValue +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_field_value<F>(f: &mut F, node: crate::FieldValue) -> crate::FieldValue where F: Fold + ?Sized, { - FieldValue { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::FieldValue { + attrs: f.fold_attributes(node.attrs), member: f.fold_member(node.member), colon_token: node.colon_token, expr: f.fold_expr(node.expr), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_fields<F>(f: &mut F, node: Fields) -> Fields +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_fields<F>(f: &mut F, node: crate::Fields) -> crate::Fields where F: Fold + ?Sized, { match node { - Fields::Named(_binding_0) => Fields::Named(f.fold_fields_named(_binding_0)), - Fields::Unnamed(_binding_0) => Fields::Unnamed(f.fold_fields_unnamed(_binding_0)), - Fields::Unit => Fields::Unit, + crate::Fields::Named(_binding_0) => { + crate::Fields::Named(f.fold_fields_named(_binding_0)) + } + crate::Fields::Unnamed(_binding_0) => { + crate::Fields::Unnamed(f.fold_fields_unnamed(_binding_0)) + } + crate::Fields::Unit => crate::Fields::Unit, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_fields_named<F>(f: &mut F, node: FieldsNamed) -> FieldsNamed +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_fields_named<F>(f: &mut F, node: crate::FieldsNamed) -> crate::FieldsNamed where F: Fold + ?Sized, { - FieldsNamed { + crate::FieldsNamed { brace_token: node.brace_token, - named: FoldHelper::lift(node.named, |it| f.fold_field(it)), + named: crate::punctuated::fold(node.named, f, F::fold_field), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_fields_unnamed<F>(f: &mut F, node: FieldsUnnamed) -> FieldsUnnamed +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_fields_unnamed<F>( + f: &mut F, + node: crate::FieldsUnnamed, +) -> crate::FieldsUnnamed where F: Fold + ?Sized, { - FieldsUnnamed { + crate::FieldsUnnamed { paren_token: node.paren_token, - unnamed: FoldHelper::lift(node.unnamed, |it| f.fold_field(it)), + unnamed: crate::punctuated::fold(node.unnamed, f, F::fold_field), } } #[cfg(feature = "full")] -pub fn fold_file<F>(f: &mut F, node: File) -> File +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_file<F>(f: &mut F, node: crate::File) -> crate::File where F: Fold + ?Sized, { - File { + crate::File { shebang: node.shebang, - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), - items: FoldHelper::lift(node.items, |it| f.fold_item(it)), + attrs: f.fold_attributes(node.attrs), + items: fold_vec(node.items, f, F::fold_item), } } #[cfg(feature = "full")] -pub fn fold_fn_arg<F>(f: &mut F, node: FnArg) -> FnArg +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_fn_arg<F>(f: &mut F, node: crate::FnArg) -> crate::FnArg where F: Fold + ?Sized, { match node { - FnArg::Receiver(_binding_0) => FnArg::Receiver(f.fold_receiver(_binding_0)), - FnArg::Typed(_binding_0) => FnArg::Typed(f.fold_pat_type(_binding_0)), + crate::FnArg::Receiver(_binding_0) => { + crate::FnArg::Receiver(f.fold_receiver(_binding_0)) + } + crate::FnArg::Typed(_binding_0) => { + crate::FnArg::Typed(f.fold_pat_type(_binding_0)) + } } } #[cfg(feature = "full")] -pub fn fold_foreign_item<F>(f: &mut F, node: ForeignItem) -> ForeignItem +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_foreign_item<F>(f: &mut F, node: crate::ForeignItem) -> crate::ForeignItem where F: Fold + ?Sized, { match node { - ForeignItem::Fn(_binding_0) => { - ForeignItem::Fn(f.fold_foreign_item_fn(_binding_0)) + crate::ForeignItem::Fn(_binding_0) => { + crate::ForeignItem::Fn(f.fold_foreign_item_fn(_binding_0)) + } + crate::ForeignItem::Static(_binding_0) => { + crate::ForeignItem::Static(f.fold_foreign_item_static(_binding_0)) } - ForeignItem::Static(_binding_0) => { - ForeignItem::Static(f.fold_foreign_item_static(_binding_0)) + crate::ForeignItem::Type(_binding_0) => { + crate::ForeignItem::Type(f.fold_foreign_item_type(_binding_0)) } - ForeignItem::Type(_binding_0) => { - ForeignItem::Type(f.fold_foreign_item_type(_binding_0)) + crate::ForeignItem::Macro(_binding_0) => { + crate::ForeignItem::Macro(f.fold_foreign_item_macro(_binding_0)) } - ForeignItem::Macro(_binding_0) => { - ForeignItem::Macro(f.fold_foreign_item_macro(_binding_0)) + crate::ForeignItem::Verbatim(_binding_0) => { + crate::ForeignItem::Verbatim(f.fold_token_stream(_binding_0)) } - ForeignItem::Verbatim(_binding_0) => ForeignItem::Verbatim(_binding_0), } } #[cfg(feature = "full")] -pub fn fold_foreign_item_fn<F>(f: &mut F, node: ForeignItemFn) -> ForeignItemFn +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_foreign_item_fn<F>( + f: &mut F, + node: crate::ForeignItemFn, +) -> crate::ForeignItemFn where F: Fold + ?Sized, { - ForeignItemFn { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ForeignItemFn { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), sig: f.fold_signature(node.sig), semi_token: node.semi_token, } } #[cfg(feature = "full")] -pub fn fold_foreign_item_macro<F>(f: &mut F, node: ForeignItemMacro) -> ForeignItemMacro +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_foreign_item_macro<F>( + f: &mut F, + node: crate::ForeignItemMacro, +) -> crate::ForeignItemMacro where F: Fold + ?Sized, { - ForeignItemMacro { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ForeignItemMacro { + attrs: f.fold_attributes(node.attrs), mac: f.fold_macro(node.mac), semi_token: node.semi_token, } } #[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub fn fold_foreign_item_static<F>( f: &mut F, - node: ForeignItemStatic, -) -> ForeignItemStatic + node: crate::ForeignItemStatic, +) -> crate::ForeignItemStatic where F: Fold + ?Sized, { - ForeignItemStatic { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ForeignItemStatic { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), static_token: node.static_token, mutability: f.fold_static_mutability(node.mutability), @@ -1686,12 +2153,16 @@ where } } #[cfg(feature = "full")] -pub fn fold_foreign_item_type<F>(f: &mut F, node: ForeignItemType) -> ForeignItemType +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_foreign_item_type<F>( + f: &mut F, + node: crate::ForeignItemType, +) -> crate::ForeignItemType where F: Fold + ?Sized, { - ForeignItemType { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ForeignItemType { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), type_token: node.type_token, ident: f.fold_ident(node.ident), @@ -1700,61 +2171,67 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_generic_argument<F>(f: &mut F, node: GenericArgument) -> GenericArgument +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_generic_argument<F>( + f: &mut F, + node: crate::GenericArgument, +) -> crate::GenericArgument where F: Fold + ?Sized, { match node { - GenericArgument::Lifetime(_binding_0) => { - GenericArgument::Lifetime(f.fold_lifetime(_binding_0)) + crate::GenericArgument::Lifetime(_binding_0) => { + crate::GenericArgument::Lifetime(f.fold_lifetime(_binding_0)) } - GenericArgument::Type(_binding_0) => { - GenericArgument::Type(f.fold_type(_binding_0)) + crate::GenericArgument::Type(_binding_0) => { + crate::GenericArgument::Type(f.fold_type(_binding_0)) } - GenericArgument::Const(_binding_0) => { - GenericArgument::Const(f.fold_expr(_binding_0)) + crate::GenericArgument::Const(_binding_0) => { + crate::GenericArgument::Const(f.fold_expr(_binding_0)) } - GenericArgument::AssocType(_binding_0) => { - GenericArgument::AssocType(f.fold_assoc_type(_binding_0)) + crate::GenericArgument::AssocType(_binding_0) => { + crate::GenericArgument::AssocType(f.fold_assoc_type(_binding_0)) } - GenericArgument::AssocConst(_binding_0) => { - GenericArgument::AssocConst(f.fold_assoc_const(_binding_0)) + crate::GenericArgument::AssocConst(_binding_0) => { + crate::GenericArgument::AssocConst(f.fold_assoc_const(_binding_0)) } - GenericArgument::Constraint(_binding_0) => { - GenericArgument::Constraint(f.fold_constraint(_binding_0)) + crate::GenericArgument::Constraint(_binding_0) => { + crate::GenericArgument::Constraint(f.fold_constraint(_binding_0)) } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_generic_param<F>(f: &mut F, node: GenericParam) -> GenericParam +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_generic_param<F>(f: &mut F, node: crate::GenericParam) -> crate::GenericParam where F: Fold + ?Sized, { match node { - GenericParam::Lifetime(_binding_0) => { - GenericParam::Lifetime(f.fold_lifetime_param(_binding_0)) + crate::GenericParam::Lifetime(_binding_0) => { + crate::GenericParam::Lifetime(f.fold_lifetime_param(_binding_0)) } - GenericParam::Type(_binding_0) => { - GenericParam::Type(f.fold_type_param(_binding_0)) + crate::GenericParam::Type(_binding_0) => { + crate::GenericParam::Type(f.fold_type_param(_binding_0)) } - GenericParam::Const(_binding_0) => { - GenericParam::Const(f.fold_const_param(_binding_0)) + crate::GenericParam::Const(_binding_0) => { + crate::GenericParam::Const(f.fold_const_param(_binding_0)) } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_generics<F>(f: &mut F, node: Generics) -> Generics +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_generics<F>(f: &mut F, node: crate::Generics) -> crate::Generics where F: Fold + ?Sized, { - Generics { + crate::Generics { lt_token: node.lt_token, - params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)), + params: crate::punctuated::fold(node.params, f, F::fold_generic_param), gt_token: node.gt_token, where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)), } } -pub fn fold_ident<F>(f: &mut F, node: Ident) -> Ident +pub fn fold_ident<F>(f: &mut F, node: proc_macro2::Ident) -> proc_macro2::Ident where F: Fold + ?Sized, { @@ -1764,29 +2241,40 @@ where node } #[cfg(feature = "full")] -pub fn fold_impl_item<F>(f: &mut F, node: ImplItem) -> ImplItem +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_impl_item<F>(f: &mut F, node: crate::ImplItem) -> crate::ImplItem where F: Fold + ?Sized, { match node { - ImplItem::Const(_binding_0) => { - ImplItem::Const(f.fold_impl_item_const(_binding_0)) + crate::ImplItem::Const(_binding_0) => { + crate::ImplItem::Const(f.fold_impl_item_const(_binding_0)) + } + crate::ImplItem::Fn(_binding_0) => { + crate::ImplItem::Fn(f.fold_impl_item_fn(_binding_0)) } - ImplItem::Fn(_binding_0) => ImplItem::Fn(f.fold_impl_item_fn(_binding_0)), - ImplItem::Type(_binding_0) => ImplItem::Type(f.fold_impl_item_type(_binding_0)), - ImplItem::Macro(_binding_0) => { - ImplItem::Macro(f.fold_impl_item_macro(_binding_0)) + crate::ImplItem::Type(_binding_0) => { + crate::ImplItem::Type(f.fold_impl_item_type(_binding_0)) + } + crate::ImplItem::Macro(_binding_0) => { + crate::ImplItem::Macro(f.fold_impl_item_macro(_binding_0)) + } + crate::ImplItem::Verbatim(_binding_0) => { + crate::ImplItem::Verbatim(f.fold_token_stream(_binding_0)) } - ImplItem::Verbatim(_binding_0) => ImplItem::Verbatim(_binding_0), } } #[cfg(feature = "full")] -pub fn fold_impl_item_const<F>(f: &mut F, node: ImplItemConst) -> ImplItemConst +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_impl_item_const<F>( + f: &mut F, + node: crate::ImplItemConst, +) -> crate::ImplItemConst where F: Fold + ?Sized, { - ImplItemConst { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ImplItemConst { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), defaultness: node.defaultness, const_token: node.const_token, @@ -1800,12 +2288,13 @@ where } } #[cfg(feature = "full")] -pub fn fold_impl_item_fn<F>(f: &mut F, node: ImplItemFn) -> ImplItemFn +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_impl_item_fn<F>(f: &mut F, node: crate::ImplItemFn) -> crate::ImplItemFn where F: Fold + ?Sized, { - ImplItemFn { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ImplItemFn { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), defaultness: node.defaultness, sig: f.fold_signature(node.sig), @@ -1813,23 +2302,31 @@ where } } #[cfg(feature = "full")] -pub fn fold_impl_item_macro<F>(f: &mut F, node: ImplItemMacro) -> ImplItemMacro +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_impl_item_macro<F>( + f: &mut F, + node: crate::ImplItemMacro, +) -> crate::ImplItemMacro where F: Fold + ?Sized, { - ImplItemMacro { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ImplItemMacro { + attrs: f.fold_attributes(node.attrs), mac: f.fold_macro(node.mac), semi_token: node.semi_token, } } #[cfg(feature = "full")] -pub fn fold_impl_item_type<F>(f: &mut F, node: ImplItemType) -> ImplItemType +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_impl_item_type<F>( + f: &mut F, + node: crate::ImplItemType, +) -> crate::ImplItemType where F: Fold + ?Sized, { - ImplItemType { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ImplItemType { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), defaultness: node.defaultness, type_token: node.type_token, @@ -1841,59 +2338,80 @@ where } } #[cfg(feature = "full")] -pub fn fold_impl_restriction<F>(f: &mut F, node: ImplRestriction) -> ImplRestriction +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_impl_restriction<F>( + f: &mut F, + node: crate::ImplRestriction, +) -> crate::ImplRestriction where F: Fold + ?Sized, { match node {} } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_index<F>(f: &mut F, node: Index) -> Index +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_index<F>(f: &mut F, node: crate::Index) -> crate::Index where F: Fold + ?Sized, { - Index { + crate::Index { index: node.index, span: f.fold_span(node.span), } } #[cfg(feature = "full")] -pub fn fold_item<F>(f: &mut F, node: Item) -> Item +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item<F>(f: &mut F, node: crate::Item) -> crate::Item where F: Fold + ?Sized, { match node { - Item::Const(_binding_0) => Item::Const(f.fold_item_const(_binding_0)), - Item::Enum(_binding_0) => Item::Enum(f.fold_item_enum(_binding_0)), - Item::ExternCrate(_binding_0) => { - Item::ExternCrate(f.fold_item_extern_crate(_binding_0)) + crate::Item::Const(_binding_0) => { + crate::Item::Const(f.fold_item_const(_binding_0)) + } + crate::Item::Enum(_binding_0) => crate::Item::Enum(f.fold_item_enum(_binding_0)), + crate::Item::ExternCrate(_binding_0) => { + crate::Item::ExternCrate(f.fold_item_extern_crate(_binding_0)) } - Item::Fn(_binding_0) => Item::Fn(f.fold_item_fn(_binding_0)), - Item::ForeignMod(_binding_0) => { - Item::ForeignMod(f.fold_item_foreign_mod(_binding_0)) + crate::Item::Fn(_binding_0) => crate::Item::Fn(f.fold_item_fn(_binding_0)), + crate::Item::ForeignMod(_binding_0) => { + crate::Item::ForeignMod(f.fold_item_foreign_mod(_binding_0)) } - Item::Impl(_binding_0) => Item::Impl(f.fold_item_impl(_binding_0)), - Item::Macro(_binding_0) => Item::Macro(f.fold_item_macro(_binding_0)), - Item::Mod(_binding_0) => Item::Mod(f.fold_item_mod(_binding_0)), - Item::Static(_binding_0) => Item::Static(f.fold_item_static(_binding_0)), - Item::Struct(_binding_0) => Item::Struct(f.fold_item_struct(_binding_0)), - Item::Trait(_binding_0) => Item::Trait(f.fold_item_trait(_binding_0)), - Item::TraitAlias(_binding_0) => { - Item::TraitAlias(f.fold_item_trait_alias(_binding_0)) + crate::Item::Impl(_binding_0) => crate::Item::Impl(f.fold_item_impl(_binding_0)), + crate::Item::Macro(_binding_0) => { + crate::Item::Macro(f.fold_item_macro(_binding_0)) + } + crate::Item::Mod(_binding_0) => crate::Item::Mod(f.fold_item_mod(_binding_0)), + crate::Item::Static(_binding_0) => { + crate::Item::Static(f.fold_item_static(_binding_0)) + } + crate::Item::Struct(_binding_0) => { + crate::Item::Struct(f.fold_item_struct(_binding_0)) + } + crate::Item::Trait(_binding_0) => { + crate::Item::Trait(f.fold_item_trait(_binding_0)) + } + crate::Item::TraitAlias(_binding_0) => { + crate::Item::TraitAlias(f.fold_item_trait_alias(_binding_0)) + } + crate::Item::Type(_binding_0) => crate::Item::Type(f.fold_item_type(_binding_0)), + crate::Item::Union(_binding_0) => { + crate::Item::Union(f.fold_item_union(_binding_0)) + } + crate::Item::Use(_binding_0) => crate::Item::Use(f.fold_item_use(_binding_0)), + crate::Item::Verbatim(_binding_0) => { + crate::Item::Verbatim(f.fold_token_stream(_binding_0)) } - Item::Type(_binding_0) => Item::Type(f.fold_item_type(_binding_0)), - Item::Union(_binding_0) => Item::Union(f.fold_item_union(_binding_0)), - Item::Use(_binding_0) => Item::Use(f.fold_item_use(_binding_0)), - Item::Verbatim(_binding_0) => Item::Verbatim(_binding_0), } } #[cfg(feature = "full")] -pub fn fold_item_const<F>(f: &mut F, node: ItemConst) -> ItemConst +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_const<F>(f: &mut F, node: crate::ItemConst) -> crate::ItemConst where F: Fold + ?Sized, { - ItemConst { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemConst { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), const_token: node.const_token, ident: f.fold_ident(node.ident), @@ -1906,27 +2424,32 @@ where } } #[cfg(feature = "full")] -pub fn fold_item_enum<F>(f: &mut F, node: ItemEnum) -> ItemEnum +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_enum<F>(f: &mut F, node: crate::ItemEnum) -> crate::ItemEnum where F: Fold + ?Sized, { - ItemEnum { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemEnum { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), enum_token: node.enum_token, ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), brace_token: node.brace_token, - variants: FoldHelper::lift(node.variants, |it| f.fold_variant(it)), + variants: crate::punctuated::fold(node.variants, f, F::fold_variant), } } #[cfg(feature = "full")] -pub fn fold_item_extern_crate<F>(f: &mut F, node: ItemExternCrate) -> ItemExternCrate +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_extern_crate<F>( + f: &mut F, + node: crate::ItemExternCrate, +) -> crate::ItemExternCrate where F: Fold + ?Sized, { - ItemExternCrate { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemExternCrate { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), extern_token: node.extern_token, crate_token: node.crate_token, @@ -1936,37 +2459,43 @@ where } } #[cfg(feature = "full")] -pub fn fold_item_fn<F>(f: &mut F, node: ItemFn) -> ItemFn +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_fn<F>(f: &mut F, node: crate::ItemFn) -> crate::ItemFn where F: Fold + ?Sized, { - ItemFn { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemFn { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), sig: f.fold_signature(node.sig), block: Box::new(f.fold_block(*node.block)), } } #[cfg(feature = "full")] -pub fn fold_item_foreign_mod<F>(f: &mut F, node: ItemForeignMod) -> ItemForeignMod +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_foreign_mod<F>( + f: &mut F, + node: crate::ItemForeignMod, +) -> crate::ItemForeignMod where F: Fold + ?Sized, { - ItemForeignMod { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemForeignMod { + attrs: f.fold_attributes(node.attrs), unsafety: node.unsafety, abi: f.fold_abi(node.abi), brace_token: node.brace_token, - items: FoldHelper::lift(node.items, |it| f.fold_foreign_item(it)), + items: fold_vec(node.items, f, F::fold_foreign_item), } } #[cfg(feature = "full")] -pub fn fold_item_impl<F>(f: &mut F, node: ItemImpl) -> ItemImpl +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_impl<F>(f: &mut F, node: crate::ItemImpl) -> crate::ItemImpl where F: Fold + ?Sized, { - ItemImpl { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemImpl { + attrs: f.fold_attributes(node.attrs), defaultness: node.defaultness, unsafety: node.unsafety, impl_token: node.impl_token, @@ -1974,44 +2503,46 @@ where trait_: (node.trait_).map(|it| ((it).0, f.fold_path((it).1), (it).2)), self_ty: Box::new(f.fold_type(*node.self_ty)), brace_token: node.brace_token, - items: FoldHelper::lift(node.items, |it| f.fold_impl_item(it)), + items: fold_vec(node.items, f, F::fold_impl_item), } } #[cfg(feature = "full")] -pub fn fold_item_macro<F>(f: &mut F, node: ItemMacro) -> ItemMacro +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_macro<F>(f: &mut F, node: crate::ItemMacro) -> crate::ItemMacro where F: Fold + ?Sized, { - ItemMacro { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemMacro { + attrs: f.fold_attributes(node.attrs), ident: (node.ident).map(|it| f.fold_ident(it)), mac: f.fold_macro(node.mac), semi_token: node.semi_token, } } #[cfg(feature = "full")] -pub fn fold_item_mod<F>(f: &mut F, node: ItemMod) -> ItemMod +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_mod<F>(f: &mut F, node: crate::ItemMod) -> crate::ItemMod where F: Fold + ?Sized, { - ItemMod { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemMod { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), unsafety: node.unsafety, mod_token: node.mod_token, ident: f.fold_ident(node.ident), - content: (node.content) - .map(|it| ((it).0, FoldHelper::lift((it).1, |it| f.fold_item(it)))), + content: (node.content).map(|it| ((it).0, fold_vec((it).1, f, F::fold_item))), semi: node.semi, } } #[cfg(feature = "full")] -pub fn fold_item_static<F>(f: &mut F, node: ItemStatic) -> ItemStatic +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_static<F>(f: &mut F, node: crate::ItemStatic) -> crate::ItemStatic where F: Fold + ?Sized, { - ItemStatic { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemStatic { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), static_token: node.static_token, mutability: f.fold_static_mutability(node.mutability), @@ -2024,12 +2555,13 @@ where } } #[cfg(feature = "full")] -pub fn fold_item_struct<F>(f: &mut F, node: ItemStruct) -> ItemStruct +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_struct<F>(f: &mut F, node: crate::ItemStruct) -> crate::ItemStruct where F: Fold + ?Sized, { - ItemStruct { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemStruct { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), struct_token: node.struct_token, ident: f.fold_ident(node.ident), @@ -2039,12 +2571,13 @@ where } } #[cfg(feature = "full")] -pub fn fold_item_trait<F>(f: &mut F, node: ItemTrait) -> ItemTrait +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_trait<F>(f: &mut F, node: crate::ItemTrait) -> crate::ItemTrait where F: Fold + ?Sized, { - ItemTrait { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemTrait { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), unsafety: node.unsafety, auto_token: node.auto_token, @@ -2053,37 +2586,43 @@ where ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), colon_token: node.colon_token, - supertraits: FoldHelper::lift( + supertraits: crate::punctuated::fold( node.supertraits, - |it| f.fold_type_param_bound(it), + f, + F::fold_type_param_bound, ), brace_token: node.brace_token, - items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)), + items: fold_vec(node.items, f, F::fold_trait_item), } } #[cfg(feature = "full")] -pub fn fold_item_trait_alias<F>(f: &mut F, node: ItemTraitAlias) -> ItemTraitAlias +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_trait_alias<F>( + f: &mut F, + node: crate::ItemTraitAlias, +) -> crate::ItemTraitAlias where F: Fold + ?Sized, { - ItemTraitAlias { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemTraitAlias { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), trait_token: node.trait_token, ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), eq_token: node.eq_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), semi_token: node.semi_token, } } #[cfg(feature = "full")] -pub fn fold_item_type<F>(f: &mut F, node: ItemType) -> ItemType +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_type<F>(f: &mut F, node: crate::ItemType) -> crate::ItemType where F: Fold + ?Sized, { - ItemType { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemType { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), type_token: node.type_token, ident: f.fold_ident(node.ident), @@ -2094,12 +2633,13 @@ where } } #[cfg(feature = "full")] -pub fn fold_item_union<F>(f: &mut F, node: ItemUnion) -> ItemUnion +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_union<F>(f: &mut F, node: crate::ItemUnion) -> crate::ItemUnion where F: Fold + ?Sized, { - ItemUnion { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemUnion { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), union_token: node.union_token, ident: f.fold_ident(node.ident), @@ -2108,12 +2648,13 @@ where } } #[cfg(feature = "full")] -pub fn fold_item_use<F>(f: &mut F, node: ItemUse) -> ItemUse +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_item_use<F>(f: &mut F, node: crate::ItemUse) -> crate::ItemUse where F: Fold + ?Sized, { - ItemUse { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::ItemUse { + attrs: f.fold_attributes(node.attrs), vis: f.fold_visibility(node.vis), use_token: node.use_token, leading_colon: node.leading_colon, @@ -2122,61 +2663,78 @@ where } } #[cfg(feature = "full")] -pub fn fold_label<F>(f: &mut F, node: Label) -> Label +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_label<F>(f: &mut F, node: crate::Label) -> crate::Label where F: Fold + ?Sized, { - Label { + crate::Label { name: f.fold_lifetime(node.name), colon_token: node.colon_token, } } -pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime +pub fn fold_lifetime<F>(f: &mut F, node: crate::Lifetime) -> crate::Lifetime where F: Fold + ?Sized, { - Lifetime { + crate::Lifetime { apostrophe: f.fold_span(node.apostrophe), ident: f.fold_ident(node.ident), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_lifetime_param<F>(f: &mut F, node: LifetimeParam) -> LifetimeParam +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_lifetime_param<F>( + f: &mut F, + node: crate::LifetimeParam, +) -> crate::LifetimeParam where F: Fold + ?Sized, { - LifetimeParam { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::LifetimeParam { + attrs: f.fold_attributes(node.attrs), lifetime: f.fold_lifetime(node.lifetime), colon_token: node.colon_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_lifetime), } } -pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit +pub fn fold_lit<F>(f: &mut F, node: crate::Lit) -> crate::Lit where F: Fold + ?Sized, { match node { - Lit::Str(_binding_0) => Lit::Str(f.fold_lit_str(_binding_0)), - Lit::ByteStr(_binding_0) => Lit::ByteStr(f.fold_lit_byte_str(_binding_0)), - Lit::Byte(_binding_0) => Lit::Byte(f.fold_lit_byte(_binding_0)), - Lit::Char(_binding_0) => Lit::Char(f.fold_lit_char(_binding_0)), - Lit::Int(_binding_0) => Lit::Int(f.fold_lit_int(_binding_0)), - Lit::Float(_binding_0) => Lit::Float(f.fold_lit_float(_binding_0)), - Lit::Bool(_binding_0) => Lit::Bool(f.fold_lit_bool(_binding_0)), - Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0), + crate::Lit::Str(_binding_0) => crate::Lit::Str(f.fold_lit_str(_binding_0)), + crate::Lit::ByteStr(_binding_0) => { + crate::Lit::ByteStr(f.fold_lit_byte_str(_binding_0)) + } + crate::Lit::CStr(_binding_0) => crate::Lit::CStr(f.fold_lit_cstr(_binding_0)), + crate::Lit::Byte(_binding_0) => crate::Lit::Byte(f.fold_lit_byte(_binding_0)), + crate::Lit::Char(_binding_0) => crate::Lit::Char(f.fold_lit_char(_binding_0)), + crate::Lit::Int(_binding_0) => crate::Lit::Int(f.fold_lit_int(_binding_0)), + crate::Lit::Float(_binding_0) => crate::Lit::Float(f.fold_lit_float(_binding_0)), + crate::Lit::Bool(_binding_0) => crate::Lit::Bool(f.fold_lit_bool(_binding_0)), + crate::Lit::Verbatim(_binding_0) => crate::Lit::Verbatim(_binding_0), } } -pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool +pub fn fold_lit_bool<F>(f: &mut F, node: crate::LitBool) -> crate::LitBool where F: Fold + ?Sized, { - LitBool { + crate::LitBool { value: node.value, span: f.fold_span(node.span), } } -pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte +pub fn fold_lit_byte<F>(f: &mut F, node: crate::LitByte) -> crate::LitByte +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +pub fn fold_lit_byte_str<F>(f: &mut F, node: crate::LitByteStr) -> crate::LitByteStr where F: Fold + ?Sized, { @@ -2185,7 +2743,7 @@ where node.set_span(span); node } -pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr +pub fn fold_lit_cstr<F>(f: &mut F, node: crate::LitCStr) -> crate::LitCStr where F: Fold + ?Sized, { @@ -2194,7 +2752,7 @@ where node.set_span(span); node } -pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar +pub fn fold_lit_char<F>(f: &mut F, node: crate::LitChar) -> crate::LitChar where F: Fold + ?Sized, { @@ -2203,7 +2761,7 @@ where node.set_span(span); node } -pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat +pub fn fold_lit_float<F>(f: &mut F, node: crate::LitFloat) -> crate::LitFloat where F: Fold + ?Sized, { @@ -2212,7 +2770,7 @@ where node.set_span(span); node } -pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt +pub fn fold_lit_int<F>(f: &mut F, node: crate::LitInt) -> crate::LitInt where F: Fold + ?Sized, { @@ -2221,7 +2779,7 @@ where node.set_span(span); node } -pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr +pub fn fold_lit_str<F>(f: &mut F, node: crate::LitStr) -> crate::LitStr where F: Fold + ?Sized, { @@ -2231,12 +2789,13 @@ where node } #[cfg(feature = "full")] -pub fn fold_local<F>(f: &mut F, node: Local) -> Local +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_local<F>(f: &mut F, node: crate::Local) -> crate::Local where F: Fold + ?Sized, { - Local { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Local { + attrs: f.fold_attributes(node.attrs), let_token: node.let_token, pat: f.fold_pat(node.pat), init: (node.init).map(|it| f.fold_local_init(it)), @@ -2244,132 +2803,164 @@ where } } #[cfg(feature = "full")] -pub fn fold_local_init<F>(f: &mut F, node: LocalInit) -> LocalInit +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_local_init<F>(f: &mut F, node: crate::LocalInit) -> crate::LocalInit where F: Fold + ?Sized, { - LocalInit { + crate::LocalInit { eq_token: node.eq_token, expr: Box::new(f.fold_expr(*node.expr)), diverge: (node.diverge).map(|it| ((it).0, Box::new(f.fold_expr(*(it).1)))), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_macro<F>(f: &mut F, node: Macro) -> Macro +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_macro<F>(f: &mut F, node: crate::Macro) -> crate::Macro where F: Fold + ?Sized, { - Macro { + crate::Macro { path: f.fold_path(node.path), bang_token: node.bang_token, delimiter: f.fold_macro_delimiter(node.delimiter), - tokens: node.tokens, + tokens: f.fold_token_stream(node.tokens), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_macro_delimiter<F>(f: &mut F, node: MacroDelimiter) -> MacroDelimiter +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_macro_delimiter<F>( + f: &mut F, + node: crate::MacroDelimiter, +) -> crate::MacroDelimiter where F: Fold + ?Sized, { match node { - MacroDelimiter::Paren(_binding_0) => MacroDelimiter::Paren(_binding_0), - MacroDelimiter::Brace(_binding_0) => MacroDelimiter::Brace(_binding_0), - MacroDelimiter::Bracket(_binding_0) => MacroDelimiter::Bracket(_binding_0), + crate::MacroDelimiter::Paren(_binding_0) => { + crate::MacroDelimiter::Paren(_binding_0) + } + crate::MacroDelimiter::Brace(_binding_0) => { + crate::MacroDelimiter::Brace(_binding_0) + } + crate::MacroDelimiter::Bracket(_binding_0) => { + crate::MacroDelimiter::Bracket(_binding_0) + } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_member<F>(f: &mut F, node: Member) -> Member +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_member<F>(f: &mut F, node: crate::Member) -> crate::Member where F: Fold + ?Sized, { match node { - Member::Named(_binding_0) => Member::Named(f.fold_ident(_binding_0)), - Member::Unnamed(_binding_0) => Member::Unnamed(f.fold_index(_binding_0)), + crate::Member::Named(_binding_0) => { + crate::Member::Named(f.fold_ident(_binding_0)) + } + crate::Member::Unnamed(_binding_0) => { + crate::Member::Unnamed(f.fold_index(_binding_0)) + } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_meta<F>(f: &mut F, node: Meta) -> Meta +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_meta<F>(f: &mut F, node: crate::Meta) -> crate::Meta where F: Fold + ?Sized, { match node { - Meta::Path(_binding_0) => Meta::Path(f.fold_path(_binding_0)), - Meta::List(_binding_0) => Meta::List(f.fold_meta_list(_binding_0)), - Meta::NameValue(_binding_0) => { - Meta::NameValue(f.fold_meta_name_value(_binding_0)) + crate::Meta::Path(_binding_0) => crate::Meta::Path(f.fold_path(_binding_0)), + crate::Meta::List(_binding_0) => crate::Meta::List(f.fold_meta_list(_binding_0)), + crate::Meta::NameValue(_binding_0) => { + crate::Meta::NameValue(f.fold_meta_name_value(_binding_0)) } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_meta_list<F>(f: &mut F, node: MetaList) -> MetaList +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_meta_list<F>(f: &mut F, node: crate::MetaList) -> crate::MetaList where F: Fold + ?Sized, { - MetaList { + crate::MetaList { path: f.fold_path(node.path), delimiter: f.fold_macro_delimiter(node.delimiter), - tokens: node.tokens, + tokens: f.fold_token_stream(node.tokens), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_meta_name_value<F>(f: &mut F, node: MetaNameValue) -> MetaNameValue +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_meta_name_value<F>( + f: &mut F, + node: crate::MetaNameValue, +) -> crate::MetaNameValue where F: Fold + ?Sized, { - MetaNameValue { + crate::MetaNameValue { path: f.fold_path(node.path), eq_token: node.eq_token, value: f.fold_expr(node.value), } } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn fold_parenthesized_generic_arguments<F>( f: &mut F, - node: ParenthesizedGenericArguments, -) -> ParenthesizedGenericArguments + node: crate::ParenthesizedGenericArguments, +) -> crate::ParenthesizedGenericArguments where F: Fold + ?Sized, { - ParenthesizedGenericArguments { + crate::ParenthesizedGenericArguments { paren_token: node.paren_token, - inputs: FoldHelper::lift(node.inputs, |it| f.fold_type(it)), + inputs: crate::punctuated::fold(node.inputs, f, F::fold_type), output: f.fold_return_type(node.output), } } #[cfg(feature = "full")] -pub fn fold_pat<F>(f: &mut F, node: Pat) -> Pat +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat<F>(f: &mut F, node: crate::Pat) -> crate::Pat where F: Fold + ?Sized, { match node { - Pat::Const(_binding_0) => Pat::Const(f.fold_expr_const(_binding_0)), - Pat::Ident(_binding_0) => Pat::Ident(f.fold_pat_ident(_binding_0)), - Pat::Lit(_binding_0) => Pat::Lit(f.fold_expr_lit(_binding_0)), - Pat::Macro(_binding_0) => Pat::Macro(f.fold_expr_macro(_binding_0)), - Pat::Or(_binding_0) => Pat::Or(f.fold_pat_or(_binding_0)), - Pat::Paren(_binding_0) => Pat::Paren(f.fold_pat_paren(_binding_0)), - Pat::Path(_binding_0) => Pat::Path(f.fold_expr_path(_binding_0)), - Pat::Range(_binding_0) => Pat::Range(f.fold_expr_range(_binding_0)), - Pat::Reference(_binding_0) => Pat::Reference(f.fold_pat_reference(_binding_0)), - Pat::Rest(_binding_0) => Pat::Rest(f.fold_pat_rest(_binding_0)), - Pat::Slice(_binding_0) => Pat::Slice(f.fold_pat_slice(_binding_0)), - Pat::Struct(_binding_0) => Pat::Struct(f.fold_pat_struct(_binding_0)), - Pat::Tuple(_binding_0) => Pat::Tuple(f.fold_pat_tuple(_binding_0)), - Pat::TupleStruct(_binding_0) => { - Pat::TupleStruct(f.fold_pat_tuple_struct(_binding_0)) + crate::Pat::Const(_binding_0) => crate::Pat::Const(f.fold_expr_const(_binding_0)), + crate::Pat::Ident(_binding_0) => crate::Pat::Ident(f.fold_pat_ident(_binding_0)), + crate::Pat::Lit(_binding_0) => crate::Pat::Lit(f.fold_expr_lit(_binding_0)), + crate::Pat::Macro(_binding_0) => crate::Pat::Macro(f.fold_expr_macro(_binding_0)), + crate::Pat::Or(_binding_0) => crate::Pat::Or(f.fold_pat_or(_binding_0)), + crate::Pat::Paren(_binding_0) => crate::Pat::Paren(f.fold_pat_paren(_binding_0)), + crate::Pat::Path(_binding_0) => crate::Pat::Path(f.fold_expr_path(_binding_0)), + crate::Pat::Range(_binding_0) => crate::Pat::Range(f.fold_expr_range(_binding_0)), + crate::Pat::Reference(_binding_0) => { + crate::Pat::Reference(f.fold_pat_reference(_binding_0)) + } + crate::Pat::Rest(_binding_0) => crate::Pat::Rest(f.fold_pat_rest(_binding_0)), + crate::Pat::Slice(_binding_0) => crate::Pat::Slice(f.fold_pat_slice(_binding_0)), + crate::Pat::Struct(_binding_0) => { + crate::Pat::Struct(f.fold_pat_struct(_binding_0)) + } + crate::Pat::Tuple(_binding_0) => crate::Pat::Tuple(f.fold_pat_tuple(_binding_0)), + crate::Pat::TupleStruct(_binding_0) => { + crate::Pat::TupleStruct(f.fold_pat_tuple_struct(_binding_0)) } - Pat::Type(_binding_0) => Pat::Type(f.fold_pat_type(_binding_0)), - Pat::Verbatim(_binding_0) => Pat::Verbatim(_binding_0), - Pat::Wild(_binding_0) => Pat::Wild(f.fold_pat_wild(_binding_0)), + crate::Pat::Type(_binding_0) => crate::Pat::Type(f.fold_pat_type(_binding_0)), + crate::Pat::Verbatim(_binding_0) => { + crate::Pat::Verbatim(f.fold_token_stream(_binding_0)) + } + crate::Pat::Wild(_binding_0) => crate::Pat::Wild(f.fold_pat_wild(_binding_0)), } } #[cfg(feature = "full")] -pub fn fold_pat_ident<F>(f: &mut F, node: PatIdent) -> PatIdent +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_ident<F>(f: &mut F, node: crate::PatIdent) -> crate::PatIdent where F: Fold + ?Sized, { - PatIdent { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatIdent { + attrs: f.fold_attributes(node.attrs), by_ref: node.by_ref, mutability: node.mutability, ident: f.fold_ident(node.ident), @@ -2377,191 +2968,250 @@ where } } #[cfg(feature = "full")] -pub fn fold_pat_or<F>(f: &mut F, node: PatOr) -> PatOr +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_or<F>(f: &mut F, node: crate::PatOr) -> crate::PatOr where F: Fold + ?Sized, { - PatOr { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatOr { + attrs: f.fold_attributes(node.attrs), leading_vert: node.leading_vert, - cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)), + cases: crate::punctuated::fold(node.cases, f, F::fold_pat), } } #[cfg(feature = "full")] -pub fn fold_pat_paren<F>(f: &mut F, node: PatParen) -> PatParen +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_paren<F>(f: &mut F, node: crate::PatParen) -> crate::PatParen where F: Fold + ?Sized, { - PatParen { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatParen { + attrs: f.fold_attributes(node.attrs), paren_token: node.paren_token, pat: Box::new(f.fold_pat(*node.pat)), } } #[cfg(feature = "full")] -pub fn fold_pat_reference<F>(f: &mut F, node: PatReference) -> PatReference +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_reference<F>(f: &mut F, node: crate::PatReference) -> crate::PatReference where F: Fold + ?Sized, { - PatReference { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatReference { + attrs: f.fold_attributes(node.attrs), and_token: node.and_token, mutability: node.mutability, pat: Box::new(f.fold_pat(*node.pat)), } } #[cfg(feature = "full")] -pub fn fold_pat_rest<F>(f: &mut F, node: PatRest) -> PatRest +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_rest<F>(f: &mut F, node: crate::PatRest) -> crate::PatRest where F: Fold + ?Sized, { - PatRest { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatRest { + attrs: f.fold_attributes(node.attrs), dot2_token: node.dot2_token, } } #[cfg(feature = "full")] -pub fn fold_pat_slice<F>(f: &mut F, node: PatSlice) -> PatSlice +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_slice<F>(f: &mut F, node: crate::PatSlice) -> crate::PatSlice where F: Fold + ?Sized, { - PatSlice { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatSlice { + attrs: f.fold_attributes(node.attrs), bracket_token: node.bracket_token, - elems: FoldHelper::lift(node.elems, |it| f.fold_pat(it)), + elems: crate::punctuated::fold(node.elems, f, F::fold_pat), } } #[cfg(feature = "full")] -pub fn fold_pat_struct<F>(f: &mut F, node: PatStruct) -> PatStruct +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_struct<F>(f: &mut F, node: crate::PatStruct) -> crate::PatStruct where F: Fold + ?Sized, { - PatStruct { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatStruct { + attrs: f.fold_attributes(node.attrs), qself: (node.qself).map(|it| f.fold_qself(it)), path: f.fold_path(node.path), brace_token: node.brace_token, - fields: FoldHelper::lift(node.fields, |it| f.fold_field_pat(it)), + fields: crate::punctuated::fold(node.fields, f, F::fold_field_pat), rest: (node.rest).map(|it| f.fold_pat_rest(it)), } } #[cfg(feature = "full")] -pub fn fold_pat_tuple<F>(f: &mut F, node: PatTuple) -> PatTuple +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_tuple<F>(f: &mut F, node: crate::PatTuple) -> crate::PatTuple where F: Fold + ?Sized, { - PatTuple { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatTuple { + attrs: f.fold_attributes(node.attrs), paren_token: node.paren_token, - elems: FoldHelper::lift(node.elems, |it| f.fold_pat(it)), + elems: crate::punctuated::fold(node.elems, f, F::fold_pat), } } #[cfg(feature = "full")] -pub fn fold_pat_tuple_struct<F>(f: &mut F, node: PatTupleStruct) -> PatTupleStruct +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_tuple_struct<F>( + f: &mut F, + node: crate::PatTupleStruct, +) -> crate::PatTupleStruct where F: Fold + ?Sized, { - PatTupleStruct { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatTupleStruct { + attrs: f.fold_attributes(node.attrs), qself: (node.qself).map(|it| f.fold_qself(it)), path: f.fold_path(node.path), paren_token: node.paren_token, - elems: FoldHelper::lift(node.elems, |it| f.fold_pat(it)), + elems: crate::punctuated::fold(node.elems, f, F::fold_pat), } } #[cfg(feature = "full")] -pub fn fold_pat_type<F>(f: &mut F, node: PatType) -> PatType +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_type<F>(f: &mut F, node: crate::PatType) -> crate::PatType where F: Fold + ?Sized, { - PatType { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatType { + attrs: f.fold_attributes(node.attrs), pat: Box::new(f.fold_pat(*node.pat)), colon_token: node.colon_token, ty: Box::new(f.fold_type(*node.ty)), } } #[cfg(feature = "full")] -pub fn fold_pat_wild<F>(f: &mut F, node: PatWild) -> PatWild +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pat_wild<F>(f: &mut F, node: crate::PatWild) -> crate::PatWild where F: Fold + ?Sized, { - PatWild { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::PatWild { + attrs: f.fold_attributes(node.attrs), underscore_token: node.underscore_token, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_path<F>(f: &mut F, node: Path) -> Path +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_path<F>(f: &mut F, node: crate::Path) -> crate::Path where F: Fold + ?Sized, { - Path { + crate::Path { leading_colon: node.leading_colon, - segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)), + segments: crate::punctuated::fold(node.segments, f, F::fold_path_segment), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_path_arguments<F>(f: &mut F, node: PathArguments) -> PathArguments +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_path_arguments<F>( + f: &mut F, + node: crate::PathArguments, +) -> crate::PathArguments where F: Fold + ?Sized, { match node { - PathArguments::None => PathArguments::None, - PathArguments::AngleBracketed(_binding_0) => { - PathArguments::AngleBracketed( + crate::PathArguments::None => crate::PathArguments::None, + crate::PathArguments::AngleBracketed(_binding_0) => { + crate::PathArguments::AngleBracketed( f.fold_angle_bracketed_generic_arguments(_binding_0), ) } - PathArguments::Parenthesized(_binding_0) => { - PathArguments::Parenthesized( + crate::PathArguments::Parenthesized(_binding_0) => { + crate::PathArguments::Parenthesized( f.fold_parenthesized_generic_arguments(_binding_0), ) } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_path_segment<F>(f: &mut F, node: PathSegment) -> PathSegment +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_path_segment<F>(f: &mut F, node: crate::PathSegment) -> crate::PathSegment where F: Fold + ?Sized, { - PathSegment { + crate::PathSegment { ident: f.fold_ident(node.ident), arguments: f.fold_path_arguments(node.arguments), } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_pointer_mutability<F>( + f: &mut F, + node: crate::PointerMutability, +) -> crate::PointerMutability +where + F: Fold + ?Sized, +{ + match node { + crate::PointerMutability::Const(_binding_0) => { + crate::PointerMutability::Const(_binding_0) + } + crate::PointerMutability::Mut(_binding_0) => { + crate::PointerMutability::Mut(_binding_0) + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_precise_capture<F>( + f: &mut F, + node: crate::PreciseCapture, +) -> crate::PreciseCapture +where + F: Fold + ?Sized, +{ + crate::PreciseCapture { + use_token: node.use_token, + lt_token: node.lt_token, + params: crate::punctuated::fold(node.params, f, F::fold_captured_param), + gt_token: node.gt_token, + } +} #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn fold_predicate_lifetime<F>( f: &mut F, - node: PredicateLifetime, -) -> PredicateLifetime + node: crate::PredicateLifetime, +) -> crate::PredicateLifetime where F: Fold + ?Sized, { - PredicateLifetime { + crate::PredicateLifetime { lifetime: f.fold_lifetime(node.lifetime), colon_token: node.colon_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_lifetime), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_predicate_type<F>(f: &mut F, node: PredicateType) -> PredicateType +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_predicate_type<F>( + f: &mut F, + node: crate::PredicateType, +) -> crate::PredicateType where F: Fold + ?Sized, { - PredicateType { + crate::PredicateType { lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), bounded_ty: f.fold_type(node.bounded_ty), colon_token: node.colon_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_qself<F>(f: &mut F, node: QSelf) -> QSelf +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_qself<F>(f: &mut F, node: crate::QSelf) -> crate::QSelf where F: Fold + ?Sized, { - QSelf { + crate::QSelf { lt_token: node.lt_token, ty: Box::new(f.fold_type(*node.ty)), position: node.position, @@ -2570,22 +3220,26 @@ where } } #[cfg(feature = "full")] -pub fn fold_range_limits<F>(f: &mut F, node: RangeLimits) -> RangeLimits +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_range_limits<F>(f: &mut F, node: crate::RangeLimits) -> crate::RangeLimits where F: Fold + ?Sized, { match node { - RangeLimits::HalfOpen(_binding_0) => RangeLimits::HalfOpen(_binding_0), - RangeLimits::Closed(_binding_0) => RangeLimits::Closed(_binding_0), + crate::RangeLimits::HalfOpen(_binding_0) => { + crate::RangeLimits::HalfOpen(_binding_0) + } + crate::RangeLimits::Closed(_binding_0) => crate::RangeLimits::Closed(_binding_0), } } #[cfg(feature = "full")] -pub fn fold_receiver<F>(f: &mut F, node: Receiver) -> Receiver +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_receiver<F>(f: &mut F, node: crate::Receiver) -> crate::Receiver where F: Fold + ?Sized, { - Receiver { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Receiver { + attrs: f.fold_attributes(node.attrs), reference: (node.reference) .map(|it| ((it).0, ((it).1).map(|it| f.fold_lifetime(it)))), mutability: node.mutability, @@ -2595,23 +3249,25 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_return_type<F>(f: &mut F, node: ReturnType) -> ReturnType +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_return_type<F>(f: &mut F, node: crate::ReturnType) -> crate::ReturnType where F: Fold + ?Sized, { match node { - ReturnType::Default => ReturnType::Default, - ReturnType::Type(_binding_0, _binding_1) => { - ReturnType::Type(_binding_0, Box::new(f.fold_type(*_binding_1))) + crate::ReturnType::Default => crate::ReturnType::Default, + crate::ReturnType::Type(_binding_0, _binding_1) => { + crate::ReturnType::Type(_binding_0, Box::new(f.fold_type(*_binding_1))) } } } #[cfg(feature = "full")] -pub fn fold_signature<F>(f: &mut F, node: Signature) -> Signature +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_signature<F>(f: &mut F, node: crate::Signature) -> crate::Signature where F: Fold + ?Sized, { - Signature { + crate::Signature { constness: node.constness, asyncness: node.asyncness, unsafety: node.unsafety, @@ -2620,58 +3276,69 @@ where ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), paren_token: node.paren_token, - inputs: FoldHelper::lift(node.inputs, |it| f.fold_fn_arg(it)), + inputs: crate::punctuated::fold(node.inputs, f, F::fold_fn_arg), variadic: (node.variadic).map(|it| f.fold_variadic(it)), output: f.fold_return_type(node.output), } } -pub fn fold_span<F>(f: &mut F, node: Span) -> Span +pub fn fold_span<F>(f: &mut F, node: proc_macro2::Span) -> proc_macro2::Span where F: Fold + ?Sized, { node } #[cfg(feature = "full")] -pub fn fold_static_mutability<F>(f: &mut F, node: StaticMutability) -> StaticMutability +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_static_mutability<F>( + f: &mut F, + node: crate::StaticMutability, +) -> crate::StaticMutability where F: Fold + ?Sized, { match node { - StaticMutability::Mut(_binding_0) => StaticMutability::Mut(_binding_0), - StaticMutability::None => StaticMutability::None, + crate::StaticMutability::Mut(_binding_0) => { + crate::StaticMutability::Mut(_binding_0) + } + crate::StaticMutability::None => crate::StaticMutability::None, } } #[cfg(feature = "full")] -pub fn fold_stmt<F>(f: &mut F, node: Stmt) -> Stmt +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_stmt<F>(f: &mut F, node: crate::Stmt) -> crate::Stmt where F: Fold + ?Sized, { match node { - Stmt::Local(_binding_0) => Stmt::Local(f.fold_local(_binding_0)), - Stmt::Item(_binding_0) => Stmt::Item(f.fold_item(_binding_0)), - Stmt::Expr(_binding_0, _binding_1) => { - Stmt::Expr(f.fold_expr(_binding_0), _binding_1) + crate::Stmt::Local(_binding_0) => crate::Stmt::Local(f.fold_local(_binding_0)), + crate::Stmt::Item(_binding_0) => crate::Stmt::Item(f.fold_item(_binding_0)), + crate::Stmt::Expr(_binding_0, _binding_1) => { + crate::Stmt::Expr(f.fold_expr(_binding_0), _binding_1) + } + crate::Stmt::Macro(_binding_0) => { + crate::Stmt::Macro(f.fold_stmt_macro(_binding_0)) } - Stmt::Macro(_binding_0) => Stmt::Macro(f.fold_stmt_macro(_binding_0)), } } #[cfg(feature = "full")] -pub fn fold_stmt_macro<F>(f: &mut F, node: StmtMacro) -> StmtMacro +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_stmt_macro<F>(f: &mut F, node: crate::StmtMacro) -> crate::StmtMacro where F: Fold + ?Sized, { - StmtMacro { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::StmtMacro { + attrs: f.fold_attributes(node.attrs), mac: f.fold_macro(node.mac), semi_token: node.semi_token, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_trait_bound<F>(f: &mut F, node: TraitBound) -> TraitBound +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_trait_bound<F>(f: &mut F, node: crate::TraitBound) -> crate::TraitBound where F: Fold + ?Sized, { - TraitBound { + crate::TraitBound { paren_token: node.paren_token, modifier: f.fold_trait_bound_modifier(node.modifier), lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), @@ -2679,44 +3346,56 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn fold_trait_bound_modifier<F>( f: &mut F, - node: TraitBoundModifier, -) -> TraitBoundModifier + node: crate::TraitBoundModifier, +) -> crate::TraitBoundModifier where F: Fold + ?Sized, { match node { - TraitBoundModifier::None => TraitBoundModifier::None, - TraitBoundModifier::Maybe(_binding_0) => TraitBoundModifier::Maybe(_binding_0), + crate::TraitBoundModifier::None => crate::TraitBoundModifier::None, + crate::TraitBoundModifier::Maybe(_binding_0) => { + crate::TraitBoundModifier::Maybe(_binding_0) + } } } #[cfg(feature = "full")] -pub fn fold_trait_item<F>(f: &mut F, node: TraitItem) -> TraitItem +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_trait_item<F>(f: &mut F, node: crate::TraitItem) -> crate::TraitItem where F: Fold + ?Sized, { match node { - TraitItem::Const(_binding_0) => { - TraitItem::Const(f.fold_trait_item_const(_binding_0)) + crate::TraitItem::Const(_binding_0) => { + crate::TraitItem::Const(f.fold_trait_item_const(_binding_0)) + } + crate::TraitItem::Fn(_binding_0) => { + crate::TraitItem::Fn(f.fold_trait_item_fn(_binding_0)) } - TraitItem::Fn(_binding_0) => TraitItem::Fn(f.fold_trait_item_fn(_binding_0)), - TraitItem::Type(_binding_0) => { - TraitItem::Type(f.fold_trait_item_type(_binding_0)) + crate::TraitItem::Type(_binding_0) => { + crate::TraitItem::Type(f.fold_trait_item_type(_binding_0)) } - TraitItem::Macro(_binding_0) => { - TraitItem::Macro(f.fold_trait_item_macro(_binding_0)) + crate::TraitItem::Macro(_binding_0) => { + crate::TraitItem::Macro(f.fold_trait_item_macro(_binding_0)) + } + crate::TraitItem::Verbatim(_binding_0) => { + crate::TraitItem::Verbatim(f.fold_token_stream(_binding_0)) } - TraitItem::Verbatim(_binding_0) => TraitItem::Verbatim(_binding_0), } } #[cfg(feature = "full")] -pub fn fold_trait_item_const<F>(f: &mut F, node: TraitItemConst) -> TraitItemConst +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_trait_item_const<F>( + f: &mut F, + node: crate::TraitItemConst, +) -> crate::TraitItemConst where F: Fold + ?Sized, { - TraitItemConst { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::TraitItemConst { + attrs: f.fold_attributes(node.attrs), const_token: node.const_token, ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), @@ -2727,77 +3406,110 @@ where } } #[cfg(feature = "full")] -pub fn fold_trait_item_fn<F>(f: &mut F, node: TraitItemFn) -> TraitItemFn +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_trait_item_fn<F>(f: &mut F, node: crate::TraitItemFn) -> crate::TraitItemFn where F: Fold + ?Sized, { - TraitItemFn { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::TraitItemFn { + attrs: f.fold_attributes(node.attrs), sig: f.fold_signature(node.sig), default: (node.default).map(|it| f.fold_block(it)), semi_token: node.semi_token, } } #[cfg(feature = "full")] -pub fn fold_trait_item_macro<F>(f: &mut F, node: TraitItemMacro) -> TraitItemMacro +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_trait_item_macro<F>( + f: &mut F, + node: crate::TraitItemMacro, +) -> crate::TraitItemMacro where F: Fold + ?Sized, { - TraitItemMacro { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::TraitItemMacro { + attrs: f.fold_attributes(node.attrs), mac: f.fold_macro(node.mac), semi_token: node.semi_token, } } #[cfg(feature = "full")] -pub fn fold_trait_item_type<F>(f: &mut F, node: TraitItemType) -> TraitItemType +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_trait_item_type<F>( + f: &mut F, + node: crate::TraitItemType, +) -> crate::TraitItemType where F: Fold + ?Sized, { - TraitItemType { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::TraitItemType { + attrs: f.fold_attributes(node.attrs), type_token: node.type_token, ident: f.fold_ident(node.ident), generics: f.fold_generics(node.generics), colon_token: node.colon_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), default: (node.default).map(|it| ((it).0, f.fold_type((it).1))), semi_token: node.semi_token, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type<F>(f: &mut F, node: Type) -> Type +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type<F>(f: &mut F, node: crate::Type) -> crate::Type where F: Fold + ?Sized, { match node { - Type::Array(_binding_0) => Type::Array(f.fold_type_array(_binding_0)), - Type::BareFn(_binding_0) => Type::BareFn(f.fold_type_bare_fn(_binding_0)), - Type::Group(_binding_0) => Type::Group(f.fold_type_group(_binding_0)), - Type::ImplTrait(_binding_0) => { - Type::ImplTrait(f.fold_type_impl_trait(_binding_0)) + crate::Type::Array(_binding_0) => { + crate::Type::Array(f.fold_type_array(_binding_0)) + } + crate::Type::BareFn(_binding_0) => { + crate::Type::BareFn(f.fold_type_bare_fn(_binding_0)) } - Type::Infer(_binding_0) => Type::Infer(f.fold_type_infer(_binding_0)), - Type::Macro(_binding_0) => Type::Macro(f.fold_type_macro(_binding_0)), - Type::Never(_binding_0) => Type::Never(f.fold_type_never(_binding_0)), - Type::Paren(_binding_0) => Type::Paren(f.fold_type_paren(_binding_0)), - Type::Path(_binding_0) => Type::Path(f.fold_type_path(_binding_0)), - Type::Ptr(_binding_0) => Type::Ptr(f.fold_type_ptr(_binding_0)), - Type::Reference(_binding_0) => Type::Reference(f.fold_type_reference(_binding_0)), - Type::Slice(_binding_0) => Type::Slice(f.fold_type_slice(_binding_0)), - Type::TraitObject(_binding_0) => { - Type::TraitObject(f.fold_type_trait_object(_binding_0)) + crate::Type::Group(_binding_0) => { + crate::Type::Group(f.fold_type_group(_binding_0)) + } + crate::Type::ImplTrait(_binding_0) => { + crate::Type::ImplTrait(f.fold_type_impl_trait(_binding_0)) + } + crate::Type::Infer(_binding_0) => { + crate::Type::Infer(f.fold_type_infer(_binding_0)) + } + crate::Type::Macro(_binding_0) => { + crate::Type::Macro(f.fold_type_macro(_binding_0)) + } + crate::Type::Never(_binding_0) => { + crate::Type::Never(f.fold_type_never(_binding_0)) + } + crate::Type::Paren(_binding_0) => { + crate::Type::Paren(f.fold_type_paren(_binding_0)) + } + crate::Type::Path(_binding_0) => crate::Type::Path(f.fold_type_path(_binding_0)), + crate::Type::Ptr(_binding_0) => crate::Type::Ptr(f.fold_type_ptr(_binding_0)), + crate::Type::Reference(_binding_0) => { + crate::Type::Reference(f.fold_type_reference(_binding_0)) + } + crate::Type::Slice(_binding_0) => { + crate::Type::Slice(f.fold_type_slice(_binding_0)) + } + crate::Type::TraitObject(_binding_0) => { + crate::Type::TraitObject(f.fold_type_trait_object(_binding_0)) + } + crate::Type::Tuple(_binding_0) => { + crate::Type::Tuple(f.fold_type_tuple(_binding_0)) + } + crate::Type::Verbatim(_binding_0) => { + crate::Type::Verbatim(f.fold_token_stream(_binding_0)) } - Type::Tuple(_binding_0) => Type::Tuple(f.fold_type_tuple(_binding_0)), - Type::Verbatim(_binding_0) => Type::Verbatim(_binding_0), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_array<F>(f: &mut F, node: TypeArray) -> TypeArray +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_array<F>(f: &mut F, node: crate::TypeArray) -> crate::TypeArray where F: Fold + ?Sized, { - TypeArray { + crate::TypeArray { bracket_token: node.bracket_token, elem: Box::new(f.fold_type(*node.elem)), semi_token: node.semi_token, @@ -2805,123 +3517,147 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_bare_fn<F>(f: &mut F, node: TypeBareFn) -> TypeBareFn +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_bare_fn<F>(f: &mut F, node: crate::TypeBareFn) -> crate::TypeBareFn where F: Fold + ?Sized, { - TypeBareFn { + crate::TypeBareFn { lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), unsafety: node.unsafety, abi: (node.abi).map(|it| f.fold_abi(it)), fn_token: node.fn_token, paren_token: node.paren_token, - inputs: FoldHelper::lift(node.inputs, |it| f.fold_bare_fn_arg(it)), + inputs: crate::punctuated::fold(node.inputs, f, F::fold_bare_fn_arg), variadic: (node.variadic).map(|it| f.fold_bare_variadic(it)), output: f.fold_return_type(node.output), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_group<F>(f: &mut F, node: TypeGroup) -> TypeGroup +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_group<F>(f: &mut F, node: crate::TypeGroup) -> crate::TypeGroup where F: Fold + ?Sized, { - TypeGroup { + crate::TypeGroup { group_token: node.group_token, elem: Box::new(f.fold_type(*node.elem)), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_impl_trait<F>(f: &mut F, node: TypeImplTrait) -> TypeImplTrait +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_impl_trait<F>( + f: &mut F, + node: crate::TypeImplTrait, +) -> crate::TypeImplTrait where F: Fold + ?Sized, { - TypeImplTrait { + crate::TypeImplTrait { impl_token: node.impl_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_infer<F>(f: &mut F, node: TypeInfer) -> TypeInfer +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_infer<F>(f: &mut F, node: crate::TypeInfer) -> crate::TypeInfer where F: Fold + ?Sized, { - TypeInfer { + crate::TypeInfer { underscore_token: node.underscore_token, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_macro<F>(f: &mut F, node: TypeMacro) -> TypeMacro +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_macro<F>(f: &mut F, node: crate::TypeMacro) -> crate::TypeMacro where F: Fold + ?Sized, { - TypeMacro { + crate::TypeMacro { mac: f.fold_macro(node.mac), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_never<F>(f: &mut F, node: TypeNever) -> TypeNever +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_never<F>(f: &mut F, node: crate::TypeNever) -> crate::TypeNever where F: Fold + ?Sized, { - TypeNever { + crate::TypeNever { bang_token: node.bang_token, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_param<F>(f: &mut F, node: TypeParam) -> TypeParam +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_param<F>(f: &mut F, node: crate::TypeParam) -> crate::TypeParam where F: Fold + ?Sized, { - TypeParam { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::TypeParam { + attrs: f.fold_attributes(node.attrs), ident: f.fold_ident(node.ident), colon_token: node.colon_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), eq_token: node.eq_token, default: (node.default).map(|it| f.fold_type(it)), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_param_bound<F>(f: &mut F, node: TypeParamBound) -> TypeParamBound +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_param_bound<F>( + f: &mut F, + node: crate::TypeParamBound, +) -> crate::TypeParamBound where F: Fold + ?Sized, { match node { - TypeParamBound::Trait(_binding_0) => { - TypeParamBound::Trait(f.fold_trait_bound(_binding_0)) + crate::TypeParamBound::Trait(_binding_0) => { + crate::TypeParamBound::Trait(f.fold_trait_bound(_binding_0)) + } + crate::TypeParamBound::Lifetime(_binding_0) => { + crate::TypeParamBound::Lifetime(f.fold_lifetime(_binding_0)) + } + crate::TypeParamBound::PreciseCapture(_binding_0) => { + crate::TypeParamBound::PreciseCapture( + full!(f.fold_precise_capture(_binding_0)), + ) } - TypeParamBound::Lifetime(_binding_0) => { - TypeParamBound::Lifetime(f.fold_lifetime(_binding_0)) + crate::TypeParamBound::Verbatim(_binding_0) => { + crate::TypeParamBound::Verbatim(f.fold_token_stream(_binding_0)) } - TypeParamBound::Verbatim(_binding_0) => TypeParamBound::Verbatim(_binding_0), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_paren<F>(f: &mut F, node: TypeParen) -> TypeParen +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_paren<F>(f: &mut F, node: crate::TypeParen) -> crate::TypeParen where F: Fold + ?Sized, { - TypeParen { + crate::TypeParen { paren_token: node.paren_token, elem: Box::new(f.fold_type(*node.elem)), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_path<F>(f: &mut F, node: TypePath) -> TypePath +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_path<F>(f: &mut F, node: crate::TypePath) -> crate::TypePath where F: Fold + ?Sized, { - TypePath { + crate::TypePath { qself: (node.qself).map(|it| f.fold_qself(it)), path: f.fold_path(node.path), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_ptr<F>(f: &mut F, node: TypePtr) -> TypePtr +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_ptr<F>(f: &mut F, node: crate::TypePtr) -> crate::TypePtr where F: Fold + ?Sized, { - TypePtr { + crate::TypePtr { star_token: node.star_token, const_token: node.const_token, mutability: node.mutability, @@ -2929,11 +3665,15 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_reference<F>(f: &mut F, node: TypeReference) -> TypeReference +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_reference<F>( + f: &mut F, + node: crate::TypeReference, +) -> crate::TypeReference where F: Fold + ?Sized, { - TypeReference { + crate::TypeReference { and_token: node.and_token, lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)), mutability: node.mutability, @@ -2941,139 +3681,168 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_slice<F>(f: &mut F, node: TypeSlice) -> TypeSlice +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_slice<F>(f: &mut F, node: crate::TypeSlice) -> crate::TypeSlice where F: Fold + ?Sized, { - TypeSlice { + crate::TypeSlice { bracket_token: node.bracket_token, elem: Box::new(f.fold_type(*node.elem)), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_trait_object<F>(f: &mut F, node: TypeTraitObject) -> TypeTraitObject +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_trait_object<F>( + f: &mut F, + node: crate::TypeTraitObject, +) -> crate::TypeTraitObject where F: Fold + ?Sized, { - TypeTraitObject { + crate::TypeTraitObject { dyn_token: node.dyn_token, - bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + bounds: crate::punctuated::fold(node.bounds, f, F::fold_type_param_bound), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_type_tuple<F>(f: &mut F, node: TypeTuple) -> TypeTuple +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_type_tuple<F>(f: &mut F, node: crate::TypeTuple) -> crate::TypeTuple where F: Fold + ?Sized, { - TypeTuple { + crate::TypeTuple { paren_token: node.paren_token, - elems: FoldHelper::lift(node.elems, |it| f.fold_type(it)), + elems: crate::punctuated::fold(node.elems, f, F::fold_type), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_un_op<F>(f: &mut F, node: UnOp) -> UnOp +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_un_op<F>(f: &mut F, node: crate::UnOp) -> crate::UnOp where F: Fold + ?Sized, { match node { - UnOp::Deref(_binding_0) => UnOp::Deref(_binding_0), - UnOp::Not(_binding_0) => UnOp::Not(_binding_0), - UnOp::Neg(_binding_0) => UnOp::Neg(_binding_0), + crate::UnOp::Deref(_binding_0) => crate::UnOp::Deref(_binding_0), + crate::UnOp::Not(_binding_0) => crate::UnOp::Not(_binding_0), + crate::UnOp::Neg(_binding_0) => crate::UnOp::Neg(_binding_0), } } #[cfg(feature = "full")] -pub fn fold_use_glob<F>(f: &mut F, node: UseGlob) -> UseGlob +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_use_glob<F>(f: &mut F, node: crate::UseGlob) -> crate::UseGlob where F: Fold + ?Sized, { - UseGlob { + crate::UseGlob { star_token: node.star_token, } } #[cfg(feature = "full")] -pub fn fold_use_group<F>(f: &mut F, node: UseGroup) -> UseGroup +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_use_group<F>(f: &mut F, node: crate::UseGroup) -> crate::UseGroup where F: Fold + ?Sized, { - UseGroup { + crate::UseGroup { brace_token: node.brace_token, - items: FoldHelper::lift(node.items, |it| f.fold_use_tree(it)), + items: crate::punctuated::fold(node.items, f, F::fold_use_tree), } } #[cfg(feature = "full")] -pub fn fold_use_name<F>(f: &mut F, node: UseName) -> UseName +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_use_name<F>(f: &mut F, node: crate::UseName) -> crate::UseName where F: Fold + ?Sized, { - UseName { + crate::UseName { ident: f.fold_ident(node.ident), } } #[cfg(feature = "full")] -pub fn fold_use_path<F>(f: &mut F, node: UsePath) -> UsePath +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_use_path<F>(f: &mut F, node: crate::UsePath) -> crate::UsePath where F: Fold + ?Sized, { - UsePath { + crate::UsePath { ident: f.fold_ident(node.ident), colon2_token: node.colon2_token, tree: Box::new(f.fold_use_tree(*node.tree)), } } #[cfg(feature = "full")] -pub fn fold_use_rename<F>(f: &mut F, node: UseRename) -> UseRename +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_use_rename<F>(f: &mut F, node: crate::UseRename) -> crate::UseRename where F: Fold + ?Sized, { - UseRename { + crate::UseRename { ident: f.fold_ident(node.ident), as_token: node.as_token, rename: f.fold_ident(node.rename), } } #[cfg(feature = "full")] -pub fn fold_use_tree<F>(f: &mut F, node: UseTree) -> UseTree +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_use_tree<F>(f: &mut F, node: crate::UseTree) -> crate::UseTree where F: Fold + ?Sized, { match node { - UseTree::Path(_binding_0) => UseTree::Path(f.fold_use_path(_binding_0)), - UseTree::Name(_binding_0) => UseTree::Name(f.fold_use_name(_binding_0)), - UseTree::Rename(_binding_0) => UseTree::Rename(f.fold_use_rename(_binding_0)), - UseTree::Glob(_binding_0) => UseTree::Glob(f.fold_use_glob(_binding_0)), - UseTree::Group(_binding_0) => UseTree::Group(f.fold_use_group(_binding_0)), + crate::UseTree::Path(_binding_0) => { + crate::UseTree::Path(f.fold_use_path(_binding_0)) + } + crate::UseTree::Name(_binding_0) => { + crate::UseTree::Name(f.fold_use_name(_binding_0)) + } + crate::UseTree::Rename(_binding_0) => { + crate::UseTree::Rename(f.fold_use_rename(_binding_0)) + } + crate::UseTree::Glob(_binding_0) => { + crate::UseTree::Glob(f.fold_use_glob(_binding_0)) + } + crate::UseTree::Group(_binding_0) => { + crate::UseTree::Group(f.fold_use_group(_binding_0)) + } } } #[cfg(feature = "full")] -pub fn fold_variadic<F>(f: &mut F, node: Variadic) -> Variadic +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn fold_variadic<F>(f: &mut F, node: crate::Variadic) -> crate::Variadic where F: Fold + ?Sized, { - Variadic { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Variadic { + attrs: f.fold_attributes(node.attrs), pat: (node.pat).map(|it| (Box::new(f.fold_pat(*(it).0)), (it).1)), dots: node.dots, comma: node.comma, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_variant<F>(f: &mut F, node: Variant) -> Variant +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_variant<F>(f: &mut F, node: crate::Variant) -> crate::Variant where F: Fold + ?Sized, { - Variant { - attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + crate::Variant { + attrs: f.fold_attributes(node.attrs), ident: f.fold_ident(node.ident), fields: f.fold_fields(node.fields), discriminant: (node.discriminant).map(|it| ((it).0, f.fold_expr((it).1))), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_vis_restricted<F>(f: &mut F, node: VisRestricted) -> VisRestricted +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_vis_restricted<F>( + f: &mut F, + node: crate::VisRestricted, +) -> crate::VisRestricted where F: Fold + ?Sized, { - VisRestricted { + crate::VisRestricted { pub_token: node.pub_token, paren_token: node.paren_token, in_token: node.in_token, @@ -3081,39 +3850,53 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_visibility<F>(f: &mut F, node: Visibility) -> Visibility +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_visibility<F>(f: &mut F, node: crate::Visibility) -> crate::Visibility where F: Fold + ?Sized, { match node { - Visibility::Public(_binding_0) => Visibility::Public(_binding_0), - Visibility::Restricted(_binding_0) => { - Visibility::Restricted(f.fold_vis_restricted(_binding_0)) + crate::Visibility::Public(_binding_0) => crate::Visibility::Public(_binding_0), + crate::Visibility::Restricted(_binding_0) => { + crate::Visibility::Restricted(f.fold_vis_restricted(_binding_0)) } - Visibility::Inherited => Visibility::Inherited, + crate::Visibility::Inherited => crate::Visibility::Inherited, } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_where_clause<F>(f: &mut F, node: WhereClause) -> WhereClause +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_where_clause<F>(f: &mut F, node: crate::WhereClause) -> crate::WhereClause where F: Fold + ?Sized, { - WhereClause { + crate::WhereClause { where_token: node.where_token, - predicates: FoldHelper::lift(node.predicates, |it| f.fold_where_predicate(it)), + predicates: crate::punctuated::fold(node.predicates, f, F::fold_where_predicate), } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn fold_where_predicate<F>(f: &mut F, node: WherePredicate) -> WherePredicate +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn fold_where_predicate<F>( + f: &mut F, + node: crate::WherePredicate, +) -> crate::WherePredicate where F: Fold + ?Sized, { match node { - WherePredicate::Lifetime(_binding_0) => { - WherePredicate::Lifetime(f.fold_predicate_lifetime(_binding_0)) + crate::WherePredicate::Lifetime(_binding_0) => { + crate::WherePredicate::Lifetime(f.fold_predicate_lifetime(_binding_0)) } - WherePredicate::Type(_binding_0) => { - WherePredicate::Type(f.fold_predicate_type(_binding_0)) + crate::WherePredicate::Type(_binding_0) => { + crate::WherePredicate::Type(f.fold_predicate_type(_binding_0)) } } } +#[cfg(any(feature = "derive", feature = "full"))] +fn fold_vec<T, V, F>(vec: Vec<T>, fold: &mut V, mut f: F) -> Vec<T> +where + V: ?Sized, + F: FnMut(&mut V, T) -> T, +{ + vec.into_iter().map(|it| f(fold, it)).collect() +} diff --git a/vendor/syn/src/gen/hash.rs b/vendor/syn/src/gen/hash.rs index 40dfc57f..04f23453 100644 --- a/vendor/syn/src/gen/hash.rs +++ b/vendor/syn/src/gen/hash.rs @@ -3,11 +3,10 @@ #[cfg(any(feature = "derive", feature = "full"))] use crate::tt::TokenStreamHelper; -use crate::*; use std::hash::{Hash, Hasher}; #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Abi { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Abi { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -16,8 +15,8 @@ impl Hash for Abi { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for AngleBracketedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::AngleBracketedGenericArguments { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -27,8 +26,8 @@ impl Hash for AngleBracketedGenericArguments { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Arm { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Arm { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -41,8 +40,8 @@ impl Hash for Arm { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for AssocConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::AssocConst { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -53,8 +52,8 @@ impl Hash for AssocConst { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for AssocType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::AssocType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -65,25 +64,25 @@ impl Hash for AssocType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for AttrStyle { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::AttrStyle { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - AttrStyle::Outer => { + crate::AttrStyle::Outer => { state.write_u8(0u8); } - AttrStyle::Inner(_) => { + crate::AttrStyle::Inner(_) => { state.write_u8(1u8); } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Attribute { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Attribute { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -93,8 +92,8 @@ impl Hash for Attribute { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for BareFnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::BareFnArg { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -105,8 +104,8 @@ impl Hash for BareFnArg { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for BareVariadic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::BareVariadic { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -117,103 +116,103 @@ impl Hash for BareVariadic { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for BinOp { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::BinOp { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - BinOp::Add(_) => { + crate::BinOp::Add(_) => { state.write_u8(0u8); } - BinOp::Sub(_) => { + crate::BinOp::Sub(_) => { state.write_u8(1u8); } - BinOp::Mul(_) => { + crate::BinOp::Mul(_) => { state.write_u8(2u8); } - BinOp::Div(_) => { + crate::BinOp::Div(_) => { state.write_u8(3u8); } - BinOp::Rem(_) => { + crate::BinOp::Rem(_) => { state.write_u8(4u8); } - BinOp::And(_) => { + crate::BinOp::And(_) => { state.write_u8(5u8); } - BinOp::Or(_) => { + crate::BinOp::Or(_) => { state.write_u8(6u8); } - BinOp::BitXor(_) => { + crate::BinOp::BitXor(_) => { state.write_u8(7u8); } - BinOp::BitAnd(_) => { + crate::BinOp::BitAnd(_) => { state.write_u8(8u8); } - BinOp::BitOr(_) => { + crate::BinOp::BitOr(_) => { state.write_u8(9u8); } - BinOp::Shl(_) => { + crate::BinOp::Shl(_) => { state.write_u8(10u8); } - BinOp::Shr(_) => { + crate::BinOp::Shr(_) => { state.write_u8(11u8); } - BinOp::Eq(_) => { + crate::BinOp::Eq(_) => { state.write_u8(12u8); } - BinOp::Lt(_) => { + crate::BinOp::Lt(_) => { state.write_u8(13u8); } - BinOp::Le(_) => { + crate::BinOp::Le(_) => { state.write_u8(14u8); } - BinOp::Ne(_) => { + crate::BinOp::Ne(_) => { state.write_u8(15u8); } - BinOp::Ge(_) => { + crate::BinOp::Ge(_) => { state.write_u8(16u8); } - BinOp::Gt(_) => { + crate::BinOp::Gt(_) => { state.write_u8(17u8); } - BinOp::AddAssign(_) => { + crate::BinOp::AddAssign(_) => { state.write_u8(18u8); } - BinOp::SubAssign(_) => { + crate::BinOp::SubAssign(_) => { state.write_u8(19u8); } - BinOp::MulAssign(_) => { + crate::BinOp::MulAssign(_) => { state.write_u8(20u8); } - BinOp::DivAssign(_) => { + crate::BinOp::DivAssign(_) => { state.write_u8(21u8); } - BinOp::RemAssign(_) => { + crate::BinOp::RemAssign(_) => { state.write_u8(22u8); } - BinOp::BitXorAssign(_) => { + crate::BinOp::BitXorAssign(_) => { state.write_u8(23u8); } - BinOp::BitAndAssign(_) => { + crate::BinOp::BitAndAssign(_) => { state.write_u8(24u8); } - BinOp::BitOrAssign(_) => { + crate::BinOp::BitOrAssign(_) => { state.write_u8(25u8); } - BinOp::ShlAssign(_) => { + crate::BinOp::ShlAssign(_) => { state.write_u8(26u8); } - BinOp::ShrAssign(_) => { + crate::BinOp::ShrAssign(_) => { state.write_u8(27u8); } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Block { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Block { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -222,8 +221,8 @@ impl Hash for Block { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for BoundLifetimes { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::BoundLifetimes { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -231,9 +230,28 @@ impl Hash for BoundLifetimes { self.lifetimes.hash(state); } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::CapturedParam { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + crate::CapturedParam::Lifetime(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + crate::CapturedParam::Ident(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ConstParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ConstParam { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -246,8 +264,8 @@ impl Hash for ConstParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Constraint { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Constraint { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -258,22 +276,22 @@ impl Hash for Constraint { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Data { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Data { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Data::Struct(v0) => { + crate::Data::Struct(v0) => { state.write_u8(0u8); v0.hash(state); } - Data::Enum(v0) => { + crate::Data::Enum(v0) => { state.write_u8(1u8); v0.hash(state); } - Data::Union(v0) => { + crate::Data::Union(v0) => { state.write_u8(2u8); v0.hash(state); } @@ -281,8 +299,8 @@ impl Hash for Data { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for DataEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::DataEnum { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -291,8 +309,8 @@ impl Hash for DataEnum { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for DataStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::DataStruct { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -302,8 +320,8 @@ impl Hash for DataStruct { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for DataUnion { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::DataUnion { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -312,8 +330,8 @@ impl Hash for DataUnion { } } #[cfg(feature = "derive")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for DeriveInput { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::DeriveInput { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -326,194 +344,195 @@ impl Hash for DeriveInput { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Expr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Expr { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { #[cfg(feature = "full")] - Expr::Array(v0) => { + crate::Expr::Array(v0) => { state.write_u8(0u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Assign(v0) => { + crate::Expr::Assign(v0) => { state.write_u8(1u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Async(v0) => { + crate::Expr::Async(v0) => { state.write_u8(2u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Await(v0) => { + crate::Expr::Await(v0) => { state.write_u8(3u8); v0.hash(state); } - Expr::Binary(v0) => { + crate::Expr::Binary(v0) => { state.write_u8(4u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Block(v0) => { + crate::Expr::Block(v0) => { state.write_u8(5u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Break(v0) => { + crate::Expr::Break(v0) => { state.write_u8(6u8); v0.hash(state); } - Expr::Call(v0) => { + crate::Expr::Call(v0) => { state.write_u8(7u8); v0.hash(state); } - Expr::Cast(v0) => { + crate::Expr::Cast(v0) => { state.write_u8(8u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Closure(v0) => { + crate::Expr::Closure(v0) => { state.write_u8(9u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Const(v0) => { + crate::Expr::Const(v0) => { state.write_u8(10u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Continue(v0) => { + crate::Expr::Continue(v0) => { state.write_u8(11u8); v0.hash(state); } - Expr::Field(v0) => { + crate::Expr::Field(v0) => { state.write_u8(12u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::ForLoop(v0) => { + crate::Expr::ForLoop(v0) => { state.write_u8(13u8); v0.hash(state); } - Expr::Group(v0) => { + crate::Expr::Group(v0) => { state.write_u8(14u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::If(v0) => { + crate::Expr::If(v0) => { state.write_u8(15u8); v0.hash(state); } - Expr::Index(v0) => { + crate::Expr::Index(v0) => { state.write_u8(16u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Infer(v0) => { + crate::Expr::Infer(v0) => { state.write_u8(17u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Let(v0) => { + crate::Expr::Let(v0) => { state.write_u8(18u8); v0.hash(state); } - Expr::Lit(v0) => { + crate::Expr::Lit(v0) => { state.write_u8(19u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Loop(v0) => { + crate::Expr::Loop(v0) => { state.write_u8(20u8); v0.hash(state); } - Expr::Macro(v0) => { + crate::Expr::Macro(v0) => { state.write_u8(21u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Match(v0) => { + crate::Expr::Match(v0) => { state.write_u8(22u8); v0.hash(state); } - #[cfg(feature = "full")] - Expr::MethodCall(v0) => { + crate::Expr::MethodCall(v0) => { state.write_u8(23u8); v0.hash(state); } - Expr::Paren(v0) => { + crate::Expr::Paren(v0) => { state.write_u8(24u8); v0.hash(state); } - Expr::Path(v0) => { + crate::Expr::Path(v0) => { state.write_u8(25u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Range(v0) => { + crate::Expr::Range(v0) => { state.write_u8(26u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Reference(v0) => { + crate::Expr::RawAddr(v0) => { state.write_u8(27u8); v0.hash(state); } - #[cfg(feature = "full")] - Expr::Repeat(v0) => { + crate::Expr::Reference(v0) => { state.write_u8(28u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Return(v0) => { + crate::Expr::Repeat(v0) => { state.write_u8(29u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Struct(v0) => { + crate::Expr::Return(v0) => { state.write_u8(30u8); v0.hash(state); } - #[cfg(feature = "full")] - Expr::Try(v0) => { + crate::Expr::Struct(v0) => { state.write_u8(31u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::TryBlock(v0) => { + crate::Expr::Try(v0) => { state.write_u8(32u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Tuple(v0) => { + crate::Expr::TryBlock(v0) => { state.write_u8(33u8); v0.hash(state); } - Expr::Unary(v0) => { + crate::Expr::Tuple(v0) => { state.write_u8(34u8); v0.hash(state); } - #[cfg(feature = "full")] - Expr::Unsafe(v0) => { + crate::Expr::Unary(v0) => { state.write_u8(35u8); v0.hash(state); } - Expr::Verbatim(v0) => { + #[cfg(feature = "full")] + crate::Expr::Unsafe(v0) => { state.write_u8(36u8); + v0.hash(state); + } + crate::Expr::Verbatim(v0) => { + state.write_u8(37u8); TokenStreamHelper(v0).hash(state); } #[cfg(feature = "full")] - Expr::While(v0) => { - state.write_u8(37u8); + crate::Expr::While(v0) => { + state.write_u8(38u8); v0.hash(state); } #[cfg(feature = "full")] - Expr::Yield(v0) => { - state.write_u8(38u8); + crate::Expr::Yield(v0) => { + state.write_u8(39u8); v0.hash(state); } #[cfg(not(feature = "full"))] @@ -522,8 +541,8 @@ impl Hash for Expr { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprArray { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprArray { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -533,8 +552,8 @@ impl Hash for ExprArray { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprAssign { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprAssign { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -545,8 +564,8 @@ impl Hash for ExprAssign { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprAsync { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprAsync { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -557,8 +576,8 @@ impl Hash for ExprAsync { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprAwait { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprAwait { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -568,8 +587,8 @@ impl Hash for ExprAwait { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprBinary { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprBinary { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -581,8 +600,8 @@ impl Hash for ExprBinary { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprBlock { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -593,8 +612,8 @@ impl Hash for ExprBlock { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprBreak { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprBreak { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -605,8 +624,8 @@ impl Hash for ExprBreak { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprCall { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprCall { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -617,8 +636,8 @@ impl Hash for ExprCall { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprCast { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprCast { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -629,8 +648,8 @@ impl Hash for ExprCast { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprClosure { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprClosure { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -647,8 +666,8 @@ impl Hash for ExprClosure { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprConst { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -658,8 +677,8 @@ impl Hash for ExprConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprContinue { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprContinue { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -669,8 +688,8 @@ impl Hash for ExprContinue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprField { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprField { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -681,8 +700,8 @@ impl Hash for ExprField { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprForLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprForLoop { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -695,8 +714,8 @@ impl Hash for ExprForLoop { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprGroup { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -706,8 +725,8 @@ impl Hash for ExprGroup { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprIf { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprIf { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -719,8 +738,8 @@ impl Hash for ExprIf { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprIndex { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprIndex { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -731,8 +750,8 @@ impl Hash for ExprIndex { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprInfer { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -741,8 +760,8 @@ impl Hash for ExprInfer { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprLet { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprLet { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -753,8 +772,8 @@ impl Hash for ExprLet { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprLit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprLit { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -764,8 +783,8 @@ impl Hash for ExprLit { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprLoop { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprLoop { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -776,8 +795,8 @@ impl Hash for ExprLoop { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -787,8 +806,8 @@ impl Hash for ExprMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprMatch { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprMatch { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -798,9 +817,9 @@ impl Hash for ExprMatch { self.arms.hash(state); } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprMethodCall { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprMethodCall { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -813,8 +832,8 @@ impl Hash for ExprMethodCall { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprParen { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -824,8 +843,8 @@ impl Hash for ExprParen { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprPath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprPath { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -836,8 +855,8 @@ impl Hash for ExprPath { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprRange { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprRange { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -849,8 +868,20 @@ impl Hash for ExprRange { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprReference { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprRawAddr { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mutability.hash(state); + self.expr.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprReference { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -861,8 +892,8 @@ impl Hash for ExprReference { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprRepeat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprRepeat { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -873,8 +904,8 @@ impl Hash for ExprRepeat { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprReturn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprReturn { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -883,9 +914,9 @@ impl Hash for ExprReturn { self.expr.hash(state); } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprStruct { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprStruct { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -899,8 +930,8 @@ impl Hash for ExprStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprTry { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprTry { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -910,8 +941,8 @@ impl Hash for ExprTry { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprTryBlock { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprTryBlock { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -920,9 +951,9 @@ impl Hash for ExprTryBlock { self.block.hash(state); } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprTuple { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprTuple { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -932,8 +963,8 @@ impl Hash for ExprTuple { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprUnary { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprUnary { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -944,8 +975,8 @@ impl Hash for ExprUnary { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprUnsafe { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprUnsafe { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -955,8 +986,8 @@ impl Hash for ExprUnsafe { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprWhile { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprWhile { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -968,8 +999,8 @@ impl Hash for ExprWhile { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ExprYield { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ExprYield { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -979,8 +1010,8 @@ impl Hash for ExprYield { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Field { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Field { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -994,22 +1025,22 @@ impl Hash for Field { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for FieldMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::FieldMutability { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - FieldMutability::None => { + crate::FieldMutability::None => { state.write_u8(0u8); } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for FieldPat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::FieldPat { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1020,9 +1051,9 @@ impl Hash for FieldPat { self.pat.hash(state); } } -#[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for FieldValue { +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::FieldValue { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1034,30 +1065,30 @@ impl Hash for FieldValue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Fields { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Fields { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Fields::Named(v0) => { + crate::Fields::Named(v0) => { state.write_u8(0u8); v0.hash(state); } - Fields::Unnamed(v0) => { + crate::Fields::Unnamed(v0) => { state.write_u8(1u8); v0.hash(state); } - Fields::Unit => { + crate::Fields::Unit => { state.write_u8(2u8); } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for FieldsNamed { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::FieldsNamed { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1066,8 +1097,8 @@ impl Hash for FieldsNamed { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for FieldsUnnamed { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::FieldsUnnamed { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1076,8 +1107,8 @@ impl Hash for FieldsUnnamed { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for File { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::File { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1088,18 +1119,18 @@ impl Hash for File { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for FnArg { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::FnArg { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - FnArg::Receiver(v0) => { + crate::FnArg::Receiver(v0) => { state.write_u8(0u8); v0.hash(state); } - FnArg::Typed(v0) => { + crate::FnArg::Typed(v0) => { state.write_u8(1u8); v0.hash(state); } @@ -1107,30 +1138,30 @@ impl Hash for FnArg { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ForeignItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ForeignItem { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - ForeignItem::Fn(v0) => { + crate::ForeignItem::Fn(v0) => { state.write_u8(0u8); v0.hash(state); } - ForeignItem::Static(v0) => { + crate::ForeignItem::Static(v0) => { state.write_u8(1u8); v0.hash(state); } - ForeignItem::Type(v0) => { + crate::ForeignItem::Type(v0) => { state.write_u8(2u8); v0.hash(state); } - ForeignItem::Macro(v0) => { + crate::ForeignItem::Macro(v0) => { state.write_u8(3u8); v0.hash(state); } - ForeignItem::Verbatim(v0) => { + crate::ForeignItem::Verbatim(v0) => { state.write_u8(4u8); TokenStreamHelper(v0).hash(state); } @@ -1138,8 +1169,8 @@ impl Hash for ForeignItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ForeignItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ForeignItemFn { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1150,8 +1181,8 @@ impl Hash for ForeignItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ForeignItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ForeignItemMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1162,8 +1193,8 @@ impl Hash for ForeignItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ForeignItemStatic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ForeignItemStatic { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1176,8 +1207,8 @@ impl Hash for ForeignItemStatic { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ForeignItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ForeignItemType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1189,34 +1220,34 @@ impl Hash for ForeignItemType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for GenericArgument { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::GenericArgument { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - GenericArgument::Lifetime(v0) => { + crate::GenericArgument::Lifetime(v0) => { state.write_u8(0u8); v0.hash(state); } - GenericArgument::Type(v0) => { + crate::GenericArgument::Type(v0) => { state.write_u8(1u8); v0.hash(state); } - GenericArgument::Const(v0) => { + crate::GenericArgument::Const(v0) => { state.write_u8(2u8); v0.hash(state); } - GenericArgument::AssocType(v0) => { + crate::GenericArgument::AssocType(v0) => { state.write_u8(3u8); v0.hash(state); } - GenericArgument::AssocConst(v0) => { + crate::GenericArgument::AssocConst(v0) => { state.write_u8(4u8); v0.hash(state); } - GenericArgument::Constraint(v0) => { + crate::GenericArgument::Constraint(v0) => { state.write_u8(5u8); v0.hash(state); } @@ -1224,22 +1255,22 @@ impl Hash for GenericArgument { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for GenericParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::GenericParam { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - GenericParam::Lifetime(v0) => { + crate::GenericParam::Lifetime(v0) => { state.write_u8(0u8); v0.hash(state); } - GenericParam::Type(v0) => { + crate::GenericParam::Type(v0) => { state.write_u8(1u8); v0.hash(state); } - GenericParam::Const(v0) => { + crate::GenericParam::Const(v0) => { state.write_u8(2u8); v0.hash(state); } @@ -1247,8 +1278,8 @@ impl Hash for GenericParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Generics { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Generics { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1260,30 +1291,30 @@ impl Hash for Generics { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ImplItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ImplItem { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - ImplItem::Const(v0) => { + crate::ImplItem::Const(v0) => { state.write_u8(0u8); v0.hash(state); } - ImplItem::Fn(v0) => { + crate::ImplItem::Fn(v0) => { state.write_u8(1u8); v0.hash(state); } - ImplItem::Type(v0) => { + crate::ImplItem::Type(v0) => { state.write_u8(2u8); v0.hash(state); } - ImplItem::Macro(v0) => { + crate::ImplItem::Macro(v0) => { state.write_u8(3u8); v0.hash(state); } - ImplItem::Verbatim(v0) => { + crate::ImplItem::Verbatim(v0) => { state.write_u8(4u8); TokenStreamHelper(v0).hash(state); } @@ -1291,8 +1322,8 @@ impl Hash for ImplItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ImplItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ImplItemConst { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1307,8 +1338,8 @@ impl Hash for ImplItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ImplItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ImplItemFn { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1321,8 +1352,8 @@ impl Hash for ImplItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ImplItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ImplItemMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1333,8 +1364,8 @@ impl Hash for ImplItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ImplItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ImplItemType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1348,8 +1379,8 @@ impl Hash for ImplItemType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ImplRestriction { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ImplRestriction { fn hash<H>(&self, _state: &mut H) where H: Hasher, @@ -1358,74 +1389,74 @@ impl Hash for ImplRestriction { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Item { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Item { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Item::Const(v0) => { + crate::Item::Const(v0) => { state.write_u8(0u8); v0.hash(state); } - Item::Enum(v0) => { + crate::Item::Enum(v0) => { state.write_u8(1u8); v0.hash(state); } - Item::ExternCrate(v0) => { + crate::Item::ExternCrate(v0) => { state.write_u8(2u8); v0.hash(state); } - Item::Fn(v0) => { + crate::Item::Fn(v0) => { state.write_u8(3u8); v0.hash(state); } - Item::ForeignMod(v0) => { + crate::Item::ForeignMod(v0) => { state.write_u8(4u8); v0.hash(state); } - Item::Impl(v0) => { + crate::Item::Impl(v0) => { state.write_u8(5u8); v0.hash(state); } - Item::Macro(v0) => { + crate::Item::Macro(v0) => { state.write_u8(6u8); v0.hash(state); } - Item::Mod(v0) => { + crate::Item::Mod(v0) => { state.write_u8(7u8); v0.hash(state); } - Item::Static(v0) => { + crate::Item::Static(v0) => { state.write_u8(8u8); v0.hash(state); } - Item::Struct(v0) => { + crate::Item::Struct(v0) => { state.write_u8(9u8); v0.hash(state); } - Item::Trait(v0) => { + crate::Item::Trait(v0) => { state.write_u8(10u8); v0.hash(state); } - Item::TraitAlias(v0) => { + crate::Item::TraitAlias(v0) => { state.write_u8(11u8); v0.hash(state); } - Item::Type(v0) => { + crate::Item::Type(v0) => { state.write_u8(12u8); v0.hash(state); } - Item::Union(v0) => { + crate::Item::Union(v0) => { state.write_u8(13u8); v0.hash(state); } - Item::Use(v0) => { + crate::Item::Use(v0) => { state.write_u8(14u8); v0.hash(state); } - Item::Verbatim(v0) => { + crate::Item::Verbatim(v0) => { state.write_u8(15u8); TokenStreamHelper(v0).hash(state); } @@ -1433,8 +1464,8 @@ impl Hash for Item { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemConst { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1448,8 +1479,8 @@ impl Hash for ItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemEnum { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemEnum { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1462,8 +1493,8 @@ impl Hash for ItemEnum { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemExternCrate { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemExternCrate { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1475,8 +1506,8 @@ impl Hash for ItemExternCrate { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemFn { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1488,8 +1519,8 @@ impl Hash for ItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemForeignMod { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemForeignMod { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1501,8 +1532,8 @@ impl Hash for ItemForeignMod { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemImpl { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemImpl { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1517,8 +1548,8 @@ impl Hash for ItemImpl { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1530,8 +1561,8 @@ impl Hash for ItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemMod { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemMod { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1545,8 +1576,8 @@ impl Hash for ItemMod { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemStatic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemStatic { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1560,8 +1591,8 @@ impl Hash for ItemStatic { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemStruct { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1575,8 +1606,8 @@ impl Hash for ItemStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemTrait { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemTrait { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1594,8 +1625,8 @@ impl Hash for ItemTrait { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemTraitAlias { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemTraitAlias { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1608,8 +1639,8 @@ impl Hash for ItemTraitAlias { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1622,8 +1653,8 @@ impl Hash for ItemType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemUnion { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemUnion { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1636,8 +1667,8 @@ impl Hash for ItemUnion { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ItemUse { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ItemUse { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1649,8 +1680,8 @@ impl Hash for ItemUse { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Label { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Label { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1659,8 +1690,8 @@ impl Hash for Label { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for LifetimeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::LifetimeParam { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1671,50 +1702,54 @@ impl Hash for LifetimeParam { self.bounds.hash(state); } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Lit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Lit { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Lit::Str(v0) => { + crate::Lit::Str(v0) => { state.write_u8(0u8); v0.hash(state); } - Lit::ByteStr(v0) => { + crate::Lit::ByteStr(v0) => { state.write_u8(1u8); v0.hash(state); } - Lit::Byte(v0) => { + crate::Lit::CStr(v0) => { state.write_u8(2u8); v0.hash(state); } - Lit::Char(v0) => { + crate::Lit::Byte(v0) => { state.write_u8(3u8); v0.hash(state); } - Lit::Int(v0) => { + crate::Lit::Char(v0) => { state.write_u8(4u8); v0.hash(state); } - Lit::Float(v0) => { + crate::Lit::Int(v0) => { state.write_u8(5u8); v0.hash(state); } - Lit::Bool(v0) => { + crate::Lit::Float(v0) => { state.write_u8(6u8); v0.hash(state); } - Lit::Verbatim(v0) => { + crate::Lit::Bool(v0) => { state.write_u8(7u8); + v0.hash(state); + } + crate::Lit::Verbatim(v0) => { + state.write_u8(8u8); v0.to_string().hash(state); } } } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for LitBool { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::LitBool { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1723,8 +1758,8 @@ impl Hash for LitBool { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Local { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Local { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1735,8 +1770,8 @@ impl Hash for Local { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for LocalInit { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::LocalInit { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1746,8 +1781,8 @@ impl Hash for LocalInit { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Macro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Macro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1758,42 +1793,42 @@ impl Hash for Macro { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for MacroDelimiter { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::MacroDelimiter { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - MacroDelimiter::Paren(_) => { + crate::MacroDelimiter::Paren(_) => { state.write_u8(0u8); } - MacroDelimiter::Brace(_) => { + crate::MacroDelimiter::Brace(_) => { state.write_u8(1u8); } - MacroDelimiter::Bracket(_) => { + crate::MacroDelimiter::Bracket(_) => { state.write_u8(2u8); } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Meta { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Meta { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Meta::Path(v0) => { + crate::Meta::Path(v0) => { state.write_u8(0u8); v0.hash(state); } - Meta::List(v0) => { + crate::Meta::List(v0) => { state.write_u8(1u8); v0.hash(state); } - Meta::NameValue(v0) => { + crate::Meta::NameValue(v0) => { state.write_u8(2u8); v0.hash(state); } @@ -1801,8 +1836,8 @@ impl Hash for Meta { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for MetaList { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::MetaList { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1813,8 +1848,8 @@ impl Hash for MetaList { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for MetaNameValue { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::MetaNameValue { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1824,8 +1859,8 @@ impl Hash for MetaNameValue { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ParenthesizedGenericArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ParenthesizedGenericArguments { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1835,78 +1870,78 @@ impl Hash for ParenthesizedGenericArguments { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Pat { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Pat { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Pat::Const(v0) => { + crate::Pat::Const(v0) => { state.write_u8(0u8); v0.hash(state); } - Pat::Ident(v0) => { + crate::Pat::Ident(v0) => { state.write_u8(1u8); v0.hash(state); } - Pat::Lit(v0) => { + crate::Pat::Lit(v0) => { state.write_u8(2u8); v0.hash(state); } - Pat::Macro(v0) => { + crate::Pat::Macro(v0) => { state.write_u8(3u8); v0.hash(state); } - Pat::Or(v0) => { + crate::Pat::Or(v0) => { state.write_u8(4u8); v0.hash(state); } - Pat::Paren(v0) => { + crate::Pat::Paren(v0) => { state.write_u8(5u8); v0.hash(state); } - Pat::Path(v0) => { + crate::Pat::Path(v0) => { state.write_u8(6u8); v0.hash(state); } - Pat::Range(v0) => { + crate::Pat::Range(v0) => { state.write_u8(7u8); v0.hash(state); } - Pat::Reference(v0) => { + crate::Pat::Reference(v0) => { state.write_u8(8u8); v0.hash(state); } - Pat::Rest(v0) => { + crate::Pat::Rest(v0) => { state.write_u8(9u8); v0.hash(state); } - Pat::Slice(v0) => { + crate::Pat::Slice(v0) => { state.write_u8(10u8); v0.hash(state); } - Pat::Struct(v0) => { + crate::Pat::Struct(v0) => { state.write_u8(11u8); v0.hash(state); } - Pat::Tuple(v0) => { + crate::Pat::Tuple(v0) => { state.write_u8(12u8); v0.hash(state); } - Pat::TupleStruct(v0) => { + crate::Pat::TupleStruct(v0) => { state.write_u8(13u8); v0.hash(state); } - Pat::Type(v0) => { + crate::Pat::Type(v0) => { state.write_u8(14u8); v0.hash(state); } - Pat::Verbatim(v0) => { + crate::Pat::Verbatim(v0) => { state.write_u8(15u8); TokenStreamHelper(v0).hash(state); } - Pat::Wild(v0) => { + crate::Pat::Wild(v0) => { state.write_u8(16u8); v0.hash(state); } @@ -1914,8 +1949,8 @@ impl Hash for Pat { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatIdent { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatIdent { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1928,8 +1963,8 @@ impl Hash for PatIdent { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatOr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatOr { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1940,8 +1975,8 @@ impl Hash for PatOr { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatParen { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1951,8 +1986,8 @@ impl Hash for PatParen { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatReference { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatReference { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1963,8 +1998,8 @@ impl Hash for PatReference { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatRest { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatRest { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1973,8 +2008,8 @@ impl Hash for PatRest { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatSlice { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatSlice { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1984,8 +2019,8 @@ impl Hash for PatSlice { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatStruct { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -1998,8 +2033,8 @@ impl Hash for PatStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatTuple { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatTuple { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2009,8 +2044,8 @@ impl Hash for PatTuple { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatTupleStruct { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatTupleStruct { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2022,8 +2057,8 @@ impl Hash for PatTupleStruct { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2034,8 +2069,8 @@ impl Hash for PatType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PatWild { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PatWild { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2044,8 +2079,8 @@ impl Hash for PatWild { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Path { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Path { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2055,21 +2090,21 @@ impl Hash for Path { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PathArguments { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PathArguments { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - PathArguments::None => { + crate::PathArguments::None => { state.write_u8(0u8); } - PathArguments::AngleBracketed(v0) => { + crate::PathArguments::AngleBracketed(v0) => { state.write_u8(1u8); v0.hash(state); } - PathArguments::Parenthesized(v0) => { + crate::PathArguments::Parenthesized(v0) => { state.write_u8(2u8); v0.hash(state); } @@ -2077,8 +2112,8 @@ impl Hash for PathArguments { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PathSegment { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PathSegment { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2087,9 +2122,36 @@ impl Hash for PathSegment { self.arguments.hash(state); } } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PointerMutability { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + crate::PointerMutability::Const(_) => { + state.write_u8(0u8); + } + crate::PointerMutability::Mut(_) => { + state.write_u8(1u8); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PreciseCapture { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.params.hash(state); + } +} #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PredicateLifetime { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PredicateLifetime { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2099,8 +2161,8 @@ impl Hash for PredicateLifetime { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for PredicateType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::PredicateType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2111,8 +2173,8 @@ impl Hash for PredicateType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for QSelf { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::QSelf { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2123,25 +2185,25 @@ impl Hash for QSelf { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for RangeLimits { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::RangeLimits { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - RangeLimits::HalfOpen(_) => { + crate::RangeLimits::HalfOpen(_) => { state.write_u8(0u8); } - RangeLimits::Closed(_) => { + crate::RangeLimits::Closed(_) => { state.write_u8(1u8); } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Receiver { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Receiver { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2154,17 +2216,17 @@ impl Hash for Receiver { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for ReturnType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::ReturnType { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - ReturnType::Default => { + crate::ReturnType::Default => { state.write_u8(0u8); } - ReturnType::Type(_, v1) => { + crate::ReturnType::Type(_, v1) => { state.write_u8(1u8); v1.hash(state); } @@ -2172,8 +2234,8 @@ impl Hash for ReturnType { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Signature { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Signature { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2190,44 +2252,44 @@ impl Hash for Signature { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for StaticMutability { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::StaticMutability { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - StaticMutability::Mut(_) => { + crate::StaticMutability::Mut(_) => { state.write_u8(0u8); } - StaticMutability::None => { + crate::StaticMutability::None => { state.write_u8(1u8); } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Stmt { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Stmt { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Stmt::Local(v0) => { + crate::Stmt::Local(v0) => { state.write_u8(0u8); v0.hash(state); } - Stmt::Item(v0) => { + crate::Stmt::Item(v0) => { state.write_u8(1u8); v0.hash(state); } - Stmt::Expr(v0, v1) => { + crate::Stmt::Expr(v0, v1) => { state.write_u8(2u8); v0.hash(state); v1.hash(state); } - Stmt::Macro(v0) => { + crate::Stmt::Macro(v0) => { state.write_u8(3u8); v0.hash(state); } @@ -2235,8 +2297,8 @@ impl Hash for Stmt { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for StmtMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::StmtMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2247,8 +2309,8 @@ impl Hash for StmtMacro { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitBound { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitBound { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2260,47 +2322,47 @@ impl Hash for TraitBound { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitBoundModifier { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitBoundModifier { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - TraitBoundModifier::None => { + crate::TraitBoundModifier::None => { state.write_u8(0u8); } - TraitBoundModifier::Maybe(_) => { + crate::TraitBoundModifier::Maybe(_) => { state.write_u8(1u8); } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitItem { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitItem { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - TraitItem::Const(v0) => { + crate::TraitItem::Const(v0) => { state.write_u8(0u8); v0.hash(state); } - TraitItem::Fn(v0) => { + crate::TraitItem::Fn(v0) => { state.write_u8(1u8); v0.hash(state); } - TraitItem::Type(v0) => { + crate::TraitItem::Type(v0) => { state.write_u8(2u8); v0.hash(state); } - TraitItem::Macro(v0) => { + crate::TraitItem::Macro(v0) => { state.write_u8(3u8); v0.hash(state); } - TraitItem::Verbatim(v0) => { + crate::TraitItem::Verbatim(v0) => { state.write_u8(4u8); TokenStreamHelper(v0).hash(state); } @@ -2308,8 +2370,8 @@ impl Hash for TraitItem { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitItemConst { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitItemConst { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2322,8 +2384,8 @@ impl Hash for TraitItemConst { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitItemFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitItemFn { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2335,8 +2397,8 @@ impl Hash for TraitItemFn { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitItemMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitItemMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2347,8 +2409,8 @@ impl Hash for TraitItemMacro { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TraitItemType { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TraitItemType { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2362,70 +2424,70 @@ impl Hash for TraitItemType { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Type { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Type { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Type::Array(v0) => { + crate::Type::Array(v0) => { state.write_u8(0u8); v0.hash(state); } - Type::BareFn(v0) => { + crate::Type::BareFn(v0) => { state.write_u8(1u8); v0.hash(state); } - Type::Group(v0) => { + crate::Type::Group(v0) => { state.write_u8(2u8); v0.hash(state); } - Type::ImplTrait(v0) => { + crate::Type::ImplTrait(v0) => { state.write_u8(3u8); v0.hash(state); } - Type::Infer(v0) => { + crate::Type::Infer(v0) => { state.write_u8(4u8); v0.hash(state); } - Type::Macro(v0) => { + crate::Type::Macro(v0) => { state.write_u8(5u8); v0.hash(state); } - Type::Never(v0) => { + crate::Type::Never(v0) => { state.write_u8(6u8); v0.hash(state); } - Type::Paren(v0) => { + crate::Type::Paren(v0) => { state.write_u8(7u8); v0.hash(state); } - Type::Path(v0) => { + crate::Type::Path(v0) => { state.write_u8(8u8); v0.hash(state); } - Type::Ptr(v0) => { + crate::Type::Ptr(v0) => { state.write_u8(9u8); v0.hash(state); } - Type::Reference(v0) => { + crate::Type::Reference(v0) => { state.write_u8(10u8); v0.hash(state); } - Type::Slice(v0) => { + crate::Type::Slice(v0) => { state.write_u8(11u8); v0.hash(state); } - Type::TraitObject(v0) => { + crate::Type::TraitObject(v0) => { state.write_u8(12u8); v0.hash(state); } - Type::Tuple(v0) => { + crate::Type::Tuple(v0) => { state.write_u8(13u8); v0.hash(state); } - Type::Verbatim(v0) => { + crate::Type::Verbatim(v0) => { state.write_u8(14u8); TokenStreamHelper(v0).hash(state); } @@ -2433,8 +2495,8 @@ impl Hash for Type { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeArray { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeArray { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2444,8 +2506,8 @@ impl Hash for TypeArray { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeBareFn { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeBareFn { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2459,8 +2521,8 @@ impl Hash for TypeBareFn { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeGroup { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2469,8 +2531,8 @@ impl Hash for TypeGroup { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeImplTrait { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeImplTrait { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2479,16 +2541,16 @@ impl Hash for TypeImplTrait { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeInfer { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeInfer { fn hash<H>(&self, _state: &mut H) where H: Hasher, {} } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeMacro { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeMacro { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2497,16 +2559,16 @@ impl Hash for TypeMacro { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeNever { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeNever { fn hash<H>(&self, _state: &mut H) where H: Hasher, {} } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeParam { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeParam { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2520,31 +2582,38 @@ impl Hash for TypeParam { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeParamBound { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeParamBound { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - TypeParamBound::Trait(v0) => { + crate::TypeParamBound::Trait(v0) => { state.write_u8(0u8); v0.hash(state); } - TypeParamBound::Lifetime(v0) => { + crate::TypeParamBound::Lifetime(v0) => { state.write_u8(1u8); v0.hash(state); } - TypeParamBound::Verbatim(v0) => { + #[cfg(feature = "full")] + crate::TypeParamBound::PreciseCapture(v0) => { state.write_u8(2u8); + v0.hash(state); + } + crate::TypeParamBound::Verbatim(v0) => { + state.write_u8(3u8); TokenStreamHelper(v0).hash(state); } + #[cfg(not(feature = "full"))] + _ => unreachable!(), } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeParen { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeParen { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2553,8 +2622,8 @@ impl Hash for TypeParen { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypePath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypePath { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2564,8 +2633,8 @@ impl Hash for TypePath { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypePtr { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypePtr { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2576,8 +2645,8 @@ impl Hash for TypePtr { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeReference { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeReference { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2588,8 +2657,8 @@ impl Hash for TypeReference { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeSlice { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeSlice { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2598,8 +2667,8 @@ impl Hash for TypeSlice { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeTraitObject { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeTraitObject { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2609,8 +2678,8 @@ impl Hash for TypeTraitObject { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for TypeTuple { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::TypeTuple { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2619,36 +2688,36 @@ impl Hash for TypeTuple { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UnOp { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UnOp { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - UnOp::Deref(_) => { + crate::UnOp::Deref(_) => { state.write_u8(0u8); } - UnOp::Not(_) => { + crate::UnOp::Not(_) => { state.write_u8(1u8); } - UnOp::Neg(_) => { + crate::UnOp::Neg(_) => { state.write_u8(2u8); } } } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UseGlob { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UseGlob { fn hash<H>(&self, _state: &mut H) where H: Hasher, {} } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UseGroup { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UseGroup { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2657,8 +2726,8 @@ impl Hash for UseGroup { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UseName { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UseName { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2667,8 +2736,8 @@ impl Hash for UseName { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UsePath { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UsePath { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2678,8 +2747,8 @@ impl Hash for UsePath { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UseRename { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UseRename { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2689,30 +2758,30 @@ impl Hash for UseRename { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for UseTree { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::UseTree { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - UseTree::Path(v0) => { + crate::UseTree::Path(v0) => { state.write_u8(0u8); v0.hash(state); } - UseTree::Name(v0) => { + crate::UseTree::Name(v0) => { state.write_u8(1u8); v0.hash(state); } - UseTree::Rename(v0) => { + crate::UseTree::Rename(v0) => { state.write_u8(2u8); v0.hash(state); } - UseTree::Glob(v0) => { + crate::UseTree::Glob(v0) => { state.write_u8(3u8); v0.hash(state); } - UseTree::Group(v0) => { + crate::UseTree::Group(v0) => { state.write_u8(4u8); v0.hash(state); } @@ -2720,8 +2789,8 @@ impl Hash for UseTree { } } #[cfg(feature = "full")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Variadic { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Variadic { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2732,8 +2801,8 @@ impl Hash for Variadic { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Variant { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Variant { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2745,8 +2814,8 @@ impl Hash for Variant { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for VisRestricted { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::VisRestricted { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2756,29 +2825,29 @@ impl Hash for VisRestricted { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for Visibility { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::Visibility { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - Visibility::Public(_) => { + crate::Visibility::Public(_) => { state.write_u8(0u8); } - Visibility::Restricted(v0) => { + crate::Visibility::Restricted(v0) => { state.write_u8(1u8); v0.hash(state); } - Visibility::Inherited => { + crate::Visibility::Inherited => { state.write_u8(2u8); } } } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for WhereClause { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::WhereClause { fn hash<H>(&self, state: &mut H) where H: Hasher, @@ -2787,18 +2856,18 @@ impl Hash for WhereClause { } } #[cfg(any(feature = "derive", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] -impl Hash for WherePredicate { +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] +impl Hash for crate::WherePredicate { fn hash<H>(&self, state: &mut H) where H: Hasher, { match self { - WherePredicate::Lifetime(v0) => { + crate::WherePredicate::Lifetime(v0) => { state.write_u8(0u8); v0.hash(state); } - WherePredicate::Type(v0) => { + crate::WherePredicate::Type(v0) => { state.write_u8(1u8); v0.hash(state); } diff --git a/vendor/syn/src/gen/token.css b/vendor/syn/src/gen/token.css new file mode 100644 index 00000000..ed82ae15 --- /dev/null +++ b/vendor/syn/src/gen/token.css @@ -0,0 +1,737 @@ +a.struct[title="struct syn::token::Abstract"], +a.struct[title="struct syn::token::And"], +a.struct[title="struct syn::token::AndAnd"], +a.struct[title="struct syn::token::AndEq"], +a.struct[title="struct syn::token::As"], +a.struct[title="struct syn::token::Async"], +a.struct[title="struct syn::token::At"], +a.struct[title="struct syn::token::Auto"], +a.struct[title="struct syn::token::Await"], +a.struct[title="struct syn::token::Become"], +a.struct[title="struct syn::token::Box"], +a.struct[title="struct syn::token::Break"], +a.struct[title="struct syn::token::Caret"], +a.struct[title="struct syn::token::CaretEq"], +a.struct[title="struct syn::token::Colon"], +a.struct[title="struct syn::token::Comma"], +a.struct[title="struct syn::token::Const"], +a.struct[title="struct syn::token::Continue"], +a.struct[title="struct syn::token::Crate"], +a.struct[title="struct syn::token::Default"], +a.struct[title="struct syn::token::Do"], +a.struct[title="struct syn::token::Dollar"], +a.struct[title="struct syn::token::Dot"], +a.struct[title="struct syn::token::DotDot"], +a.struct[title="struct syn::token::DotDotDot"], +a.struct[title="struct syn::token::DotDotEq"], +a.struct[title="struct syn::token::Dyn"], +a.struct[title="struct syn::token::Else"], +a.struct[title="struct syn::token::Enum"], +a.struct[title="struct syn::token::Eq"], +a.struct[title="struct syn::token::EqEq"], +a.struct[title="struct syn::token::Extern"], +a.struct[title="struct syn::token::FatArrow"], +a.struct[title="struct syn::token::Final"], +a.struct[title="struct syn::token::Fn"], +a.struct[title="struct syn::token::For"], +a.struct[title="struct syn::token::Ge"], +a.struct[title="struct syn::token::Gt"], +a.struct[title="struct syn::token::If"], +a.struct[title="struct syn::token::Impl"], +a.struct[title="struct syn::token::In"], +a.struct[title="struct syn::token::LArrow"], +a.struct[title="struct syn::token::Le"], +a.struct[title="struct syn::token::Let"], +a.struct[title="struct syn::token::Loop"], +a.struct[title="struct syn::token::Lt"], +a.struct[title="struct syn::token::Macro"], +a.struct[title="struct syn::token::Match"], +a.struct[title="struct syn::token::Minus"], +a.struct[title="struct syn::token::MinusEq"], +a.struct[title="struct syn::token::Mod"], +a.struct[title="struct syn::token::Move"], +a.struct[title="struct syn::token::Mut"], +a.struct[title="struct syn::token::Ne"], +a.struct[title="struct syn::token::Not"], +a.struct[title="struct syn::token::Or"], +a.struct[title="struct syn::token::OrEq"], +a.struct[title="struct syn::token::OrOr"], +a.struct[title="struct syn::token::Override"], +a.struct[title="struct syn::token::PathSep"], +a.struct[title="struct syn::token::Percent"], +a.struct[title="struct syn::token::PercentEq"], +a.struct[title="struct syn::token::Plus"], +a.struct[title="struct syn::token::PlusEq"], +a.struct[title="struct syn::token::Pound"], +a.struct[title="struct syn::token::Priv"], +a.struct[title="struct syn::token::Pub"], +a.struct[title="struct syn::token::Question"], +a.struct[title="struct syn::token::RArrow"], +a.struct[title="struct syn::token::Raw"], +a.struct[title="struct syn::token::Ref"], +a.struct[title="struct syn::token::Return"], +a.struct[title="struct syn::token::SelfType"], +a.struct[title="struct syn::token::SelfValue"], +a.struct[title="struct syn::token::Semi"], +a.struct[title="struct syn::token::Shl"], +a.struct[title="struct syn::token::ShlEq"], +a.struct[title="struct syn::token::Shr"], +a.struct[title="struct syn::token::ShrEq"], +a.struct[title="struct syn::token::Slash"], +a.struct[title="struct syn::token::SlashEq"], +a.struct[title="struct syn::token::Star"], +a.struct[title="struct syn::token::StarEq"], +a.struct[title="struct syn::token::Static"], +a.struct[title="struct syn::token::Struct"], +a.struct[title="struct syn::token::Super"], +a.struct[title="struct syn::token::Tilde"], +a.struct[title="struct syn::token::Trait"], +a.struct[title="struct syn::token::Try"], +a.struct[title="struct syn::token::Type"], +a.struct[title="struct syn::token::Typeof"], +a.struct[title="struct syn::token::Underscore"], +a.struct[title="struct syn::token::Union"], +a.struct[title="struct syn::token::Unsafe"], +a.struct[title="struct syn::token::Unsized"], +a.struct[title="struct syn::token::Use"], +a.struct[title="struct syn::token::Virtual"], +a.struct[title="struct syn::token::Where"], +a.struct[title="struct syn::token::While"], +a.struct[title="struct syn::token::Yield"] { + display: inline-block; + color: transparent; + white-space: nowrap; +} + +a.struct[title="struct syn::token::Abstract"]::before, +a.struct[title="struct syn::token::And"]::before, +a.struct[title="struct syn::token::AndAnd"]::before, +a.struct[title="struct syn::token::AndEq"]::before, +a.struct[title="struct syn::token::As"]::before, +a.struct[title="struct syn::token::Async"]::before, +a.struct[title="struct syn::token::At"]::before, +a.struct[title="struct syn::token::Auto"]::before, +a.struct[title="struct syn::token::Await"]::before, +a.struct[title="struct syn::token::Become"]::before, +a.struct[title="struct syn::token::Box"]::before, +a.struct[title="struct syn::token::Break"]::before, +a.struct[title="struct syn::token::Caret"]::before, +a.struct[title="struct syn::token::CaretEq"]::before, +a.struct[title="struct syn::token::Colon"]::before, +a.struct[title="struct syn::token::Comma"]::before, +a.struct[title="struct syn::token::Const"]::before, +a.struct[title="struct syn::token::Continue"]::before, +a.struct[title="struct syn::token::Crate"]::before, +a.struct[title="struct syn::token::Default"]::before, +a.struct[title="struct syn::token::Do"]::before, +a.struct[title="struct syn::token::Dollar"]::before, +a.struct[title="struct syn::token::Dot"]::before, +a.struct[title="struct syn::token::DotDot"]::before, +a.struct[title="struct syn::token::DotDotDot"]::before, +a.struct[title="struct syn::token::DotDotEq"]::before, +a.struct[title="struct syn::token::Dyn"]::before, +a.struct[title="struct syn::token::Else"]::before, +a.struct[title="struct syn::token::Enum"]::before, +a.struct[title="struct syn::token::Eq"]::before, +a.struct[title="struct syn::token::EqEq"]::before, +a.struct[title="struct syn::token::Extern"]::before, +a.struct[title="struct syn::token::FatArrow"]::before, +a.struct[title="struct syn::token::Final"]::before, +a.struct[title="struct syn::token::Fn"]::before, +a.struct[title="struct syn::token::For"]::before, +a.struct[title="struct syn::token::Ge"]::before, +a.struct[title="struct syn::token::Gt"]::before, +a.struct[title="struct syn::token::If"]::before, +a.struct[title="struct syn::token::Impl"]::before, +a.struct[title="struct syn::token::In"]::before, +a.struct[title="struct syn::token::LArrow"]::before, +a.struct[title="struct syn::token::Le"]::before, +a.struct[title="struct syn::token::Let"]::before, +a.struct[title="struct syn::token::Loop"]::before, +a.struct[title="struct syn::token::Lt"]::before, +a.struct[title="struct syn::token::Macro"]::before, +a.struct[title="struct syn::token::Match"]::before, +a.struct[title="struct syn::token::Minus"]::before, +a.struct[title="struct syn::token::MinusEq"]::before, +a.struct[title="struct syn::token::Mod"]::before, +a.struct[title="struct syn::token::Move"]::before, +a.struct[title="struct syn::token::Mut"]::before, +a.struct[title="struct syn::token::Ne"]::before, +a.struct[title="struct syn::token::Not"]::before, +a.struct[title="struct syn::token::Or"]::before, +a.struct[title="struct syn::token::OrEq"]::before, +a.struct[title="struct syn::token::OrOr"]::before, +a.struct[title="struct syn::token::Override"]::before, +a.struct[title="struct syn::token::PathSep"]::before, +a.struct[title="struct syn::token::Percent"]::before, +a.struct[title="struct syn::token::PercentEq"]::before, +a.struct[title="struct syn::token::Plus"]::before, +a.struct[title="struct syn::token::PlusEq"]::before, +a.struct[title="struct syn::token::Pound"]::before, +a.struct[title="struct syn::token::Priv"]::before, +a.struct[title="struct syn::token::Pub"]::before, +a.struct[title="struct syn::token::Question"]::before, +a.struct[title="struct syn::token::RArrow"]::before, +a.struct[title="struct syn::token::Raw"]::before, +a.struct[title="struct syn::token::Ref"]::before, +a.struct[title="struct syn::token::Return"]::before, +a.struct[title="struct syn::token::SelfType"]::before, +a.struct[title="struct syn::token::SelfValue"]::before, +a.struct[title="struct syn::token::Semi"]::before, +a.struct[title="struct syn::token::Shl"]::before, +a.struct[title="struct syn::token::ShlEq"]::before, +a.struct[title="struct syn::token::Shr"]::before, +a.struct[title="struct syn::token::ShrEq"]::before, +a.struct[title="struct syn::token::Slash"]::before, +a.struct[title="struct syn::token::SlashEq"]::before, +a.struct[title="struct syn::token::Star"]::before, +a.struct[title="struct syn::token::StarEq"]::before, +a.struct[title="struct syn::token::Static"]::before, +a.struct[title="struct syn::token::Struct"]::before, +a.struct[title="struct syn::token::Super"]::before, +a.struct[title="struct syn::token::Tilde"]::before, +a.struct[title="struct syn::token::Trait"]::before, +a.struct[title="struct syn::token::Try"]::before, +a.struct[title="struct syn::token::Type"]::before, +a.struct[title="struct syn::token::Typeof"]::before, +a.struct[title="struct syn::token::Underscore"]::before, +a.struct[title="struct syn::token::Union"]::before, +a.struct[title="struct syn::token::Unsafe"]::before, +a.struct[title="struct syn::token::Unsized"]::before, +a.struct[title="struct syn::token::Use"]::before, +a.struct[title="struct syn::token::Virtual"]::before, +a.struct[title="struct syn::token::Where"]::before, +a.struct[title="struct syn::token::While"]::before, +a.struct[title="struct syn::token::Yield"]::before { + display: inline-block; + color: var(--type-link-color); + width: 0; +} + +a.struct[title="struct syn::token::Abstract"]::before { + content: "Token![abstract]"; +} + +a.struct[title="struct syn::token::And"]::before { + content: "Token![&]"; +} + +a.struct[title="struct syn::token::AndAnd"]::before { + content: "Token![&&]"; +} + +a.struct[title="struct syn::token::AndEq"]::before { + content: "Token![&=]"; +} + +a.struct[title="struct syn::token::As"]::before { + content: "Token![as]"; +} + +a.struct[title="struct syn::token::Async"]::before { + content: "Token![async]"; +} + +a.struct[title="struct syn::token::At"]::before { + content: "Token![@]"; +} + +a.struct[title="struct syn::token::Auto"]::before { + content: "Token![auto]"; +} + +a.struct[title="struct syn::token::Await"]::before { + content: "Token![await]"; +} + +a.struct[title="struct syn::token::Become"]::before { + content: "Token![become]"; +} + +a.struct[title="struct syn::token::Box"]::before { + content: "Token![box]"; +} + +a.struct[title="struct syn::token::Break"]::before { + content: "Token![break]"; +} + +a.struct[title="struct syn::token::Caret"]::before { + content: "Token![^]"; +} + +a.struct[title="struct syn::token::CaretEq"]::before { + content: "Token![^=]"; +} + +a.struct[title="struct syn::token::Colon"]::before { + content: "Token![:]"; +} + +a.struct[title="struct syn::token::Comma"]::before { + content: "Token![,]"; +} + +a.struct[title="struct syn::token::Const"]::before { + content: "Token![const]"; +} + +a.struct[title="struct syn::token::Continue"]::before { + content: "Token![continue]"; +} + +a.struct[title="struct syn::token::Crate"]::before { + content: "Token![crate]"; +} + +a.struct[title="struct syn::token::Default"]::before { + content: "Token![default]"; +} + +a.struct[title="struct syn::token::Do"]::before { + content: "Token![do]"; +} + +a.struct[title="struct syn::token::Dollar"]::before { + content: "Token![$]"; +} + +a.struct[title="struct syn::token::Dot"]::before { + content: "Token![.]"; +} + +a.struct[title="struct syn::token::DotDot"]::before { + content: "Token![..]"; +} + +a.struct[title="struct syn::token::DotDotDot"]::before { + content: "Token![...]"; +} + +a.struct[title="struct syn::token::DotDotEq"]::before { + content: "Token![..=]"; +} + +a.struct[title="struct syn::token::Dyn"]::before { + content: "Token![dyn]"; +} + +a.struct[title="struct syn::token::Else"]::before { + content: "Token![else]"; +} + +a.struct[title="struct syn::token::Enum"]::before { + content: "Token![enum]"; +} + +a.struct[title="struct syn::token::Eq"]::before { + content: "Token![=]"; +} + +a.struct[title="struct syn::token::EqEq"]::before { + content: "Token![==]"; +} + +a.struct[title="struct syn::token::Extern"]::before { + content: "Token![extern]"; +} + +a.struct[title="struct syn::token::FatArrow"]::before { + content: "Token![=>]"; +} + +a.struct[title="struct syn::token::Final"]::before { + content: "Token![final]"; +} + +a.struct[title="struct syn::token::Fn"]::before { + content: "Token![fn]"; +} + +a.struct[title="struct syn::token::For"]::before { + content: "Token![for]"; +} + +a.struct[title="struct syn::token::Ge"]::before { + content: "Token![>=]"; +} + +a.struct[title="struct syn::token::Gt"]::before { + content: "Token![>]"; +} + +a.struct[title="struct syn::token::If"]::before { + content: "Token![if]"; +} + +a.struct[title="struct syn::token::Impl"]::before { + content: "Token![impl]"; +} + +a.struct[title="struct syn::token::In"]::before { + content: "Token![in]"; +} + +a.struct[title="struct syn::token::LArrow"]::before { + content: "Token![<-]"; +} + +a.struct[title="struct syn::token::Le"]::before { + content: "Token![<=]"; +} + +a.struct[title="struct syn::token::Let"]::before { + content: "Token![let]"; +} + +a.struct[title="struct syn::token::Loop"]::before { + content: "Token![loop]"; +} + +a.struct[title="struct syn::token::Lt"]::before { + content: "Token![<]"; +} + +a.struct[title="struct syn::token::Macro"]::before { + content: "Token![macro]"; +} + +a.struct[title="struct syn::token::Match"]::before { + content: "Token![match]"; +} + +a.struct[title="struct syn::token::Minus"]::before { + content: "Token![-]"; +} + +a.struct[title="struct syn::token::MinusEq"]::before { + content: "Token![-=]"; +} + +a.struct[title="struct syn::token::Mod"]::before { + content: "Token![mod]"; +} + +a.struct[title="struct syn::token::Move"]::before { + content: "Token![move]"; +} + +a.struct[title="struct syn::token::Mut"]::before { + content: "Token![mut]"; +} + +a.struct[title="struct syn::token::Ne"]::before { + content: "Token![!=]"; +} + +a.struct[title="struct syn::token::Not"]::before { + content: "Token![!]"; +} + +a.struct[title="struct syn::token::Or"]::before { + content: "Token![|]"; +} + +a.struct[title="struct syn::token::OrEq"]::before { + content: "Token![|=]"; +} + +a.struct[title="struct syn::token::OrOr"]::before { + content: "Token![||]"; +} + +a.struct[title="struct syn::token::Override"]::before { + content: "Token![override]"; +} + +a.struct[title="struct syn::token::PathSep"]::before { + content: "Token![::]"; +} + +a.struct[title="struct syn::token::Percent"]::before { + content: "Token![%]"; +} + +a.struct[title="struct syn::token::PercentEq"]::before { + content: "Token![%=]"; +} + +a.struct[title="struct syn::token::Plus"]::before { + content: "Token![+]"; +} + +a.struct[title="struct syn::token::PlusEq"]::before { + content: "Token![+=]"; +} + +a.struct[title="struct syn::token::Pound"]::before { + content: "Token![#]"; +} + +a.struct[title="struct syn::token::Priv"]::before { + content: "Token![priv]"; +} + +a.struct[title="struct syn::token::Pub"]::before { + content: "Token![pub]"; +} + +a.struct[title="struct syn::token::Question"]::before { + content: "Token![?]"; +} + +a.struct[title="struct syn::token::RArrow"]::before { + content: "Token![->]"; +} + +a.struct[title="struct syn::token::Raw"]::before { + content: "Token![raw]"; +} + +a.struct[title="struct syn::token::Ref"]::before { + content: "Token![ref]"; +} + +a.struct[title="struct syn::token::Return"]::before { + content: "Token![return]"; +} + +a.struct[title="struct syn::token::SelfType"]::before { + content: "Token![Self]"; +} + +a.struct[title="struct syn::token::SelfValue"]::before { + content: "Token![self]"; +} + +a.struct[title="struct syn::token::Semi"]::before { + content: "Token![;]"; +} + +a.struct[title="struct syn::token::Shl"]::before { + content: "Token![<<]"; +} + +a.struct[title="struct syn::token::ShlEq"]::before { + content: "Token![<<=]"; +} + +a.struct[title="struct syn::token::Shr"]::before { + content: "Token![>>]"; +} + +a.struct[title="struct syn::token::ShrEq"]::before { + content: "Token![>>=]"; +} + +a.struct[title="struct syn::token::Slash"]::before { + content: "Token![/]"; +} + +a.struct[title="struct syn::token::SlashEq"]::before { + content: "Token![/=]"; +} + +a.struct[title="struct syn::token::Star"]::before { + content: "Token![*]"; +} + +a.struct[title="struct syn::token::StarEq"]::before { + content: "Token![*=]"; +} + +a.struct[title="struct syn::token::Static"]::before { + content: "Token![static]"; +} + +a.struct[title="struct syn::token::Struct"]::before { + content: "Token![struct]"; +} + +a.struct[title="struct syn::token::Super"]::before { + content: "Token![super]"; +} + +a.struct[title="struct syn::token::Tilde"]::before { + content: "Token![~]"; +} + +a.struct[title="struct syn::token::Trait"]::before { + content: "Token![trait]"; +} + +a.struct[title="struct syn::token::Try"]::before { + content: "Token![try]"; +} + +a.struct[title="struct syn::token::Type"]::before { + content: "Token![type]"; +} + +a.struct[title="struct syn::token::Typeof"]::before { + content: "Token![typeof]"; +} + +a.struct[title="struct syn::token::Underscore"]::before { + content: "Token![_]"; + font-size: calc(100% * 10 / 9); +} + +a.struct[title="struct syn::token::Union"]::before { + content: "Token![union]"; +} + +a.struct[title="struct syn::token::Unsafe"]::before { + content: "Token![unsafe]"; +} + +a.struct[title="struct syn::token::Unsized"]::before { + content: "Token![unsized]"; +} + +a.struct[title="struct syn::token::Use"]::before { + content: "Token![use]"; +} + +a.struct[title="struct syn::token::Virtual"]::before { + content: "Token![virtual]"; +} + +a.struct[title="struct syn::token::Where"]::before { + content: "Token![where]"; +} + +a.struct[title="struct syn::token::While"]::before { + content: "Token![while]"; +} + +a.struct[title="struct syn::token::Yield"]::before { + content: "Token![yield]"; +} + +a.struct[title="struct syn::token::Underscore"] { + font-size: calc(100% * 9 / 10); +} + +a.struct[title="struct syn::token::PercentEq"]::after, +a.struct[title="struct syn::token::Question"]::after { + content: "."; +} + +a.struct[title="struct syn::token::DotDotDot"]::after, +a.struct[title="struct syn::token::FatArrow"]::after, +a.struct[title="struct syn::token::Percent"]::after { + content: ".."; +} + +a.struct[title="struct syn::token::CaretEq"]::after, +a.struct[title="struct syn::token::Dollar"]::after, +a.struct[title="struct syn::token::DotDotEq"]::after, +a.struct[title="struct syn::token::MinusEq"]::after, +a.struct[title="struct syn::token::PathSep"]::after, +a.struct[title="struct syn::token::SelfValue"]::after, +a.struct[title="struct syn::token::SlashEq"]::after { + content: "..."; +} + +a.struct[title="struct syn::token::AndAnd"]::after, +a.struct[title="struct syn::token::Caret"]::after, +a.struct[title="struct syn::token::Colon"]::after, +a.struct[title="struct syn::token::Comma"]::after, +a.struct[title="struct syn::token::DotDot"]::after, +a.struct[title="struct syn::token::LArrow"]::after, +a.struct[title="struct syn::token::Minus"]::after, +a.struct[title="struct syn::token::PlusEq"]::after, +a.struct[title="struct syn::token::Pound"]::after, +a.struct[title="struct syn::token::RArrow"]::after, +a.struct[title="struct syn::token::SelfType"]::after, +a.struct[title="struct syn::token::Slash"]::after, +a.struct[title="struct syn::token::StarEq"]::after, +a.struct[title="struct syn::token::Tilde"]::after { + content: "...."; +} + +a.struct[title="struct syn::token::AndEq"]::after, +a.struct[title="struct syn::token::Plus"]::after, +a.struct[title="struct syn::token::Semi"]::after, +a.struct[title="struct syn::token::Star"]::after { + content: "....."; +} + +a.struct[title="struct syn::token::And"]::after, +a.struct[title="struct syn::token::Dot"]::after, +a.struct[title="struct syn::token::EqEq"]::after, +a.struct[title="struct syn::token::Not"]::after, +a.struct[title="struct syn::token::OrEq"]::after, +a.struct[title="struct syn::token::OrOr"]::after, +a.struct[title="struct syn::token::ShlEq"]::after, +a.struct[title="struct syn::token::ShrEq"]::after { + content: "......"; +} + +a.struct[title="struct syn::token::At"]::after, +a.struct[title="struct syn::token::Eq"]::after, +a.struct[title="struct syn::token::Gt"]::after, +a.struct[title="struct syn::token::Lt"]::after, +a.struct[title="struct syn::token::Or"]::after, +a.struct[title="struct syn::token::Shl"]::after, +a.struct[title="struct syn::token::Shr"]::after { + content: "......."; +} + +a.struct[title="struct syn::token::Abstract"]::after, +a.struct[title="struct syn::token::As"]::after, +a.struct[title="struct syn::token::Async"]::after, +a.struct[title="struct syn::token::Auto"]::after, +a.struct[title="struct syn::token::Await"]::after, +a.struct[title="struct syn::token::Become"]::after, +a.struct[title="struct syn::token::Box"]::after, +a.struct[title="struct syn::token::Break"]::after, +a.struct[title="struct syn::token::Const"]::after, +a.struct[title="struct syn::token::Continue"]::after, +a.struct[title="struct syn::token::Crate"]::after, +a.struct[title="struct syn::token::Default"]::after, +a.struct[title="struct syn::token::Do"]::after, +a.struct[title="struct syn::token::Dyn"]::after, +a.struct[title="struct syn::token::Else"]::after, +a.struct[title="struct syn::token::Enum"]::after, +a.struct[title="struct syn::token::Extern"]::after, +a.struct[title="struct syn::token::Final"]::after, +a.struct[title="struct syn::token::Fn"]::after, +a.struct[title="struct syn::token::For"]::after, +a.struct[title="struct syn::token::Ge"]::after, +a.struct[title="struct syn::token::If"]::after, +a.struct[title="struct syn::token::Impl"]::after, +a.struct[title="struct syn::token::In"]::after, +a.struct[title="struct syn::token::Le"]::after, +a.struct[title="struct syn::token::Let"]::after, +a.struct[title="struct syn::token::Loop"]::after, +a.struct[title="struct syn::token::Macro"]::after, +a.struct[title="struct syn::token::Match"]::after, +a.struct[title="struct syn::token::Mod"]::after, +a.struct[title="struct syn::token::Move"]::after, +a.struct[title="struct syn::token::Mut"]::after, +a.struct[title="struct syn::token::Ne"]::after, +a.struct[title="struct syn::token::Override"]::after, +a.struct[title="struct syn::token::Priv"]::after, +a.struct[title="struct syn::token::Pub"]::after, +a.struct[title="struct syn::token::Raw"]::after, +a.struct[title="struct syn::token::Ref"]::after, +a.struct[title="struct syn::token::Return"]::after, +a.struct[title="struct syn::token::Static"]::after, +a.struct[title="struct syn::token::Struct"]::after, +a.struct[title="struct syn::token::Super"]::after, +a.struct[title="struct syn::token::Trait"]::after, +a.struct[title="struct syn::token::Try"]::after, +a.struct[title="struct syn::token::Type"]::after, +a.struct[title="struct syn::token::Typeof"]::after, +a.struct[title="struct syn::token::Union"]::after, +a.struct[title="struct syn::token::Unsafe"]::after, +a.struct[title="struct syn::token::Unsized"]::after, +a.struct[title="struct syn::token::Use"]::after, +a.struct[title="struct syn::token::Virtual"]::after, +a.struct[title="struct syn::token::Where"]::after, +a.struct[title="struct syn::token::While"]::after, +a.struct[title="struct syn::token::Yield"]::after { + content: "........"; +} diff --git a/vendor/syn/src/gen/visit.rs b/vendor/syn/src/gen/visit.rs index fe81fb63..cd258fcd 100644 --- a/vendor/syn/src/gen/visit.rs +++ b/vendor/syn/src/gen/visit.rs @@ -5,8 +5,6 @@ #![allow(clippy::needless_pass_by_ref_mut)] #[cfg(any(feature = "full", feature = "derive"))] use crate::punctuated::Punctuated; -use crate::*; -use proc_macro2::Span; #[cfg(feature = "full")] macro_rules! full { ($e:expr) => { @@ -29,731 +27,925 @@ macro_rules! skip { /// [module documentation]: self pub trait Visit<'ast> { #[cfg(any(feature = "derive", feature = "full"))] - fn visit_abi(&mut self, i: &'ast Abi) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_abi(&mut self, i: &'ast crate::Abi) { visit_abi(self, i); } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn visit_angle_bracketed_generic_arguments( &mut self, - i: &'ast AngleBracketedGenericArguments, + i: &'ast crate::AngleBracketedGenericArguments, ) { visit_angle_bracketed_generic_arguments(self, i); } #[cfg(feature = "full")] - fn visit_arm(&mut self, i: &'ast Arm) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_arm(&mut self, i: &'ast crate::Arm) { visit_arm(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_assoc_const(&mut self, i: &'ast AssocConst) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_assoc_const(&mut self, i: &'ast crate::AssocConst) { visit_assoc_const(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_assoc_type(&mut self, i: &'ast AssocType) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_assoc_type(&mut self, i: &'ast crate::AssocType) { visit_assoc_type(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_attr_style(&mut self, i: &'ast AttrStyle) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_attr_style(&mut self, i: &'ast crate::AttrStyle) { visit_attr_style(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_attribute(&mut self, i: &'ast Attribute) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_attribute(&mut self, i: &'ast crate::Attribute) { visit_attribute(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bare_fn_arg(&mut self, i: &'ast BareFnArg) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bare_fn_arg(&mut self, i: &'ast crate::BareFnArg) { visit_bare_fn_arg(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bare_variadic(&mut self, i: &'ast BareVariadic) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bare_variadic(&mut self, i: &'ast crate::BareVariadic) { visit_bare_variadic(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bin_op(&mut self, i: &'ast BinOp) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bin_op(&mut self, i: &'ast crate::BinOp) { visit_bin_op(self, i); } #[cfg(feature = "full")] - fn visit_block(&mut self, i: &'ast Block) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_block(&mut self, i: &'ast crate::Block) { visit_block(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bound_lifetimes(&mut self, i: &'ast BoundLifetimes) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bound_lifetimes(&mut self, i: &'ast crate::BoundLifetimes) { visit_bound_lifetimes(self, i); } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_captured_param(&mut self, i: &'ast crate::CapturedParam) { + visit_captured_param(self, i); + } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_const_param(&mut self, i: &'ast ConstParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_const_param(&mut self, i: &'ast crate::ConstParam) { visit_const_param(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_constraint(&mut self, i: &'ast Constraint) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_constraint(&mut self, i: &'ast crate::Constraint) { visit_constraint(self, i); } #[cfg(feature = "derive")] - fn visit_data(&mut self, i: &'ast Data) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data(&mut self, i: &'ast crate::Data) { visit_data(self, i); } #[cfg(feature = "derive")] - fn visit_data_enum(&mut self, i: &'ast DataEnum) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_enum(&mut self, i: &'ast crate::DataEnum) { visit_data_enum(self, i); } #[cfg(feature = "derive")] - fn visit_data_struct(&mut self, i: &'ast DataStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_struct(&mut self, i: &'ast crate::DataStruct) { visit_data_struct(self, i); } #[cfg(feature = "derive")] - fn visit_data_union(&mut self, i: &'ast DataUnion) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_union(&mut self, i: &'ast crate::DataUnion) { visit_data_union(self, i); } #[cfg(feature = "derive")] - fn visit_derive_input(&mut self, i: &'ast DeriveInput) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_derive_input(&mut self, i: &'ast crate::DeriveInput) { visit_derive_input(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr(&mut self, i: &'ast Expr) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr(&mut self, i: &'ast crate::Expr) { visit_expr(self, i); } #[cfg(feature = "full")] - fn visit_expr_array(&mut self, i: &'ast ExprArray) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_array(&mut self, i: &'ast crate::ExprArray) { visit_expr_array(self, i); } #[cfg(feature = "full")] - fn visit_expr_assign(&mut self, i: &'ast ExprAssign) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_assign(&mut self, i: &'ast crate::ExprAssign) { visit_expr_assign(self, i); } #[cfg(feature = "full")] - fn visit_expr_async(&mut self, i: &'ast ExprAsync) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_async(&mut self, i: &'ast crate::ExprAsync) { visit_expr_async(self, i); } #[cfg(feature = "full")] - fn visit_expr_await(&mut self, i: &'ast ExprAwait) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_await(&mut self, i: &'ast crate::ExprAwait) { visit_expr_await(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_binary(&mut self, i: &'ast ExprBinary) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_binary(&mut self, i: &'ast crate::ExprBinary) { visit_expr_binary(self, i); } #[cfg(feature = "full")] - fn visit_expr_block(&mut self, i: &'ast ExprBlock) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_block(&mut self, i: &'ast crate::ExprBlock) { visit_expr_block(self, i); } #[cfg(feature = "full")] - fn visit_expr_break(&mut self, i: &'ast ExprBreak) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_break(&mut self, i: &'ast crate::ExprBreak) { visit_expr_break(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_call(&mut self, i: &'ast ExprCall) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_call(&mut self, i: &'ast crate::ExprCall) { visit_expr_call(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_cast(&mut self, i: &'ast ExprCast) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_cast(&mut self, i: &'ast crate::ExprCast) { visit_expr_cast(self, i); } #[cfg(feature = "full")] - fn visit_expr_closure(&mut self, i: &'ast ExprClosure) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_closure(&mut self, i: &'ast crate::ExprClosure) { visit_expr_closure(self, i); } #[cfg(feature = "full")] - fn visit_expr_const(&mut self, i: &'ast ExprConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_const(&mut self, i: &'ast crate::ExprConst) { visit_expr_const(self, i); } #[cfg(feature = "full")] - fn visit_expr_continue(&mut self, i: &'ast ExprContinue) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_continue(&mut self, i: &'ast crate::ExprContinue) { visit_expr_continue(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_field(&mut self, i: &'ast ExprField) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_field(&mut self, i: &'ast crate::ExprField) { visit_expr_field(self, i); } #[cfg(feature = "full")] - fn visit_expr_for_loop(&mut self, i: &'ast ExprForLoop) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_for_loop(&mut self, i: &'ast crate::ExprForLoop) { visit_expr_for_loop(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_group(&mut self, i: &'ast ExprGroup) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_group(&mut self, i: &'ast crate::ExprGroup) { visit_expr_group(self, i); } #[cfg(feature = "full")] - fn visit_expr_if(&mut self, i: &'ast ExprIf) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_if(&mut self, i: &'ast crate::ExprIf) { visit_expr_if(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_index(&mut self, i: &'ast ExprIndex) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_index(&mut self, i: &'ast crate::ExprIndex) { visit_expr_index(self, i); } #[cfg(feature = "full")] - fn visit_expr_infer(&mut self, i: &'ast ExprInfer) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_infer(&mut self, i: &'ast crate::ExprInfer) { visit_expr_infer(self, i); } #[cfg(feature = "full")] - fn visit_expr_let(&mut self, i: &'ast ExprLet) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_let(&mut self, i: &'ast crate::ExprLet) { visit_expr_let(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_lit(&mut self, i: &'ast ExprLit) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_lit(&mut self, i: &'ast crate::ExprLit) { visit_expr_lit(self, i); } #[cfg(feature = "full")] - fn visit_expr_loop(&mut self, i: &'ast ExprLoop) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_loop(&mut self, i: &'ast crate::ExprLoop) { visit_expr_loop(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_macro(&mut self, i: &'ast ExprMacro) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_macro(&mut self, i: &'ast crate::ExprMacro) { visit_expr_macro(self, i); } #[cfg(feature = "full")] - fn visit_expr_match(&mut self, i: &'ast ExprMatch) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_match(&mut self, i: &'ast crate::ExprMatch) { visit_expr_match(self, i); } - #[cfg(feature = "full")] - fn visit_expr_method_call(&mut self, i: &'ast ExprMethodCall) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_method_call(&mut self, i: &'ast crate::ExprMethodCall) { visit_expr_method_call(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_paren(&mut self, i: &'ast ExprParen) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_paren(&mut self, i: &'ast crate::ExprParen) { visit_expr_paren(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_path(&mut self, i: &'ast ExprPath) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_path(&mut self, i: &'ast crate::ExprPath) { visit_expr_path(self, i); } #[cfg(feature = "full")] - fn visit_expr_range(&mut self, i: &'ast ExprRange) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_range(&mut self, i: &'ast crate::ExprRange) { visit_expr_range(self, i); } #[cfg(feature = "full")] - fn visit_expr_reference(&mut self, i: &'ast ExprReference) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_raw_addr(&mut self, i: &'ast crate::ExprRawAddr) { + visit_expr_raw_addr(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_reference(&mut self, i: &'ast crate::ExprReference) { visit_expr_reference(self, i); } #[cfg(feature = "full")] - fn visit_expr_repeat(&mut self, i: &'ast ExprRepeat) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_repeat(&mut self, i: &'ast crate::ExprRepeat) { visit_expr_repeat(self, i); } #[cfg(feature = "full")] - fn visit_expr_return(&mut self, i: &'ast ExprReturn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_return(&mut self, i: &'ast crate::ExprReturn) { visit_expr_return(self, i); } - #[cfg(feature = "full")] - fn visit_expr_struct(&mut self, i: &'ast ExprStruct) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_struct(&mut self, i: &'ast crate::ExprStruct) { visit_expr_struct(self, i); } #[cfg(feature = "full")] - fn visit_expr_try(&mut self, i: &'ast ExprTry) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_try(&mut self, i: &'ast crate::ExprTry) { visit_expr_try(self, i); } #[cfg(feature = "full")] - fn visit_expr_try_block(&mut self, i: &'ast ExprTryBlock) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_try_block(&mut self, i: &'ast crate::ExprTryBlock) { visit_expr_try_block(self, i); } - #[cfg(feature = "full")] - fn visit_expr_tuple(&mut self, i: &'ast ExprTuple) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_tuple(&mut self, i: &'ast crate::ExprTuple) { visit_expr_tuple(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_unary(&mut self, i: &'ast ExprUnary) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_unary(&mut self, i: &'ast crate::ExprUnary) { visit_expr_unary(self, i); } #[cfg(feature = "full")] - fn visit_expr_unsafe(&mut self, i: &'ast ExprUnsafe) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_unsafe(&mut self, i: &'ast crate::ExprUnsafe) { visit_expr_unsafe(self, i); } #[cfg(feature = "full")] - fn visit_expr_while(&mut self, i: &'ast ExprWhile) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_while(&mut self, i: &'ast crate::ExprWhile) { visit_expr_while(self, i); } #[cfg(feature = "full")] - fn visit_expr_yield(&mut self, i: &'ast ExprYield) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_yield(&mut self, i: &'ast crate::ExprYield) { visit_expr_yield(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_field(&mut self, i: &'ast Field) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_field(&mut self, i: &'ast crate::Field) { visit_field(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_field_mutability(&mut self, i: &'ast FieldMutability) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_field_mutability(&mut self, i: &'ast crate::FieldMutability) { visit_field_mutability(self, i); } #[cfg(feature = "full")] - fn visit_field_pat(&mut self, i: &'ast FieldPat) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_field_pat(&mut self, i: &'ast crate::FieldPat) { visit_field_pat(self, i); } - #[cfg(feature = "full")] - fn visit_field_value(&mut self, i: &'ast FieldValue) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_field_value(&mut self, i: &'ast crate::FieldValue) { visit_field_value(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_fields(&mut self, i: &'ast Fields) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_fields(&mut self, i: &'ast crate::Fields) { visit_fields(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_fields_named(&mut self, i: &'ast FieldsNamed) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_fields_named(&mut self, i: &'ast crate::FieldsNamed) { visit_fields_named(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_fields_unnamed(&mut self, i: &'ast FieldsUnnamed) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_fields_unnamed(&mut self, i: &'ast crate::FieldsUnnamed) { visit_fields_unnamed(self, i); } #[cfg(feature = "full")] - fn visit_file(&mut self, i: &'ast File) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_file(&mut self, i: &'ast crate::File) { visit_file(self, i); } #[cfg(feature = "full")] - fn visit_fn_arg(&mut self, i: &'ast FnArg) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_fn_arg(&mut self, i: &'ast crate::FnArg) { visit_fn_arg(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item(&mut self, i: &'ast ForeignItem) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item(&mut self, i: &'ast crate::ForeignItem) { visit_foreign_item(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_fn(&mut self, i: &'ast ForeignItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_fn(&mut self, i: &'ast crate::ForeignItemFn) { visit_foreign_item_fn(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_macro(&mut self, i: &'ast ForeignItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_macro(&mut self, i: &'ast crate::ForeignItemMacro) { visit_foreign_item_macro(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_static(&mut self, i: &'ast ForeignItemStatic) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_static(&mut self, i: &'ast crate::ForeignItemStatic) { visit_foreign_item_static(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_type(&mut self, i: &'ast ForeignItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_type(&mut self, i: &'ast crate::ForeignItemType) { visit_foreign_item_type(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_generic_argument(&mut self, i: &'ast GenericArgument) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_generic_argument(&mut self, i: &'ast crate::GenericArgument) { visit_generic_argument(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_generic_param(&mut self, i: &'ast GenericParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_generic_param(&mut self, i: &'ast crate::GenericParam) { visit_generic_param(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_generics(&mut self, i: &'ast Generics) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_generics(&mut self, i: &'ast crate::Generics) { visit_generics(self, i); } - fn visit_ident(&mut self, i: &'ast Ident) { + fn visit_ident(&mut self, i: &'ast proc_macro2::Ident) { visit_ident(self, i); } #[cfg(feature = "full")] - fn visit_impl_item(&mut self, i: &'ast ImplItem) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item(&mut self, i: &'ast crate::ImplItem) { visit_impl_item(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_const(&mut self, i: &'ast ImplItemConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_const(&mut self, i: &'ast crate::ImplItemConst) { visit_impl_item_const(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_fn(&mut self, i: &'ast ImplItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_fn(&mut self, i: &'ast crate::ImplItemFn) { visit_impl_item_fn(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_macro(&mut self, i: &'ast ImplItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_macro(&mut self, i: &'ast crate::ImplItemMacro) { visit_impl_item_macro(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_type(&mut self, i: &'ast ImplItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_type(&mut self, i: &'ast crate::ImplItemType) { visit_impl_item_type(self, i); } #[cfg(feature = "full")] - fn visit_impl_restriction(&mut self, i: &'ast ImplRestriction) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_restriction(&mut self, i: &'ast crate::ImplRestriction) { visit_impl_restriction(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_index(&mut self, i: &'ast Index) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_index(&mut self, i: &'ast crate::Index) { visit_index(self, i); } #[cfg(feature = "full")] - fn visit_item(&mut self, i: &'ast Item) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item(&mut self, i: &'ast crate::Item) { visit_item(self, i); } #[cfg(feature = "full")] - fn visit_item_const(&mut self, i: &'ast ItemConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_const(&mut self, i: &'ast crate::ItemConst) { visit_item_const(self, i); } #[cfg(feature = "full")] - fn visit_item_enum(&mut self, i: &'ast ItemEnum) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_enum(&mut self, i: &'ast crate::ItemEnum) { visit_item_enum(self, i); } #[cfg(feature = "full")] - fn visit_item_extern_crate(&mut self, i: &'ast ItemExternCrate) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_extern_crate(&mut self, i: &'ast crate::ItemExternCrate) { visit_item_extern_crate(self, i); } #[cfg(feature = "full")] - fn visit_item_fn(&mut self, i: &'ast ItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_fn(&mut self, i: &'ast crate::ItemFn) { visit_item_fn(self, i); } #[cfg(feature = "full")] - fn visit_item_foreign_mod(&mut self, i: &'ast ItemForeignMod) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_foreign_mod(&mut self, i: &'ast crate::ItemForeignMod) { visit_item_foreign_mod(self, i); } #[cfg(feature = "full")] - fn visit_item_impl(&mut self, i: &'ast ItemImpl) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_impl(&mut self, i: &'ast crate::ItemImpl) { visit_item_impl(self, i); } #[cfg(feature = "full")] - fn visit_item_macro(&mut self, i: &'ast ItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_macro(&mut self, i: &'ast crate::ItemMacro) { visit_item_macro(self, i); } #[cfg(feature = "full")] - fn visit_item_mod(&mut self, i: &'ast ItemMod) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_mod(&mut self, i: &'ast crate::ItemMod) { visit_item_mod(self, i); } #[cfg(feature = "full")] - fn visit_item_static(&mut self, i: &'ast ItemStatic) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_static(&mut self, i: &'ast crate::ItemStatic) { visit_item_static(self, i); } #[cfg(feature = "full")] - fn visit_item_struct(&mut self, i: &'ast ItemStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_struct(&mut self, i: &'ast crate::ItemStruct) { visit_item_struct(self, i); } #[cfg(feature = "full")] - fn visit_item_trait(&mut self, i: &'ast ItemTrait) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_trait(&mut self, i: &'ast crate::ItemTrait) { visit_item_trait(self, i); } #[cfg(feature = "full")] - fn visit_item_trait_alias(&mut self, i: &'ast ItemTraitAlias) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_trait_alias(&mut self, i: &'ast crate::ItemTraitAlias) { visit_item_trait_alias(self, i); } #[cfg(feature = "full")] - fn visit_item_type(&mut self, i: &'ast ItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_type(&mut self, i: &'ast crate::ItemType) { visit_item_type(self, i); } #[cfg(feature = "full")] - fn visit_item_union(&mut self, i: &'ast ItemUnion) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_union(&mut self, i: &'ast crate::ItemUnion) { visit_item_union(self, i); } #[cfg(feature = "full")] - fn visit_item_use(&mut self, i: &'ast ItemUse) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_use(&mut self, i: &'ast crate::ItemUse) { visit_item_use(self, i); } #[cfg(feature = "full")] - fn visit_label(&mut self, i: &'ast Label) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_label(&mut self, i: &'ast crate::Label) { visit_label(self, i); } - fn visit_lifetime(&mut self, i: &'ast Lifetime) { + fn visit_lifetime(&mut self, i: &'ast crate::Lifetime) { visit_lifetime(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_lifetime_param(&mut self, i: &'ast LifetimeParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_lifetime_param(&mut self, i: &'ast crate::LifetimeParam) { visit_lifetime_param(self, i); } - fn visit_lit(&mut self, i: &'ast Lit) { + fn visit_lit(&mut self, i: &'ast crate::Lit) { visit_lit(self, i); } - fn visit_lit_bool(&mut self, i: &'ast LitBool) { + fn visit_lit_bool(&mut self, i: &'ast crate::LitBool) { visit_lit_bool(self, i); } - fn visit_lit_byte(&mut self, i: &'ast LitByte) { + fn visit_lit_byte(&mut self, i: &'ast crate::LitByte) { visit_lit_byte(self, i); } - fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) { + fn visit_lit_byte_str(&mut self, i: &'ast crate::LitByteStr) { visit_lit_byte_str(self, i); } - fn visit_lit_char(&mut self, i: &'ast LitChar) { + fn visit_lit_cstr(&mut self, i: &'ast crate::LitCStr) { + visit_lit_cstr(self, i); + } + fn visit_lit_char(&mut self, i: &'ast crate::LitChar) { visit_lit_char(self, i); } - fn visit_lit_float(&mut self, i: &'ast LitFloat) { + fn visit_lit_float(&mut self, i: &'ast crate::LitFloat) { visit_lit_float(self, i); } - fn visit_lit_int(&mut self, i: &'ast LitInt) { + fn visit_lit_int(&mut self, i: &'ast crate::LitInt) { visit_lit_int(self, i); } - fn visit_lit_str(&mut self, i: &'ast LitStr) { + fn visit_lit_str(&mut self, i: &'ast crate::LitStr) { visit_lit_str(self, i); } #[cfg(feature = "full")] - fn visit_local(&mut self, i: &'ast Local) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_local(&mut self, i: &'ast crate::Local) { visit_local(self, i); } #[cfg(feature = "full")] - fn visit_local_init(&mut self, i: &'ast LocalInit) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_local_init(&mut self, i: &'ast crate::LocalInit) { visit_local_init(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_macro(&mut self, i: &'ast Macro) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_macro(&mut self, i: &'ast crate::Macro) { visit_macro(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_macro_delimiter(&mut self, i: &'ast MacroDelimiter) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_macro_delimiter(&mut self, i: &'ast crate::MacroDelimiter) { visit_macro_delimiter(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_member(&mut self, i: &'ast Member) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_member(&mut self, i: &'ast crate::Member) { visit_member(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_meta(&mut self, i: &'ast Meta) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_meta(&mut self, i: &'ast crate::Meta) { visit_meta(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_meta_list(&mut self, i: &'ast MetaList) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_meta_list(&mut self, i: &'ast crate::MetaList) { visit_meta_list(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_meta_name_value(&mut self, i: &'ast MetaNameValue) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_meta_name_value(&mut self, i: &'ast crate::MetaNameValue) { visit_meta_name_value(self, i); } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn visit_parenthesized_generic_arguments( &mut self, - i: &'ast ParenthesizedGenericArguments, + i: &'ast crate::ParenthesizedGenericArguments, ) { visit_parenthesized_generic_arguments(self, i); } #[cfg(feature = "full")] - fn visit_pat(&mut self, i: &'ast Pat) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat(&mut self, i: &'ast crate::Pat) { visit_pat(self, i); } #[cfg(feature = "full")] - fn visit_pat_ident(&mut self, i: &'ast PatIdent) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_ident(&mut self, i: &'ast crate::PatIdent) { visit_pat_ident(self, i); } #[cfg(feature = "full")] - fn visit_pat_or(&mut self, i: &'ast PatOr) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_or(&mut self, i: &'ast crate::PatOr) { visit_pat_or(self, i); } #[cfg(feature = "full")] - fn visit_pat_paren(&mut self, i: &'ast PatParen) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_paren(&mut self, i: &'ast crate::PatParen) { visit_pat_paren(self, i); } #[cfg(feature = "full")] - fn visit_pat_reference(&mut self, i: &'ast PatReference) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_reference(&mut self, i: &'ast crate::PatReference) { visit_pat_reference(self, i); } #[cfg(feature = "full")] - fn visit_pat_rest(&mut self, i: &'ast PatRest) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_rest(&mut self, i: &'ast crate::PatRest) { visit_pat_rest(self, i); } #[cfg(feature = "full")] - fn visit_pat_slice(&mut self, i: &'ast PatSlice) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_slice(&mut self, i: &'ast crate::PatSlice) { visit_pat_slice(self, i); } #[cfg(feature = "full")] - fn visit_pat_struct(&mut self, i: &'ast PatStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_struct(&mut self, i: &'ast crate::PatStruct) { visit_pat_struct(self, i); } #[cfg(feature = "full")] - fn visit_pat_tuple(&mut self, i: &'ast PatTuple) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_tuple(&mut self, i: &'ast crate::PatTuple) { visit_pat_tuple(self, i); } #[cfg(feature = "full")] - fn visit_pat_tuple_struct(&mut self, i: &'ast PatTupleStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_tuple_struct(&mut self, i: &'ast crate::PatTupleStruct) { visit_pat_tuple_struct(self, i); } #[cfg(feature = "full")] - fn visit_pat_type(&mut self, i: &'ast PatType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_type(&mut self, i: &'ast crate::PatType) { visit_pat_type(self, i); } #[cfg(feature = "full")] - fn visit_pat_wild(&mut self, i: &'ast PatWild) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_wild(&mut self, i: &'ast crate::PatWild) { visit_pat_wild(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_path(&mut self, i: &'ast Path) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_path(&mut self, i: &'ast crate::Path) { visit_path(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_path_arguments(&mut self, i: &'ast PathArguments) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_path_arguments(&mut self, i: &'ast crate::PathArguments) { visit_path_arguments(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_path_segment(&mut self, i: &'ast PathSegment) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_path_segment(&mut self, i: &'ast crate::PathSegment) { visit_path_segment(self, i); } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pointer_mutability(&mut self, i: &'ast crate::PointerMutability) { + visit_pointer_mutability(self, i); + } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_precise_capture(&mut self, i: &'ast crate::PreciseCapture) { + visit_precise_capture(self, i); + } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_predicate_lifetime(&mut self, i: &'ast PredicateLifetime) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_predicate_lifetime(&mut self, i: &'ast crate::PredicateLifetime) { visit_predicate_lifetime(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_predicate_type(&mut self, i: &'ast PredicateType) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_predicate_type(&mut self, i: &'ast crate::PredicateType) { visit_predicate_type(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_qself(&mut self, i: &'ast QSelf) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_qself(&mut self, i: &'ast crate::QSelf) { visit_qself(self, i); } #[cfg(feature = "full")] - fn visit_range_limits(&mut self, i: &'ast RangeLimits) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_range_limits(&mut self, i: &'ast crate::RangeLimits) { visit_range_limits(self, i); } #[cfg(feature = "full")] - fn visit_receiver(&mut self, i: &'ast Receiver) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_receiver(&mut self, i: &'ast crate::Receiver) { visit_receiver(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_return_type(&mut self, i: &'ast ReturnType) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_return_type(&mut self, i: &'ast crate::ReturnType) { visit_return_type(self, i); } #[cfg(feature = "full")] - fn visit_signature(&mut self, i: &'ast Signature) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_signature(&mut self, i: &'ast crate::Signature) { visit_signature(self, i); } - fn visit_span(&mut self, i: &Span) { - visit_span(self, i); - } + fn visit_span(&mut self, i: &proc_macro2::Span) {} #[cfg(feature = "full")] - fn visit_static_mutability(&mut self, i: &'ast StaticMutability) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_static_mutability(&mut self, i: &'ast crate::StaticMutability) { visit_static_mutability(self, i); } #[cfg(feature = "full")] - fn visit_stmt(&mut self, i: &'ast Stmt) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_stmt(&mut self, i: &'ast crate::Stmt) { visit_stmt(self, i); } #[cfg(feature = "full")] - fn visit_stmt_macro(&mut self, i: &'ast StmtMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_stmt_macro(&mut self, i: &'ast crate::StmtMacro) { visit_stmt_macro(self, i); } + fn visit_token_stream(&mut self, i: &'ast proc_macro2::TokenStream) {} #[cfg(any(feature = "derive", feature = "full"))] - fn visit_trait_bound(&mut self, i: &'ast TraitBound) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_trait_bound(&mut self, i: &'ast crate::TraitBound) { visit_trait_bound(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_trait_bound_modifier(&mut self, i: &'ast TraitBoundModifier) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_trait_bound_modifier(&mut self, i: &'ast crate::TraitBoundModifier) { visit_trait_bound_modifier(self, i); } #[cfg(feature = "full")] - fn visit_trait_item(&mut self, i: &'ast TraitItem) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item(&mut self, i: &'ast crate::TraitItem) { visit_trait_item(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_const(&mut self, i: &'ast TraitItemConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_const(&mut self, i: &'ast crate::TraitItemConst) { visit_trait_item_const(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_fn(&mut self, i: &'ast TraitItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_fn(&mut self, i: &'ast crate::TraitItemFn) { visit_trait_item_fn(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_macro(&mut self, i: &'ast TraitItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_macro(&mut self, i: &'ast crate::TraitItemMacro) { visit_trait_item_macro(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_type(&mut self, i: &'ast TraitItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_type(&mut self, i: &'ast crate::TraitItemType) { visit_trait_item_type(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type(&mut self, i: &'ast Type) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type(&mut self, i: &'ast crate::Type) { visit_type(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_array(&mut self, i: &'ast TypeArray) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_array(&mut self, i: &'ast crate::TypeArray) { visit_type_array(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_bare_fn(&mut self, i: &'ast TypeBareFn) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_bare_fn(&mut self, i: &'ast crate::TypeBareFn) { visit_type_bare_fn(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_group(&mut self, i: &'ast TypeGroup) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_group(&mut self, i: &'ast crate::TypeGroup) { visit_type_group(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_impl_trait(&mut self, i: &'ast TypeImplTrait) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_impl_trait(&mut self, i: &'ast crate::TypeImplTrait) { visit_type_impl_trait(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_infer(&mut self, i: &'ast TypeInfer) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_infer(&mut self, i: &'ast crate::TypeInfer) { visit_type_infer(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_macro(&mut self, i: &'ast TypeMacro) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_macro(&mut self, i: &'ast crate::TypeMacro) { visit_type_macro(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_never(&mut self, i: &'ast TypeNever) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_never(&mut self, i: &'ast crate::TypeNever) { visit_type_never(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_param(&mut self, i: &'ast TypeParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_param(&mut self, i: &'ast crate::TypeParam) { visit_type_param(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_param_bound(&mut self, i: &'ast TypeParamBound) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_param_bound(&mut self, i: &'ast crate::TypeParamBound) { visit_type_param_bound(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_paren(&mut self, i: &'ast TypeParen) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_paren(&mut self, i: &'ast crate::TypeParen) { visit_type_paren(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_path(&mut self, i: &'ast TypePath) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_path(&mut self, i: &'ast crate::TypePath) { visit_type_path(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_ptr(&mut self, i: &'ast TypePtr) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_ptr(&mut self, i: &'ast crate::TypePtr) { visit_type_ptr(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_reference(&mut self, i: &'ast TypeReference) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_reference(&mut self, i: &'ast crate::TypeReference) { visit_type_reference(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_slice(&mut self, i: &'ast TypeSlice) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_slice(&mut self, i: &'ast crate::TypeSlice) { visit_type_slice(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_trait_object(&mut self, i: &'ast TypeTraitObject) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_trait_object(&mut self, i: &'ast crate::TypeTraitObject) { visit_type_trait_object(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_tuple(&mut self, i: &'ast TypeTuple) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_tuple(&mut self, i: &'ast crate::TypeTuple) { visit_type_tuple(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_un_op(&mut self, i: &'ast UnOp) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_un_op(&mut self, i: &'ast crate::UnOp) { visit_un_op(self, i); } #[cfg(feature = "full")] - fn visit_use_glob(&mut self, i: &'ast UseGlob) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_glob(&mut self, i: &'ast crate::UseGlob) { visit_use_glob(self, i); } #[cfg(feature = "full")] - fn visit_use_group(&mut self, i: &'ast UseGroup) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_group(&mut self, i: &'ast crate::UseGroup) { visit_use_group(self, i); } #[cfg(feature = "full")] - fn visit_use_name(&mut self, i: &'ast UseName) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_name(&mut self, i: &'ast crate::UseName) { visit_use_name(self, i); } #[cfg(feature = "full")] - fn visit_use_path(&mut self, i: &'ast UsePath) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_path(&mut self, i: &'ast crate::UsePath) { visit_use_path(self, i); } #[cfg(feature = "full")] - fn visit_use_rename(&mut self, i: &'ast UseRename) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_rename(&mut self, i: &'ast crate::UseRename) { visit_use_rename(self, i); } #[cfg(feature = "full")] - fn visit_use_tree(&mut self, i: &'ast UseTree) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_tree(&mut self, i: &'ast crate::UseTree) { visit_use_tree(self, i); } #[cfg(feature = "full")] - fn visit_variadic(&mut self, i: &'ast Variadic) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_variadic(&mut self, i: &'ast crate::Variadic) { visit_variadic(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_variant(&mut self, i: &'ast Variant) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_variant(&mut self, i: &'ast crate::Variant) { visit_variant(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_vis_restricted(&mut self, i: &'ast VisRestricted) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_vis_restricted(&mut self, i: &'ast crate::VisRestricted) { visit_vis_restricted(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_visibility(&mut self, i: &'ast Visibility) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_visibility(&mut self, i: &'ast crate::Visibility) { visit_visibility(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_where_clause(&mut self, i: &'ast WhereClause) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_where_clause(&mut self, i: &'ast crate::WhereClause) { visit_where_clause(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_where_predicate(&mut self, i: &'ast WherePredicate) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_where_predicate(&mut self, i: &'ast crate::WherePredicate) { visit_where_predicate(self, i); } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_abi<'ast, V>(v: &mut V, node: &'ast Abi) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_abi<'ast, V>(v: &mut V, node: &'ast crate::Abi) where V: Visit<'ast> + ?Sized, { @@ -763,9 +955,10 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn visit_angle_bracketed_generic_arguments<'ast, V>( v: &mut V, - node: &'ast AngleBracketedGenericArguments, + node: &'ast crate::AngleBracketedGenericArguments, ) where V: Visit<'ast> + ?Sized, @@ -779,7 +972,8 @@ where skip!(node.gt_token); } #[cfg(feature = "full")] -pub fn visit_arm<'ast, V>(v: &mut V, node: &'ast Arm) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_arm<'ast, V>(v: &mut V, node: &'ast crate::Arm) where V: Visit<'ast> + ?Sized, { @@ -796,7 +990,8 @@ where skip!(node.comma); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_assoc_const<'ast, V>(v: &mut V, node: &'ast AssocConst) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_assoc_const<'ast, V>(v: &mut V, node: &'ast crate::AssocConst) where V: Visit<'ast> + ?Sized, { @@ -808,7 +1003,8 @@ where v.visit_expr(&node.value); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_assoc_type<'ast, V>(v: &mut V, node: &'ast AssocType) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_assoc_type<'ast, V>(v: &mut V, node: &'ast crate::AssocType) where V: Visit<'ast> + ?Sized, { @@ -820,19 +1016,21 @@ where v.visit_type(&node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_attr_style<'ast, V>(v: &mut V, node: &'ast AttrStyle) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_attr_style<'ast, V>(v: &mut V, node: &'ast crate::AttrStyle) where V: Visit<'ast> + ?Sized, { match node { - AttrStyle::Outer => {} - AttrStyle::Inner(_binding_0) => { + crate::AttrStyle::Outer => {} + crate::AttrStyle::Inner(_binding_0) => { skip!(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_attribute<'ast, V>(v: &mut V, node: &'ast Attribute) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_attribute<'ast, V>(v: &mut V, node: &'ast crate::Attribute) where V: Visit<'ast> + ?Sized, { @@ -842,7 +1040,8 @@ where v.visit_meta(&node.meta); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bare_fn_arg<'ast, V>(v: &mut V, node: &'ast BareFnArg) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bare_fn_arg<'ast, V>(v: &mut V, node: &'ast crate::BareFnArg) where V: Visit<'ast> + ?Sized, { @@ -856,7 +1055,8 @@ where v.visit_type(&node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bare_variadic<'ast, V>(v: &mut V, node: &'ast BareVariadic) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bare_variadic<'ast, V>(v: &mut V, node: &'ast crate::BareVariadic) where V: Visit<'ast> + ?Sized, { @@ -871,99 +1071,101 @@ where skip!(node.comma); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bin_op<'ast, V>(v: &mut V, node: &'ast BinOp) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bin_op<'ast, V>(v: &mut V, node: &'ast crate::BinOp) where V: Visit<'ast> + ?Sized, { match node { - BinOp::Add(_binding_0) => { + crate::BinOp::Add(_binding_0) => { skip!(_binding_0); } - BinOp::Sub(_binding_0) => { + crate::BinOp::Sub(_binding_0) => { skip!(_binding_0); } - BinOp::Mul(_binding_0) => { + crate::BinOp::Mul(_binding_0) => { skip!(_binding_0); } - BinOp::Div(_binding_0) => { + crate::BinOp::Div(_binding_0) => { skip!(_binding_0); } - BinOp::Rem(_binding_0) => { + crate::BinOp::Rem(_binding_0) => { skip!(_binding_0); } - BinOp::And(_binding_0) => { + crate::BinOp::And(_binding_0) => { skip!(_binding_0); } - BinOp::Or(_binding_0) => { + crate::BinOp::Or(_binding_0) => { skip!(_binding_0); } - BinOp::BitXor(_binding_0) => { + crate::BinOp::BitXor(_binding_0) => { skip!(_binding_0); } - BinOp::BitAnd(_binding_0) => { + crate::BinOp::BitAnd(_binding_0) => { skip!(_binding_0); } - BinOp::BitOr(_binding_0) => { + crate::BinOp::BitOr(_binding_0) => { skip!(_binding_0); } - BinOp::Shl(_binding_0) => { + crate::BinOp::Shl(_binding_0) => { skip!(_binding_0); } - BinOp::Shr(_binding_0) => { + crate::BinOp::Shr(_binding_0) => { skip!(_binding_0); } - BinOp::Eq(_binding_0) => { + crate::BinOp::Eq(_binding_0) => { skip!(_binding_0); } - BinOp::Lt(_binding_0) => { + crate::BinOp::Lt(_binding_0) => { skip!(_binding_0); } - BinOp::Le(_binding_0) => { + crate::BinOp::Le(_binding_0) => { skip!(_binding_0); } - BinOp::Ne(_binding_0) => { + crate::BinOp::Ne(_binding_0) => { skip!(_binding_0); } - BinOp::Ge(_binding_0) => { + crate::BinOp::Ge(_binding_0) => { skip!(_binding_0); } - BinOp::Gt(_binding_0) => { + crate::BinOp::Gt(_binding_0) => { skip!(_binding_0); } - BinOp::AddAssign(_binding_0) => { + crate::BinOp::AddAssign(_binding_0) => { skip!(_binding_0); } - BinOp::SubAssign(_binding_0) => { + crate::BinOp::SubAssign(_binding_0) => { skip!(_binding_0); } - BinOp::MulAssign(_binding_0) => { + crate::BinOp::MulAssign(_binding_0) => { skip!(_binding_0); } - BinOp::DivAssign(_binding_0) => { + crate::BinOp::DivAssign(_binding_0) => { skip!(_binding_0); } - BinOp::RemAssign(_binding_0) => { + crate::BinOp::RemAssign(_binding_0) => { skip!(_binding_0); } - BinOp::BitXorAssign(_binding_0) => { + crate::BinOp::BitXorAssign(_binding_0) => { skip!(_binding_0); } - BinOp::BitAndAssign(_binding_0) => { + crate::BinOp::BitAndAssign(_binding_0) => { skip!(_binding_0); } - BinOp::BitOrAssign(_binding_0) => { + crate::BinOp::BitOrAssign(_binding_0) => { skip!(_binding_0); } - BinOp::ShlAssign(_binding_0) => { + crate::BinOp::ShlAssign(_binding_0) => { skip!(_binding_0); } - BinOp::ShrAssign(_binding_0) => { + crate::BinOp::ShrAssign(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_block<'ast, V>(v: &mut V, node: &'ast Block) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_block<'ast, V>(v: &mut V, node: &'ast crate::Block) where V: Visit<'ast> + ?Sized, { @@ -973,7 +1175,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bound_lifetimes<'ast, V>(v: &mut V, node: &'ast BoundLifetimes) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bound_lifetimes<'ast, V>(v: &mut V, node: &'ast crate::BoundLifetimes) where V: Visit<'ast> + ?Sized, { @@ -985,8 +1188,24 @@ where } skip!(node.gt_token); } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_captured_param<'ast, V>(v: &mut V, node: &'ast crate::CapturedParam) +where + V: Visit<'ast> + ?Sized, +{ + match node { + crate::CapturedParam::Lifetime(_binding_0) => { + v.visit_lifetime(_binding_0); + } + crate::CapturedParam::Ident(_binding_0) => { + v.visit_ident(_binding_0); + } + } +} #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_const_param<'ast, V>(v: &mut V, node: &'ast ConstParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_const_param<'ast, V>(v: &mut V, node: &'ast crate::ConstParam) where V: Visit<'ast> + ?Sized, { @@ -1003,7 +1222,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_constraint<'ast, V>(v: &mut V, node: &'ast Constraint) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_constraint<'ast, V>(v: &mut V, node: &'ast crate::Constraint) where V: Visit<'ast> + ?Sized, { @@ -1018,24 +1238,26 @@ where } } #[cfg(feature = "derive")] -pub fn visit_data<'ast, V>(v: &mut V, node: &'ast Data) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data<'ast, V>(v: &mut V, node: &'ast crate::Data) where V: Visit<'ast> + ?Sized, { match node { - Data::Struct(_binding_0) => { + crate::Data::Struct(_binding_0) => { v.visit_data_struct(_binding_0); } - Data::Enum(_binding_0) => { + crate::Data::Enum(_binding_0) => { v.visit_data_enum(_binding_0); } - Data::Union(_binding_0) => { + crate::Data::Union(_binding_0) => { v.visit_data_union(_binding_0); } } } #[cfg(feature = "derive")] -pub fn visit_data_enum<'ast, V>(v: &mut V, node: &'ast DataEnum) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_enum<'ast, V>(v: &mut V, node: &'ast crate::DataEnum) where V: Visit<'ast> + ?Sized, { @@ -1047,7 +1269,8 @@ where } } #[cfg(feature = "derive")] -pub fn visit_data_struct<'ast, V>(v: &mut V, node: &'ast DataStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_struct<'ast, V>(v: &mut V, node: &'ast crate::DataStruct) where V: Visit<'ast> + ?Sized, { @@ -1056,7 +1279,8 @@ where skip!(node.semi_token); } #[cfg(feature = "derive")] -pub fn visit_data_union<'ast, V>(v: &mut V, node: &'ast DataUnion) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_union<'ast, V>(v: &mut V, node: &'ast crate::DataUnion) where V: Visit<'ast> + ?Sized, { @@ -1064,7 +1288,8 @@ where v.visit_fields_named(&node.fields); } #[cfg(feature = "derive")] -pub fn visit_derive_input<'ast, V>(v: &mut V, node: &'ast DeriveInput) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_derive_input<'ast, V>(v: &mut V, node: &'ast crate::DeriveInput) where V: Visit<'ast> + ?Sized, { @@ -1077,132 +1302,137 @@ where v.visit_data(&node.data); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr<'ast, V>(v: &mut V, node: &'ast Expr) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr<'ast, V>(v: &mut V, node: &'ast crate::Expr) where V: Visit<'ast> + ?Sized, { match node { - Expr::Array(_binding_0) => { + crate::Expr::Array(_binding_0) => { full!(v.visit_expr_array(_binding_0)); } - Expr::Assign(_binding_0) => { + crate::Expr::Assign(_binding_0) => { full!(v.visit_expr_assign(_binding_0)); } - Expr::Async(_binding_0) => { + crate::Expr::Async(_binding_0) => { full!(v.visit_expr_async(_binding_0)); } - Expr::Await(_binding_0) => { + crate::Expr::Await(_binding_0) => { full!(v.visit_expr_await(_binding_0)); } - Expr::Binary(_binding_0) => { + crate::Expr::Binary(_binding_0) => { v.visit_expr_binary(_binding_0); } - Expr::Block(_binding_0) => { + crate::Expr::Block(_binding_0) => { full!(v.visit_expr_block(_binding_0)); } - Expr::Break(_binding_0) => { + crate::Expr::Break(_binding_0) => { full!(v.visit_expr_break(_binding_0)); } - Expr::Call(_binding_0) => { + crate::Expr::Call(_binding_0) => { v.visit_expr_call(_binding_0); } - Expr::Cast(_binding_0) => { + crate::Expr::Cast(_binding_0) => { v.visit_expr_cast(_binding_0); } - Expr::Closure(_binding_0) => { + crate::Expr::Closure(_binding_0) => { full!(v.visit_expr_closure(_binding_0)); } - Expr::Const(_binding_0) => { + crate::Expr::Const(_binding_0) => { full!(v.visit_expr_const(_binding_0)); } - Expr::Continue(_binding_0) => { + crate::Expr::Continue(_binding_0) => { full!(v.visit_expr_continue(_binding_0)); } - Expr::Field(_binding_0) => { + crate::Expr::Field(_binding_0) => { v.visit_expr_field(_binding_0); } - Expr::ForLoop(_binding_0) => { + crate::Expr::ForLoop(_binding_0) => { full!(v.visit_expr_for_loop(_binding_0)); } - Expr::Group(_binding_0) => { + crate::Expr::Group(_binding_0) => { v.visit_expr_group(_binding_0); } - Expr::If(_binding_0) => { + crate::Expr::If(_binding_0) => { full!(v.visit_expr_if(_binding_0)); } - Expr::Index(_binding_0) => { + crate::Expr::Index(_binding_0) => { v.visit_expr_index(_binding_0); } - Expr::Infer(_binding_0) => { + crate::Expr::Infer(_binding_0) => { full!(v.visit_expr_infer(_binding_0)); } - Expr::Let(_binding_0) => { + crate::Expr::Let(_binding_0) => { full!(v.visit_expr_let(_binding_0)); } - Expr::Lit(_binding_0) => { + crate::Expr::Lit(_binding_0) => { v.visit_expr_lit(_binding_0); } - Expr::Loop(_binding_0) => { + crate::Expr::Loop(_binding_0) => { full!(v.visit_expr_loop(_binding_0)); } - Expr::Macro(_binding_0) => { + crate::Expr::Macro(_binding_0) => { v.visit_expr_macro(_binding_0); } - Expr::Match(_binding_0) => { + crate::Expr::Match(_binding_0) => { full!(v.visit_expr_match(_binding_0)); } - Expr::MethodCall(_binding_0) => { - full!(v.visit_expr_method_call(_binding_0)); + crate::Expr::MethodCall(_binding_0) => { + v.visit_expr_method_call(_binding_0); } - Expr::Paren(_binding_0) => { + crate::Expr::Paren(_binding_0) => { v.visit_expr_paren(_binding_0); } - Expr::Path(_binding_0) => { + crate::Expr::Path(_binding_0) => { v.visit_expr_path(_binding_0); } - Expr::Range(_binding_0) => { + crate::Expr::Range(_binding_0) => { full!(v.visit_expr_range(_binding_0)); } - Expr::Reference(_binding_0) => { - full!(v.visit_expr_reference(_binding_0)); + crate::Expr::RawAddr(_binding_0) => { + full!(v.visit_expr_raw_addr(_binding_0)); + } + crate::Expr::Reference(_binding_0) => { + v.visit_expr_reference(_binding_0); } - Expr::Repeat(_binding_0) => { + crate::Expr::Repeat(_binding_0) => { full!(v.visit_expr_repeat(_binding_0)); } - Expr::Return(_binding_0) => { + crate::Expr::Return(_binding_0) => { full!(v.visit_expr_return(_binding_0)); } - Expr::Struct(_binding_0) => { - full!(v.visit_expr_struct(_binding_0)); + crate::Expr::Struct(_binding_0) => { + v.visit_expr_struct(_binding_0); } - Expr::Try(_binding_0) => { + crate::Expr::Try(_binding_0) => { full!(v.visit_expr_try(_binding_0)); } - Expr::TryBlock(_binding_0) => { + crate::Expr::TryBlock(_binding_0) => { full!(v.visit_expr_try_block(_binding_0)); } - Expr::Tuple(_binding_0) => { - full!(v.visit_expr_tuple(_binding_0)); + crate::Expr::Tuple(_binding_0) => { + v.visit_expr_tuple(_binding_0); } - Expr::Unary(_binding_0) => { + crate::Expr::Unary(_binding_0) => { v.visit_expr_unary(_binding_0); } - Expr::Unsafe(_binding_0) => { + crate::Expr::Unsafe(_binding_0) => { full!(v.visit_expr_unsafe(_binding_0)); } - Expr::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Expr::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } - Expr::While(_binding_0) => { + crate::Expr::While(_binding_0) => { full!(v.visit_expr_while(_binding_0)); } - Expr::Yield(_binding_0) => { + crate::Expr::Yield(_binding_0) => { full!(v.visit_expr_yield(_binding_0)); } } } #[cfg(feature = "full")] -pub fn visit_expr_array<'ast, V>(v: &mut V, node: &'ast ExprArray) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_array<'ast, V>(v: &mut V, node: &'ast crate::ExprArray) where V: Visit<'ast> + ?Sized, { @@ -1216,7 +1446,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_expr_assign<'ast, V>(v: &mut V, node: &'ast ExprAssign) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_assign<'ast, V>(v: &mut V, node: &'ast crate::ExprAssign) where V: Visit<'ast> + ?Sized, { @@ -1228,7 +1459,8 @@ where v.visit_expr(&*node.right); } #[cfg(feature = "full")] -pub fn visit_expr_async<'ast, V>(v: &mut V, node: &'ast ExprAsync) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_async<'ast, V>(v: &mut V, node: &'ast crate::ExprAsync) where V: Visit<'ast> + ?Sized, { @@ -1240,7 +1472,8 @@ where v.visit_block(&node.block); } #[cfg(feature = "full")] -pub fn visit_expr_await<'ast, V>(v: &mut V, node: &'ast ExprAwait) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_await<'ast, V>(v: &mut V, node: &'ast crate::ExprAwait) where V: Visit<'ast> + ?Sized, { @@ -1252,7 +1485,8 @@ where skip!(node.await_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast ExprBinary) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast crate::ExprBinary) where V: Visit<'ast> + ?Sized, { @@ -1264,7 +1498,8 @@ where v.visit_expr(&*node.right); } #[cfg(feature = "full")] -pub fn visit_expr_block<'ast, V>(v: &mut V, node: &'ast ExprBlock) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_block<'ast, V>(v: &mut V, node: &'ast crate::ExprBlock) where V: Visit<'ast> + ?Sized, { @@ -1277,7 +1512,8 @@ where v.visit_block(&node.block); } #[cfg(feature = "full")] -pub fn visit_expr_break<'ast, V>(v: &mut V, node: &'ast ExprBreak) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_break<'ast, V>(v: &mut V, node: &'ast crate::ExprBreak) where V: Visit<'ast> + ?Sized, { @@ -1293,7 +1529,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_call<'ast, V>(v: &mut V, node: &'ast ExprCall) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_call<'ast, V>(v: &mut V, node: &'ast crate::ExprCall) where V: Visit<'ast> + ?Sized, { @@ -1308,7 +1545,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_cast<'ast, V>(v: &mut V, node: &'ast ExprCast) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_cast<'ast, V>(v: &mut V, node: &'ast crate::ExprCast) where V: Visit<'ast> + ?Sized, { @@ -1320,7 +1558,8 @@ where v.visit_type(&*node.ty); } #[cfg(feature = "full")] -pub fn visit_expr_closure<'ast, V>(v: &mut V, node: &'ast ExprClosure) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_closure<'ast, V>(v: &mut V, node: &'ast crate::ExprClosure) where V: Visit<'ast> + ?Sized, { @@ -1344,7 +1583,8 @@ where v.visit_expr(&*node.body); } #[cfg(feature = "full")] -pub fn visit_expr_const<'ast, V>(v: &mut V, node: &'ast ExprConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_const<'ast, V>(v: &mut V, node: &'ast crate::ExprConst) where V: Visit<'ast> + ?Sized, { @@ -1355,7 +1595,8 @@ where v.visit_block(&node.block); } #[cfg(feature = "full")] -pub fn visit_expr_continue<'ast, V>(v: &mut V, node: &'ast ExprContinue) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_continue<'ast, V>(v: &mut V, node: &'ast crate::ExprContinue) where V: Visit<'ast> + ?Sized, { @@ -1368,7 +1609,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_field<'ast, V>(v: &mut V, node: &'ast ExprField) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_field<'ast, V>(v: &mut V, node: &'ast crate::ExprField) where V: Visit<'ast> + ?Sized, { @@ -1380,7 +1622,8 @@ where v.visit_member(&node.member); } #[cfg(feature = "full")] -pub fn visit_expr_for_loop<'ast, V>(v: &mut V, node: &'ast ExprForLoop) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_for_loop<'ast, V>(v: &mut V, node: &'ast crate::ExprForLoop) where V: Visit<'ast> + ?Sized, { @@ -1397,7 +1640,8 @@ where v.visit_block(&node.body); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_group<'ast, V>(v: &mut V, node: &'ast ExprGroup) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_group<'ast, V>(v: &mut V, node: &'ast crate::ExprGroup) where V: Visit<'ast> + ?Sized, { @@ -1408,7 +1652,8 @@ where v.visit_expr(&*node.expr); } #[cfg(feature = "full")] -pub fn visit_expr_if<'ast, V>(v: &mut V, node: &'ast ExprIf) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_if<'ast, V>(v: &mut V, node: &'ast crate::ExprIf) where V: Visit<'ast> + ?Sized, { @@ -1424,7 +1669,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_index<'ast, V>(v: &mut V, node: &'ast ExprIndex) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_index<'ast, V>(v: &mut V, node: &'ast crate::ExprIndex) where V: Visit<'ast> + ?Sized, { @@ -1436,7 +1682,8 @@ where v.visit_expr(&*node.index); } #[cfg(feature = "full")] -pub fn visit_expr_infer<'ast, V>(v: &mut V, node: &'ast ExprInfer) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_infer<'ast, V>(v: &mut V, node: &'ast crate::ExprInfer) where V: Visit<'ast> + ?Sized, { @@ -1446,7 +1693,8 @@ where skip!(node.underscore_token); } #[cfg(feature = "full")] -pub fn visit_expr_let<'ast, V>(v: &mut V, node: &'ast ExprLet) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_let<'ast, V>(v: &mut V, node: &'ast crate::ExprLet) where V: Visit<'ast> + ?Sized, { @@ -1459,7 +1707,8 @@ where v.visit_expr(&*node.expr); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_lit<'ast, V>(v: &mut V, node: &'ast ExprLit) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_lit<'ast, V>(v: &mut V, node: &'ast crate::ExprLit) where V: Visit<'ast> + ?Sized, { @@ -1469,7 +1718,8 @@ where v.visit_lit(&node.lit); } #[cfg(feature = "full")] -pub fn visit_expr_loop<'ast, V>(v: &mut V, node: &'ast ExprLoop) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_loop<'ast, V>(v: &mut V, node: &'ast crate::ExprLoop) where V: Visit<'ast> + ?Sized, { @@ -1483,7 +1733,8 @@ where v.visit_block(&node.body); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_macro<'ast, V>(v: &mut V, node: &'ast ExprMacro) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_macro<'ast, V>(v: &mut V, node: &'ast crate::ExprMacro) where V: Visit<'ast> + ?Sized, { @@ -1493,7 +1744,8 @@ where v.visit_macro(&node.mac); } #[cfg(feature = "full")] -pub fn visit_expr_match<'ast, V>(v: &mut V, node: &'ast ExprMatch) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_match<'ast, V>(v: &mut V, node: &'ast crate::ExprMatch) where V: Visit<'ast> + ?Sized, { @@ -1507,8 +1759,9 @@ where v.visit_arm(it); } } -#[cfg(feature = "full")] -pub fn visit_expr_method_call<'ast, V>(v: &mut V, node: &'ast ExprMethodCall) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_method_call<'ast, V>(v: &mut V, node: &'ast crate::ExprMethodCall) where V: Visit<'ast> + ?Sized, { @@ -1528,7 +1781,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_paren<'ast, V>(v: &mut V, node: &'ast ExprParen) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_paren<'ast, V>(v: &mut V, node: &'ast crate::ExprParen) where V: Visit<'ast> + ?Sized, { @@ -1539,7 +1793,8 @@ where v.visit_expr(&*node.expr); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_path<'ast, V>(v: &mut V, node: &'ast ExprPath) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_path<'ast, V>(v: &mut V, node: &'ast crate::ExprPath) where V: Visit<'ast> + ?Sized, { @@ -1552,7 +1807,8 @@ where v.visit_path(&node.path); } #[cfg(feature = "full")] -pub fn visit_expr_range<'ast, V>(v: &mut V, node: &'ast ExprRange) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_range<'ast, V>(v: &mut V, node: &'ast crate::ExprRange) where V: Visit<'ast> + ?Sized, { @@ -1568,7 +1824,22 @@ where } } #[cfg(feature = "full")] -pub fn visit_expr_reference<'ast, V>(v: &mut V, node: &'ast ExprReference) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_raw_addr<'ast, V>(v: &mut V, node: &'ast crate::ExprRawAddr) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + skip!(node.and_token); + skip!(node.raw); + v.visit_pointer_mutability(&node.mutability); + v.visit_expr(&*node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_reference<'ast, V>(v: &mut V, node: &'ast crate::ExprReference) where V: Visit<'ast> + ?Sized, { @@ -1580,7 +1851,8 @@ where v.visit_expr(&*node.expr); } #[cfg(feature = "full")] -pub fn visit_expr_repeat<'ast, V>(v: &mut V, node: &'ast ExprRepeat) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_repeat<'ast, V>(v: &mut V, node: &'ast crate::ExprRepeat) where V: Visit<'ast> + ?Sized, { @@ -1593,7 +1865,8 @@ where v.visit_expr(&*node.len); } #[cfg(feature = "full")] -pub fn visit_expr_return<'ast, V>(v: &mut V, node: &'ast ExprReturn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_return<'ast, V>(v: &mut V, node: &'ast crate::ExprReturn) where V: Visit<'ast> + ?Sized, { @@ -1605,8 +1878,9 @@ where v.visit_expr(&**it); } } -#[cfg(feature = "full")] -pub fn visit_expr_struct<'ast, V>(v: &mut V, node: &'ast ExprStruct) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_struct<'ast, V>(v: &mut V, node: &'ast crate::ExprStruct) where V: Visit<'ast> + ?Sized, { @@ -1628,7 +1902,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_expr_try<'ast, V>(v: &mut V, node: &'ast ExprTry) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_try<'ast, V>(v: &mut V, node: &'ast crate::ExprTry) where V: Visit<'ast> + ?Sized, { @@ -1639,7 +1914,8 @@ where skip!(node.question_token); } #[cfg(feature = "full")] -pub fn visit_expr_try_block<'ast, V>(v: &mut V, node: &'ast ExprTryBlock) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_try_block<'ast, V>(v: &mut V, node: &'ast crate::ExprTryBlock) where V: Visit<'ast> + ?Sized, { @@ -1649,8 +1925,9 @@ where skip!(node.try_token); v.visit_block(&node.block); } -#[cfg(feature = "full")] -pub fn visit_expr_tuple<'ast, V>(v: &mut V, node: &'ast ExprTuple) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_tuple<'ast, V>(v: &mut V, node: &'ast crate::ExprTuple) where V: Visit<'ast> + ?Sized, { @@ -1664,7 +1941,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_unary<'ast, V>(v: &mut V, node: &'ast ExprUnary) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_unary<'ast, V>(v: &mut V, node: &'ast crate::ExprUnary) where V: Visit<'ast> + ?Sized, { @@ -1675,7 +1953,8 @@ where v.visit_expr(&*node.expr); } #[cfg(feature = "full")] -pub fn visit_expr_unsafe<'ast, V>(v: &mut V, node: &'ast ExprUnsafe) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_unsafe<'ast, V>(v: &mut V, node: &'ast crate::ExprUnsafe) where V: Visit<'ast> + ?Sized, { @@ -1686,7 +1965,8 @@ where v.visit_block(&node.block); } #[cfg(feature = "full")] -pub fn visit_expr_while<'ast, V>(v: &mut V, node: &'ast ExprWhile) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_while<'ast, V>(v: &mut V, node: &'ast crate::ExprWhile) where V: Visit<'ast> + ?Sized, { @@ -1701,7 +1981,8 @@ where v.visit_block(&node.body); } #[cfg(feature = "full")] -pub fn visit_expr_yield<'ast, V>(v: &mut V, node: &'ast ExprYield) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_yield<'ast, V>(v: &mut V, node: &'ast crate::ExprYield) where V: Visit<'ast> + ?Sized, { @@ -1714,7 +1995,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_field<'ast, V>(v: &mut V, node: &'ast Field) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_field<'ast, V>(v: &mut V, node: &'ast crate::Field) where V: Visit<'ast> + ?Sized, { @@ -1730,16 +2012,18 @@ where v.visit_type(&node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_field_mutability<'ast, V>(v: &mut V, node: &'ast FieldMutability) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_field_mutability<'ast, V>(v: &mut V, node: &'ast crate::FieldMutability) where V: Visit<'ast> + ?Sized, { match node { - FieldMutability::None => {} + crate::FieldMutability::None => {} } } #[cfg(feature = "full")] -pub fn visit_field_pat<'ast, V>(v: &mut V, node: &'ast FieldPat) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_field_pat<'ast, V>(v: &mut V, node: &'ast crate::FieldPat) where V: Visit<'ast> + ?Sized, { @@ -1750,8 +2034,9 @@ where skip!(node.colon_token); v.visit_pat(&*node.pat); } -#[cfg(feature = "full")] -pub fn visit_field_value<'ast, V>(v: &mut V, node: &'ast FieldValue) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_field_value<'ast, V>(v: &mut V, node: &'ast crate::FieldValue) where V: Visit<'ast> + ?Sized, { @@ -1763,22 +2048,24 @@ where v.visit_expr(&node.expr); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_fields<'ast, V>(v: &mut V, node: &'ast Fields) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_fields<'ast, V>(v: &mut V, node: &'ast crate::Fields) where V: Visit<'ast> + ?Sized, { match node { - Fields::Named(_binding_0) => { + crate::Fields::Named(_binding_0) => { v.visit_fields_named(_binding_0); } - Fields::Unnamed(_binding_0) => { + crate::Fields::Unnamed(_binding_0) => { v.visit_fields_unnamed(_binding_0); } - Fields::Unit => {} + crate::Fields::Unit => {} } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_fields_named<'ast, V>(v: &mut V, node: &'ast FieldsNamed) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_fields_named<'ast, V>(v: &mut V, node: &'ast crate::FieldsNamed) where V: Visit<'ast> + ?Sized, { @@ -1789,7 +2076,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_fields_unnamed<'ast, V>(v: &mut V, node: &'ast FieldsUnnamed) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_fields_unnamed<'ast, V>(v: &mut V, node: &'ast crate::FieldsUnnamed) where V: Visit<'ast> + ?Sized, { @@ -1800,7 +2088,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_file<'ast, V>(v: &mut V, node: &'ast File) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_file<'ast, V>(v: &mut V, node: &'ast crate::File) where V: Visit<'ast> + ?Sized, { @@ -1813,44 +2102,47 @@ where } } #[cfg(feature = "full")] -pub fn visit_fn_arg<'ast, V>(v: &mut V, node: &'ast FnArg) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_fn_arg<'ast, V>(v: &mut V, node: &'ast crate::FnArg) where V: Visit<'ast> + ?Sized, { match node { - FnArg::Receiver(_binding_0) => { + crate::FnArg::Receiver(_binding_0) => { v.visit_receiver(_binding_0); } - FnArg::Typed(_binding_0) => { + crate::FnArg::Typed(_binding_0) => { v.visit_pat_type(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_foreign_item<'ast, V>(v: &mut V, node: &'ast ForeignItem) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item<'ast, V>(v: &mut V, node: &'ast crate::ForeignItem) where V: Visit<'ast> + ?Sized, { match node { - ForeignItem::Fn(_binding_0) => { + crate::ForeignItem::Fn(_binding_0) => { v.visit_foreign_item_fn(_binding_0); } - ForeignItem::Static(_binding_0) => { + crate::ForeignItem::Static(_binding_0) => { v.visit_foreign_item_static(_binding_0); } - ForeignItem::Type(_binding_0) => { + crate::ForeignItem::Type(_binding_0) => { v.visit_foreign_item_type(_binding_0); } - ForeignItem::Macro(_binding_0) => { + crate::ForeignItem::Macro(_binding_0) => { v.visit_foreign_item_macro(_binding_0); } - ForeignItem::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::ForeignItem::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_foreign_item_fn<'ast, V>(v: &mut V, node: &'ast ForeignItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_fn<'ast, V>(v: &mut V, node: &'ast crate::ForeignItemFn) where V: Visit<'ast> + ?Sized, { @@ -1862,7 +2154,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_foreign_item_macro<'ast, V>(v: &mut V, node: &'ast ForeignItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_macro<'ast, V>(v: &mut V, node: &'ast crate::ForeignItemMacro) where V: Visit<'ast> + ?Sized, { @@ -1873,7 +2166,11 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_foreign_item_static<'ast, V>(v: &mut V, node: &'ast ForeignItemStatic) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_static<'ast, V>( + v: &mut V, + node: &'ast crate::ForeignItemStatic, +) where V: Visit<'ast> + ?Sized, { @@ -1889,7 +2186,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_foreign_item_type<'ast, V>(v: &mut V, node: &'ast ForeignItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_type<'ast, V>(v: &mut V, node: &'ast crate::ForeignItemType) where V: Visit<'ast> + ?Sized, { @@ -1903,50 +2201,53 @@ where skip!(node.semi_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_generic_argument<'ast, V>(v: &mut V, node: &'ast GenericArgument) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_generic_argument<'ast, V>(v: &mut V, node: &'ast crate::GenericArgument) where V: Visit<'ast> + ?Sized, { match node { - GenericArgument::Lifetime(_binding_0) => { + crate::GenericArgument::Lifetime(_binding_0) => { v.visit_lifetime(_binding_0); } - GenericArgument::Type(_binding_0) => { + crate::GenericArgument::Type(_binding_0) => { v.visit_type(_binding_0); } - GenericArgument::Const(_binding_0) => { + crate::GenericArgument::Const(_binding_0) => { v.visit_expr(_binding_0); } - GenericArgument::AssocType(_binding_0) => { + crate::GenericArgument::AssocType(_binding_0) => { v.visit_assoc_type(_binding_0); } - GenericArgument::AssocConst(_binding_0) => { + crate::GenericArgument::AssocConst(_binding_0) => { v.visit_assoc_const(_binding_0); } - GenericArgument::Constraint(_binding_0) => { + crate::GenericArgument::Constraint(_binding_0) => { v.visit_constraint(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_generic_param<'ast, V>(v: &mut V, node: &'ast GenericParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_generic_param<'ast, V>(v: &mut V, node: &'ast crate::GenericParam) where V: Visit<'ast> + ?Sized, { match node { - GenericParam::Lifetime(_binding_0) => { + crate::GenericParam::Lifetime(_binding_0) => { v.visit_lifetime_param(_binding_0); } - GenericParam::Type(_binding_0) => { + crate::GenericParam::Type(_binding_0) => { v.visit_type_param(_binding_0); } - GenericParam::Const(_binding_0) => { + crate::GenericParam::Const(_binding_0) => { v.visit_const_param(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_generics<'ast, V>(v: &mut V, node: &'ast Generics) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_generics<'ast, V>(v: &mut V, node: &'ast crate::Generics) where V: Visit<'ast> + ?Sized, { @@ -1960,37 +2261,39 @@ where v.visit_where_clause(it); } } -pub fn visit_ident<'ast, V>(v: &mut V, node: &'ast Ident) +pub fn visit_ident<'ast, V>(v: &mut V, node: &'ast proc_macro2::Ident) where V: Visit<'ast> + ?Sized, { v.visit_span(&node.span()); } #[cfg(feature = "full")] -pub fn visit_impl_item<'ast, V>(v: &mut V, node: &'ast ImplItem) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item<'ast, V>(v: &mut V, node: &'ast crate::ImplItem) where V: Visit<'ast> + ?Sized, { match node { - ImplItem::Const(_binding_0) => { + crate::ImplItem::Const(_binding_0) => { v.visit_impl_item_const(_binding_0); } - ImplItem::Fn(_binding_0) => { + crate::ImplItem::Fn(_binding_0) => { v.visit_impl_item_fn(_binding_0); } - ImplItem::Type(_binding_0) => { + crate::ImplItem::Type(_binding_0) => { v.visit_impl_item_type(_binding_0); } - ImplItem::Macro(_binding_0) => { + crate::ImplItem::Macro(_binding_0) => { v.visit_impl_item_macro(_binding_0); } - ImplItem::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::ImplItem::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_impl_item_const<'ast, V>(v: &mut V, node: &'ast ImplItemConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_const<'ast, V>(v: &mut V, node: &'ast crate::ImplItemConst) where V: Visit<'ast> + ?Sized, { @@ -2009,7 +2312,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_impl_item_fn<'ast, V>(v: &mut V, node: &'ast ImplItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_fn<'ast, V>(v: &mut V, node: &'ast crate::ImplItemFn) where V: Visit<'ast> + ?Sized, { @@ -2022,7 +2326,8 @@ where v.visit_block(&node.block); } #[cfg(feature = "full")] -pub fn visit_impl_item_macro<'ast, V>(v: &mut V, node: &'ast ImplItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_macro<'ast, V>(v: &mut V, node: &'ast crate::ImplItemMacro) where V: Visit<'ast> + ?Sized, { @@ -2033,7 +2338,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_impl_item_type<'ast, V>(v: &mut V, node: &'ast ImplItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_type<'ast, V>(v: &mut V, node: &'ast crate::ImplItemType) where V: Visit<'ast> + ?Sized, { @@ -2050,14 +2356,16 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_impl_restriction<'ast, V>(v: &mut V, node: &'ast ImplRestriction) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_restriction<'ast, V>(v: &mut V, node: &'ast crate::ImplRestriction) where V: Visit<'ast> + ?Sized, { match *node {} } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_index<'ast, V>(v: &mut V, node: &'ast Index) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_index<'ast, V>(v: &mut V, node: &'ast crate::Index) where V: Visit<'ast> + ?Sized, { @@ -2065,63 +2373,65 @@ where v.visit_span(&node.span); } #[cfg(feature = "full")] -pub fn visit_item<'ast, V>(v: &mut V, node: &'ast Item) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item<'ast, V>(v: &mut V, node: &'ast crate::Item) where V: Visit<'ast> + ?Sized, { match node { - Item::Const(_binding_0) => { + crate::Item::Const(_binding_0) => { v.visit_item_const(_binding_0); } - Item::Enum(_binding_0) => { + crate::Item::Enum(_binding_0) => { v.visit_item_enum(_binding_0); } - Item::ExternCrate(_binding_0) => { + crate::Item::ExternCrate(_binding_0) => { v.visit_item_extern_crate(_binding_0); } - Item::Fn(_binding_0) => { + crate::Item::Fn(_binding_0) => { v.visit_item_fn(_binding_0); } - Item::ForeignMod(_binding_0) => { + crate::Item::ForeignMod(_binding_0) => { v.visit_item_foreign_mod(_binding_0); } - Item::Impl(_binding_0) => { + crate::Item::Impl(_binding_0) => { v.visit_item_impl(_binding_0); } - Item::Macro(_binding_0) => { + crate::Item::Macro(_binding_0) => { v.visit_item_macro(_binding_0); } - Item::Mod(_binding_0) => { + crate::Item::Mod(_binding_0) => { v.visit_item_mod(_binding_0); } - Item::Static(_binding_0) => { + crate::Item::Static(_binding_0) => { v.visit_item_static(_binding_0); } - Item::Struct(_binding_0) => { + crate::Item::Struct(_binding_0) => { v.visit_item_struct(_binding_0); } - Item::Trait(_binding_0) => { + crate::Item::Trait(_binding_0) => { v.visit_item_trait(_binding_0); } - Item::TraitAlias(_binding_0) => { + crate::Item::TraitAlias(_binding_0) => { v.visit_item_trait_alias(_binding_0); } - Item::Type(_binding_0) => { + crate::Item::Type(_binding_0) => { v.visit_item_type(_binding_0); } - Item::Union(_binding_0) => { + crate::Item::Union(_binding_0) => { v.visit_item_union(_binding_0); } - Item::Use(_binding_0) => { + crate::Item::Use(_binding_0) => { v.visit_item_use(_binding_0); } - Item::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Item::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_item_const<'ast, V>(v: &mut V, node: &'ast ItemConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_const<'ast, V>(v: &mut V, node: &'ast crate::ItemConst) where V: Visit<'ast> + ?Sized, { @@ -2139,7 +2449,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_enum<'ast, V>(v: &mut V, node: &'ast ItemEnum) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_enum<'ast, V>(v: &mut V, node: &'ast crate::ItemEnum) where V: Visit<'ast> + ?Sized, { @@ -2157,7 +2468,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_extern_crate<'ast, V>(v: &mut V, node: &'ast ItemExternCrate) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_extern_crate<'ast, V>(v: &mut V, node: &'ast crate::ItemExternCrate) where V: Visit<'ast> + ?Sized, { @@ -2175,7 +2487,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_fn<'ast, V>(v: &mut V, node: &'ast ItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_fn<'ast, V>(v: &mut V, node: &'ast crate::ItemFn) where V: Visit<'ast> + ?Sized, { @@ -2187,7 +2500,8 @@ where v.visit_block(&*node.block); } #[cfg(feature = "full")] -pub fn visit_item_foreign_mod<'ast, V>(v: &mut V, node: &'ast ItemForeignMod) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_foreign_mod<'ast, V>(v: &mut V, node: &'ast crate::ItemForeignMod) where V: Visit<'ast> + ?Sized, { @@ -2202,7 +2516,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_impl<'ast, V>(v: &mut V, node: &'ast ItemImpl) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_impl<'ast, V>(v: &mut V, node: &'ast crate::ItemImpl) where V: Visit<'ast> + ?Sized, { @@ -2225,7 +2540,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_macro<'ast, V>(v: &mut V, node: &'ast ItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_macro<'ast, V>(v: &mut V, node: &'ast crate::ItemMacro) where V: Visit<'ast> + ?Sized, { @@ -2239,7 +2555,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_mod<'ast, V>(v: &mut V, node: &'ast ItemMod) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_mod<'ast, V>(v: &mut V, node: &'ast crate::ItemMod) where V: Visit<'ast> + ?Sized, { @@ -2259,7 +2576,8 @@ where skip!(node.semi); } #[cfg(feature = "full")] -pub fn visit_item_static<'ast, V>(v: &mut V, node: &'ast ItemStatic) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_static<'ast, V>(v: &mut V, node: &'ast crate::ItemStatic) where V: Visit<'ast> + ?Sized, { @@ -2277,7 +2595,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_struct<'ast, V>(v: &mut V, node: &'ast ItemStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_struct<'ast, V>(v: &mut V, node: &'ast crate::ItemStruct) where V: Visit<'ast> + ?Sized, { @@ -2292,7 +2611,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_trait<'ast, V>(v: &mut V, node: &'ast ItemTrait) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_trait<'ast, V>(v: &mut V, node: &'ast crate::ItemTrait) where V: Visit<'ast> + ?Sized, { @@ -2319,7 +2639,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_trait_alias<'ast, V>(v: &mut V, node: &'ast ItemTraitAlias) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_trait_alias<'ast, V>(v: &mut V, node: &'ast crate::ItemTraitAlias) where V: Visit<'ast> + ?Sized, { @@ -2338,7 +2659,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_type<'ast, V>(v: &mut V, node: &'ast ItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_type<'ast, V>(v: &mut V, node: &'ast crate::ItemType) where V: Visit<'ast> + ?Sized, { @@ -2354,7 +2676,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_union<'ast, V>(v: &mut V, node: &'ast ItemUnion) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_union<'ast, V>(v: &mut V, node: &'ast crate::ItemUnion) where V: Visit<'ast> + ?Sized, { @@ -2368,7 +2691,8 @@ where v.visit_fields_named(&node.fields); } #[cfg(feature = "full")] -pub fn visit_item_use<'ast, V>(v: &mut V, node: &'ast ItemUse) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_use<'ast, V>(v: &mut V, node: &'ast crate::ItemUse) where V: Visit<'ast> + ?Sized, { @@ -2382,14 +2706,15 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_label<'ast, V>(v: &mut V, node: &'ast Label) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_label<'ast, V>(v: &mut V, node: &'ast crate::Label) where V: Visit<'ast> + ?Sized, { v.visit_lifetime(&node.name); skip!(node.colon_token); } -pub fn visit_lifetime<'ast, V>(v: &mut V, node: &'ast Lifetime) +pub fn visit_lifetime<'ast, V>(v: &mut V, node: &'ast crate::Lifetime) where V: Visit<'ast> + ?Sized, { @@ -2397,7 +2722,8 @@ where v.visit_ident(&node.ident); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_lifetime_param<'ast, V>(v: &mut V, node: &'ast LifetimeParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_lifetime_param<'ast, V>(v: &mut V, node: &'ast crate::LifetimeParam) where V: Visit<'ast> + ?Sized, { @@ -2411,70 +2737,78 @@ where v.visit_lifetime(it); } } -pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit) +pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast crate::Lit) where V: Visit<'ast> + ?Sized, { match node { - Lit::Str(_binding_0) => { + crate::Lit::Str(_binding_0) => { v.visit_lit_str(_binding_0); } - Lit::ByteStr(_binding_0) => { + crate::Lit::ByteStr(_binding_0) => { v.visit_lit_byte_str(_binding_0); } - Lit::Byte(_binding_0) => { + crate::Lit::CStr(_binding_0) => { + v.visit_lit_cstr(_binding_0); + } + crate::Lit::Byte(_binding_0) => { v.visit_lit_byte(_binding_0); } - Lit::Char(_binding_0) => { + crate::Lit::Char(_binding_0) => { v.visit_lit_char(_binding_0); } - Lit::Int(_binding_0) => { + crate::Lit::Int(_binding_0) => { v.visit_lit_int(_binding_0); } - Lit::Float(_binding_0) => { + crate::Lit::Float(_binding_0) => { v.visit_lit_float(_binding_0); } - Lit::Bool(_binding_0) => { + crate::Lit::Bool(_binding_0) => { v.visit_lit_bool(_binding_0); } - Lit::Verbatim(_binding_0) => { + crate::Lit::Verbatim(_binding_0) => { skip!(_binding_0); } } } -pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool) +pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast crate::LitBool) where V: Visit<'ast> + ?Sized, { skip!(node.value); v.visit_span(&node.span); } -pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte) +pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast crate::LitByte) where V: Visit<'ast> + ?Sized, {} -pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr) +pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast crate::LitByteStr) where V: Visit<'ast> + ?Sized, {} -pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar) +pub fn visit_lit_cstr<'ast, V>(v: &mut V, node: &'ast crate::LitCStr) where V: Visit<'ast> + ?Sized, {} -pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat) +pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast crate::LitChar) where V: Visit<'ast> + ?Sized, {} -pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt) +pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast crate::LitFloat) where V: Visit<'ast> + ?Sized, {} -pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr) +pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast crate::LitInt) +where + V: Visit<'ast> + ?Sized, +{} +pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast crate::LitStr) where V: Visit<'ast> + ?Sized, {} #[cfg(feature = "full")] -pub fn visit_local<'ast, V>(v: &mut V, node: &'ast Local) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_local<'ast, V>(v: &mut V, node: &'ast crate::Local) where V: Visit<'ast> + ?Sized, { @@ -2489,7 +2823,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_local_init<'ast, V>(v: &mut V, node: &'ast LocalInit) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_local_init<'ast, V>(v: &mut V, node: &'ast crate::LocalInit) where V: Visit<'ast> + ?Sized, { @@ -2501,74 +2836,80 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_macro<'ast, V>(v: &mut V, node: &'ast Macro) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_macro<'ast, V>(v: &mut V, node: &'ast crate::Macro) where V: Visit<'ast> + ?Sized, { v.visit_path(&node.path); skip!(node.bang_token); v.visit_macro_delimiter(&node.delimiter); - skip!(node.tokens); + v.visit_token_stream(&node.tokens); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_macro_delimiter<'ast, V>(v: &mut V, node: &'ast MacroDelimiter) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_macro_delimiter<'ast, V>(v: &mut V, node: &'ast crate::MacroDelimiter) where V: Visit<'ast> + ?Sized, { match node { - MacroDelimiter::Paren(_binding_0) => { + crate::MacroDelimiter::Paren(_binding_0) => { skip!(_binding_0); } - MacroDelimiter::Brace(_binding_0) => { + crate::MacroDelimiter::Brace(_binding_0) => { skip!(_binding_0); } - MacroDelimiter::Bracket(_binding_0) => { + crate::MacroDelimiter::Bracket(_binding_0) => { skip!(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_member<'ast, V>(v: &mut V, node: &'ast Member) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_member<'ast, V>(v: &mut V, node: &'ast crate::Member) where V: Visit<'ast> + ?Sized, { match node { - Member::Named(_binding_0) => { + crate::Member::Named(_binding_0) => { v.visit_ident(_binding_0); } - Member::Unnamed(_binding_0) => { + crate::Member::Unnamed(_binding_0) => { v.visit_index(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_meta<'ast, V>(v: &mut V, node: &'ast Meta) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_meta<'ast, V>(v: &mut V, node: &'ast crate::Meta) where V: Visit<'ast> + ?Sized, { match node { - Meta::Path(_binding_0) => { + crate::Meta::Path(_binding_0) => { v.visit_path(_binding_0); } - Meta::List(_binding_0) => { + crate::Meta::List(_binding_0) => { v.visit_meta_list(_binding_0); } - Meta::NameValue(_binding_0) => { + crate::Meta::NameValue(_binding_0) => { v.visit_meta_name_value(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_meta_list<'ast, V>(v: &mut V, node: &'ast MetaList) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_meta_list<'ast, V>(v: &mut V, node: &'ast crate::MetaList) where V: Visit<'ast> + ?Sized, { v.visit_path(&node.path); v.visit_macro_delimiter(&node.delimiter); - skip!(node.tokens); + v.visit_token_stream(&node.tokens); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_meta_name_value<'ast, V>(v: &mut V, node: &'ast MetaNameValue) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_meta_name_value<'ast, V>(v: &mut V, node: &'ast crate::MetaNameValue) where V: Visit<'ast> + ?Sized, { @@ -2577,9 +2918,10 @@ where v.visit_expr(&node.value); } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn visit_parenthesized_generic_arguments<'ast, V>( v: &mut V, - node: &'ast ParenthesizedGenericArguments, + node: &'ast crate::ParenthesizedGenericArguments, ) where V: Visit<'ast> + ?Sized, @@ -2592,66 +2934,68 @@ where v.visit_return_type(&node.output); } #[cfg(feature = "full")] -pub fn visit_pat<'ast, V>(v: &mut V, node: &'ast Pat) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat<'ast, V>(v: &mut V, node: &'ast crate::Pat) where V: Visit<'ast> + ?Sized, { match node { - Pat::Const(_binding_0) => { + crate::Pat::Const(_binding_0) => { v.visit_expr_const(_binding_0); } - Pat::Ident(_binding_0) => { + crate::Pat::Ident(_binding_0) => { v.visit_pat_ident(_binding_0); } - Pat::Lit(_binding_0) => { + crate::Pat::Lit(_binding_0) => { v.visit_expr_lit(_binding_0); } - Pat::Macro(_binding_0) => { + crate::Pat::Macro(_binding_0) => { v.visit_expr_macro(_binding_0); } - Pat::Or(_binding_0) => { + crate::Pat::Or(_binding_0) => { v.visit_pat_or(_binding_0); } - Pat::Paren(_binding_0) => { + crate::Pat::Paren(_binding_0) => { v.visit_pat_paren(_binding_0); } - Pat::Path(_binding_0) => { + crate::Pat::Path(_binding_0) => { v.visit_expr_path(_binding_0); } - Pat::Range(_binding_0) => { + crate::Pat::Range(_binding_0) => { v.visit_expr_range(_binding_0); } - Pat::Reference(_binding_0) => { + crate::Pat::Reference(_binding_0) => { v.visit_pat_reference(_binding_0); } - Pat::Rest(_binding_0) => { + crate::Pat::Rest(_binding_0) => { v.visit_pat_rest(_binding_0); } - Pat::Slice(_binding_0) => { + crate::Pat::Slice(_binding_0) => { v.visit_pat_slice(_binding_0); } - Pat::Struct(_binding_0) => { + crate::Pat::Struct(_binding_0) => { v.visit_pat_struct(_binding_0); } - Pat::Tuple(_binding_0) => { + crate::Pat::Tuple(_binding_0) => { v.visit_pat_tuple(_binding_0); } - Pat::TupleStruct(_binding_0) => { + crate::Pat::TupleStruct(_binding_0) => { v.visit_pat_tuple_struct(_binding_0); } - Pat::Type(_binding_0) => { + crate::Pat::Type(_binding_0) => { v.visit_pat_type(_binding_0); } - Pat::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Pat::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } - Pat::Wild(_binding_0) => { + crate::Pat::Wild(_binding_0) => { v.visit_pat_wild(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_pat_ident<'ast, V>(v: &mut V, node: &'ast PatIdent) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_ident<'ast, V>(v: &mut V, node: &'ast crate::PatIdent) where V: Visit<'ast> + ?Sized, { @@ -2667,7 +3011,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_or<'ast, V>(v: &mut V, node: &'ast PatOr) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_or<'ast, V>(v: &mut V, node: &'ast crate::PatOr) where V: Visit<'ast> + ?Sized, { @@ -2681,7 +3026,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_paren<'ast, V>(v: &mut V, node: &'ast PatParen) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_paren<'ast, V>(v: &mut V, node: &'ast crate::PatParen) where V: Visit<'ast> + ?Sized, { @@ -2692,7 +3038,8 @@ where v.visit_pat(&*node.pat); } #[cfg(feature = "full")] -pub fn visit_pat_reference<'ast, V>(v: &mut V, node: &'ast PatReference) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_reference<'ast, V>(v: &mut V, node: &'ast crate::PatReference) where V: Visit<'ast> + ?Sized, { @@ -2704,7 +3051,8 @@ where v.visit_pat(&*node.pat); } #[cfg(feature = "full")] -pub fn visit_pat_rest<'ast, V>(v: &mut V, node: &'ast PatRest) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_rest<'ast, V>(v: &mut V, node: &'ast crate::PatRest) where V: Visit<'ast> + ?Sized, { @@ -2714,7 +3062,8 @@ where skip!(node.dot2_token); } #[cfg(feature = "full")] -pub fn visit_pat_slice<'ast, V>(v: &mut V, node: &'ast PatSlice) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_slice<'ast, V>(v: &mut V, node: &'ast crate::PatSlice) where V: Visit<'ast> + ?Sized, { @@ -2728,7 +3077,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_struct<'ast, V>(v: &mut V, node: &'ast PatStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_struct<'ast, V>(v: &mut V, node: &'ast crate::PatStruct) where V: Visit<'ast> + ?Sized, { @@ -2749,7 +3099,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_tuple<'ast, V>(v: &mut V, node: &'ast PatTuple) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_tuple<'ast, V>(v: &mut V, node: &'ast crate::PatTuple) where V: Visit<'ast> + ?Sized, { @@ -2763,7 +3114,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_tuple_struct<'ast, V>(v: &mut V, node: &'ast PatTupleStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_tuple_struct<'ast, V>(v: &mut V, node: &'ast crate::PatTupleStruct) where V: Visit<'ast> + ?Sized, { @@ -2781,7 +3133,8 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_type<'ast, V>(v: &mut V, node: &'ast PatType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_type<'ast, V>(v: &mut V, node: &'ast crate::PatType) where V: Visit<'ast> + ?Sized, { @@ -2793,7 +3146,8 @@ where v.visit_type(&*node.ty); } #[cfg(feature = "full")] -pub fn visit_pat_wild<'ast, V>(v: &mut V, node: &'ast PatWild) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_wild<'ast, V>(v: &mut V, node: &'ast crate::PatWild) where V: Visit<'ast> + ?Sized, { @@ -2803,7 +3157,8 @@ where skip!(node.underscore_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_path<'ast, V>(v: &mut V, node: &'ast Path) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_path<'ast, V>(v: &mut V, node: &'ast crate::Path) where V: Visit<'ast> + ?Sized, { @@ -2814,30 +3169,62 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_path_arguments<'ast, V>(v: &mut V, node: &'ast PathArguments) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_path_arguments<'ast, V>(v: &mut V, node: &'ast crate::PathArguments) where V: Visit<'ast> + ?Sized, { match node { - PathArguments::None => {} - PathArguments::AngleBracketed(_binding_0) => { + crate::PathArguments::None => {} + crate::PathArguments::AngleBracketed(_binding_0) => { v.visit_angle_bracketed_generic_arguments(_binding_0); } - PathArguments::Parenthesized(_binding_0) => { + crate::PathArguments::Parenthesized(_binding_0) => { v.visit_parenthesized_generic_arguments(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_path_segment<'ast, V>(v: &mut V, node: &'ast PathSegment) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_path_segment<'ast, V>(v: &mut V, node: &'ast crate::PathSegment) where V: Visit<'ast> + ?Sized, { v.visit_ident(&node.ident); v.visit_path_arguments(&node.arguments); } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pointer_mutability<'ast, V>(v: &mut V, node: &'ast crate::PointerMutability) +where + V: Visit<'ast> + ?Sized, +{ + match node { + crate::PointerMutability::Const(_binding_0) => { + skip!(_binding_0); + } + crate::PointerMutability::Mut(_binding_0) => { + skip!(_binding_0); + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_precise_capture<'ast, V>(v: &mut V, node: &'ast crate::PreciseCapture) +where + V: Visit<'ast> + ?Sized, +{ + skip!(node.use_token); + skip!(node.lt_token); + for el in Punctuated::pairs(&node.params) { + let it = el.value(); + v.visit_captured_param(it); + } + skip!(node.gt_token); +} #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_predicate_lifetime<'ast, V>(v: &mut V, node: &'ast PredicateLifetime) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_predicate_lifetime<'ast, V>(v: &mut V, node: &'ast crate::PredicateLifetime) where V: Visit<'ast> + ?Sized, { @@ -2849,7 +3236,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_predicate_type<'ast, V>(v: &mut V, node: &'ast PredicateType) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_predicate_type<'ast, V>(v: &mut V, node: &'ast crate::PredicateType) where V: Visit<'ast> + ?Sized, { @@ -2864,7 +3252,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_qself<'ast, V>(v: &mut V, node: &'ast QSelf) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_qself<'ast, V>(v: &mut V, node: &'ast crate::QSelf) where V: Visit<'ast> + ?Sized, { @@ -2875,21 +3264,23 @@ where skip!(node.gt_token); } #[cfg(feature = "full")] -pub fn visit_range_limits<'ast, V>(v: &mut V, node: &'ast RangeLimits) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_range_limits<'ast, V>(v: &mut V, node: &'ast crate::RangeLimits) where V: Visit<'ast> + ?Sized, { match node { - RangeLimits::HalfOpen(_binding_0) => { + crate::RangeLimits::HalfOpen(_binding_0) => { skip!(_binding_0); } - RangeLimits::Closed(_binding_0) => { + crate::RangeLimits::Closed(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_receiver<'ast, V>(v: &mut V, node: &'ast Receiver) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_receiver<'ast, V>(v: &mut V, node: &'ast crate::Receiver) where V: Visit<'ast> + ?Sized, { @@ -2908,20 +3299,22 @@ where v.visit_type(&*node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_return_type<'ast, V>(v: &mut V, node: &'ast ReturnType) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_return_type<'ast, V>(v: &mut V, node: &'ast crate::ReturnType) where V: Visit<'ast> + ?Sized, { match node { - ReturnType::Default => {} - ReturnType::Type(_binding_0, _binding_1) => { + crate::ReturnType::Default => {} + crate::ReturnType::Type(_binding_0, _binding_1) => { skip!(_binding_0); v.visit_type(&**_binding_1); } } } #[cfg(feature = "full")] -pub fn visit_signature<'ast, V>(v: &mut V, node: &'ast Signature) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_signature<'ast, V>(v: &mut V, node: &'ast crate::Signature) where V: Visit<'ast> + ?Sized, { @@ -2944,45 +3337,48 @@ where } v.visit_return_type(&node.output); } -pub fn visit_span<'ast, V>(v: &mut V, node: &Span) +pub fn visit_span<'ast, V>(v: &mut V, node: &proc_macro2::Span) where V: Visit<'ast> + ?Sized, {} #[cfg(feature = "full")] -pub fn visit_static_mutability<'ast, V>(v: &mut V, node: &'ast StaticMutability) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_static_mutability<'ast, V>(v: &mut V, node: &'ast crate::StaticMutability) where V: Visit<'ast> + ?Sized, { match node { - StaticMutability::Mut(_binding_0) => { + crate::StaticMutability::Mut(_binding_0) => { skip!(_binding_0); } - StaticMutability::None => {} + crate::StaticMutability::None => {} } } #[cfg(feature = "full")] -pub fn visit_stmt<'ast, V>(v: &mut V, node: &'ast Stmt) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_stmt<'ast, V>(v: &mut V, node: &'ast crate::Stmt) where V: Visit<'ast> + ?Sized, { match node { - Stmt::Local(_binding_0) => { + crate::Stmt::Local(_binding_0) => { v.visit_local(_binding_0); } - Stmt::Item(_binding_0) => { + crate::Stmt::Item(_binding_0) => { v.visit_item(_binding_0); } - Stmt::Expr(_binding_0, _binding_1) => { + crate::Stmt::Expr(_binding_0, _binding_1) => { v.visit_expr(_binding_0); skip!(_binding_1); } - Stmt::Macro(_binding_0) => { + crate::Stmt::Macro(_binding_0) => { v.visit_stmt_macro(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_stmt_macro<'ast, V>(v: &mut V, node: &'ast StmtMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_stmt_macro<'ast, V>(v: &mut V, node: &'ast crate::StmtMacro) where V: Visit<'ast> + ?Sized, { @@ -2993,7 +3389,8 @@ where skip!(node.semi_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_trait_bound<'ast, V>(v: &mut V, node: &'ast TraitBound) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_trait_bound<'ast, V>(v: &mut V, node: &'ast crate::TraitBound) where V: Visit<'ast> + ?Sized, { @@ -3005,42 +3402,48 @@ where v.visit_path(&node.path); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_trait_bound_modifier<'ast, V>(v: &mut V, node: &'ast TraitBoundModifier) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_trait_bound_modifier<'ast, V>( + v: &mut V, + node: &'ast crate::TraitBoundModifier, +) where V: Visit<'ast> + ?Sized, { match node { - TraitBoundModifier::None => {} - TraitBoundModifier::Maybe(_binding_0) => { + crate::TraitBoundModifier::None => {} + crate::TraitBoundModifier::Maybe(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_trait_item<'ast, V>(v: &mut V, node: &'ast TraitItem) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item<'ast, V>(v: &mut V, node: &'ast crate::TraitItem) where V: Visit<'ast> + ?Sized, { match node { - TraitItem::Const(_binding_0) => { + crate::TraitItem::Const(_binding_0) => { v.visit_trait_item_const(_binding_0); } - TraitItem::Fn(_binding_0) => { + crate::TraitItem::Fn(_binding_0) => { v.visit_trait_item_fn(_binding_0); } - TraitItem::Type(_binding_0) => { + crate::TraitItem::Type(_binding_0) => { v.visit_trait_item_type(_binding_0); } - TraitItem::Macro(_binding_0) => { + crate::TraitItem::Macro(_binding_0) => { v.visit_trait_item_macro(_binding_0); } - TraitItem::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::TraitItem::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_trait_item_const<'ast, V>(v: &mut V, node: &'ast TraitItemConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_const<'ast, V>(v: &mut V, node: &'ast crate::TraitItemConst) where V: Visit<'ast> + ?Sized, { @@ -3059,7 +3462,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_trait_item_fn<'ast, V>(v: &mut V, node: &'ast TraitItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_fn<'ast, V>(v: &mut V, node: &'ast crate::TraitItemFn) where V: Visit<'ast> + ?Sized, { @@ -3073,7 +3477,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_trait_item_macro<'ast, V>(v: &mut V, node: &'ast TraitItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_macro<'ast, V>(v: &mut V, node: &'ast crate::TraitItemMacro) where V: Visit<'ast> + ?Sized, { @@ -3084,7 +3489,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_trait_item_type<'ast, V>(v: &mut V, node: &'ast TraitItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_type<'ast, V>(v: &mut V, node: &'ast crate::TraitItemType) where V: Visit<'ast> + ?Sized, { @@ -3106,60 +3512,62 @@ where skip!(node.semi_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type<'ast, V>(v: &mut V, node: &'ast Type) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type<'ast, V>(v: &mut V, node: &'ast crate::Type) where V: Visit<'ast> + ?Sized, { match node { - Type::Array(_binding_0) => { + crate::Type::Array(_binding_0) => { v.visit_type_array(_binding_0); } - Type::BareFn(_binding_0) => { + crate::Type::BareFn(_binding_0) => { v.visit_type_bare_fn(_binding_0); } - Type::Group(_binding_0) => { + crate::Type::Group(_binding_0) => { v.visit_type_group(_binding_0); } - Type::ImplTrait(_binding_0) => { + crate::Type::ImplTrait(_binding_0) => { v.visit_type_impl_trait(_binding_0); } - Type::Infer(_binding_0) => { + crate::Type::Infer(_binding_0) => { v.visit_type_infer(_binding_0); } - Type::Macro(_binding_0) => { + crate::Type::Macro(_binding_0) => { v.visit_type_macro(_binding_0); } - Type::Never(_binding_0) => { + crate::Type::Never(_binding_0) => { v.visit_type_never(_binding_0); } - Type::Paren(_binding_0) => { + crate::Type::Paren(_binding_0) => { v.visit_type_paren(_binding_0); } - Type::Path(_binding_0) => { + crate::Type::Path(_binding_0) => { v.visit_type_path(_binding_0); } - Type::Ptr(_binding_0) => { + crate::Type::Ptr(_binding_0) => { v.visit_type_ptr(_binding_0); } - Type::Reference(_binding_0) => { + crate::Type::Reference(_binding_0) => { v.visit_type_reference(_binding_0); } - Type::Slice(_binding_0) => { + crate::Type::Slice(_binding_0) => { v.visit_type_slice(_binding_0); } - Type::TraitObject(_binding_0) => { + crate::Type::TraitObject(_binding_0) => { v.visit_type_trait_object(_binding_0); } - Type::Tuple(_binding_0) => { + crate::Type::Tuple(_binding_0) => { v.visit_type_tuple(_binding_0); } - Type::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Type::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_array<'ast, V>(v: &mut V, node: &'ast TypeArray) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_array<'ast, V>(v: &mut V, node: &'ast crate::TypeArray) where V: Visit<'ast> + ?Sized, { @@ -3169,7 +3577,8 @@ where v.visit_expr(&node.len); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_bare_fn<'ast, V>(v: &mut V, node: &'ast TypeBareFn) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_bare_fn<'ast, V>(v: &mut V, node: &'ast crate::TypeBareFn) where V: Visit<'ast> + ?Sized, { @@ -3192,7 +3601,8 @@ where v.visit_return_type(&node.output); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_group<'ast, V>(v: &mut V, node: &'ast TypeGroup) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_group<'ast, V>(v: &mut V, node: &'ast crate::TypeGroup) where V: Visit<'ast> + ?Sized, { @@ -3200,7 +3610,8 @@ where v.visit_type(&*node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_impl_trait<'ast, V>(v: &mut V, node: &'ast TypeImplTrait) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_impl_trait<'ast, V>(v: &mut V, node: &'ast crate::TypeImplTrait) where V: Visit<'ast> + ?Sized, { @@ -3211,28 +3622,32 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_infer<'ast, V>(v: &mut V, node: &'ast TypeInfer) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_infer<'ast, V>(v: &mut V, node: &'ast crate::TypeInfer) where V: Visit<'ast> + ?Sized, { skip!(node.underscore_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_macro<'ast, V>(v: &mut V, node: &'ast TypeMacro) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_macro<'ast, V>(v: &mut V, node: &'ast crate::TypeMacro) where V: Visit<'ast> + ?Sized, { v.visit_macro(&node.mac); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_never<'ast, V>(v: &mut V, node: &'ast TypeNever) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_never<'ast, V>(v: &mut V, node: &'ast crate::TypeNever) where V: Visit<'ast> + ?Sized, { skip!(node.bang_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_param<'ast, V>(v: &mut V, node: &'ast TypeParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_param<'ast, V>(v: &mut V, node: &'ast crate::TypeParam) where V: Visit<'ast> + ?Sized, { @@ -3251,24 +3666,29 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_param_bound<'ast, V>(v: &mut V, node: &'ast TypeParamBound) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_param_bound<'ast, V>(v: &mut V, node: &'ast crate::TypeParamBound) where V: Visit<'ast> + ?Sized, { match node { - TypeParamBound::Trait(_binding_0) => { + crate::TypeParamBound::Trait(_binding_0) => { v.visit_trait_bound(_binding_0); } - TypeParamBound::Lifetime(_binding_0) => { + crate::TypeParamBound::Lifetime(_binding_0) => { v.visit_lifetime(_binding_0); } - TypeParamBound::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::TypeParamBound::PreciseCapture(_binding_0) => { + full!(v.visit_precise_capture(_binding_0)); + } + crate::TypeParamBound::Verbatim(_binding_0) => { + v.visit_token_stream(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_paren<'ast, V>(v: &mut V, node: &'ast TypeParen) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_paren<'ast, V>(v: &mut V, node: &'ast crate::TypeParen) where V: Visit<'ast> + ?Sized, { @@ -3276,7 +3696,8 @@ where v.visit_type(&*node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_path<'ast, V>(v: &mut V, node: &'ast TypePath) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_path<'ast, V>(v: &mut V, node: &'ast crate::TypePath) where V: Visit<'ast> + ?Sized, { @@ -3286,7 +3707,8 @@ where v.visit_path(&node.path); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_ptr<'ast, V>(v: &mut V, node: &'ast TypePtr) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_ptr<'ast, V>(v: &mut V, node: &'ast crate::TypePtr) where V: Visit<'ast> + ?Sized, { @@ -3296,7 +3718,8 @@ where v.visit_type(&*node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_reference<'ast, V>(v: &mut V, node: &'ast TypeReference) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_reference<'ast, V>(v: &mut V, node: &'ast crate::TypeReference) where V: Visit<'ast> + ?Sized, { @@ -3308,7 +3731,8 @@ where v.visit_type(&*node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_slice<'ast, V>(v: &mut V, node: &'ast TypeSlice) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_slice<'ast, V>(v: &mut V, node: &'ast crate::TypeSlice) where V: Visit<'ast> + ?Sized, { @@ -3316,7 +3740,8 @@ where v.visit_type(&*node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_trait_object<'ast, V>(v: &mut V, node: &'ast TypeTraitObject) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_trait_object<'ast, V>(v: &mut V, node: &'ast crate::TypeTraitObject) where V: Visit<'ast> + ?Sized, { @@ -3327,7 +3752,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_tuple<'ast, V>(v: &mut V, node: &'ast TypeTuple) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_tuple<'ast, V>(v: &mut V, node: &'ast crate::TypeTuple) where V: Visit<'ast> + ?Sized, { @@ -3338,31 +3764,34 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_un_op<'ast, V>(v: &mut V, node: &'ast UnOp) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_un_op<'ast, V>(v: &mut V, node: &'ast crate::UnOp) where V: Visit<'ast> + ?Sized, { match node { - UnOp::Deref(_binding_0) => { + crate::UnOp::Deref(_binding_0) => { skip!(_binding_0); } - UnOp::Not(_binding_0) => { + crate::UnOp::Not(_binding_0) => { skip!(_binding_0); } - UnOp::Neg(_binding_0) => { + crate::UnOp::Neg(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_use_glob<'ast, V>(v: &mut V, node: &'ast UseGlob) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_glob<'ast, V>(v: &mut V, node: &'ast crate::UseGlob) where V: Visit<'ast> + ?Sized, { skip!(node.star_token); } #[cfg(feature = "full")] -pub fn visit_use_group<'ast, V>(v: &mut V, node: &'ast UseGroup) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_group<'ast, V>(v: &mut V, node: &'ast crate::UseGroup) where V: Visit<'ast> + ?Sized, { @@ -3373,14 +3802,16 @@ where } } #[cfg(feature = "full")] -pub fn visit_use_name<'ast, V>(v: &mut V, node: &'ast UseName) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_name<'ast, V>(v: &mut V, node: &'ast crate::UseName) where V: Visit<'ast> + ?Sized, { v.visit_ident(&node.ident); } #[cfg(feature = "full")] -pub fn visit_use_path<'ast, V>(v: &mut V, node: &'ast UsePath) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_path<'ast, V>(v: &mut V, node: &'ast crate::UsePath) where V: Visit<'ast> + ?Sized, { @@ -3389,7 +3820,8 @@ where v.visit_use_tree(&*node.tree); } #[cfg(feature = "full")] -pub fn visit_use_rename<'ast, V>(v: &mut V, node: &'ast UseRename) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_rename<'ast, V>(v: &mut V, node: &'ast crate::UseRename) where V: Visit<'ast> + ?Sized, { @@ -3398,30 +3830,32 @@ where v.visit_ident(&node.rename); } #[cfg(feature = "full")] -pub fn visit_use_tree<'ast, V>(v: &mut V, node: &'ast UseTree) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_tree<'ast, V>(v: &mut V, node: &'ast crate::UseTree) where V: Visit<'ast> + ?Sized, { match node { - UseTree::Path(_binding_0) => { + crate::UseTree::Path(_binding_0) => { v.visit_use_path(_binding_0); } - UseTree::Name(_binding_0) => { + crate::UseTree::Name(_binding_0) => { v.visit_use_name(_binding_0); } - UseTree::Rename(_binding_0) => { + crate::UseTree::Rename(_binding_0) => { v.visit_use_rename(_binding_0); } - UseTree::Glob(_binding_0) => { + crate::UseTree::Glob(_binding_0) => { v.visit_use_glob(_binding_0); } - UseTree::Group(_binding_0) => { + crate::UseTree::Group(_binding_0) => { v.visit_use_group(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_variadic<'ast, V>(v: &mut V, node: &'ast Variadic) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_variadic<'ast, V>(v: &mut V, node: &'ast crate::Variadic) where V: Visit<'ast> + ?Sized, { @@ -3436,7 +3870,8 @@ where skip!(node.comma); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_variant<'ast, V>(v: &mut V, node: &'ast Variant) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_variant<'ast, V>(v: &mut V, node: &'ast crate::Variant) where V: Visit<'ast> + ?Sized, { @@ -3451,7 +3886,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_vis_restricted<'ast, V>(v: &mut V, node: &'ast VisRestricted) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_vis_restricted<'ast, V>(v: &mut V, node: &'ast crate::VisRestricted) where V: Visit<'ast> + ?Sized, { @@ -3461,22 +3897,24 @@ where v.visit_path(&*node.path); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_visibility<'ast, V>(v: &mut V, node: &'ast Visibility) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_visibility<'ast, V>(v: &mut V, node: &'ast crate::Visibility) where V: Visit<'ast> + ?Sized, { match node { - Visibility::Public(_binding_0) => { + crate::Visibility::Public(_binding_0) => { skip!(_binding_0); } - Visibility::Restricted(_binding_0) => { + crate::Visibility::Restricted(_binding_0) => { v.visit_vis_restricted(_binding_0); } - Visibility::Inherited => {} + crate::Visibility::Inherited => {} } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_where_clause<'ast, V>(v: &mut V, node: &'ast WhereClause) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_where_clause<'ast, V>(v: &mut V, node: &'ast crate::WhereClause) where V: Visit<'ast> + ?Sized, { @@ -3487,15 +3925,16 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_where_predicate<'ast, V>(v: &mut V, node: &'ast WherePredicate) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_where_predicate<'ast, V>(v: &mut V, node: &'ast crate::WherePredicate) where V: Visit<'ast> + ?Sized, { match node { - WherePredicate::Lifetime(_binding_0) => { + crate::WherePredicate::Lifetime(_binding_0) => { v.visit_predicate_lifetime(_binding_0); } - WherePredicate::Type(_binding_0) => { + crate::WherePredicate::Type(_binding_0) => { v.visit_predicate_type(_binding_0); } } diff --git a/vendor/syn/src/gen/visit_mut.rs b/vendor/syn/src/gen/visit_mut.rs index 9e7d16ff..2bbd6895 100644 --- a/vendor/syn/src/gen/visit_mut.rs +++ b/vendor/syn/src/gen/visit_mut.rs @@ -5,8 +5,6 @@ #![allow(clippy::needless_pass_by_ref_mut)] #[cfg(any(feature = "full", feature = "derive"))] use crate::punctuated::Punctuated; -use crate::*; -use proc_macro2::Span; #[cfg(feature = "full")] macro_rules! full { ($e:expr) => { @@ -30,731 +28,932 @@ macro_rules! skip { /// [module documentation]: self pub trait VisitMut { #[cfg(any(feature = "derive", feature = "full"))] - fn visit_abi_mut(&mut self, i: &mut Abi) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_abi_mut(&mut self, i: &mut crate::Abi) { visit_abi_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn visit_angle_bracketed_generic_arguments_mut( &mut self, - i: &mut AngleBracketedGenericArguments, + i: &mut crate::AngleBracketedGenericArguments, ) { visit_angle_bracketed_generic_arguments_mut(self, i); } #[cfg(feature = "full")] - fn visit_arm_mut(&mut self, i: &mut Arm) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_arm_mut(&mut self, i: &mut crate::Arm) { visit_arm_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_assoc_const_mut(&mut self, i: &mut AssocConst) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_assoc_const_mut(&mut self, i: &mut crate::AssocConst) { visit_assoc_const_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_assoc_type_mut(&mut self, i: &mut AssocType) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_assoc_type_mut(&mut self, i: &mut crate::AssocType) { visit_assoc_type_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_attr_style_mut(&mut self, i: &mut AttrStyle) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_attr_style_mut(&mut self, i: &mut crate::AttrStyle) { visit_attr_style_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_attribute_mut(&mut self, i: &mut Attribute) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_attribute_mut(&mut self, i: &mut crate::Attribute) { visit_attribute_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bare_fn_arg_mut(&mut self, i: &mut BareFnArg) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_attributes_mut(&mut self, i: &mut Vec<crate::Attribute>) { + for attr in i { + self.visit_attribute_mut(attr); + } + } + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bare_fn_arg_mut(&mut self, i: &mut crate::BareFnArg) { visit_bare_fn_arg_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bare_variadic_mut(&mut self, i: &mut BareVariadic) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bare_variadic_mut(&mut self, i: &mut crate::BareVariadic) { visit_bare_variadic_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bin_op_mut(&mut self, i: &mut BinOp) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bin_op_mut(&mut self, i: &mut crate::BinOp) { visit_bin_op_mut(self, i); } #[cfg(feature = "full")] - fn visit_block_mut(&mut self, i: &mut Block) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_block_mut(&mut self, i: &mut crate::Block) { visit_block_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_bound_lifetimes_mut(&mut self, i: &mut BoundLifetimes) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_bound_lifetimes_mut(&mut self, i: &mut crate::BoundLifetimes) { visit_bound_lifetimes_mut(self, i); } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_captured_param_mut(&mut self, i: &mut crate::CapturedParam) { + visit_captured_param_mut(self, i); + } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_const_param_mut(&mut self, i: &mut ConstParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_const_param_mut(&mut self, i: &mut crate::ConstParam) { visit_const_param_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_constraint_mut(&mut self, i: &mut Constraint) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_constraint_mut(&mut self, i: &mut crate::Constraint) { visit_constraint_mut(self, i); } #[cfg(feature = "derive")] - fn visit_data_mut(&mut self, i: &mut Data) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_mut(&mut self, i: &mut crate::Data) { visit_data_mut(self, i); } #[cfg(feature = "derive")] - fn visit_data_enum_mut(&mut self, i: &mut DataEnum) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_enum_mut(&mut self, i: &mut crate::DataEnum) { visit_data_enum_mut(self, i); } #[cfg(feature = "derive")] - fn visit_data_struct_mut(&mut self, i: &mut DataStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_struct_mut(&mut self, i: &mut crate::DataStruct) { visit_data_struct_mut(self, i); } #[cfg(feature = "derive")] - fn visit_data_union_mut(&mut self, i: &mut DataUnion) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_data_union_mut(&mut self, i: &mut crate::DataUnion) { visit_data_union_mut(self, i); } #[cfg(feature = "derive")] - fn visit_derive_input_mut(&mut self, i: &mut DeriveInput) { + #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] + fn visit_derive_input_mut(&mut self, i: &mut crate::DeriveInput) { visit_derive_input_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_mut(&mut self, i: &mut Expr) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_mut(&mut self, i: &mut crate::Expr) { visit_expr_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_array_mut(&mut self, i: &mut ExprArray) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_array_mut(&mut self, i: &mut crate::ExprArray) { visit_expr_array_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_assign_mut(&mut self, i: &mut ExprAssign) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_assign_mut(&mut self, i: &mut crate::ExprAssign) { visit_expr_assign_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_async_mut(&mut self, i: &mut ExprAsync) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_async_mut(&mut self, i: &mut crate::ExprAsync) { visit_expr_async_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_await_mut(&mut self, i: &mut ExprAwait) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_await_mut(&mut self, i: &mut crate::ExprAwait) { visit_expr_await_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_binary_mut(&mut self, i: &mut ExprBinary) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_binary_mut(&mut self, i: &mut crate::ExprBinary) { visit_expr_binary_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_block_mut(&mut self, i: &mut ExprBlock) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_block_mut(&mut self, i: &mut crate::ExprBlock) { visit_expr_block_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_break_mut(&mut self, i: &mut ExprBreak) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_break_mut(&mut self, i: &mut crate::ExprBreak) { visit_expr_break_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_call_mut(&mut self, i: &mut ExprCall) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_call_mut(&mut self, i: &mut crate::ExprCall) { visit_expr_call_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_cast_mut(&mut self, i: &mut ExprCast) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_cast_mut(&mut self, i: &mut crate::ExprCast) { visit_expr_cast_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_closure_mut(&mut self, i: &mut ExprClosure) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_closure_mut(&mut self, i: &mut crate::ExprClosure) { visit_expr_closure_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_const_mut(&mut self, i: &mut ExprConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_const_mut(&mut self, i: &mut crate::ExprConst) { visit_expr_const_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_continue_mut(&mut self, i: &mut ExprContinue) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_continue_mut(&mut self, i: &mut crate::ExprContinue) { visit_expr_continue_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_field_mut(&mut self, i: &mut ExprField) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_field_mut(&mut self, i: &mut crate::ExprField) { visit_expr_field_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_for_loop_mut(&mut self, i: &mut ExprForLoop) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_for_loop_mut(&mut self, i: &mut crate::ExprForLoop) { visit_expr_for_loop_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_group_mut(&mut self, i: &mut ExprGroup) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_group_mut(&mut self, i: &mut crate::ExprGroup) { visit_expr_group_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_if_mut(&mut self, i: &mut ExprIf) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_if_mut(&mut self, i: &mut crate::ExprIf) { visit_expr_if_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_index_mut(&mut self, i: &mut ExprIndex) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_index_mut(&mut self, i: &mut crate::ExprIndex) { visit_expr_index_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_infer_mut(&mut self, i: &mut ExprInfer) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_infer_mut(&mut self, i: &mut crate::ExprInfer) { visit_expr_infer_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_let_mut(&mut self, i: &mut ExprLet) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_let_mut(&mut self, i: &mut crate::ExprLet) { visit_expr_let_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_lit_mut(&mut self, i: &mut ExprLit) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_lit_mut(&mut self, i: &mut crate::ExprLit) { visit_expr_lit_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_loop_mut(&mut self, i: &mut ExprLoop) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_loop_mut(&mut self, i: &mut crate::ExprLoop) { visit_expr_loop_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_macro_mut(&mut self, i: &mut ExprMacro) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_macro_mut(&mut self, i: &mut crate::ExprMacro) { visit_expr_macro_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_match_mut(&mut self, i: &mut ExprMatch) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_match_mut(&mut self, i: &mut crate::ExprMatch) { visit_expr_match_mut(self, i); } - #[cfg(feature = "full")] - fn visit_expr_method_call_mut(&mut self, i: &mut ExprMethodCall) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_method_call_mut(&mut self, i: &mut crate::ExprMethodCall) { visit_expr_method_call_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_paren_mut(&mut self, i: &mut ExprParen) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_paren_mut(&mut self, i: &mut crate::ExprParen) { visit_expr_paren_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_path_mut(&mut self, i: &mut ExprPath) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_path_mut(&mut self, i: &mut crate::ExprPath) { visit_expr_path_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_range_mut(&mut self, i: &mut ExprRange) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_range_mut(&mut self, i: &mut crate::ExprRange) { visit_expr_range_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_reference_mut(&mut self, i: &mut ExprReference) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_raw_addr_mut(&mut self, i: &mut crate::ExprRawAddr) { + visit_expr_raw_addr_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_reference_mut(&mut self, i: &mut crate::ExprReference) { visit_expr_reference_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_repeat_mut(&mut self, i: &mut ExprRepeat) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_repeat_mut(&mut self, i: &mut crate::ExprRepeat) { visit_expr_repeat_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_return_mut(&mut self, i: &mut ExprReturn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_return_mut(&mut self, i: &mut crate::ExprReturn) { visit_expr_return_mut(self, i); } - #[cfg(feature = "full")] - fn visit_expr_struct_mut(&mut self, i: &mut ExprStruct) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_struct_mut(&mut self, i: &mut crate::ExprStruct) { visit_expr_struct_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_try_mut(&mut self, i: &mut ExprTry) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_try_mut(&mut self, i: &mut crate::ExprTry) { visit_expr_try_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_try_block_mut(&mut self, i: &mut ExprTryBlock) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_try_block_mut(&mut self, i: &mut crate::ExprTryBlock) { visit_expr_try_block_mut(self, i); } - #[cfg(feature = "full")] - fn visit_expr_tuple_mut(&mut self, i: &mut ExprTuple) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_tuple_mut(&mut self, i: &mut crate::ExprTuple) { visit_expr_tuple_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_expr_unary_mut(&mut self, i: &mut ExprUnary) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_expr_unary_mut(&mut self, i: &mut crate::ExprUnary) { visit_expr_unary_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_unsafe_mut(&mut self, i: &mut ExprUnsafe) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_unsafe_mut(&mut self, i: &mut crate::ExprUnsafe) { visit_expr_unsafe_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_while_mut(&mut self, i: &mut ExprWhile) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_while_mut(&mut self, i: &mut crate::ExprWhile) { visit_expr_while_mut(self, i); } #[cfg(feature = "full")] - fn visit_expr_yield_mut(&mut self, i: &mut ExprYield) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_expr_yield_mut(&mut self, i: &mut crate::ExprYield) { visit_expr_yield_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_field_mut(&mut self, i: &mut Field) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_field_mut(&mut self, i: &mut crate::Field) { visit_field_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_field_mutability_mut(&mut self, i: &mut FieldMutability) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_field_mutability_mut(&mut self, i: &mut crate::FieldMutability) { visit_field_mutability_mut(self, i); } #[cfg(feature = "full")] - fn visit_field_pat_mut(&mut self, i: &mut FieldPat) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_field_pat_mut(&mut self, i: &mut crate::FieldPat) { visit_field_pat_mut(self, i); } - #[cfg(feature = "full")] - fn visit_field_value_mut(&mut self, i: &mut FieldValue) { + #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_field_value_mut(&mut self, i: &mut crate::FieldValue) { visit_field_value_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_fields_mut(&mut self, i: &mut Fields) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_fields_mut(&mut self, i: &mut crate::Fields) { visit_fields_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_fields_named_mut(&mut self, i: &mut FieldsNamed) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_fields_named_mut(&mut self, i: &mut crate::FieldsNamed) { visit_fields_named_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_fields_unnamed_mut(&mut self, i: &mut FieldsUnnamed) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_fields_unnamed_mut(&mut self, i: &mut crate::FieldsUnnamed) { visit_fields_unnamed_mut(self, i); } #[cfg(feature = "full")] - fn visit_file_mut(&mut self, i: &mut File) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_file_mut(&mut self, i: &mut crate::File) { visit_file_mut(self, i); } #[cfg(feature = "full")] - fn visit_fn_arg_mut(&mut self, i: &mut FnArg) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_fn_arg_mut(&mut self, i: &mut crate::FnArg) { visit_fn_arg_mut(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_mut(&mut self, i: &mut ForeignItem) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_mut(&mut self, i: &mut crate::ForeignItem) { visit_foreign_item_mut(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_fn_mut(&mut self, i: &mut ForeignItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_fn_mut(&mut self, i: &mut crate::ForeignItemFn) { visit_foreign_item_fn_mut(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_macro_mut(&mut self, i: &mut ForeignItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_macro_mut(&mut self, i: &mut crate::ForeignItemMacro) { visit_foreign_item_macro_mut(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_static_mut(&mut self, i: &mut ForeignItemStatic) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_static_mut(&mut self, i: &mut crate::ForeignItemStatic) { visit_foreign_item_static_mut(self, i); } #[cfg(feature = "full")] - fn visit_foreign_item_type_mut(&mut self, i: &mut ForeignItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_foreign_item_type_mut(&mut self, i: &mut crate::ForeignItemType) { visit_foreign_item_type_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_generic_argument_mut(&mut self, i: &mut GenericArgument) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_generic_argument_mut(&mut self, i: &mut crate::GenericArgument) { visit_generic_argument_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_generic_param_mut(&mut self, i: &mut GenericParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_generic_param_mut(&mut self, i: &mut crate::GenericParam) { visit_generic_param_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_generics_mut(&mut self, i: &mut Generics) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_generics_mut(&mut self, i: &mut crate::Generics) { visit_generics_mut(self, i); } - fn visit_ident_mut(&mut self, i: &mut Ident) { + fn visit_ident_mut(&mut self, i: &mut proc_macro2::Ident) { visit_ident_mut(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_mut(&mut self, i: &mut ImplItem) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_mut(&mut self, i: &mut crate::ImplItem) { visit_impl_item_mut(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_const_mut(&mut self, i: &mut ImplItemConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_const_mut(&mut self, i: &mut crate::ImplItemConst) { visit_impl_item_const_mut(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_fn_mut(&mut self, i: &mut ImplItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_fn_mut(&mut self, i: &mut crate::ImplItemFn) { visit_impl_item_fn_mut(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_macro_mut(&mut self, i: &mut ImplItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_macro_mut(&mut self, i: &mut crate::ImplItemMacro) { visit_impl_item_macro_mut(self, i); } #[cfg(feature = "full")] - fn visit_impl_item_type_mut(&mut self, i: &mut ImplItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_item_type_mut(&mut self, i: &mut crate::ImplItemType) { visit_impl_item_type_mut(self, i); } #[cfg(feature = "full")] - fn visit_impl_restriction_mut(&mut self, i: &mut ImplRestriction) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_impl_restriction_mut(&mut self, i: &mut crate::ImplRestriction) { visit_impl_restriction_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_index_mut(&mut self, i: &mut Index) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_index_mut(&mut self, i: &mut crate::Index) { visit_index_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_mut(&mut self, i: &mut Item) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_mut(&mut self, i: &mut crate::Item) { visit_item_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_const_mut(&mut self, i: &mut ItemConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_const_mut(&mut self, i: &mut crate::ItemConst) { visit_item_const_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_enum_mut(&mut self, i: &mut ItemEnum) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_enum_mut(&mut self, i: &mut crate::ItemEnum) { visit_item_enum_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_extern_crate_mut(&mut self, i: &mut ItemExternCrate) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_extern_crate_mut(&mut self, i: &mut crate::ItemExternCrate) { visit_item_extern_crate_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_fn_mut(&mut self, i: &mut ItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_fn_mut(&mut self, i: &mut crate::ItemFn) { visit_item_fn_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_foreign_mod_mut(&mut self, i: &mut ItemForeignMod) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_foreign_mod_mut(&mut self, i: &mut crate::ItemForeignMod) { visit_item_foreign_mod_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_impl_mut(&mut self, i: &mut ItemImpl) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_impl_mut(&mut self, i: &mut crate::ItemImpl) { visit_item_impl_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_macro_mut(&mut self, i: &mut ItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_macro_mut(&mut self, i: &mut crate::ItemMacro) { visit_item_macro_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_mod_mut(&mut self, i: &mut ItemMod) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_mod_mut(&mut self, i: &mut crate::ItemMod) { visit_item_mod_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_static_mut(&mut self, i: &mut ItemStatic) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_static_mut(&mut self, i: &mut crate::ItemStatic) { visit_item_static_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_struct_mut(&mut self, i: &mut ItemStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_struct_mut(&mut self, i: &mut crate::ItemStruct) { visit_item_struct_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_trait_mut(&mut self, i: &mut ItemTrait) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_trait_mut(&mut self, i: &mut crate::ItemTrait) { visit_item_trait_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_trait_alias_mut(&mut self, i: &mut ItemTraitAlias) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_trait_alias_mut(&mut self, i: &mut crate::ItemTraitAlias) { visit_item_trait_alias_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_type_mut(&mut self, i: &mut ItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_type_mut(&mut self, i: &mut crate::ItemType) { visit_item_type_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_union_mut(&mut self, i: &mut ItemUnion) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_union_mut(&mut self, i: &mut crate::ItemUnion) { visit_item_union_mut(self, i); } #[cfg(feature = "full")] - fn visit_item_use_mut(&mut self, i: &mut ItemUse) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_item_use_mut(&mut self, i: &mut crate::ItemUse) { visit_item_use_mut(self, i); } #[cfg(feature = "full")] - fn visit_label_mut(&mut self, i: &mut Label) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_label_mut(&mut self, i: &mut crate::Label) { visit_label_mut(self, i); } - fn visit_lifetime_mut(&mut self, i: &mut Lifetime) { + fn visit_lifetime_mut(&mut self, i: &mut crate::Lifetime) { visit_lifetime_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_lifetime_param_mut(&mut self, i: &mut LifetimeParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_lifetime_param_mut(&mut self, i: &mut crate::LifetimeParam) { visit_lifetime_param_mut(self, i); } - fn visit_lit_mut(&mut self, i: &mut Lit) { + fn visit_lit_mut(&mut self, i: &mut crate::Lit) { visit_lit_mut(self, i); } - fn visit_lit_bool_mut(&mut self, i: &mut LitBool) { + fn visit_lit_bool_mut(&mut self, i: &mut crate::LitBool) { visit_lit_bool_mut(self, i); } - fn visit_lit_byte_mut(&mut self, i: &mut LitByte) { + fn visit_lit_byte_mut(&mut self, i: &mut crate::LitByte) { visit_lit_byte_mut(self, i); } - fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) { + fn visit_lit_byte_str_mut(&mut self, i: &mut crate::LitByteStr) { visit_lit_byte_str_mut(self, i); } - fn visit_lit_char_mut(&mut self, i: &mut LitChar) { + fn visit_lit_cstr_mut(&mut self, i: &mut crate::LitCStr) { + visit_lit_cstr_mut(self, i); + } + fn visit_lit_char_mut(&mut self, i: &mut crate::LitChar) { visit_lit_char_mut(self, i); } - fn visit_lit_float_mut(&mut self, i: &mut LitFloat) { + fn visit_lit_float_mut(&mut self, i: &mut crate::LitFloat) { visit_lit_float_mut(self, i); } - fn visit_lit_int_mut(&mut self, i: &mut LitInt) { + fn visit_lit_int_mut(&mut self, i: &mut crate::LitInt) { visit_lit_int_mut(self, i); } - fn visit_lit_str_mut(&mut self, i: &mut LitStr) { + fn visit_lit_str_mut(&mut self, i: &mut crate::LitStr) { visit_lit_str_mut(self, i); } #[cfg(feature = "full")] - fn visit_local_mut(&mut self, i: &mut Local) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_local_mut(&mut self, i: &mut crate::Local) { visit_local_mut(self, i); } #[cfg(feature = "full")] - fn visit_local_init_mut(&mut self, i: &mut LocalInit) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_local_init_mut(&mut self, i: &mut crate::LocalInit) { visit_local_init_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_macro_mut(&mut self, i: &mut Macro) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_macro_mut(&mut self, i: &mut crate::Macro) { visit_macro_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_macro_delimiter_mut(&mut self, i: &mut MacroDelimiter) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_macro_delimiter_mut(&mut self, i: &mut crate::MacroDelimiter) { visit_macro_delimiter_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_member_mut(&mut self, i: &mut Member) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_member_mut(&mut self, i: &mut crate::Member) { visit_member_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_meta_mut(&mut self, i: &mut Meta) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_meta_mut(&mut self, i: &mut crate::Meta) { visit_meta_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_meta_list_mut(&mut self, i: &mut MetaList) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_meta_list_mut(&mut self, i: &mut crate::MetaList) { visit_meta_list_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_meta_name_value_mut(&mut self, i: &mut MetaNameValue) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_meta_name_value_mut(&mut self, i: &mut crate::MetaNameValue) { visit_meta_name_value_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] fn visit_parenthesized_generic_arguments_mut( &mut self, - i: &mut ParenthesizedGenericArguments, + i: &mut crate::ParenthesizedGenericArguments, ) { visit_parenthesized_generic_arguments_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_mut(&mut self, i: &mut Pat) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_mut(&mut self, i: &mut crate::Pat) { visit_pat_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_ident_mut(&mut self, i: &mut PatIdent) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_ident_mut(&mut self, i: &mut crate::PatIdent) { visit_pat_ident_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_or_mut(&mut self, i: &mut PatOr) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_or_mut(&mut self, i: &mut crate::PatOr) { visit_pat_or_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_paren_mut(&mut self, i: &mut PatParen) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_paren_mut(&mut self, i: &mut crate::PatParen) { visit_pat_paren_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_reference_mut(&mut self, i: &mut PatReference) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_reference_mut(&mut self, i: &mut crate::PatReference) { visit_pat_reference_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_rest_mut(&mut self, i: &mut PatRest) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_rest_mut(&mut self, i: &mut crate::PatRest) { visit_pat_rest_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_slice_mut(&mut self, i: &mut PatSlice) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_slice_mut(&mut self, i: &mut crate::PatSlice) { visit_pat_slice_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_struct_mut(&mut self, i: &mut PatStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_struct_mut(&mut self, i: &mut crate::PatStruct) { visit_pat_struct_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_tuple_mut(&mut self, i: &mut PatTuple) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_tuple_mut(&mut self, i: &mut crate::PatTuple) { visit_pat_tuple_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_tuple_struct_mut(&mut self, i: &mut PatTupleStruct) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_tuple_struct_mut(&mut self, i: &mut crate::PatTupleStruct) { visit_pat_tuple_struct_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_type_mut(&mut self, i: &mut PatType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_type_mut(&mut self, i: &mut crate::PatType) { visit_pat_type_mut(self, i); } #[cfg(feature = "full")] - fn visit_pat_wild_mut(&mut self, i: &mut PatWild) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pat_wild_mut(&mut self, i: &mut crate::PatWild) { visit_pat_wild_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_path_mut(&mut self, i: &mut Path) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_path_mut(&mut self, i: &mut crate::Path) { visit_path_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_path_arguments_mut(&mut self, i: &mut PathArguments) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_path_arguments_mut(&mut self, i: &mut crate::PathArguments) { visit_path_arguments_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_path_segment_mut(&mut self, i: &mut PathSegment) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_path_segment_mut(&mut self, i: &mut crate::PathSegment) { visit_path_segment_mut(self, i); } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_pointer_mutability_mut(&mut self, i: &mut crate::PointerMutability) { + visit_pointer_mutability_mut(self, i); + } + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_precise_capture_mut(&mut self, i: &mut crate::PreciseCapture) { + visit_precise_capture_mut(self, i); + } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_predicate_lifetime_mut(&mut self, i: &mut PredicateLifetime) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_predicate_lifetime_mut(&mut self, i: &mut crate::PredicateLifetime) { visit_predicate_lifetime_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_predicate_type_mut(&mut self, i: &mut PredicateType) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_predicate_type_mut(&mut self, i: &mut crate::PredicateType) { visit_predicate_type_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_qself_mut(&mut self, i: &mut QSelf) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_qself_mut(&mut self, i: &mut crate::QSelf) { visit_qself_mut(self, i); } #[cfg(feature = "full")] - fn visit_range_limits_mut(&mut self, i: &mut RangeLimits) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_range_limits_mut(&mut self, i: &mut crate::RangeLimits) { visit_range_limits_mut(self, i); } #[cfg(feature = "full")] - fn visit_receiver_mut(&mut self, i: &mut Receiver) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_receiver_mut(&mut self, i: &mut crate::Receiver) { visit_receiver_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_return_type_mut(&mut self, i: &mut ReturnType) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_return_type_mut(&mut self, i: &mut crate::ReturnType) { visit_return_type_mut(self, i); } #[cfg(feature = "full")] - fn visit_signature_mut(&mut self, i: &mut Signature) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_signature_mut(&mut self, i: &mut crate::Signature) { visit_signature_mut(self, i); } - fn visit_span_mut(&mut self, i: &mut Span) { - visit_span_mut(self, i); - } + fn visit_span_mut(&mut self, i: &mut proc_macro2::Span) {} #[cfg(feature = "full")] - fn visit_static_mutability_mut(&mut self, i: &mut StaticMutability) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_static_mutability_mut(&mut self, i: &mut crate::StaticMutability) { visit_static_mutability_mut(self, i); } #[cfg(feature = "full")] - fn visit_stmt_mut(&mut self, i: &mut Stmt) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_stmt_mut(&mut self, i: &mut crate::Stmt) { visit_stmt_mut(self, i); } #[cfg(feature = "full")] - fn visit_stmt_macro_mut(&mut self, i: &mut StmtMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_stmt_macro_mut(&mut self, i: &mut crate::StmtMacro) { visit_stmt_macro_mut(self, i); } + fn visit_token_stream_mut(&mut self, i: &mut proc_macro2::TokenStream) {} #[cfg(any(feature = "derive", feature = "full"))] - fn visit_trait_bound_mut(&mut self, i: &mut TraitBound) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_trait_bound_mut(&mut self, i: &mut crate::TraitBound) { visit_trait_bound_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_trait_bound_modifier_mut(&mut self, i: &mut TraitBoundModifier) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_trait_bound_modifier_mut(&mut self, i: &mut crate::TraitBoundModifier) { visit_trait_bound_modifier_mut(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_mut(&mut self, i: &mut TraitItem) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_mut(&mut self, i: &mut crate::TraitItem) { visit_trait_item_mut(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_const_mut(&mut self, i: &mut TraitItemConst) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_const_mut(&mut self, i: &mut crate::TraitItemConst) { visit_trait_item_const_mut(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_fn_mut(&mut self, i: &mut TraitItemFn) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_fn_mut(&mut self, i: &mut crate::TraitItemFn) { visit_trait_item_fn_mut(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_macro_mut(&mut self, i: &mut TraitItemMacro) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_macro_mut(&mut self, i: &mut crate::TraitItemMacro) { visit_trait_item_macro_mut(self, i); } #[cfg(feature = "full")] - fn visit_trait_item_type_mut(&mut self, i: &mut TraitItemType) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_trait_item_type_mut(&mut self, i: &mut crate::TraitItemType) { visit_trait_item_type_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_mut(&mut self, i: &mut Type) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_mut(&mut self, i: &mut crate::Type) { visit_type_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_array_mut(&mut self, i: &mut TypeArray) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_array_mut(&mut self, i: &mut crate::TypeArray) { visit_type_array_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_bare_fn_mut(&mut self, i: &mut TypeBareFn) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_bare_fn_mut(&mut self, i: &mut crate::TypeBareFn) { visit_type_bare_fn_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_group_mut(&mut self, i: &mut TypeGroup) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_group_mut(&mut self, i: &mut crate::TypeGroup) { visit_type_group_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_impl_trait_mut(&mut self, i: &mut TypeImplTrait) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_impl_trait_mut(&mut self, i: &mut crate::TypeImplTrait) { visit_type_impl_trait_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_infer_mut(&mut self, i: &mut TypeInfer) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_infer_mut(&mut self, i: &mut crate::TypeInfer) { visit_type_infer_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_macro_mut(&mut self, i: &mut TypeMacro) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_macro_mut(&mut self, i: &mut crate::TypeMacro) { visit_type_macro_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_never_mut(&mut self, i: &mut TypeNever) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_never_mut(&mut self, i: &mut crate::TypeNever) { visit_type_never_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_param_mut(&mut self, i: &mut TypeParam) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_param_mut(&mut self, i: &mut crate::TypeParam) { visit_type_param_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_param_bound_mut(&mut self, i: &mut TypeParamBound) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_param_bound_mut(&mut self, i: &mut crate::TypeParamBound) { visit_type_param_bound_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_paren_mut(&mut self, i: &mut TypeParen) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_paren_mut(&mut self, i: &mut crate::TypeParen) { visit_type_paren_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_path_mut(&mut self, i: &mut TypePath) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_path_mut(&mut self, i: &mut crate::TypePath) { visit_type_path_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_ptr_mut(&mut self, i: &mut TypePtr) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_ptr_mut(&mut self, i: &mut crate::TypePtr) { visit_type_ptr_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_reference_mut(&mut self, i: &mut TypeReference) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_reference_mut(&mut self, i: &mut crate::TypeReference) { visit_type_reference_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_slice_mut(&mut self, i: &mut TypeSlice) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_slice_mut(&mut self, i: &mut crate::TypeSlice) { visit_type_slice_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_trait_object_mut(&mut self, i: &mut TypeTraitObject) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_trait_object_mut(&mut self, i: &mut crate::TypeTraitObject) { visit_type_trait_object_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_type_tuple_mut(&mut self, i: &mut TypeTuple) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_type_tuple_mut(&mut self, i: &mut crate::TypeTuple) { visit_type_tuple_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_un_op_mut(&mut self, i: &mut UnOp) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_un_op_mut(&mut self, i: &mut crate::UnOp) { visit_un_op_mut(self, i); } #[cfg(feature = "full")] - fn visit_use_glob_mut(&mut self, i: &mut UseGlob) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_glob_mut(&mut self, i: &mut crate::UseGlob) { visit_use_glob_mut(self, i); } #[cfg(feature = "full")] - fn visit_use_group_mut(&mut self, i: &mut UseGroup) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_group_mut(&mut self, i: &mut crate::UseGroup) { visit_use_group_mut(self, i); } #[cfg(feature = "full")] - fn visit_use_name_mut(&mut self, i: &mut UseName) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_name_mut(&mut self, i: &mut crate::UseName) { visit_use_name_mut(self, i); } #[cfg(feature = "full")] - fn visit_use_path_mut(&mut self, i: &mut UsePath) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_path_mut(&mut self, i: &mut crate::UsePath) { visit_use_path_mut(self, i); } #[cfg(feature = "full")] - fn visit_use_rename_mut(&mut self, i: &mut UseRename) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_rename_mut(&mut self, i: &mut crate::UseRename) { visit_use_rename_mut(self, i); } #[cfg(feature = "full")] - fn visit_use_tree_mut(&mut self, i: &mut UseTree) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_use_tree_mut(&mut self, i: &mut crate::UseTree) { visit_use_tree_mut(self, i); } #[cfg(feature = "full")] - fn visit_variadic_mut(&mut self, i: &mut Variadic) { + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + fn visit_variadic_mut(&mut self, i: &mut crate::Variadic) { visit_variadic_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_variant_mut(&mut self, i: &mut Variant) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_variant_mut(&mut self, i: &mut crate::Variant) { visit_variant_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_vis_restricted_mut(&mut self, i: &mut VisRestricted) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_vis_restricted_mut(&mut self, i: &mut crate::VisRestricted) { visit_vis_restricted_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_visibility_mut(&mut self, i: &mut Visibility) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_visibility_mut(&mut self, i: &mut crate::Visibility) { visit_visibility_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_where_clause_mut(&mut self, i: &mut WhereClause) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_where_clause_mut(&mut self, i: &mut crate::WhereClause) { visit_where_clause_mut(self, i); } #[cfg(any(feature = "derive", feature = "full"))] - fn visit_where_predicate_mut(&mut self, i: &mut WherePredicate) { + #[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] + fn visit_where_predicate_mut(&mut self, i: &mut crate::WherePredicate) { visit_where_predicate_mut(self, i); } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_abi_mut<V>(v: &mut V, node: &mut Abi) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_abi_mut<V>(v: &mut V, node: &mut crate::Abi) where V: VisitMut + ?Sized, { @@ -764,9 +963,10 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn visit_angle_bracketed_generic_arguments_mut<V>( v: &mut V, - node: &mut AngleBracketedGenericArguments, + node: &mut crate::AngleBracketedGenericArguments, ) where V: VisitMut + ?Sized, @@ -780,13 +980,12 @@ where skip!(node.gt_token); } #[cfg(feature = "full")] -pub fn visit_arm_mut<V>(v: &mut V, node: &mut Arm) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_arm_mut<V>(v: &mut V, node: &mut crate::Arm) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_pat_mut(&mut node.pat); if let Some(it) = &mut node.guard { skip!((it).0); @@ -797,7 +996,8 @@ where skip!(node.comma); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_assoc_const_mut<V>(v: &mut V, node: &mut AssocConst) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_assoc_const_mut<V>(v: &mut V, node: &mut crate::AssocConst) where V: VisitMut + ?Sized, { @@ -809,7 +1009,8 @@ where v.visit_expr_mut(&mut node.value); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_assoc_type_mut<V>(v: &mut V, node: &mut AssocType) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_assoc_type_mut<V>(v: &mut V, node: &mut crate::AssocType) where V: VisitMut + ?Sized, { @@ -821,19 +1022,21 @@ where v.visit_type_mut(&mut node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_attr_style_mut<V>(v: &mut V, node: &mut AttrStyle) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_attr_style_mut<V>(v: &mut V, node: &mut crate::AttrStyle) where V: VisitMut + ?Sized, { match node { - AttrStyle::Outer => {} - AttrStyle::Inner(_binding_0) => { + crate::AttrStyle::Outer => {} + crate::AttrStyle::Inner(_binding_0) => { skip!(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_attribute_mut<V>(v: &mut V, node: &mut Attribute) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_attribute_mut<V>(v: &mut V, node: &mut crate::Attribute) where V: VisitMut + ?Sized, { @@ -843,13 +1046,12 @@ where v.visit_meta_mut(&mut node.meta); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bare_fn_arg_mut<V>(v: &mut V, node: &mut BareFnArg) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bare_fn_arg_mut<V>(v: &mut V, node: &mut crate::BareFnArg) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.name { v.visit_ident_mut(&mut (it).0); skip!((it).1); @@ -857,13 +1059,12 @@ where v.visit_type_mut(&mut node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bare_variadic_mut<V>(v: &mut V, node: &mut BareVariadic) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bare_variadic_mut<V>(v: &mut V, node: &mut crate::BareVariadic) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.name { v.visit_ident_mut(&mut (it).0); skip!((it).1); @@ -872,99 +1073,101 @@ where skip!(node.comma); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bin_op_mut<V>(v: &mut V, node: &mut BinOp) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bin_op_mut<V>(v: &mut V, node: &mut crate::BinOp) where V: VisitMut + ?Sized, { match node { - BinOp::Add(_binding_0) => { + crate::BinOp::Add(_binding_0) => { skip!(_binding_0); } - BinOp::Sub(_binding_0) => { + crate::BinOp::Sub(_binding_0) => { skip!(_binding_0); } - BinOp::Mul(_binding_0) => { + crate::BinOp::Mul(_binding_0) => { skip!(_binding_0); } - BinOp::Div(_binding_0) => { + crate::BinOp::Div(_binding_0) => { skip!(_binding_0); } - BinOp::Rem(_binding_0) => { + crate::BinOp::Rem(_binding_0) => { skip!(_binding_0); } - BinOp::And(_binding_0) => { + crate::BinOp::And(_binding_0) => { skip!(_binding_0); } - BinOp::Or(_binding_0) => { + crate::BinOp::Or(_binding_0) => { skip!(_binding_0); } - BinOp::BitXor(_binding_0) => { + crate::BinOp::BitXor(_binding_0) => { skip!(_binding_0); } - BinOp::BitAnd(_binding_0) => { + crate::BinOp::BitAnd(_binding_0) => { skip!(_binding_0); } - BinOp::BitOr(_binding_0) => { + crate::BinOp::BitOr(_binding_0) => { skip!(_binding_0); } - BinOp::Shl(_binding_0) => { + crate::BinOp::Shl(_binding_0) => { skip!(_binding_0); } - BinOp::Shr(_binding_0) => { + crate::BinOp::Shr(_binding_0) => { skip!(_binding_0); } - BinOp::Eq(_binding_0) => { + crate::BinOp::Eq(_binding_0) => { skip!(_binding_0); } - BinOp::Lt(_binding_0) => { + crate::BinOp::Lt(_binding_0) => { skip!(_binding_0); } - BinOp::Le(_binding_0) => { + crate::BinOp::Le(_binding_0) => { skip!(_binding_0); } - BinOp::Ne(_binding_0) => { + crate::BinOp::Ne(_binding_0) => { skip!(_binding_0); } - BinOp::Ge(_binding_0) => { + crate::BinOp::Ge(_binding_0) => { skip!(_binding_0); } - BinOp::Gt(_binding_0) => { + crate::BinOp::Gt(_binding_0) => { skip!(_binding_0); } - BinOp::AddAssign(_binding_0) => { + crate::BinOp::AddAssign(_binding_0) => { skip!(_binding_0); } - BinOp::SubAssign(_binding_0) => { + crate::BinOp::SubAssign(_binding_0) => { skip!(_binding_0); } - BinOp::MulAssign(_binding_0) => { + crate::BinOp::MulAssign(_binding_0) => { skip!(_binding_0); } - BinOp::DivAssign(_binding_0) => { + crate::BinOp::DivAssign(_binding_0) => { skip!(_binding_0); } - BinOp::RemAssign(_binding_0) => { + crate::BinOp::RemAssign(_binding_0) => { skip!(_binding_0); } - BinOp::BitXorAssign(_binding_0) => { + crate::BinOp::BitXorAssign(_binding_0) => { skip!(_binding_0); } - BinOp::BitAndAssign(_binding_0) => { + crate::BinOp::BitAndAssign(_binding_0) => { skip!(_binding_0); } - BinOp::BitOrAssign(_binding_0) => { + crate::BinOp::BitOrAssign(_binding_0) => { skip!(_binding_0); } - BinOp::ShlAssign(_binding_0) => { + crate::BinOp::ShlAssign(_binding_0) => { skip!(_binding_0); } - BinOp::ShrAssign(_binding_0) => { + crate::BinOp::ShrAssign(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_block_mut<V>(v: &mut V, node: &mut Block) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_block_mut<V>(v: &mut V, node: &mut crate::Block) where V: VisitMut + ?Sized, { @@ -974,7 +1177,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_bound_lifetimes_mut<V>(v: &mut V, node: &mut BoundLifetimes) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_bound_lifetimes_mut<V>(v: &mut V, node: &mut crate::BoundLifetimes) where V: VisitMut + ?Sized, { @@ -986,14 +1190,28 @@ where } skip!(node.gt_token); } -#[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_const_param_mut<V>(v: &mut V, node: &mut ConstParam) +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_captured_param_mut<V>(v: &mut V, node: &mut crate::CapturedParam) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); + match node { + crate::CapturedParam::Lifetime(_binding_0) => { + v.visit_lifetime_mut(_binding_0); + } + crate::CapturedParam::Ident(_binding_0) => { + v.visit_ident_mut(_binding_0); + } } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_const_param_mut<V>(v: &mut V, node: &mut crate::ConstParam) +where + V: VisitMut + ?Sized, +{ + v.visit_attributes_mut(&mut node.attrs); skip!(node.const_token); v.visit_ident_mut(&mut node.ident); skip!(node.colon_token); @@ -1004,7 +1222,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_constraint_mut<V>(v: &mut V, node: &mut Constraint) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_constraint_mut<V>(v: &mut V, node: &mut crate::Constraint) where V: VisitMut + ?Sized, { @@ -1019,24 +1238,26 @@ where } } #[cfg(feature = "derive")] -pub fn visit_data_mut<V>(v: &mut V, node: &mut Data) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_mut<V>(v: &mut V, node: &mut crate::Data) where V: VisitMut + ?Sized, { match node { - Data::Struct(_binding_0) => { + crate::Data::Struct(_binding_0) => { v.visit_data_struct_mut(_binding_0); } - Data::Enum(_binding_0) => { + crate::Data::Enum(_binding_0) => { v.visit_data_enum_mut(_binding_0); } - Data::Union(_binding_0) => { + crate::Data::Union(_binding_0) => { v.visit_data_union_mut(_binding_0); } } } #[cfg(feature = "derive")] -pub fn visit_data_enum_mut<V>(v: &mut V, node: &mut DataEnum) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_enum_mut<V>(v: &mut V, node: &mut crate::DataEnum) where V: VisitMut + ?Sized, { @@ -1048,7 +1269,8 @@ where } } #[cfg(feature = "derive")] -pub fn visit_data_struct_mut<V>(v: &mut V, node: &mut DataStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_struct_mut<V>(v: &mut V, node: &mut crate::DataStruct) where V: VisitMut + ?Sized, { @@ -1057,7 +1279,8 @@ where skip!(node.semi_token); } #[cfg(feature = "derive")] -pub fn visit_data_union_mut<V>(v: &mut V, node: &mut DataUnion) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_data_union_mut<V>(v: &mut V, node: &mut crate::DataUnion) where V: VisitMut + ?Sized, { @@ -1065,151 +1288,153 @@ where v.visit_fields_named_mut(&mut node.fields); } #[cfg(feature = "derive")] -pub fn visit_derive_input_mut<V>(v: &mut V, node: &mut DeriveInput) +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] +pub fn visit_derive_input_mut<V>(v: &mut V, node: &mut crate::DeriveInput) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); v.visit_ident_mut(&mut node.ident); v.visit_generics_mut(&mut node.generics); v.visit_data_mut(&mut node.data); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_mut<V>(v: &mut V, node: &mut Expr) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_mut<V>(v: &mut V, node: &mut crate::Expr) where V: VisitMut + ?Sized, { match node { - Expr::Array(_binding_0) => { + crate::Expr::Array(_binding_0) => { full!(v.visit_expr_array_mut(_binding_0)); } - Expr::Assign(_binding_0) => { + crate::Expr::Assign(_binding_0) => { full!(v.visit_expr_assign_mut(_binding_0)); } - Expr::Async(_binding_0) => { + crate::Expr::Async(_binding_0) => { full!(v.visit_expr_async_mut(_binding_0)); } - Expr::Await(_binding_0) => { + crate::Expr::Await(_binding_0) => { full!(v.visit_expr_await_mut(_binding_0)); } - Expr::Binary(_binding_0) => { + crate::Expr::Binary(_binding_0) => { v.visit_expr_binary_mut(_binding_0); } - Expr::Block(_binding_0) => { + crate::Expr::Block(_binding_0) => { full!(v.visit_expr_block_mut(_binding_0)); } - Expr::Break(_binding_0) => { + crate::Expr::Break(_binding_0) => { full!(v.visit_expr_break_mut(_binding_0)); } - Expr::Call(_binding_0) => { + crate::Expr::Call(_binding_0) => { v.visit_expr_call_mut(_binding_0); } - Expr::Cast(_binding_0) => { + crate::Expr::Cast(_binding_0) => { v.visit_expr_cast_mut(_binding_0); } - Expr::Closure(_binding_0) => { + crate::Expr::Closure(_binding_0) => { full!(v.visit_expr_closure_mut(_binding_0)); } - Expr::Const(_binding_0) => { + crate::Expr::Const(_binding_0) => { full!(v.visit_expr_const_mut(_binding_0)); } - Expr::Continue(_binding_0) => { + crate::Expr::Continue(_binding_0) => { full!(v.visit_expr_continue_mut(_binding_0)); } - Expr::Field(_binding_0) => { + crate::Expr::Field(_binding_0) => { v.visit_expr_field_mut(_binding_0); } - Expr::ForLoop(_binding_0) => { + crate::Expr::ForLoop(_binding_0) => { full!(v.visit_expr_for_loop_mut(_binding_0)); } - Expr::Group(_binding_0) => { + crate::Expr::Group(_binding_0) => { v.visit_expr_group_mut(_binding_0); } - Expr::If(_binding_0) => { + crate::Expr::If(_binding_0) => { full!(v.visit_expr_if_mut(_binding_0)); } - Expr::Index(_binding_0) => { + crate::Expr::Index(_binding_0) => { v.visit_expr_index_mut(_binding_0); } - Expr::Infer(_binding_0) => { + crate::Expr::Infer(_binding_0) => { full!(v.visit_expr_infer_mut(_binding_0)); } - Expr::Let(_binding_0) => { + crate::Expr::Let(_binding_0) => { full!(v.visit_expr_let_mut(_binding_0)); } - Expr::Lit(_binding_0) => { + crate::Expr::Lit(_binding_0) => { v.visit_expr_lit_mut(_binding_0); } - Expr::Loop(_binding_0) => { + crate::Expr::Loop(_binding_0) => { full!(v.visit_expr_loop_mut(_binding_0)); } - Expr::Macro(_binding_0) => { + crate::Expr::Macro(_binding_0) => { v.visit_expr_macro_mut(_binding_0); } - Expr::Match(_binding_0) => { + crate::Expr::Match(_binding_0) => { full!(v.visit_expr_match_mut(_binding_0)); } - Expr::MethodCall(_binding_0) => { - full!(v.visit_expr_method_call_mut(_binding_0)); + crate::Expr::MethodCall(_binding_0) => { + v.visit_expr_method_call_mut(_binding_0); } - Expr::Paren(_binding_0) => { + crate::Expr::Paren(_binding_0) => { v.visit_expr_paren_mut(_binding_0); } - Expr::Path(_binding_0) => { + crate::Expr::Path(_binding_0) => { v.visit_expr_path_mut(_binding_0); } - Expr::Range(_binding_0) => { + crate::Expr::Range(_binding_0) => { full!(v.visit_expr_range_mut(_binding_0)); } - Expr::Reference(_binding_0) => { - full!(v.visit_expr_reference_mut(_binding_0)); + crate::Expr::RawAddr(_binding_0) => { + full!(v.visit_expr_raw_addr_mut(_binding_0)); + } + crate::Expr::Reference(_binding_0) => { + v.visit_expr_reference_mut(_binding_0); } - Expr::Repeat(_binding_0) => { + crate::Expr::Repeat(_binding_0) => { full!(v.visit_expr_repeat_mut(_binding_0)); } - Expr::Return(_binding_0) => { + crate::Expr::Return(_binding_0) => { full!(v.visit_expr_return_mut(_binding_0)); } - Expr::Struct(_binding_0) => { - full!(v.visit_expr_struct_mut(_binding_0)); + crate::Expr::Struct(_binding_0) => { + v.visit_expr_struct_mut(_binding_0); } - Expr::Try(_binding_0) => { + crate::Expr::Try(_binding_0) => { full!(v.visit_expr_try_mut(_binding_0)); } - Expr::TryBlock(_binding_0) => { + crate::Expr::TryBlock(_binding_0) => { full!(v.visit_expr_try_block_mut(_binding_0)); } - Expr::Tuple(_binding_0) => { - full!(v.visit_expr_tuple_mut(_binding_0)); + crate::Expr::Tuple(_binding_0) => { + v.visit_expr_tuple_mut(_binding_0); } - Expr::Unary(_binding_0) => { + crate::Expr::Unary(_binding_0) => { v.visit_expr_unary_mut(_binding_0); } - Expr::Unsafe(_binding_0) => { + crate::Expr::Unsafe(_binding_0) => { full!(v.visit_expr_unsafe_mut(_binding_0)); } - Expr::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Expr::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } - Expr::While(_binding_0) => { + crate::Expr::While(_binding_0) => { full!(v.visit_expr_while_mut(_binding_0)); } - Expr::Yield(_binding_0) => { + crate::Expr::Yield(_binding_0) => { full!(v.visit_expr_yield_mut(_binding_0)); } } } #[cfg(feature = "full")] -pub fn visit_expr_array_mut<V>(v: &mut V, node: &mut ExprArray) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_array_mut<V>(v: &mut V, node: &mut crate::ExprArray) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.bracket_token); for mut el in Punctuated::pairs_mut(&mut node.elems) { let it = el.value_mut(); @@ -1217,74 +1442,68 @@ where } } #[cfg(feature = "full")] -pub fn visit_expr_assign_mut<V>(v: &mut V, node: &mut ExprAssign) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_assign_mut<V>(v: &mut V, node: &mut crate::ExprAssign) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.left); skip!(node.eq_token); v.visit_expr_mut(&mut *node.right); } #[cfg(feature = "full")] -pub fn visit_expr_async_mut<V>(v: &mut V, node: &mut ExprAsync) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_async_mut<V>(v: &mut V, node: &mut crate::ExprAsync) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.async_token); skip!(node.capture); v.visit_block_mut(&mut node.block); } #[cfg(feature = "full")] -pub fn visit_expr_await_mut<V>(v: &mut V, node: &mut ExprAwait) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_await_mut<V>(v: &mut V, node: &mut crate::ExprAwait) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.base); skip!(node.dot_token); skip!(node.await_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut ExprBinary) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut crate::ExprBinary) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.left); v.visit_bin_op_mut(&mut node.op); v.visit_expr_mut(&mut *node.right); } #[cfg(feature = "full")] -pub fn visit_expr_block_mut<V>(v: &mut V, node: &mut ExprBlock) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_block_mut<V>(v: &mut V, node: &mut crate::ExprBlock) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.label { v.visit_label_mut(it); } v.visit_block_mut(&mut node.block); } #[cfg(feature = "full")] -pub fn visit_expr_break_mut<V>(v: &mut V, node: &mut ExprBreak) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_break_mut<V>(v: &mut V, node: &mut crate::ExprBreak) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.break_token); if let Some(it) = &mut node.label { v.visit_lifetime_mut(it); @@ -1294,13 +1513,12 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_call_mut<V>(v: &mut V, node: &mut ExprCall) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_call_mut<V>(v: &mut V, node: &mut crate::ExprCall) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.func); skip!(node.paren_token); for mut el in Punctuated::pairs_mut(&mut node.args) { @@ -1309,25 +1527,23 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_cast_mut<V>(v: &mut V, node: &mut ExprCast) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_cast_mut<V>(v: &mut V, node: &mut crate::ExprCast) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.expr); skip!(node.as_token); v.visit_type_mut(&mut *node.ty); } #[cfg(feature = "full")] -pub fn visit_expr_closure_mut<V>(v: &mut V, node: &mut ExprClosure) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_closure_mut<V>(v: &mut V, node: &mut crate::ExprClosure) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.lifetimes { v.visit_bound_lifetimes_mut(it); } @@ -1345,49 +1561,45 @@ where v.visit_expr_mut(&mut *node.body); } #[cfg(feature = "full")] -pub fn visit_expr_const_mut<V>(v: &mut V, node: &mut ExprConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_const_mut<V>(v: &mut V, node: &mut crate::ExprConst) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.const_token); v.visit_block_mut(&mut node.block); } #[cfg(feature = "full")] -pub fn visit_expr_continue_mut<V>(v: &mut V, node: &mut ExprContinue) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_continue_mut<V>(v: &mut V, node: &mut crate::ExprContinue) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.continue_token); if let Some(it) = &mut node.label { v.visit_lifetime_mut(it); } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_field_mut<V>(v: &mut V, node: &mut ExprField) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_field_mut<V>(v: &mut V, node: &mut crate::ExprField) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.base); skip!(node.dot_token); v.visit_member_mut(&mut node.member); } #[cfg(feature = "full")] -pub fn visit_expr_for_loop_mut<V>(v: &mut V, node: &mut ExprForLoop) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_for_loop_mut<V>(v: &mut V, node: &mut crate::ExprForLoop) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.label { v.visit_label_mut(it); } @@ -1398,24 +1610,22 @@ where v.visit_block_mut(&mut node.body); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_group_mut<V>(v: &mut V, node: &mut ExprGroup) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_group_mut<V>(v: &mut V, node: &mut crate::ExprGroup) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.group_token); v.visit_expr_mut(&mut *node.expr); } #[cfg(feature = "full")] -pub fn visit_expr_if_mut<V>(v: &mut V, node: &mut ExprIf) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_if_mut<V>(v: &mut V, node: &mut crate::ExprIf) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.if_token); v.visit_expr_mut(&mut *node.cond); v.visit_block_mut(&mut node.then_branch); @@ -1425,58 +1635,53 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_index_mut<V>(v: &mut V, node: &mut ExprIndex) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_index_mut<V>(v: &mut V, node: &mut crate::ExprIndex) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.expr); skip!(node.bracket_token); v.visit_expr_mut(&mut *node.index); } #[cfg(feature = "full")] -pub fn visit_expr_infer_mut<V>(v: &mut V, node: &mut ExprInfer) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_infer_mut<V>(v: &mut V, node: &mut crate::ExprInfer) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.underscore_token); } #[cfg(feature = "full")] -pub fn visit_expr_let_mut<V>(v: &mut V, node: &mut ExprLet) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_let_mut<V>(v: &mut V, node: &mut crate::ExprLet) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.let_token); v.visit_pat_mut(&mut *node.pat); skip!(node.eq_token); v.visit_expr_mut(&mut *node.expr); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_lit_mut<V>(v: &mut V, node: &mut ExprLit) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_lit_mut<V>(v: &mut V, node: &mut crate::ExprLit) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_lit_mut(&mut node.lit); } #[cfg(feature = "full")] -pub fn visit_expr_loop_mut<V>(v: &mut V, node: &mut ExprLoop) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_loop_mut<V>(v: &mut V, node: &mut crate::ExprLoop) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.label { v.visit_label_mut(it); } @@ -1484,23 +1689,21 @@ where v.visit_block_mut(&mut node.body); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_macro_mut<V>(v: &mut V, node: &mut ExprMacro) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_macro_mut<V>(v: &mut V, node: &mut crate::ExprMacro) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_macro_mut(&mut node.mac); } #[cfg(feature = "full")] -pub fn visit_expr_match_mut<V>(v: &mut V, node: &mut ExprMatch) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_match_mut<V>(v: &mut V, node: &mut crate::ExprMatch) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.match_token); v.visit_expr_mut(&mut *node.expr); skip!(node.brace_token); @@ -1508,14 +1711,13 @@ where v.visit_arm_mut(it); } } -#[cfg(feature = "full")] -pub fn visit_expr_method_call_mut<V>(v: &mut V, node: &mut ExprMethodCall) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_method_call_mut<V>(v: &mut V, node: &mut crate::ExprMethodCall) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.receiver); skip!(node.dot_token); v.visit_ident_mut(&mut node.method); @@ -1529,37 +1731,34 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_paren_mut<V>(v: &mut V, node: &mut ExprParen) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_paren_mut<V>(v: &mut V, node: &mut crate::ExprParen) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.paren_token); v.visit_expr_mut(&mut *node.expr); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_path_mut<V>(v: &mut V, node: &mut ExprPath) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_path_mut<V>(v: &mut V, node: &mut crate::ExprPath) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.qself { v.visit_qself_mut(it); } v.visit_path_mut(&mut node.path); } #[cfg(feature = "full")] -pub fn visit_expr_range_mut<V>(v: &mut V, node: &mut ExprRange) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_range_mut<V>(v: &mut V, node: &mut crate::ExprRange) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.start { v.visit_expr_mut(&mut **it); } @@ -1569,51 +1768,59 @@ where } } #[cfg(feature = "full")] -pub fn visit_expr_reference_mut<V>(v: &mut V, node: &mut ExprReference) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_raw_addr_mut<V>(v: &mut V, node: &mut crate::ExprRawAddr) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); + skip!(node.and_token); + skip!(node.raw); + v.visit_pointer_mutability_mut(&mut node.mutability); + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_reference_mut<V>(v: &mut V, node: &mut crate::ExprReference) +where + V: VisitMut + ?Sized, +{ + v.visit_attributes_mut(&mut node.attrs); skip!(node.and_token); skip!(node.mutability); v.visit_expr_mut(&mut *node.expr); } #[cfg(feature = "full")] -pub fn visit_expr_repeat_mut<V>(v: &mut V, node: &mut ExprRepeat) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_repeat_mut<V>(v: &mut V, node: &mut crate::ExprRepeat) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.bracket_token); v.visit_expr_mut(&mut *node.expr); skip!(node.semi_token); v.visit_expr_mut(&mut *node.len); } #[cfg(feature = "full")] -pub fn visit_expr_return_mut<V>(v: &mut V, node: &mut ExprReturn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_return_mut<V>(v: &mut V, node: &mut crate::ExprReturn) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.return_token); if let Some(it) = &mut node.expr { v.visit_expr_mut(&mut **it); } } -#[cfg(feature = "full")] -pub fn visit_expr_struct_mut<V>(v: &mut V, node: &mut ExprStruct) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_struct_mut<V>(v: &mut V, node: &mut crate::ExprStruct) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.qself { v.visit_qself_mut(it); } @@ -1629,35 +1836,32 @@ where } } #[cfg(feature = "full")] -pub fn visit_expr_try_mut<V>(v: &mut V, node: &mut ExprTry) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_try_mut<V>(v: &mut V, node: &mut crate::ExprTry) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_expr_mut(&mut *node.expr); skip!(node.question_token); } #[cfg(feature = "full")] -pub fn visit_expr_try_block_mut<V>(v: &mut V, node: &mut ExprTryBlock) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_try_block_mut<V>(v: &mut V, node: &mut crate::ExprTryBlock) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.try_token); v.visit_block_mut(&mut node.block); } -#[cfg(feature = "full")] -pub fn visit_expr_tuple_mut<V>(v: &mut V, node: &mut ExprTuple) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_tuple_mut<V>(v: &mut V, node: &mut crate::ExprTuple) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.paren_token); for mut el in Punctuated::pairs_mut(&mut node.elems) { let it = el.value_mut(); @@ -1665,35 +1869,32 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_expr_unary_mut<V>(v: &mut V, node: &mut ExprUnary) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_expr_unary_mut<V>(v: &mut V, node: &mut crate::ExprUnary) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_un_op_mut(&mut node.op); v.visit_expr_mut(&mut *node.expr); } #[cfg(feature = "full")] -pub fn visit_expr_unsafe_mut<V>(v: &mut V, node: &mut ExprUnsafe) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_unsafe_mut<V>(v: &mut V, node: &mut crate::ExprUnsafe) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.unsafe_token); v.visit_block_mut(&mut node.block); } #[cfg(feature = "full")] -pub fn visit_expr_while_mut<V>(v: &mut V, node: &mut ExprWhile) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_while_mut<V>(v: &mut V, node: &mut crate::ExprWhile) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.label { v.visit_label_mut(it); } @@ -1702,26 +1903,24 @@ where v.visit_block_mut(&mut node.body); } #[cfg(feature = "full")] -pub fn visit_expr_yield_mut<V>(v: &mut V, node: &mut ExprYield) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_expr_yield_mut<V>(v: &mut V, node: &mut crate::ExprYield) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.yield_token); if let Some(it) = &mut node.expr { v.visit_expr_mut(&mut **it); } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_field_mut<V>(v: &mut V, node: &mut Field) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_field_mut<V>(v: &mut V, node: &mut crate::Field) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); v.visit_field_mutability_mut(&mut node.mutability); if let Some(it) = &mut node.ident { @@ -1731,55 +1930,56 @@ where v.visit_type_mut(&mut node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_field_mutability_mut<V>(v: &mut V, node: &mut FieldMutability) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_field_mutability_mut<V>(v: &mut V, node: &mut crate::FieldMutability) where V: VisitMut + ?Sized, { match node { - FieldMutability::None => {} + crate::FieldMutability::None => {} } } #[cfg(feature = "full")] -pub fn visit_field_pat_mut<V>(v: &mut V, node: &mut FieldPat) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_field_pat_mut<V>(v: &mut V, node: &mut crate::FieldPat) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_member_mut(&mut node.member); skip!(node.colon_token); v.visit_pat_mut(&mut *node.pat); } -#[cfg(feature = "full")] -pub fn visit_field_value_mut<V>(v: &mut V, node: &mut FieldValue) +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_field_value_mut<V>(v: &mut V, node: &mut crate::FieldValue) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_member_mut(&mut node.member); skip!(node.colon_token); v.visit_expr_mut(&mut node.expr); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_fields_mut<V>(v: &mut V, node: &mut Fields) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_fields_mut<V>(v: &mut V, node: &mut crate::Fields) where V: VisitMut + ?Sized, { match node { - Fields::Named(_binding_0) => { + crate::Fields::Named(_binding_0) => { v.visit_fields_named_mut(_binding_0); } - Fields::Unnamed(_binding_0) => { + crate::Fields::Unnamed(_binding_0) => { v.visit_fields_unnamed_mut(_binding_0); } - Fields::Unit => {} + crate::Fields::Unit => {} } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_fields_named_mut<V>(v: &mut V, node: &mut FieldsNamed) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_fields_named_mut<V>(v: &mut V, node: &mut crate::FieldsNamed) where V: VisitMut + ?Sized, { @@ -1790,7 +1990,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_fields_unnamed_mut<V>(v: &mut V, node: &mut FieldsUnnamed) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_fields_unnamed_mut<V>(v: &mut V, node: &mut crate::FieldsUnnamed) where V: VisitMut + ?Sized, { @@ -1801,86 +2002,84 @@ where } } #[cfg(feature = "full")] -pub fn visit_file_mut<V>(v: &mut V, node: &mut File) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_file_mut<V>(v: &mut V, node: &mut crate::File) where V: VisitMut + ?Sized, { skip!(node.shebang); - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); for it in &mut node.items { v.visit_item_mut(it); } } #[cfg(feature = "full")] -pub fn visit_fn_arg_mut<V>(v: &mut V, node: &mut FnArg) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_fn_arg_mut<V>(v: &mut V, node: &mut crate::FnArg) where V: VisitMut + ?Sized, { match node { - FnArg::Receiver(_binding_0) => { + crate::FnArg::Receiver(_binding_0) => { v.visit_receiver_mut(_binding_0); } - FnArg::Typed(_binding_0) => { + crate::FnArg::Typed(_binding_0) => { v.visit_pat_type_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_foreign_item_mut<V>(v: &mut V, node: &mut ForeignItem) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_mut<V>(v: &mut V, node: &mut crate::ForeignItem) where V: VisitMut + ?Sized, { match node { - ForeignItem::Fn(_binding_0) => { + crate::ForeignItem::Fn(_binding_0) => { v.visit_foreign_item_fn_mut(_binding_0); } - ForeignItem::Static(_binding_0) => { + crate::ForeignItem::Static(_binding_0) => { v.visit_foreign_item_static_mut(_binding_0); } - ForeignItem::Type(_binding_0) => { + crate::ForeignItem::Type(_binding_0) => { v.visit_foreign_item_type_mut(_binding_0); } - ForeignItem::Macro(_binding_0) => { + crate::ForeignItem::Macro(_binding_0) => { v.visit_foreign_item_macro_mut(_binding_0); } - ForeignItem::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::ForeignItem::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_foreign_item_fn_mut<V>(v: &mut V, node: &mut ForeignItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_fn_mut<V>(v: &mut V, node: &mut crate::ForeignItemFn) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); v.visit_signature_mut(&mut node.sig); skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_foreign_item_macro_mut<V>(v: &mut V, node: &mut ForeignItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_macro_mut<V>(v: &mut V, node: &mut crate::ForeignItemMacro) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_macro_mut(&mut node.mac); skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_foreign_item_static_mut<V>(v: &mut V, node: &mut ForeignItemStatic) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_static_mut<V>(v: &mut V, node: &mut crate::ForeignItemStatic) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.static_token); v.visit_static_mutability_mut(&mut node.mutability); @@ -1890,13 +2089,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_foreign_item_type_mut<V>(v: &mut V, node: &mut ForeignItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_foreign_item_type_mut<V>(v: &mut V, node: &mut crate::ForeignItemType) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.type_token); v.visit_ident_mut(&mut node.ident); @@ -1904,50 +2102,53 @@ where skip!(node.semi_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_generic_argument_mut<V>(v: &mut V, node: &mut GenericArgument) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_generic_argument_mut<V>(v: &mut V, node: &mut crate::GenericArgument) where V: VisitMut + ?Sized, { match node { - GenericArgument::Lifetime(_binding_0) => { + crate::GenericArgument::Lifetime(_binding_0) => { v.visit_lifetime_mut(_binding_0); } - GenericArgument::Type(_binding_0) => { + crate::GenericArgument::Type(_binding_0) => { v.visit_type_mut(_binding_0); } - GenericArgument::Const(_binding_0) => { + crate::GenericArgument::Const(_binding_0) => { v.visit_expr_mut(_binding_0); } - GenericArgument::AssocType(_binding_0) => { + crate::GenericArgument::AssocType(_binding_0) => { v.visit_assoc_type_mut(_binding_0); } - GenericArgument::AssocConst(_binding_0) => { + crate::GenericArgument::AssocConst(_binding_0) => { v.visit_assoc_const_mut(_binding_0); } - GenericArgument::Constraint(_binding_0) => { + crate::GenericArgument::Constraint(_binding_0) => { v.visit_constraint_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_generic_param_mut<V>(v: &mut V, node: &mut GenericParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_generic_param_mut<V>(v: &mut V, node: &mut crate::GenericParam) where V: VisitMut + ?Sized, { match node { - GenericParam::Lifetime(_binding_0) => { + crate::GenericParam::Lifetime(_binding_0) => { v.visit_lifetime_param_mut(_binding_0); } - GenericParam::Type(_binding_0) => { + crate::GenericParam::Type(_binding_0) => { v.visit_type_param_mut(_binding_0); } - GenericParam::Const(_binding_0) => { + crate::GenericParam::Const(_binding_0) => { v.visit_const_param_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_generics_mut<V>(v: &mut V, node: &mut Generics) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_generics_mut<V>(v: &mut V, node: &mut crate::Generics) where V: VisitMut + ?Sized, { @@ -1961,7 +2162,7 @@ where v.visit_where_clause_mut(it); } } -pub fn visit_ident_mut<V>(v: &mut V, node: &mut Ident) +pub fn visit_ident_mut<V>(v: &mut V, node: &mut proc_macro2::Ident) where V: VisitMut + ?Sized, { @@ -1970,36 +2171,36 @@ where node.set_span(span); } #[cfg(feature = "full")] -pub fn visit_impl_item_mut<V>(v: &mut V, node: &mut ImplItem) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_mut<V>(v: &mut V, node: &mut crate::ImplItem) where V: VisitMut + ?Sized, { match node { - ImplItem::Const(_binding_0) => { + crate::ImplItem::Const(_binding_0) => { v.visit_impl_item_const_mut(_binding_0); } - ImplItem::Fn(_binding_0) => { + crate::ImplItem::Fn(_binding_0) => { v.visit_impl_item_fn_mut(_binding_0); } - ImplItem::Type(_binding_0) => { + crate::ImplItem::Type(_binding_0) => { v.visit_impl_item_type_mut(_binding_0); } - ImplItem::Macro(_binding_0) => { + crate::ImplItem::Macro(_binding_0) => { v.visit_impl_item_macro_mut(_binding_0); } - ImplItem::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::ImplItem::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_impl_item_const_mut<V>(v: &mut V, node: &mut ImplItemConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_const_mut<V>(v: &mut V, node: &mut crate::ImplItemConst) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.defaultness); skip!(node.const_token); @@ -2012,37 +2213,34 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_impl_item_fn_mut<V>(v: &mut V, node: &mut ImplItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_fn_mut<V>(v: &mut V, node: &mut crate::ImplItemFn) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.defaultness); v.visit_signature_mut(&mut node.sig); v.visit_block_mut(&mut node.block); } #[cfg(feature = "full")] -pub fn visit_impl_item_macro_mut<V>(v: &mut V, node: &mut ImplItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_macro_mut<V>(v: &mut V, node: &mut crate::ImplItemMacro) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_macro_mut(&mut node.mac); skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_impl_item_type_mut<V>(v: &mut V, node: &mut ImplItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_item_type_mut<V>(v: &mut V, node: &mut crate::ImplItemType) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.defaultness); skip!(node.type_token); @@ -2053,14 +2251,16 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_impl_restriction_mut<V>(v: &mut V, node: &mut ImplRestriction) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_impl_restriction_mut<V>(v: &mut V, node: &mut crate::ImplRestriction) where V: VisitMut + ?Sized, { match *node {} } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_index_mut<V>(v: &mut V, node: &mut Index) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_index_mut<V>(v: &mut V, node: &mut crate::Index) where V: VisitMut + ?Sized, { @@ -2068,69 +2268,69 @@ where v.visit_span_mut(&mut node.span); } #[cfg(feature = "full")] -pub fn visit_item_mut<V>(v: &mut V, node: &mut Item) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_mut<V>(v: &mut V, node: &mut crate::Item) where V: VisitMut + ?Sized, { match node { - Item::Const(_binding_0) => { + crate::Item::Const(_binding_0) => { v.visit_item_const_mut(_binding_0); } - Item::Enum(_binding_0) => { + crate::Item::Enum(_binding_0) => { v.visit_item_enum_mut(_binding_0); } - Item::ExternCrate(_binding_0) => { + crate::Item::ExternCrate(_binding_0) => { v.visit_item_extern_crate_mut(_binding_0); } - Item::Fn(_binding_0) => { + crate::Item::Fn(_binding_0) => { v.visit_item_fn_mut(_binding_0); } - Item::ForeignMod(_binding_0) => { + crate::Item::ForeignMod(_binding_0) => { v.visit_item_foreign_mod_mut(_binding_0); } - Item::Impl(_binding_0) => { + crate::Item::Impl(_binding_0) => { v.visit_item_impl_mut(_binding_0); } - Item::Macro(_binding_0) => { + crate::Item::Macro(_binding_0) => { v.visit_item_macro_mut(_binding_0); } - Item::Mod(_binding_0) => { + crate::Item::Mod(_binding_0) => { v.visit_item_mod_mut(_binding_0); } - Item::Static(_binding_0) => { + crate::Item::Static(_binding_0) => { v.visit_item_static_mut(_binding_0); } - Item::Struct(_binding_0) => { + crate::Item::Struct(_binding_0) => { v.visit_item_struct_mut(_binding_0); } - Item::Trait(_binding_0) => { + crate::Item::Trait(_binding_0) => { v.visit_item_trait_mut(_binding_0); } - Item::TraitAlias(_binding_0) => { + crate::Item::TraitAlias(_binding_0) => { v.visit_item_trait_alias_mut(_binding_0); } - Item::Type(_binding_0) => { + crate::Item::Type(_binding_0) => { v.visit_item_type_mut(_binding_0); } - Item::Union(_binding_0) => { + crate::Item::Union(_binding_0) => { v.visit_item_union_mut(_binding_0); } - Item::Use(_binding_0) => { + crate::Item::Use(_binding_0) => { v.visit_item_use_mut(_binding_0); } - Item::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Item::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_item_const_mut<V>(v: &mut V, node: &mut ItemConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_const_mut<V>(v: &mut V, node: &mut crate::ItemConst) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.const_token); v.visit_ident_mut(&mut node.ident); @@ -2142,13 +2342,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_enum_mut<V>(v: &mut V, node: &mut ItemEnum) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_enum_mut<V>(v: &mut V, node: &mut crate::ItemEnum) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.enum_token); v.visit_ident_mut(&mut node.ident); @@ -2160,13 +2359,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_extern_crate_mut<V>(v: &mut V, node: &mut ItemExternCrate) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_extern_crate_mut<V>(v: &mut V, node: &mut crate::ItemExternCrate) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.extern_token); skip!(node.crate_token); @@ -2178,25 +2376,23 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_fn_mut<V>(v: &mut V, node: &mut ItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_fn_mut<V>(v: &mut V, node: &mut crate::ItemFn) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); v.visit_signature_mut(&mut node.sig); v.visit_block_mut(&mut *node.block); } #[cfg(feature = "full")] -pub fn visit_item_foreign_mod_mut<V>(v: &mut V, node: &mut ItemForeignMod) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_foreign_mod_mut<V>(v: &mut V, node: &mut crate::ItemForeignMod) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.unsafety); v.visit_abi_mut(&mut node.abi); skip!(node.brace_token); @@ -2205,13 +2401,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_impl_mut<V>(v: &mut V, node: &mut ItemImpl) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_impl_mut<V>(v: &mut V, node: &mut crate::ItemImpl) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.defaultness); skip!(node.unsafety); skip!(node.impl_token); @@ -2228,13 +2423,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_macro_mut<V>(v: &mut V, node: &mut ItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_macro_mut<V>(v: &mut V, node: &mut crate::ItemMacro) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.ident { v.visit_ident_mut(it); } @@ -2242,13 +2436,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_mod_mut<V>(v: &mut V, node: &mut ItemMod) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_mod_mut<V>(v: &mut V, node: &mut crate::ItemMod) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.unsafety); skip!(node.mod_token); @@ -2262,13 +2455,12 @@ where skip!(node.semi); } #[cfg(feature = "full")] -pub fn visit_item_static_mut<V>(v: &mut V, node: &mut ItemStatic) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_static_mut<V>(v: &mut V, node: &mut crate::ItemStatic) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.static_token); v.visit_static_mutability_mut(&mut node.mutability); @@ -2280,13 +2472,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_struct_mut<V>(v: &mut V, node: &mut ItemStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_struct_mut<V>(v: &mut V, node: &mut crate::ItemStruct) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.struct_token); v.visit_ident_mut(&mut node.ident); @@ -2295,13 +2486,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_trait_mut<V>(v: &mut V, node: &mut ItemTrait) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_trait_mut<V>(v: &mut V, node: &mut crate::ItemTrait) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.unsafety); skip!(node.auto_token); @@ -2322,13 +2512,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_item_trait_alias_mut<V>(v: &mut V, node: &mut ItemTraitAlias) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_trait_alias_mut<V>(v: &mut V, node: &mut crate::ItemTraitAlias) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.trait_token); v.visit_ident_mut(&mut node.ident); @@ -2341,13 +2530,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_type_mut<V>(v: &mut V, node: &mut ItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_type_mut<V>(v: &mut V, node: &mut crate::ItemType) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.type_token); v.visit_ident_mut(&mut node.ident); @@ -2357,13 +2545,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_item_union_mut<V>(v: &mut V, node: &mut ItemUnion) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_union_mut<V>(v: &mut V, node: &mut crate::ItemUnion) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.union_token); v.visit_ident_mut(&mut node.ident); @@ -2371,13 +2558,12 @@ where v.visit_fields_named_mut(&mut node.fields); } #[cfg(feature = "full")] -pub fn visit_item_use_mut<V>(v: &mut V, node: &mut ItemUse) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_item_use_mut<V>(v: &mut V, node: &mut crate::ItemUse) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_visibility_mut(&mut node.vis); skip!(node.use_token); skip!(node.leading_colon); @@ -2385,14 +2571,15 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_label_mut<V>(v: &mut V, node: &mut Label) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_label_mut<V>(v: &mut V, node: &mut crate::Label) where V: VisitMut + ?Sized, { v.visit_lifetime_mut(&mut node.name); skip!(node.colon_token); } -pub fn visit_lifetime_mut<V>(v: &mut V, node: &mut Lifetime) +pub fn visit_lifetime_mut<V>(v: &mut V, node: &mut crate::Lifetime) where V: VisitMut + ?Sized, { @@ -2400,13 +2587,12 @@ where v.visit_ident_mut(&mut node.ident); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_lifetime_param_mut<V>(v: &mut V, node: &mut LifetimeParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_lifetime_param_mut<V>(v: &mut V, node: &mut crate::LifetimeParam) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_lifetime_mut(&mut node.lifetime); skip!(node.colon_token); for mut el in Punctuated::pairs_mut(&mut node.bounds) { @@ -2414,76 +2600,82 @@ where v.visit_lifetime_mut(it); } } -pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit) +pub fn visit_lit_mut<V>(v: &mut V, node: &mut crate::Lit) where V: VisitMut + ?Sized, { match node { - Lit::Str(_binding_0) => { + crate::Lit::Str(_binding_0) => { v.visit_lit_str_mut(_binding_0); } - Lit::ByteStr(_binding_0) => { + crate::Lit::ByteStr(_binding_0) => { v.visit_lit_byte_str_mut(_binding_0); } - Lit::Byte(_binding_0) => { + crate::Lit::CStr(_binding_0) => { + v.visit_lit_cstr_mut(_binding_0); + } + crate::Lit::Byte(_binding_0) => { v.visit_lit_byte_mut(_binding_0); } - Lit::Char(_binding_0) => { + crate::Lit::Char(_binding_0) => { v.visit_lit_char_mut(_binding_0); } - Lit::Int(_binding_0) => { + crate::Lit::Int(_binding_0) => { v.visit_lit_int_mut(_binding_0); } - Lit::Float(_binding_0) => { + crate::Lit::Float(_binding_0) => { v.visit_lit_float_mut(_binding_0); } - Lit::Bool(_binding_0) => { + crate::Lit::Bool(_binding_0) => { v.visit_lit_bool_mut(_binding_0); } - Lit::Verbatim(_binding_0) => { + crate::Lit::Verbatim(_binding_0) => { skip!(_binding_0); } } } -pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool) +pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut crate::LitBool) where V: VisitMut + ?Sized, { skip!(node.value); v.visit_span_mut(&mut node.span); } -pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte) +pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut crate::LitByte) +where + V: VisitMut + ?Sized, +{} +pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut crate::LitByteStr) where V: VisitMut + ?Sized, {} -pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr) +pub fn visit_lit_cstr_mut<V>(v: &mut V, node: &mut crate::LitCStr) where V: VisitMut + ?Sized, {} -pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar) +pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut crate::LitChar) where V: VisitMut + ?Sized, {} -pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat) +pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut crate::LitFloat) where V: VisitMut + ?Sized, {} -pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt) +pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut crate::LitInt) where V: VisitMut + ?Sized, {} -pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr) +pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut crate::LitStr) where V: VisitMut + ?Sized, {} #[cfg(feature = "full")] -pub fn visit_local_mut<V>(v: &mut V, node: &mut Local) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_local_mut<V>(v: &mut V, node: &mut crate::Local) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.let_token); v.visit_pat_mut(&mut node.pat); if let Some(it) = &mut node.init { @@ -2492,7 +2684,8 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_local_init_mut<V>(v: &mut V, node: &mut LocalInit) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_local_init_mut<V>(v: &mut V, node: &mut crate::LocalInit) where V: VisitMut + ?Sized, { @@ -2504,74 +2697,80 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_macro_mut<V>(v: &mut V, node: &mut Macro) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_macro_mut<V>(v: &mut V, node: &mut crate::Macro) where V: VisitMut + ?Sized, { v.visit_path_mut(&mut node.path); skip!(node.bang_token); v.visit_macro_delimiter_mut(&mut node.delimiter); - skip!(node.tokens); + v.visit_token_stream_mut(&mut node.tokens); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_macro_delimiter_mut<V>(v: &mut V, node: &mut MacroDelimiter) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_macro_delimiter_mut<V>(v: &mut V, node: &mut crate::MacroDelimiter) where V: VisitMut + ?Sized, { match node { - MacroDelimiter::Paren(_binding_0) => { + crate::MacroDelimiter::Paren(_binding_0) => { skip!(_binding_0); } - MacroDelimiter::Brace(_binding_0) => { + crate::MacroDelimiter::Brace(_binding_0) => { skip!(_binding_0); } - MacroDelimiter::Bracket(_binding_0) => { + crate::MacroDelimiter::Bracket(_binding_0) => { skip!(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_member_mut<V>(v: &mut V, node: &mut Member) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_member_mut<V>(v: &mut V, node: &mut crate::Member) where V: VisitMut + ?Sized, { match node { - Member::Named(_binding_0) => { + crate::Member::Named(_binding_0) => { v.visit_ident_mut(_binding_0); } - Member::Unnamed(_binding_0) => { + crate::Member::Unnamed(_binding_0) => { v.visit_index_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_meta_mut<V>(v: &mut V, node: &mut Meta) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_meta_mut<V>(v: &mut V, node: &mut crate::Meta) where V: VisitMut + ?Sized, { match node { - Meta::Path(_binding_0) => { + crate::Meta::Path(_binding_0) => { v.visit_path_mut(_binding_0); } - Meta::List(_binding_0) => { + crate::Meta::List(_binding_0) => { v.visit_meta_list_mut(_binding_0); } - Meta::NameValue(_binding_0) => { + crate::Meta::NameValue(_binding_0) => { v.visit_meta_name_value_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_meta_list_mut<V>(v: &mut V, node: &mut MetaList) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_meta_list_mut<V>(v: &mut V, node: &mut crate::MetaList) where V: VisitMut + ?Sized, { v.visit_path_mut(&mut node.path); v.visit_macro_delimiter_mut(&mut node.delimiter); - skip!(node.tokens); + v.visit_token_stream_mut(&mut node.tokens); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_meta_name_value_mut<V>(v: &mut V, node: &mut MetaNameValue) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_meta_name_value_mut<V>(v: &mut V, node: &mut crate::MetaNameValue) where V: VisitMut + ?Sized, { @@ -2580,9 +2779,10 @@ where v.visit_expr_mut(&mut node.value); } #[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] pub fn visit_parenthesized_generic_arguments_mut<V>( v: &mut V, - node: &mut ParenthesizedGenericArguments, + node: &mut crate::ParenthesizedGenericArguments, ) where V: VisitMut + ?Sized, @@ -2595,72 +2795,72 @@ where v.visit_return_type_mut(&mut node.output); } #[cfg(feature = "full")] -pub fn visit_pat_mut<V>(v: &mut V, node: &mut Pat) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_mut<V>(v: &mut V, node: &mut crate::Pat) where V: VisitMut + ?Sized, { match node { - Pat::Const(_binding_0) => { + crate::Pat::Const(_binding_0) => { v.visit_expr_const_mut(_binding_0); } - Pat::Ident(_binding_0) => { + crate::Pat::Ident(_binding_0) => { v.visit_pat_ident_mut(_binding_0); } - Pat::Lit(_binding_0) => { + crate::Pat::Lit(_binding_0) => { v.visit_expr_lit_mut(_binding_0); } - Pat::Macro(_binding_0) => { + crate::Pat::Macro(_binding_0) => { v.visit_expr_macro_mut(_binding_0); } - Pat::Or(_binding_0) => { + crate::Pat::Or(_binding_0) => { v.visit_pat_or_mut(_binding_0); } - Pat::Paren(_binding_0) => { + crate::Pat::Paren(_binding_0) => { v.visit_pat_paren_mut(_binding_0); } - Pat::Path(_binding_0) => { + crate::Pat::Path(_binding_0) => { v.visit_expr_path_mut(_binding_0); } - Pat::Range(_binding_0) => { + crate::Pat::Range(_binding_0) => { v.visit_expr_range_mut(_binding_0); } - Pat::Reference(_binding_0) => { + crate::Pat::Reference(_binding_0) => { v.visit_pat_reference_mut(_binding_0); } - Pat::Rest(_binding_0) => { + crate::Pat::Rest(_binding_0) => { v.visit_pat_rest_mut(_binding_0); } - Pat::Slice(_binding_0) => { + crate::Pat::Slice(_binding_0) => { v.visit_pat_slice_mut(_binding_0); } - Pat::Struct(_binding_0) => { + crate::Pat::Struct(_binding_0) => { v.visit_pat_struct_mut(_binding_0); } - Pat::Tuple(_binding_0) => { + crate::Pat::Tuple(_binding_0) => { v.visit_pat_tuple_mut(_binding_0); } - Pat::TupleStruct(_binding_0) => { + crate::Pat::TupleStruct(_binding_0) => { v.visit_pat_tuple_struct_mut(_binding_0); } - Pat::Type(_binding_0) => { + crate::Pat::Type(_binding_0) => { v.visit_pat_type_mut(_binding_0); } - Pat::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Pat::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } - Pat::Wild(_binding_0) => { + crate::Pat::Wild(_binding_0) => { v.visit_pat_wild_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_pat_ident_mut<V>(v: &mut V, node: &mut PatIdent) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_ident_mut<V>(v: &mut V, node: &mut crate::PatIdent) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.by_ref); skip!(node.mutability); v.visit_ident_mut(&mut node.ident); @@ -2670,13 +2870,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_or_mut<V>(v: &mut V, node: &mut PatOr) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_or_mut<V>(v: &mut V, node: &mut crate::PatOr) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.leading_vert); for mut el in Punctuated::pairs_mut(&mut node.cases) { let it = el.value_mut(); @@ -2684,46 +2883,42 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_paren_mut<V>(v: &mut V, node: &mut PatParen) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_paren_mut<V>(v: &mut V, node: &mut crate::PatParen) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.paren_token); v.visit_pat_mut(&mut *node.pat); } #[cfg(feature = "full")] -pub fn visit_pat_reference_mut<V>(v: &mut V, node: &mut PatReference) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_reference_mut<V>(v: &mut V, node: &mut crate::PatReference) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.and_token); skip!(node.mutability); v.visit_pat_mut(&mut *node.pat); } #[cfg(feature = "full")] -pub fn visit_pat_rest_mut<V>(v: &mut V, node: &mut PatRest) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_rest_mut<V>(v: &mut V, node: &mut crate::PatRest) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.dot2_token); } #[cfg(feature = "full")] -pub fn visit_pat_slice_mut<V>(v: &mut V, node: &mut PatSlice) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_slice_mut<V>(v: &mut V, node: &mut crate::PatSlice) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.bracket_token); for mut el in Punctuated::pairs_mut(&mut node.elems) { let it = el.value_mut(); @@ -2731,13 +2926,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_struct_mut<V>(v: &mut V, node: &mut PatStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_struct_mut<V>(v: &mut V, node: &mut crate::PatStruct) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.qself { v.visit_qself_mut(it); } @@ -2752,13 +2946,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_tuple_mut<V>(v: &mut V, node: &mut PatTuple) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_tuple_mut<V>(v: &mut V, node: &mut crate::PatTuple) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.paren_token); for mut el in Punctuated::pairs_mut(&mut node.elems) { let it = el.value_mut(); @@ -2766,13 +2959,12 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_tuple_struct_mut<V>(v: &mut V, node: &mut PatTupleStruct) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_tuple_struct_mut<V>(v: &mut V, node: &mut crate::PatTupleStruct) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.qself { v.visit_qself_mut(it); } @@ -2784,29 +2976,28 @@ where } } #[cfg(feature = "full")] -pub fn visit_pat_type_mut<V>(v: &mut V, node: &mut PatType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_type_mut<V>(v: &mut V, node: &mut crate::PatType) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_pat_mut(&mut *node.pat); skip!(node.colon_token); v.visit_type_mut(&mut *node.ty); } #[cfg(feature = "full")] -pub fn visit_pat_wild_mut<V>(v: &mut V, node: &mut PatWild) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pat_wild_mut<V>(v: &mut V, node: &mut crate::PatWild) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.underscore_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_path_mut<V>(v: &mut V, node: &mut Path) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_path_mut<V>(v: &mut V, node: &mut crate::Path) where V: VisitMut + ?Sized, { @@ -2817,30 +3008,62 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_path_arguments_mut<V>(v: &mut V, node: &mut PathArguments) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_path_arguments_mut<V>(v: &mut V, node: &mut crate::PathArguments) where V: VisitMut + ?Sized, { match node { - PathArguments::None => {} - PathArguments::AngleBracketed(_binding_0) => { + crate::PathArguments::None => {} + crate::PathArguments::AngleBracketed(_binding_0) => { v.visit_angle_bracketed_generic_arguments_mut(_binding_0); } - PathArguments::Parenthesized(_binding_0) => { + crate::PathArguments::Parenthesized(_binding_0) => { v.visit_parenthesized_generic_arguments_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_path_segment_mut<V>(v: &mut V, node: &mut PathSegment) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_path_segment_mut<V>(v: &mut V, node: &mut crate::PathSegment) where V: VisitMut + ?Sized, { v.visit_ident_mut(&mut node.ident); v.visit_path_arguments_mut(&mut node.arguments); } +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_pointer_mutability_mut<V>(v: &mut V, node: &mut crate::PointerMutability) +where + V: VisitMut + ?Sized, +{ + match node { + crate::PointerMutability::Const(_binding_0) => { + skip!(_binding_0); + } + crate::PointerMutability::Mut(_binding_0) => { + skip!(_binding_0); + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_precise_capture_mut<V>(v: &mut V, node: &mut crate::PreciseCapture) +where + V: VisitMut + ?Sized, +{ + skip!(node.use_token); + skip!(node.lt_token); + for mut el in Punctuated::pairs_mut(&mut node.params) { + let it = el.value_mut(); + v.visit_captured_param_mut(it); + } + skip!(node.gt_token); +} #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_predicate_lifetime_mut<V>(v: &mut V, node: &mut PredicateLifetime) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_predicate_lifetime_mut<V>(v: &mut V, node: &mut crate::PredicateLifetime) where V: VisitMut + ?Sized, { @@ -2852,7 +3075,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_predicate_type_mut<V>(v: &mut V, node: &mut PredicateType) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_predicate_type_mut<V>(v: &mut V, node: &mut crate::PredicateType) where V: VisitMut + ?Sized, { @@ -2867,7 +3091,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_qself_mut<V>(v: &mut V, node: &mut QSelf) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_qself_mut<V>(v: &mut V, node: &mut crate::QSelf) where V: VisitMut + ?Sized, { @@ -2878,27 +3103,27 @@ where skip!(node.gt_token); } #[cfg(feature = "full")] -pub fn visit_range_limits_mut<V>(v: &mut V, node: &mut RangeLimits) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_range_limits_mut<V>(v: &mut V, node: &mut crate::RangeLimits) where V: VisitMut + ?Sized, { match node { - RangeLimits::HalfOpen(_binding_0) => { + crate::RangeLimits::HalfOpen(_binding_0) => { skip!(_binding_0); } - RangeLimits::Closed(_binding_0) => { + crate::RangeLimits::Closed(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_receiver_mut<V>(v: &mut V, node: &mut Receiver) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_receiver_mut<V>(v: &mut V, node: &mut crate::Receiver) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.reference { skip!((it).0); if let Some(it) = &mut (it).1 { @@ -2911,20 +3136,22 @@ where v.visit_type_mut(&mut *node.ty); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_return_type_mut<V>(v: &mut V, node: &mut ReturnType) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_return_type_mut<V>(v: &mut V, node: &mut crate::ReturnType) where V: VisitMut + ?Sized, { match node { - ReturnType::Default => {} - ReturnType::Type(_binding_0, _binding_1) => { + crate::ReturnType::Default => {} + crate::ReturnType::Type(_binding_0, _binding_1) => { skip!(_binding_0); v.visit_type_mut(&mut **_binding_1); } } } #[cfg(feature = "full")] -pub fn visit_signature_mut<V>(v: &mut V, node: &mut Signature) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_signature_mut<V>(v: &mut V, node: &mut crate::Signature) where V: VisitMut + ?Sized, { @@ -2947,56 +3174,58 @@ where } v.visit_return_type_mut(&mut node.output); } -pub fn visit_span_mut<V>(v: &mut V, node: &mut Span) +pub fn visit_span_mut<V>(v: &mut V, node: &mut proc_macro2::Span) where V: VisitMut + ?Sized, {} #[cfg(feature = "full")] -pub fn visit_static_mutability_mut<V>(v: &mut V, node: &mut StaticMutability) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_static_mutability_mut<V>(v: &mut V, node: &mut crate::StaticMutability) where V: VisitMut + ?Sized, { match node { - StaticMutability::Mut(_binding_0) => { + crate::StaticMutability::Mut(_binding_0) => { skip!(_binding_0); } - StaticMutability::None => {} + crate::StaticMutability::None => {} } } #[cfg(feature = "full")] -pub fn visit_stmt_mut<V>(v: &mut V, node: &mut Stmt) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_stmt_mut<V>(v: &mut V, node: &mut crate::Stmt) where V: VisitMut + ?Sized, { match node { - Stmt::Local(_binding_0) => { + crate::Stmt::Local(_binding_0) => { v.visit_local_mut(_binding_0); } - Stmt::Item(_binding_0) => { + crate::Stmt::Item(_binding_0) => { v.visit_item_mut(_binding_0); } - Stmt::Expr(_binding_0, _binding_1) => { + crate::Stmt::Expr(_binding_0, _binding_1) => { v.visit_expr_mut(_binding_0); skip!(_binding_1); } - Stmt::Macro(_binding_0) => { + crate::Stmt::Macro(_binding_0) => { v.visit_stmt_macro_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_stmt_macro_mut<V>(v: &mut V, node: &mut StmtMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_stmt_macro_mut<V>(v: &mut V, node: &mut crate::StmtMacro) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_macro_mut(&mut node.mac); skip!(node.semi_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_trait_bound_mut<V>(v: &mut V, node: &mut TraitBound) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_trait_bound_mut<V>(v: &mut V, node: &mut crate::TraitBound) where V: VisitMut + ?Sized, { @@ -3008,48 +3237,49 @@ where v.visit_path_mut(&mut node.path); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_trait_bound_modifier_mut<V>(v: &mut V, node: &mut TraitBoundModifier) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_trait_bound_modifier_mut<V>(v: &mut V, node: &mut crate::TraitBoundModifier) where V: VisitMut + ?Sized, { match node { - TraitBoundModifier::None => {} - TraitBoundModifier::Maybe(_binding_0) => { + crate::TraitBoundModifier::None => {} + crate::TraitBoundModifier::Maybe(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_trait_item_mut<V>(v: &mut V, node: &mut TraitItem) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_mut<V>(v: &mut V, node: &mut crate::TraitItem) where V: VisitMut + ?Sized, { match node { - TraitItem::Const(_binding_0) => { + crate::TraitItem::Const(_binding_0) => { v.visit_trait_item_const_mut(_binding_0); } - TraitItem::Fn(_binding_0) => { + crate::TraitItem::Fn(_binding_0) => { v.visit_trait_item_fn_mut(_binding_0); } - TraitItem::Type(_binding_0) => { + crate::TraitItem::Type(_binding_0) => { v.visit_trait_item_type_mut(_binding_0); } - TraitItem::Macro(_binding_0) => { + crate::TraitItem::Macro(_binding_0) => { v.visit_trait_item_macro_mut(_binding_0); } - TraitItem::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::TraitItem::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_trait_item_const_mut<V>(v: &mut V, node: &mut TraitItemConst) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_const_mut<V>(v: &mut V, node: &mut crate::TraitItemConst) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.const_token); v.visit_ident_mut(&mut node.ident); v.visit_generics_mut(&mut node.generics); @@ -3062,13 +3292,12 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_trait_item_fn_mut<V>(v: &mut V, node: &mut TraitItemFn) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_fn_mut<V>(v: &mut V, node: &mut crate::TraitItemFn) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_signature_mut(&mut node.sig); if let Some(it) = &mut node.default { v.visit_block_mut(it); @@ -3076,24 +3305,22 @@ where skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_trait_item_macro_mut<V>(v: &mut V, node: &mut TraitItemMacro) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_macro_mut<V>(v: &mut V, node: &mut crate::TraitItemMacro) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_macro_mut(&mut node.mac); skip!(node.semi_token); } #[cfg(feature = "full")] -pub fn visit_trait_item_type_mut<V>(v: &mut V, node: &mut TraitItemType) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_trait_item_type_mut<V>(v: &mut V, node: &mut crate::TraitItemType) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); skip!(node.type_token); v.visit_ident_mut(&mut node.ident); v.visit_generics_mut(&mut node.generics); @@ -3109,60 +3336,62 @@ where skip!(node.semi_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_mut<V>(v: &mut V, node: &mut Type) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_mut<V>(v: &mut V, node: &mut crate::Type) where V: VisitMut + ?Sized, { match node { - Type::Array(_binding_0) => { + crate::Type::Array(_binding_0) => { v.visit_type_array_mut(_binding_0); } - Type::BareFn(_binding_0) => { + crate::Type::BareFn(_binding_0) => { v.visit_type_bare_fn_mut(_binding_0); } - Type::Group(_binding_0) => { + crate::Type::Group(_binding_0) => { v.visit_type_group_mut(_binding_0); } - Type::ImplTrait(_binding_0) => { + crate::Type::ImplTrait(_binding_0) => { v.visit_type_impl_trait_mut(_binding_0); } - Type::Infer(_binding_0) => { + crate::Type::Infer(_binding_0) => { v.visit_type_infer_mut(_binding_0); } - Type::Macro(_binding_0) => { + crate::Type::Macro(_binding_0) => { v.visit_type_macro_mut(_binding_0); } - Type::Never(_binding_0) => { + crate::Type::Never(_binding_0) => { v.visit_type_never_mut(_binding_0); } - Type::Paren(_binding_0) => { + crate::Type::Paren(_binding_0) => { v.visit_type_paren_mut(_binding_0); } - Type::Path(_binding_0) => { + crate::Type::Path(_binding_0) => { v.visit_type_path_mut(_binding_0); } - Type::Ptr(_binding_0) => { + crate::Type::Ptr(_binding_0) => { v.visit_type_ptr_mut(_binding_0); } - Type::Reference(_binding_0) => { + crate::Type::Reference(_binding_0) => { v.visit_type_reference_mut(_binding_0); } - Type::Slice(_binding_0) => { + crate::Type::Slice(_binding_0) => { v.visit_type_slice_mut(_binding_0); } - Type::TraitObject(_binding_0) => { + crate::Type::TraitObject(_binding_0) => { v.visit_type_trait_object_mut(_binding_0); } - Type::Tuple(_binding_0) => { + crate::Type::Tuple(_binding_0) => { v.visit_type_tuple_mut(_binding_0); } - Type::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::Type::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_array_mut<V>(v: &mut V, node: &mut TypeArray) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_array_mut<V>(v: &mut V, node: &mut crate::TypeArray) where V: VisitMut + ?Sized, { @@ -3172,7 +3401,8 @@ where v.visit_expr_mut(&mut node.len); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_bare_fn_mut<V>(v: &mut V, node: &mut TypeBareFn) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_bare_fn_mut<V>(v: &mut V, node: &mut crate::TypeBareFn) where V: VisitMut + ?Sized, { @@ -3195,7 +3425,8 @@ where v.visit_return_type_mut(&mut node.output); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_group_mut<V>(v: &mut V, node: &mut TypeGroup) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_group_mut<V>(v: &mut V, node: &mut crate::TypeGroup) where V: VisitMut + ?Sized, { @@ -3203,7 +3434,8 @@ where v.visit_type_mut(&mut *node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_impl_trait_mut<V>(v: &mut V, node: &mut TypeImplTrait) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_impl_trait_mut<V>(v: &mut V, node: &mut crate::TypeImplTrait) where V: VisitMut + ?Sized, { @@ -3214,34 +3446,36 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_infer_mut<V>(v: &mut V, node: &mut TypeInfer) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_infer_mut<V>(v: &mut V, node: &mut crate::TypeInfer) where V: VisitMut + ?Sized, { skip!(node.underscore_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_macro_mut<V>(v: &mut V, node: &mut TypeMacro) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_macro_mut<V>(v: &mut V, node: &mut crate::TypeMacro) where V: VisitMut + ?Sized, { v.visit_macro_mut(&mut node.mac); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_never_mut<V>(v: &mut V, node: &mut TypeNever) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_never_mut<V>(v: &mut V, node: &mut crate::TypeNever) where V: VisitMut + ?Sized, { skip!(node.bang_token); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_param_mut<V>(v: &mut V, node: &mut TypeParam) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_param_mut<V>(v: &mut V, node: &mut crate::TypeParam) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_ident_mut(&mut node.ident); skip!(node.colon_token); for mut el in Punctuated::pairs_mut(&mut node.bounds) { @@ -3254,24 +3488,29 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_param_bound_mut<V>(v: &mut V, node: &mut TypeParamBound) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_param_bound_mut<V>(v: &mut V, node: &mut crate::TypeParamBound) where V: VisitMut + ?Sized, { match node { - TypeParamBound::Trait(_binding_0) => { + crate::TypeParamBound::Trait(_binding_0) => { v.visit_trait_bound_mut(_binding_0); } - TypeParamBound::Lifetime(_binding_0) => { + crate::TypeParamBound::Lifetime(_binding_0) => { v.visit_lifetime_mut(_binding_0); } - TypeParamBound::Verbatim(_binding_0) => { - skip!(_binding_0); + crate::TypeParamBound::PreciseCapture(_binding_0) => { + full!(v.visit_precise_capture_mut(_binding_0)); + } + crate::TypeParamBound::Verbatim(_binding_0) => { + v.visit_token_stream_mut(_binding_0); } } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_paren_mut<V>(v: &mut V, node: &mut TypeParen) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_paren_mut<V>(v: &mut V, node: &mut crate::TypeParen) where V: VisitMut + ?Sized, { @@ -3279,7 +3518,8 @@ where v.visit_type_mut(&mut *node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_path_mut<V>(v: &mut V, node: &mut TypePath) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_path_mut<V>(v: &mut V, node: &mut crate::TypePath) where V: VisitMut + ?Sized, { @@ -3289,7 +3529,8 @@ where v.visit_path_mut(&mut node.path); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_ptr_mut<V>(v: &mut V, node: &mut TypePtr) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_ptr_mut<V>(v: &mut V, node: &mut crate::TypePtr) where V: VisitMut + ?Sized, { @@ -3299,7 +3540,8 @@ where v.visit_type_mut(&mut *node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_reference_mut<V>(v: &mut V, node: &mut TypeReference) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_reference_mut<V>(v: &mut V, node: &mut crate::TypeReference) where V: VisitMut + ?Sized, { @@ -3311,7 +3553,8 @@ where v.visit_type_mut(&mut *node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_slice_mut<V>(v: &mut V, node: &mut TypeSlice) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_slice_mut<V>(v: &mut V, node: &mut crate::TypeSlice) where V: VisitMut + ?Sized, { @@ -3319,7 +3562,8 @@ where v.visit_type_mut(&mut *node.elem); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_trait_object_mut<V>(v: &mut V, node: &mut TypeTraitObject) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_trait_object_mut<V>(v: &mut V, node: &mut crate::TypeTraitObject) where V: VisitMut + ?Sized, { @@ -3330,7 +3574,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_type_tuple_mut<V>(v: &mut V, node: &mut TypeTuple) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_type_tuple_mut<V>(v: &mut V, node: &mut crate::TypeTuple) where V: VisitMut + ?Sized, { @@ -3341,31 +3586,34 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_un_op_mut<V>(v: &mut V, node: &mut UnOp) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_un_op_mut<V>(v: &mut V, node: &mut crate::UnOp) where V: VisitMut + ?Sized, { match node { - UnOp::Deref(_binding_0) => { + crate::UnOp::Deref(_binding_0) => { skip!(_binding_0); } - UnOp::Not(_binding_0) => { + crate::UnOp::Not(_binding_0) => { skip!(_binding_0); } - UnOp::Neg(_binding_0) => { + crate::UnOp::Neg(_binding_0) => { skip!(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_use_glob_mut<V>(v: &mut V, node: &mut UseGlob) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_glob_mut<V>(v: &mut V, node: &mut crate::UseGlob) where V: VisitMut + ?Sized, { skip!(node.star_token); } #[cfg(feature = "full")] -pub fn visit_use_group_mut<V>(v: &mut V, node: &mut UseGroup) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_group_mut<V>(v: &mut V, node: &mut crate::UseGroup) where V: VisitMut + ?Sized, { @@ -3376,14 +3624,16 @@ where } } #[cfg(feature = "full")] -pub fn visit_use_name_mut<V>(v: &mut V, node: &mut UseName) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_name_mut<V>(v: &mut V, node: &mut crate::UseName) where V: VisitMut + ?Sized, { v.visit_ident_mut(&mut node.ident); } #[cfg(feature = "full")] -pub fn visit_use_path_mut<V>(v: &mut V, node: &mut UsePath) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_path_mut<V>(v: &mut V, node: &mut crate::UsePath) where V: VisitMut + ?Sized, { @@ -3392,7 +3642,8 @@ where v.visit_use_tree_mut(&mut *node.tree); } #[cfg(feature = "full")] -pub fn visit_use_rename_mut<V>(v: &mut V, node: &mut UseRename) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_rename_mut<V>(v: &mut V, node: &mut crate::UseRename) where V: VisitMut + ?Sized, { @@ -3401,36 +3652,36 @@ where v.visit_ident_mut(&mut node.rename); } #[cfg(feature = "full")] -pub fn visit_use_tree_mut<V>(v: &mut V, node: &mut UseTree) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_use_tree_mut<V>(v: &mut V, node: &mut crate::UseTree) where V: VisitMut + ?Sized, { match node { - UseTree::Path(_binding_0) => { + crate::UseTree::Path(_binding_0) => { v.visit_use_path_mut(_binding_0); } - UseTree::Name(_binding_0) => { + crate::UseTree::Name(_binding_0) => { v.visit_use_name_mut(_binding_0); } - UseTree::Rename(_binding_0) => { + crate::UseTree::Rename(_binding_0) => { v.visit_use_rename_mut(_binding_0); } - UseTree::Glob(_binding_0) => { + crate::UseTree::Glob(_binding_0) => { v.visit_use_glob_mut(_binding_0); } - UseTree::Group(_binding_0) => { + crate::UseTree::Group(_binding_0) => { v.visit_use_group_mut(_binding_0); } } } #[cfg(feature = "full")] -pub fn visit_variadic_mut<V>(v: &mut V, node: &mut Variadic) +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub fn visit_variadic_mut<V>(v: &mut V, node: &mut crate::Variadic) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); if let Some(it) = &mut node.pat { v.visit_pat_mut(&mut *(it).0); skip!((it).1); @@ -3439,13 +3690,12 @@ where skip!(node.comma); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_variant_mut<V>(v: &mut V, node: &mut Variant) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_variant_mut<V>(v: &mut V, node: &mut crate::Variant) where V: VisitMut + ?Sized, { - for it in &mut node.attrs { - v.visit_attribute_mut(it); - } + v.visit_attributes_mut(&mut node.attrs); v.visit_ident_mut(&mut node.ident); v.visit_fields_mut(&mut node.fields); if let Some(it) = &mut node.discriminant { @@ -3454,7 +3704,8 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_vis_restricted_mut<V>(v: &mut V, node: &mut VisRestricted) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_vis_restricted_mut<V>(v: &mut V, node: &mut crate::VisRestricted) where V: VisitMut + ?Sized, { @@ -3464,22 +3715,24 @@ where v.visit_path_mut(&mut *node.path); } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_visibility_mut<V>(v: &mut V, node: &mut Visibility) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_visibility_mut<V>(v: &mut V, node: &mut crate::Visibility) where V: VisitMut + ?Sized, { match node { - Visibility::Public(_binding_0) => { + crate::Visibility::Public(_binding_0) => { skip!(_binding_0); } - Visibility::Restricted(_binding_0) => { + crate::Visibility::Restricted(_binding_0) => { v.visit_vis_restricted_mut(_binding_0); } - Visibility::Inherited => {} + crate::Visibility::Inherited => {} } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_where_clause_mut<V>(v: &mut V, node: &mut WhereClause) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_where_clause_mut<V>(v: &mut V, node: &mut crate::WhereClause) where V: VisitMut + ?Sized, { @@ -3490,15 +3743,16 @@ where } } #[cfg(any(feature = "derive", feature = "full"))] -pub fn visit_where_predicate_mut<V>(v: &mut V, node: &mut WherePredicate) +#[cfg_attr(docsrs, doc(cfg(any(feature = "derive", feature = "full"))))] +pub fn visit_where_predicate_mut<V>(v: &mut V, node: &mut crate::WherePredicate) where V: VisitMut + ?Sized, { match node { - WherePredicate::Lifetime(_binding_0) => { + crate::WherePredicate::Lifetime(_binding_0) => { v.visit_predicate_lifetime_mut(_binding_0); } - WherePredicate::Type(_binding_0) => { + crate::WherePredicate::Type(_binding_0) => { v.visit_predicate_type_mut(_binding_0); } } diff --git a/vendor/syn/src/gen_helper.rs b/vendor/syn/src/gen_helper.rs deleted file mode 100644 index e433bac3..00000000 --- a/vendor/syn/src/gen_helper.rs +++ /dev/null @@ -1,34 +0,0 @@ -#[cfg(feature = "fold")] -pub(crate) mod fold { - use crate::punctuated::{Pair, Punctuated}; - - pub(crate) trait FoldHelper { - type Item; - fn lift<F>(self, f: F) -> Self - where - F: FnMut(Self::Item) -> Self::Item; - } - - impl<T> FoldHelper for Vec<T> { - type Item = T; - fn lift<F>(self, f: F) -> Self - where - F: FnMut(Self::Item) -> Self::Item, - { - self.into_iter().map(f).collect() - } - } - - impl<T, U> FoldHelper for Punctuated<T, U> { - type Item = T; - fn lift<F>(self, mut f: F) -> Self - where - F: FnMut(Self::Item) -> Self::Item, - { - self.into_pairs() - .map(Pair::into_tuple) - .map(|(t, u)| Pair::new(f(t), u)) - .collect() - } - } -} diff --git a/vendor/syn/src/generics.rs b/vendor/syn/src/generics.rs index 2ad913d1..de8e0915 100644 --- a/vendor/syn/src/generics.rs +++ b/vendor/syn/src/generics.rs @@ -1,5 +1,11 @@ -use super::*; +use crate::attr::Attribute; +use crate::expr::Expr; +use crate::ident::Ident; +use crate::lifetime::Lifetime; +use crate::path::Path; use crate::punctuated::{Iter, IterMut, Punctuated}; +use crate::token; +use crate::ty::Type; use proc_macro2::TokenStream; #[cfg(all(feature = "printing", feature = "extra-traits"))] use std::fmt::{self, Debug}; @@ -16,7 +22,7 @@ ast_struct! { /// /// [generic parameters]: https://doc.rust-lang.org/stable/reference/items/generics.html#generic-parameters /// [where clause]: https://doc.rust-lang.org/stable/reference/items/generics.html#where-clauses - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Generics { pub lt_token: Option<Token![<]>, pub params: Punctuated<GenericParam, Token![,]>, @@ -33,8 +39,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum GenericParam { /// A lifetime parameter: `'a: 'b + 'c + 'd`. Lifetime(LifetimeParam), @@ -49,7 +55,7 @@ ast_enum_of_structs! { ast_struct! { /// A lifetime definition: `'a: 'b + 'c + 'd`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct LifetimeParam { pub attrs: Vec<Attribute>, pub lifetime: Lifetime, @@ -60,7 +66,7 @@ ast_struct! { ast_struct! { /// A generic type parameter: `T: Into<String>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeParam { pub attrs: Vec<Attribute>, pub ident: Ident, @@ -73,7 +79,7 @@ ast_struct! { ast_struct! { /// A const generic parameter: `const LENGTH: usize`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ConstParam { pub attrs: Vec<Attribute>, pub const_token: Token![const], @@ -97,70 +103,46 @@ impl Default for Generics { } impl Generics { - /// Returns an - /// <code - /// style="padding-right:0;">Iterator<Item = &</code><a - /// href="struct.LifetimeParam.html"><code - /// style="padding-left:0;padding-right:0;">LifetimeParam</code></a><code - /// style="padding-left:0;">></code> - /// over the lifetime parameters in `self.params`. - pub fn lifetimes(&self) -> Lifetimes { - Lifetimes(self.params.iter()) - } - - /// Returns an - /// <code - /// style="padding-right:0;">Iterator<Item = &mut </code><a - /// href="struct.LifetimeParam.html"><code - /// style="padding-left:0;padding-right:0;">LifetimeParam</code></a><code - /// style="padding-left:0;">></code> - /// over the lifetime parameters in `self.params`. - pub fn lifetimes_mut(&mut self) -> LifetimesMut { - LifetimesMut(self.params.iter_mut()) - } - - /// Returns an - /// <code - /// style="padding-right:0;">Iterator<Item = &</code><a - /// href="struct.TypeParam.html"><code - /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code - /// style="padding-left:0;">></code> - /// over the type parameters in `self.params`. - pub fn type_params(&self) -> TypeParams { - TypeParams(self.params.iter()) - } - - /// Returns an - /// <code - /// style="padding-right:0;">Iterator<Item = &mut </code><a - /// href="struct.TypeParam.html"><code - /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code - /// style="padding-left:0;">></code> - /// over the type parameters in `self.params`. - pub fn type_params_mut(&mut self) -> TypeParamsMut { - TypeParamsMut(self.params.iter_mut()) - } - - /// Returns an - /// <code - /// style="padding-right:0;">Iterator<Item = &</code><a - /// href="struct.ConstParam.html"><code - /// style="padding-left:0;padding-right:0;">ConstParam</code></a><code - /// style="padding-left:0;">></code> - /// over the constant parameters in `self.params`. - pub fn const_params(&self) -> ConstParams { - ConstParams(self.params.iter()) - } - - /// Returns an - /// <code - /// style="padding-right:0;">Iterator<Item = &mut </code><a - /// href="struct.ConstParam.html"><code - /// style="padding-left:0;padding-right:0;">ConstParam</code></a><code - /// style="padding-left:0;">></code> - /// over the constant parameters in `self.params`. - pub fn const_params_mut(&mut self) -> ConstParamsMut { - ConstParamsMut(self.params.iter_mut()) + return_impl_trait! { + /// Iterator over the lifetime parameters in `self.params`. + pub fn lifetimes(&self) -> impl Iterator<Item = &LifetimeParam> [Lifetimes] { + Lifetimes(self.params.iter()) + } + } + + return_impl_trait! { + /// Iterator over the lifetime parameters in `self.params`. + pub fn lifetimes_mut(&mut self) -> impl Iterator<Item = &mut LifetimeParam> [LifetimesMut] { + LifetimesMut(self.params.iter_mut()) + } + } + + return_impl_trait! { + /// Iterator over the type parameters in `self.params`. + pub fn type_params(&self) -> impl Iterator<Item = &TypeParam> [TypeParams] { + TypeParams(self.params.iter()) + } + } + + return_impl_trait! { + /// Iterator over the type parameters in `self.params`. + pub fn type_params_mut(&mut self) -> impl Iterator<Item = &mut TypeParam> [TypeParamsMut] { + TypeParamsMut(self.params.iter_mut()) + } + } + + return_impl_trait! { + /// Iterator over the constant parameters in `self.params`. + pub fn const_params(&self) -> impl Iterator<Item = &ConstParam> [ConstParams] { + ConstParams(self.params.iter()) + } + } + + return_impl_trait! { + /// Iterator over the constant parameters in `self.params`. + pub fn const_params_mut(&mut self) -> impl Iterator<Item = &mut ConstParam> [ConstParamsMut] { + ConstParamsMut(self.params.iter_mut()) + } } /// Initializes an empty `where`-clause if there is not one present already. @@ -170,6 +152,34 @@ impl Generics { predicates: Punctuated::new(), }) } + + /// Split a type's generics into the pieces required for impl'ing a trait + /// for that type. + /// + /// ``` + /// # use proc_macro2::{Span, Ident}; + /// # use quote::quote; + /// # + /// # let generics: syn::Generics = Default::default(); + /// # let name = Ident::new("MyType", Span::call_site()); + /// # + /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + /// quote! { + /// impl #impl_generics MyTrait for #name #ty_generics #where_clause { + /// // ... + /// } + /// } + /// # ; + /// ``` + #[cfg(feature = "printing")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) { + ( + ImplGenerics(self), + TypeGenerics(self), + self.where_clause.as_ref(), + ) + } } pub struct Lifetimes<'a>(Iter<'a, GenericParam>); @@ -178,11 +188,7 @@ impl<'a> Iterator for Lifetimes<'a> { type Item = &'a LifetimeParam; fn next(&mut self) -> Option<Self::Item> { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Lifetime(lifetime) = next { + if let GenericParam::Lifetime(lifetime) = self.0.next()? { Some(lifetime) } else { self.next() @@ -196,11 +202,7 @@ impl<'a> Iterator for LifetimesMut<'a> { type Item = &'a mut LifetimeParam; fn next(&mut self) -> Option<Self::Item> { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Lifetime(lifetime) = next { + if let GenericParam::Lifetime(lifetime) = self.0.next()? { Some(lifetime) } else { self.next() @@ -214,11 +216,7 @@ impl<'a> Iterator for TypeParams<'a> { type Item = &'a TypeParam; fn next(&mut self) -> Option<Self::Item> { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Type(type_param) = next { + if let GenericParam::Type(type_param) = self.0.next()? { Some(type_param) } else { self.next() @@ -232,11 +230,7 @@ impl<'a> Iterator for TypeParamsMut<'a> { type Item = &'a mut TypeParam; fn next(&mut self) -> Option<Self::Item> { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Type(type_param) = next { + if let GenericParam::Type(type_param) = self.0.next()? { Some(type_param) } else { self.next() @@ -250,11 +244,7 @@ impl<'a> Iterator for ConstParams<'a> { type Item = &'a ConstParam; fn next(&mut self) -> Option<Self::Item> { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Const(const_param) = next { + if let GenericParam::Const(const_param) = self.0.next()? { Some(const_param) } else { self.next() @@ -268,11 +258,7 @@ impl<'a> Iterator for ConstParamsMut<'a> { type Item = &'a mut ConstParam; fn next(&mut self) -> Option<Self::Item> { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Const(const_param) = next { + if let GenericParam::Const(const_param) = self.0.next()? { Some(const_param) } else { self.next() @@ -283,7 +269,7 @@ impl<'a> Iterator for ConstParamsMut<'a> { /// Returned by `Generics::split_for_impl`. #[cfg(feature = "printing")] #[cfg_attr( - doc_cfg, + docsrs, doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) )] pub struct ImplGenerics<'a>(&'a Generics); @@ -291,7 +277,7 @@ pub struct ImplGenerics<'a>(&'a Generics); /// Returned by `Generics::split_for_impl`. #[cfg(feature = "printing")] #[cfg_attr( - doc_cfg, + docsrs, doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) )] pub struct TypeGenerics<'a>(&'a Generics); @@ -299,49 +285,16 @@ pub struct TypeGenerics<'a>(&'a Generics); /// Returned by `TypeGenerics::as_turbofish`. #[cfg(feature = "printing")] #[cfg_attr( - doc_cfg, + docsrs, doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) )] pub struct Turbofish<'a>(&'a Generics); -#[cfg(feature = "printing")] -impl Generics { - /// Split a type's generics into the pieces required for impl'ing a trait - /// for that type. - /// - /// ``` - /// # use proc_macro2::{Span, Ident}; - /// # use quote::quote; - /// # - /// # let generics: syn::Generics = Default::default(); - /// # let name = Ident::new("MyType", Span::call_site()); - /// # - /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - /// quote! { - /// impl #impl_generics MyTrait for #name #ty_generics #where_clause { - /// // ... - /// } - /// } - /// # ; - /// ``` - #[cfg_attr( - doc_cfg, - doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) - )] - pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) { - ( - ImplGenerics(self), - TypeGenerics(self), - self.where_clause.as_ref(), - ) - } -} - #[cfg(feature = "printing")] macro_rules! generics_wrapper_impls { ($ty:ident) => { #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl<'a> Clone for $ty<'a> { fn clone(&self) -> Self { $ty(self.0) @@ -349,7 +302,7 @@ macro_rules! generics_wrapper_impls { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<'a> Debug for $ty<'a> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter @@ -360,11 +313,11 @@ macro_rules! generics_wrapper_impls { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<'a> Eq for $ty<'a> {} #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<'a> PartialEq for $ty<'a> { fn eq(&self, other: &Self) -> bool { self.0 == other.0 @@ -372,7 +325,7 @@ macro_rules! generics_wrapper_impls { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<'a> Hash for $ty<'a> { fn hash<H: Hasher>(&self, state: &mut H) { self.0.hash(state); @@ -391,14 +344,14 @@ generics_wrapper_impls!(Turbofish); #[cfg(feature = "printing")] impl<'a> TypeGenerics<'a> { /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`. - pub fn as_turbofish(&self) -> Turbofish { + pub fn as_turbofish(&self) -> Turbofish<'a> { Turbofish(self.0) } } ast_struct! { /// A set of bound lifetimes: `for<'a, 'b, 'c>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct BoundLifetimes { pub for_token: Token![for], pub lt_token: Token![<], @@ -444,18 +397,19 @@ impl From<Ident> for TypeParam { ast_enum_of_structs! { /// A trait or lifetime used as a bound on a type parameter. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum TypeParamBound { Trait(TraitBound), Lifetime(Lifetime), + PreciseCapture(PreciseCapture), Verbatim(TokenStream), } } ast_struct! { /// A trait used as a bound on a type parameter. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TraitBound { pub paren_token: Option<token::Paren>, pub modifier: TraitBoundModifier, @@ -469,17 +423,45 @@ ast_struct! { ast_enum! { /// A modifier on a trait bound, currently only used for the `?` in /// `?Sized`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum TraitBoundModifier { None, Maybe(Token![?]), } } +ast_struct! { + /// Precise capturing bound: the 'use<…>' in `impl Trait + + /// use<'a, T>`. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + pub struct PreciseCapture #full { + pub use_token: Token![use], + pub lt_token: Token![<], + pub params: Punctuated<CapturedParam, Token![,]>, + pub gt_token: Token![>], + } +} + +#[cfg(feature = "full")] +ast_enum! { + /// Single parameter in a precise capturing bound. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] + #[non_exhaustive] + pub enum CapturedParam { + /// A lifetime parameter in precise capturing bound: `fn f<'a>() -> impl + /// Trait + use<'a>`. + Lifetime(Lifetime), + /// A type parameter or const generic parameter in precise capturing + /// bound: `fn f<T>() -> impl Trait + use<T>` or `fn f<const K: T>() -> + /// impl Trait + use<K>`. + Ident(Ident), + } +} + ast_struct! { /// A `where` clause in a definition: `where T: Deserialize<'de>, D: /// 'static`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct WhereClause { pub where_token: Token![where], pub predicates: Punctuated<WherePredicate, Token![,]>, @@ -493,8 +475,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum WherePredicate { /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`. @@ -507,7 +489,7 @@ ast_enum_of_structs! { ast_struct! { /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct PredicateLifetime { pub lifetime: Lifetime, pub colon_token: Token![:], @@ -517,7 +499,7 @@ ast_struct! { ast_struct! { /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct PredicateType { /// Any lifetimes from a `for` binding pub lifetimes: Option<BoundLifetimes>, @@ -531,11 +513,28 @@ ast_struct! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + use crate::attr::Attribute; + #[cfg(feature = "full")] + use crate::error; + use crate::error::{Error, Result}; + use crate::ext::IdentExt as _; + use crate::generics::{ + BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeParam, PredicateLifetime, + PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound, WhereClause, + WherePredicate, + }; + #[cfg(feature = "full")] + use crate::generics::{CapturedParam, PreciseCapture}; + use crate::ident::Ident; + use crate::lifetime::Lifetime; + use crate::parse::{Parse, ParseStream}; + use crate::path::{self, ParenthesizedGenericArguments, Path, PathArguments}; + use crate::punctuated::Punctuated; + use crate::token; + use crate::ty::Type; + use crate::verbatim; + + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Generics { fn parse(input: ParseStream) -> Result<Self> { if !input.peek(Token![<]) { @@ -598,7 +597,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for GenericParam { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -625,7 +624,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LifetimeParam { fn parse(input: ParseStream) -> Result<Self> { let has_colon; @@ -663,7 +662,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for BoundLifetimes { fn parse(input: ParseStream) -> Result<Self> { Ok(BoundLifetimes { @@ -672,14 +671,7 @@ pub(crate) mod parsing { lifetimes: { let mut lifetimes = Punctuated::new(); while !input.peek(Token![>]) { - let attrs = input.call(Attribute::parse_outer)?; - let lifetime: Lifetime = input.parse()?; - lifetimes.push_value(GenericParam::Lifetime(LifetimeParam { - attrs, - lifetime, - colon_token: None, - bounds: Punctuated::new(), - })); + lifetimes.push_value(input.parse()?); if input.peek(Token![>]) { break; } @@ -692,7 +684,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Option<BoundLifetimes> { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![for]) { @@ -703,7 +695,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeParam { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -716,8 +708,11 @@ pub(crate) mod parsing { if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) { break; } - let value: TypeParamBound = input.parse()?; - bounds.push_value(value); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = true; + TypeParamBound::parse_single(input, allow_precise_capture, allow_const)? + }); if !input.peek(Token![+]) { break; } @@ -744,13 +739,42 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeParamBound { fn parse(input: ParseStream) -> Result<Self> { + let allow_precise_capture = true; + let allow_const = true; + Self::parse_single(input, allow_precise_capture, allow_const) + } + } + + impl TypeParamBound { + pub(crate) fn parse_single( + input: ParseStream, + #[cfg_attr(not(feature = "full"), allow(unused_variables))] allow_precise_capture: bool, + allow_const: bool, + ) -> Result<Self> { if input.peek(Lifetime) { return input.parse().map(TypeParamBound::Lifetime); } + #[cfg(feature = "full")] + { + if input.peek(Token![use]) { + let precise_capture: PreciseCapture = input.parse()?; + return if allow_precise_capture { + Ok(TypeParamBound::PreciseCapture(precise_capture)) + } else { + let msg = "`use<...>` precise capturing syntax is not allowed here"; + Err(error::new2( + precise_capture.use_token.span, + precise_capture.gt_token.span, + msg, + )) + }; + } + } + let begin = input.fork(); let content; @@ -760,32 +784,24 @@ pub(crate) mod parsing { (None, input) }; - let is_tilde_const = - cfg!(feature = "full") && content.peek(Token![~]) && content.peek2(Token![const]); - if is_tilde_const { - content.parse::<Token![~]>()?; - content.parse::<Token![const]>()?; - } - - let mut bound: TraitBound = content.parse()?; - bound.paren_token = paren_token; - - if is_tilde_const { - Ok(TypeParamBound::Verbatim(verbatim::between(&begin, input))) - } else { + if let Some(mut bound) = TraitBound::do_parse(content, allow_const)? { + bound.paren_token = paren_token; Ok(TypeParamBound::Trait(bound)) + } else { + Ok(TypeParamBound::Verbatim(verbatim::between(&begin, input))) } } - } - impl TypeParamBound { pub(crate) fn parse_multiple( input: ParseStream, allow_plus: bool, + allow_precise_capture: bool, + allow_const: bool, ) -> Result<Punctuated<Self, Token![+]>> { let mut bounds = Punctuated::new(); loop { - bounds.push_value(input.parse()?); + let bound = Self::parse_single(input, allow_precise_capture, allow_const)?; + bounds.push_value(bound); if !(allow_plus && input.peek(Token![+])) { break; } @@ -795,7 +811,7 @@ pub(crate) mod parsing { || input.peek(Token![?]) || input.peek(Lifetime) || input.peek(token::Paren) - || input.peek(Token![~])) + || (allow_const && (input.peek(token::Bracket) || input.peek(Token![const])))) { break; } @@ -804,11 +820,40 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitBound { fn parse(input: ParseStream) -> Result<Self> { + let allow_const = false; + Self::do_parse(input, allow_const).map(Option::unwrap) + } + } + + impl TraitBound { + fn do_parse(input: ParseStream, allow_const: bool) -> Result<Option<Self>> { + let mut lifetimes: Option<BoundLifetimes> = input.parse()?; + + let is_conditionally_const = cfg!(feature = "full") && input.peek(token::Bracket); + let is_unconditionally_const = cfg!(feature = "full") && input.peek(Token![const]); + if is_conditionally_const { + let conditionally_const; + let bracket_token = bracketed!(conditionally_const in input); + conditionally_const.parse::<Token![const]>()?; + if !allow_const { + let msg = "`[const]` is not allowed here"; + return Err(Error::new(bracket_token.span.join(), msg)); + } + } else if is_unconditionally_const { + let const_token: Token![const] = input.parse()?; + if !allow_const { + let msg = "`const` is not allowed here"; + return Err(Error::new(const_token.span, msg)); + } + } + let modifier: TraitBoundModifier = input.parse()?; - let lifetimes: Option<BoundLifetimes> = input.parse()?; + if lifetimes.is_none() && matches!(modifier, TraitBoundModifier::Maybe(_)) { + lifetimes = input.parse()?; + } let mut path: Path = input.parse()?; if path.segments.last().unwrap().arguments.is_empty() @@ -820,16 +865,30 @@ pub(crate) mod parsing { path.segments.last_mut().unwrap().arguments = parenthesized; } - Ok(TraitBound { - paren_token: None, - modifier, - lifetimes, - path, - }) + if lifetimes.is_some() { + match modifier { + TraitBoundModifier::None => {} + TraitBoundModifier::Maybe(maybe) => { + let msg = "`for<...>` binder not allowed with `?` trait polarity modifier"; + return Err(Error::new(maybe.span, msg)); + } + } + } + + if is_conditionally_const || is_unconditionally_const { + Ok(None) + } else { + Ok(Some(TraitBound { + paren_token: None, + modifier, + lifetimes, + path, + })) + } } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitBoundModifier { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![?]) { @@ -840,7 +899,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ConstParam { fn parse(input: ParseStream) -> Result<Self> { let mut default = None; @@ -864,11 +923,18 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for WhereClause { fn parse(input: ParseStream) -> Result<Self> { + let where_token: Token![where] = input.parse()?; + + if choose_generics_over_qpath(input) { + return Err(input + .error("generic parameters on `where` clauses are reserved for future use")); + } + Ok(WhereClause { - where_token: input.parse()?, + where_token, predicates: { let mut predicates = Punctuated::new(); loop { @@ -895,7 +961,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Option<WhereClause> { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![where]) { @@ -906,7 +972,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for WherePredicate { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Lifetime) && input.peek2(Token![:]) { @@ -953,8 +1019,15 @@ pub(crate) mod parsing { { break; } - let value = input.parse()?; - bounds.push_value(value); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = true; + TypeParamBound::parse_single( + input, + allow_precise_capture, + allow_const, + )? + }); if !input.peek(Token![+]) { break; } @@ -967,17 +1040,123 @@ pub(crate) mod parsing { } } } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + impl Parse for PreciseCapture { + fn parse(input: ParseStream) -> Result<Self> { + let use_token: Token![use] = input.parse()?; + let lt_token: Token![<] = input.parse()?; + let mut params = Punctuated::new(); + loop { + let lookahead = input.lookahead1(); + params.push_value( + if lookahead.peek(Lifetime) || lookahead.peek(Ident) || input.peek(Token![Self]) + { + input.parse::<CapturedParam>()? + } else if lookahead.peek(Token![>]) { + break; + } else { + return Err(lookahead.error()); + }, + ); + let lookahead = input.lookahead1(); + params.push_punct(if lookahead.peek(Token![,]) { + input.parse::<Token![,]>()? + } else if lookahead.peek(Token![>]) { + break; + } else { + return Err(lookahead.error()); + }); + } + let gt_token: Token![>] = input.parse()?; + Ok(PreciseCapture { + use_token, + lt_token, + params, + gt_token, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + impl Parse for CapturedParam { + fn parse(input: ParseStream) -> Result<Self> { + let lookahead = input.lookahead1(); + if lookahead.peek(Lifetime) { + input.parse().map(CapturedParam::Lifetime) + } else if lookahead.peek(Ident) || input.peek(Token![Self]) { + input.call(Ident::parse_any).map(CapturedParam::Ident) + } else { + Err(lookahead.error()) + } + } + } + + pub(crate) fn choose_generics_over_qpath(input: ParseStream) -> bool { + // Rust syntax has an ambiguity between generic parameters and qualified + // paths. In `impl <T> :: Thing<T, U> {}` this may either be a generic + // inherent impl `impl<T> ::Thing<T, U>` or a non-generic inherent impl + // for an associated type `impl <T>::Thing<T, U>`. + // + // After `<` the following continuations can only begin generics, not a + // qualified path: + // + // `<` `>` - empty generic parameters + // `<` `#` - generic parameters with attribute + // `<` LIFETIME `>` - single lifetime parameter + // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list + // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds + // `<` (LIFETIME|IDENT) `=` - generic parameter with a default + // `<` const - generic const parameter + // + // The only truly ambiguous case is: + // + // `<` IDENT `>` `::` IDENT ... + // + // which we disambiguate in favor of generics because this is almost + // always the expected one in the context of real-world code. + input.peek(Token![<]) + && (input.peek2(Token![>]) + || input.peek2(Token![#]) + || (input.peek2(Lifetime) || input.peek2(Ident)) + && (input.peek3(Token![>]) + || input.peek3(Token![,]) + || input.peek3(Token![:]) && !input.peek3(Token![::]) + || input.peek3(Token![=])) + || input.peek2(Token![const])) + } + + #[cfg(feature = "full")] + pub(crate) fn choose_generics_over_qpath_after_keyword(input: ParseStream) -> bool { + let input = input.fork(); + input.call(Ident::parse_any).unwrap(); // `impl` or `for` or `where` + choose_generics_over_qpath(&input) + } } #[cfg(feature = "printing")] -mod printing { - use super::*; +pub(crate) mod printing { use crate::attr::FilterAttrs; + #[cfg(feature = "full")] + use crate::expr; + use crate::expr::Expr; + #[cfg(feature = "full")] + use crate::fixup::FixupContext; + use crate::generics::{ + BoundLifetimes, ConstParam, GenericParam, Generics, ImplGenerics, LifetimeParam, + PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, Turbofish, TypeGenerics, + TypeParam, WhereClause, + }; + #[cfg(feature = "full")] + use crate::generics::{CapturedParam, PreciseCapture}; use crate::print::TokensOrDefault; + use crate::token; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Generics { fn to_tokens(&self, tokens: &mut TokenStream) { if self.params.is_empty() { @@ -1118,7 +1297,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for BoundLifetimes { fn to_tokens(&self, tokens: &mut TokenStream) { self.for_token.to_tokens(tokens); @@ -1128,7 +1307,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LifetimeParam { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -1140,7 +1319,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeParam { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -1156,7 +1335,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TraitBound { fn to_tokens(&self, tokens: &mut TokenStream) { let to_tokens = |tokens: &mut TokenStream| { @@ -1171,7 +1350,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TraitBoundModifier { fn to_tokens(&self, tokens: &mut TokenStream) { match self { @@ -1181,7 +1360,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ConstParam { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -1191,12 +1370,12 @@ mod printing { self.ty.to_tokens(tokens); if let Some(default) = &self.default { TokensOrDefault(&self.eq_token).to_tokens(tokens); - default.to_tokens(tokens); + print_const_argument(default, tokens); } } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for WhereClause { fn to_tokens(&self, tokens: &mut TokenStream) { if !self.predicates.is_empty() { @@ -1206,7 +1385,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PredicateLifetime { fn to_tokens(&self, tokens: &mut TokenStream) { self.lifetime.to_tokens(tokens); @@ -1215,7 +1394,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PredicateType { fn to_tokens(&self, tokens: &mut TokenStream) { self.lifetimes.to_tokens(tokens); @@ -1224,4 +1403,75 @@ mod printing { self.bounds.to_tokens(tokens); } } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + impl ToTokens for PreciseCapture { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.use_token.to_tokens(tokens); + self.lt_token.to_tokens(tokens); + + // Print lifetimes before types and consts, regardless of their + // order in self.params. + let mut trailing_or_empty = true; + for param in self.params.pairs() { + if let CapturedParam::Lifetime(_) = **param.value() { + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); + } + } + for param in self.params.pairs() { + if let CapturedParam::Ident(_) = **param.value() { + if !trailing_or_empty { + <Token![,]>::default().to_tokens(tokens); + trailing_or_empty = true; + } + param.to_tokens(tokens); + } + } + + self.gt_token.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + impl ToTokens for CapturedParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + CapturedParam::Lifetime(lifetime) => lifetime.to_tokens(tokens), + CapturedParam::Ident(ident) => ident.to_tokens(tokens), + } + } + } + + pub(crate) fn print_const_argument(expr: &Expr, tokens: &mut TokenStream) { + match expr { + Expr::Lit(expr) => expr.to_tokens(tokens), + + Expr::Path(expr) + if expr.attrs.is_empty() + && expr.qself.is_none() + && expr.path.get_ident().is_some() => + { + expr.to_tokens(tokens); + } + + #[cfg(feature = "full")] + Expr::Block(expr) => expr.to_tokens(tokens), + + #[cfg(not(feature = "full"))] + Expr::Verbatim(expr) => expr.to_tokens(tokens), + + // ERROR CORRECTION: Add braces to make sure that the + // generated code is valid. + _ => token::Brace::default().surround(tokens, |tokens| { + #[cfg(feature = "full")] + expr::printing::print_expr(expr, tokens, FixupContext::new_stmt()); + + #[cfg(not(feature = "full"))] + expr.to_tokens(tokens); + }), + } + } } diff --git a/vendor/syn/src/group.rs b/vendor/syn/src/group.rs index 27302331..56667b6a 100644 --- a/vendor/syn/src/group.rs +++ b/vendor/syn/src/group.rs @@ -82,7 +82,7 @@ fn parse_delimited<'a>( ) -> Result<(DelimSpan, ParseBuffer<'a>)> { input.step(|cursor| { if let Some((content, span, rest)) = cursor.group(delimiter) { - let scope = crate::buffer::close_span_of_group(*cursor); + let scope = span.close(); let nested = crate::parse::advance_step_cursor(cursor, content); let unexpected = crate::parse::get_unexpected(input); let content = crate::parse::new_parse_buffer(scope, nested, unexpected); @@ -142,12 +142,13 @@ fn parse_delimited<'a>( /// # } /// ``` #[macro_export] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] macro_rules! parenthesized { ($content:ident in $cursor:expr) => { match $crate::__private::parse_parens(&$cursor) { $crate::__private::Ok(parens) => { $content = parens.content; + _ = $content; parens.token } $crate::__private::Err(error) => { @@ -220,12 +221,13 @@ macro_rules! parenthesized { /// # } /// ``` #[macro_export] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] macro_rules! braced { ($content:ident in $cursor:expr) => { match $crate::__private::parse_braces(&$cursor) { $crate::__private::Ok(braces) => { $content = braces.content; + _ = $content; braces.token } $crate::__private::Err(error) => { @@ -275,12 +277,13 @@ macro_rules! braced { /// # } /// ``` #[macro_export] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] macro_rules! bracketed { ($content:ident in $cursor:expr) => { match $crate::__private::parse_brackets(&$cursor) { $crate::__private::Ok(brackets) => { $content = brackets.content; + _ = $content; brackets.token } $crate::__private::Err(error) => { diff --git a/vendor/syn/src/ident.rs b/vendor/syn/src/ident.rs index d0f4ba08..8a8e8a50 100644 --- a/vendor/syn/src/ident.rs +++ b/vendor/syn/src/ident.rs @@ -51,7 +51,8 @@ pub(crate) fn xid_ok(symbol: &str) -> bool { #[cfg(feature = "parsing")] mod parsing { use crate::buffer::Cursor; - use crate::parse::{Parse, ParseStream, Result}; + use crate::error::Result; + use crate::parse::{Parse, ParseStream}; use crate::token::Token; use proc_macro2::Ident; @@ -71,7 +72,7 @@ mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Ident { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| { diff --git a/vendor/syn/src/item.rs b/vendor/syn/src/item.rs index ee91f591..00beb0d3 100644 --- a/vendor/syn/src/item.rs +++ b/vendor/syn/src/item.rs @@ -1,8 +1,19 @@ -use super::*; +use crate::attr::Attribute; +use crate::data::{Fields, FieldsNamed, Variant}; use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput}; +use crate::expr::Expr; +use crate::generics::{Generics, TypeParamBound}; +use crate::ident::Ident; +use crate::lifetime::Lifetime; +use crate::mac::Macro; +use crate::pat::{Pat, PatType}; +use crate::path::Path; use crate::punctuated::Punctuated; +use crate::restriction::Visibility; +use crate::stmt::Block; +use crate::token; +use crate::ty::{Abi, ReturnType, Type}; use proc_macro2::TokenStream; - #[cfg(feature = "parsing")] use std::mem; @@ -13,8 +24,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum Item { /// A constant item: `const MAX: u16 = 65535`. @@ -70,12 +81,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match item { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // Item::Const(item) => {...} // Item::Enum(item) => {...} // ... // Item::Verbatim(item) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -88,7 +100,7 @@ ast_enum_of_structs! { ast_struct! { /// A constant item: `const MAX: u16 = 65535`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemConst { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -105,7 +117,7 @@ ast_struct! { ast_struct! { /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemEnum { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -119,7 +131,7 @@ ast_struct! { ast_struct! { /// An `extern crate` item: `extern crate serde`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemExternCrate { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -133,7 +145,7 @@ ast_struct! { ast_struct! { /// A free-standing function: `fn process(n: usize) -> Result<()> { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemFn { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -144,7 +156,7 @@ ast_struct! { ast_struct! { /// A block of foreign items: `extern "C" { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemForeignMod { pub attrs: Vec<Attribute>, pub unsafety: Option<Token![unsafe]>, @@ -157,7 +169,7 @@ ast_struct! { ast_struct! { /// An impl block providing trait or associated items: `impl<A> Trait /// for Data<A> { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemImpl { pub attrs: Vec<Attribute>, pub defaultness: Option<Token![default]>, @@ -175,7 +187,7 @@ ast_struct! { ast_struct! { /// A macro invocation, which includes `macro_rules!` definitions. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemMacro { pub attrs: Vec<Attribute>, /// The `example` in `macro_rules! example { ... }`. @@ -187,7 +199,7 @@ ast_struct! { ast_struct! { /// A module or module declaration: `mod m` or `mod m { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemMod { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -201,7 +213,7 @@ ast_struct! { ast_struct! { /// A static item: `static BIKE: Shed = Shed(42)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemStatic { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -218,7 +230,7 @@ ast_struct! { ast_struct! { /// A struct definition: `struct Foo<A> { x: A }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemStruct { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -232,7 +244,7 @@ ast_struct! { ast_struct! { /// A trait definition: `pub trait Iterator { ... }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemTrait { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -251,7 +263,7 @@ ast_struct! { ast_struct! { /// A trait alias: `pub trait SharableIterator = Iterator + Sync`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemTraitAlias { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -266,7 +278,7 @@ ast_struct! { ast_struct! { /// A type alias: `type Result<T> = std::result::Result<T, MyError>`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemType { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -281,7 +293,7 @@ ast_struct! { ast_struct! { /// A union definition: `union Foo<A, B> { x: A, y: B }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemUnion { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -294,7 +306,7 @@ ast_struct! { ast_struct! { /// A use declaration: `use std::collections::HashMap`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ItemUse { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -416,8 +428,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub enum UseTree { /// A path prefix of imports in a `use` item: `std::...`. Path(UsePath), @@ -438,7 +450,7 @@ ast_enum_of_structs! { ast_struct! { /// A path prefix of imports in a `use` item: `std::...`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct UsePath { pub ident: Ident, pub colon2_token: Token![::], @@ -448,7 +460,7 @@ ast_struct! { ast_struct! { /// An identifier imported by a `use` item: `HashMap`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct UseName { pub ident: Ident, } @@ -456,7 +468,7 @@ ast_struct! { ast_struct! { /// An renamed identifier imported by a `use` item: `HashMap as Map`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct UseRename { pub ident: Ident, pub as_token: Token![as], @@ -466,7 +478,7 @@ ast_struct! { ast_struct! { /// A glob import in a `use` item: `*`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct UseGlob { pub star_token: Token![*], } @@ -474,7 +486,7 @@ ast_struct! { ast_struct! { /// A braced group of imports in a `use` item: `{A, B, C}`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct UseGroup { pub brace_token: token::Brace, pub items: Punctuated<UseTree, Token![,]>, @@ -488,8 +500,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum ForeignItem { /// A foreign function in an `extern` block. @@ -510,12 +522,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match item { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // ForeignItem::Fn(item) => {...} // ForeignItem::Static(item) => {...} // ... // ForeignItem::Verbatim(item) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -528,7 +541,7 @@ ast_enum_of_structs! { ast_struct! { /// A foreign function in an `extern` block. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ForeignItemFn { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -539,7 +552,7 @@ ast_struct! { ast_struct! { /// A foreign static item in an `extern` block: `static ext: u8`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ForeignItemStatic { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -554,7 +567,7 @@ ast_struct! { ast_struct! { /// A foreign type in an `extern` block: `type void`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ForeignItemType { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -567,7 +580,7 @@ ast_struct! { ast_struct! { /// A macro invocation within an extern block. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ForeignItemMacro { pub attrs: Vec<Attribute>, pub mac: Macro, @@ -582,8 +595,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum TraitItem { /// An associated constant within the definition of a trait. @@ -604,12 +617,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match item { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // TraitItem::Const(item) => {...} // TraitItem::Fn(item) => {...} // ... // TraitItem::Verbatim(item) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -622,7 +636,7 @@ ast_enum_of_structs! { ast_struct! { /// An associated constant within the definition of a trait. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct TraitItemConst { pub attrs: Vec<Attribute>, pub const_token: Token![const], @@ -637,7 +651,7 @@ ast_struct! { ast_struct! { /// An associated function within the definition of a trait. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct TraitItemFn { pub attrs: Vec<Attribute>, pub sig: Signature, @@ -648,7 +662,7 @@ ast_struct! { ast_struct! { /// An associated type within the definition of a trait. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct TraitItemType { pub attrs: Vec<Attribute>, pub type_token: Token![type], @@ -663,7 +677,7 @@ ast_struct! { ast_struct! { /// A macro invocation within the definition of a trait. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct TraitItemMacro { pub attrs: Vec<Attribute>, pub mac: Macro, @@ -678,8 +692,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum ImplItem { /// An associated constant within an impl block. @@ -700,12 +714,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match item { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // ImplItem::Const(item) => {...} // ImplItem::Fn(item) => {...} // ... // ImplItem::Verbatim(item) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -718,7 +733,7 @@ ast_enum_of_structs! { ast_struct! { /// An associated constant within an impl block. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ImplItemConst { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -736,7 +751,7 @@ ast_struct! { ast_struct! { /// An associated function within an impl block. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ImplItemFn { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -748,7 +763,7 @@ ast_struct! { ast_struct! { /// An associated type within an impl block. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ImplItemType { pub attrs: Vec<Attribute>, pub vis: Visibility, @@ -764,7 +779,7 @@ ast_struct! { ast_struct! { /// A macro invocation within an impl block. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct ImplItemMacro { pub attrs: Vec<Attribute>, pub mac: Macro, @@ -775,7 +790,7 @@ ast_struct! { ast_struct! { /// A function signature in a trait or implementation: `unsafe fn /// initialize(&self)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Signature { pub constness: Option<Token![const]>, pub asyncness: Option<Token![async]>, @@ -804,7 +819,7 @@ impl Signature { ast_enum_of_structs! { /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub enum FnArg { /// The `self` argument of an associated method. Receiver(Receiver), @@ -822,7 +837,7 @@ ast_struct! { /// is written in shorthand such as `self` or `&self` or `&mut self`. In the /// shorthand case, the type in `ty` is reconstructed as one of `Self`, /// `&Self`, or `&mut Self`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Receiver { pub attrs: Vec<Attribute>, pub reference: Option<(Token![&], Option<Lifetime>)>, @@ -851,7 +866,7 @@ ast_struct! { /// // ^^^ /// } /// ``` - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Variadic { pub attrs: Vec<Attribute>, pub pat: Option<(Box<Pat>, Token![:])>, @@ -862,7 +877,7 @@ ast_struct! { ast_enum! { /// The mutability of an `Item::Static` or `ForeignItem::Static`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum StaticMutability { Mut(Token![mut]), @@ -872,7 +887,7 @@ ast_enum! { ast_enum! { /// Unused, but reserved for RFC 3323 restrictions. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum ImplRestriction {} @@ -889,12 +904,37 @@ ast_enum! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::discouraged::Speculative; - use crate::parse::{Parse, ParseBuffer, ParseStream, Result}; + use crate::attr::{self, Attribute}; + use crate::derive; + use crate::error::{Error, Result}; + use crate::expr::Expr; + use crate::ext::IdentExt as _; + use crate::generics::{self, Generics, TypeParamBound}; + use crate::ident::Ident; + use crate::item::{ + FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType, + ImplItem, ImplItemConst, ImplItemFn, ImplItemMacro, ImplItemType, Item, ItemConst, + ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMod, + ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver, + Signature, StaticMutability, TraitItem, TraitItemConst, TraitItemFn, TraitItemMacro, + TraitItemType, UseGlob, UseGroup, UseName, UsePath, UseRename, UseTree, Variadic, + }; + use crate::lifetime::Lifetime; + use crate::lit::LitStr; + use crate::mac::{self, Macro}; + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseBuffer, ParseStream}; + use crate::pat::{Pat, PatType, PatWild}; + use crate::path::Path; + use crate::punctuated::Punctuated; + use crate::restriction::Visibility; + use crate::stmt::Block; + use crate::token; + use crate::ty::{Abi, ReturnType, Type, TypePath, TypeReference}; + use crate::verbatim; + use proc_macro2::TokenStream; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Item { fn parse(input: ParseStream) -> Result<Self> { let begin = input.fork(); @@ -912,7 +952,8 @@ pub(crate) mod parsing { let vis: Visibility = ahead.parse()?; let lookahead = ahead.lookahead1(); - let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + let allow_safe = false; + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead, allow_safe) { let vis: Visibility = input.parse()?; let sig: Signature = input.parse()?; if input.peek(Token![;]) { @@ -1165,7 +1206,11 @@ pub(crate) mod parsing { if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) { break; } - bounds.push_value(input.parse::<TypeParamBound>()?); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = true; + TypeParamBound::parse_single(input, allow_precise_capture, allow_const)? + }); if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) { break; } @@ -1187,7 +1232,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemMacro { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -1241,7 +1286,7 @@ pub(crate) mod parsing { Ok(Item::Verbatim(verbatim::between(&begin, input))) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemExternCrate { fn parse(input: ParseStream) -> Result<Self> { Ok(ItemExternCrate { @@ -1274,7 +1319,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemUse { fn parse(input: ParseStream) -> Result<Self> { let allow_crate_root_in_path = false; @@ -1308,7 +1353,7 @@ pub(crate) mod parsing { })) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for UseTree { fn parse(input: ParseStream) -> Result<UseTree> { let allow_crate_root_in_path = false; @@ -1371,14 +1416,16 @@ pub(crate) mod parsing { &content, allow_crate_root_in_path && !this_tree_starts_with_crate_root, )? { - Some(tree) => items.push_value(tree), - None => has_any_crate_root_in_path = true, + Some(tree) if !has_any_crate_root_in_path => items.push_value(tree), + _ => has_any_crate_root_in_path = true, } if content.is_empty() { break; } let comma: Token![,] = content.parse()?; - items.push_punct(comma); + if !has_any_crate_root_in_path { + items.push_punct(comma); + } } if has_any_crate_root_in_path { Ok(None) @@ -1390,7 +1437,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemStatic { fn parse(input: ParseStream) -> Result<Self> { Ok(ItemStatic { @@ -1408,7 +1455,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemConst { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -1443,34 +1490,52 @@ pub(crate) mod parsing { } } - fn peek_signature(input: ParseStream) -> bool { + fn peek_signature(input: ParseStream, allow_safe: bool) -> bool { let fork = input.fork(); fork.parse::<Option<Token![const]>>().is_ok() && fork.parse::<Option<Token![async]>>().is_ok() - && fork.parse::<Option<Token![unsafe]>>().is_ok() + && ((allow_safe + && token::parsing::peek_keyword(fork.cursor(), "safe") + && token::parsing::keyword(&fork, "safe").is_ok()) + || fork.parse::<Option<Token![unsafe]>>().is_ok()) && fork.parse::<Option<Abi>>().is_ok() && fork.peek(Token![fn]) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Signature { fn parse(input: ParseStream) -> Result<Self> { - let constness: Option<Token![const]> = input.parse()?; - let asyncness: Option<Token![async]> = input.parse()?; - let unsafety: Option<Token![unsafe]> = input.parse()?; - let abi: Option<Abi> = input.parse()?; - let fn_token: Token![fn] = input.parse()?; - let ident: Ident = input.parse()?; - let mut generics: Generics = input.parse()?; + let allow_safe = false; + parse_signature(input, allow_safe).map(Option::unwrap) + } + } - let content; - let paren_token = parenthesized!(content in input); - let (inputs, variadic) = parse_fn_args(&content)?; + fn parse_signature(input: ParseStream, allow_safe: bool) -> Result<Option<Signature>> { + let constness: Option<Token![const]> = input.parse()?; + let asyncness: Option<Token![async]> = input.parse()?; + let unsafety: Option<Token![unsafe]> = input.parse()?; + let safe = allow_safe + && unsafety.is_none() + && token::parsing::peek_keyword(input.cursor(), "safe"); + if safe { + token::parsing::keyword(input, "safe")?; + } + let abi: Option<Abi> = input.parse()?; + let fn_token: Token![fn] = input.parse()?; + let ident: Ident = input.parse()?; + let mut generics: Generics = input.parse()?; - let output: ReturnType = input.parse()?; - generics.where_clause = input.parse()?; + let content; + let paren_token = parenthesized!(content in input); + let (inputs, variadic) = parse_fn_args(&content)?; - Ok(Signature { + let output: ReturnType = input.parse()?; + generics.where_clause = input.parse()?; + + Ok(if safe { + None + } else { + Some(Signature { constness, asyncness, unsafety, @@ -1483,10 +1548,10 @@ pub(crate) mod parsing { variadic, output, }) - } + }) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemFn { fn parse(input: ParseStream) -> Result<Self> { let outer_attrs = input.call(Attribute::parse_outer)?; @@ -1515,7 +1580,7 @@ pub(crate) mod parsing { }) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for FnArg { fn parse(input: ParseStream) -> Result<Self> { let allow_variadic = false; @@ -1548,7 +1613,7 @@ pub(crate) mod parsing { // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs // because the rest of the test case is valuable. if input.peek(Ident) && input.peek2(Token![<]) { - let span = input.fork().parse::<Ident>()?.span(); + let span = input.span(); return Ok(FnArgOrVariadic::FnArg(FnArg::Typed(PatType { attrs, pat: Box::new(Pat::Wild(PatWild { @@ -1582,7 +1647,7 @@ pub(crate) mod parsing { }))) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Receiver { fn parse(input: ParseStream) -> Result<Self> { let reference = if input.peek(Token![&]) { @@ -1696,7 +1761,7 @@ pub(crate) mod parsing { Ok((args, variadic)) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemMod { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -1745,7 +1810,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemForeignMod { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -1770,7 +1835,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ForeignItem { fn parse(input: ParseStream) -> Result<Self> { let begin = input.fork(); @@ -1779,35 +1844,55 @@ pub(crate) mod parsing { let vis: Visibility = ahead.parse()?; let lookahead = ahead.lookahead1(); - let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + let allow_safe = true; + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead, allow_safe) { let vis: Visibility = input.parse()?; - let sig: Signature = input.parse()?; - if input.peek(token::Brace) { + let sig = parse_signature(input, allow_safe)?; + let has_safe = sig.is_none(); + let has_body = input.peek(token::Brace); + let semi_token: Option<Token![;]> = if has_body { let content; braced!(content in input); content.call(Attribute::parse_inner)?; content.call(Block::parse_within)?; - + None + } else { + Some(input.parse()?) + }; + if has_safe || has_body { Ok(ForeignItem::Verbatim(verbatim::between(&begin, input))) } else { Ok(ForeignItem::Fn(ForeignItemFn { attrs: Vec::new(), vis, - sig, - semi_token: input.parse()?, + sig: sig.unwrap(), + semi_token: semi_token.unwrap(), })) } - } else if lookahead.peek(Token![static]) { + } else if lookahead.peek(Token![static]) + || ((ahead.peek(Token![unsafe]) + || token::parsing::peek_keyword(ahead.cursor(), "safe")) + && ahead.peek2(Token![static])) + { let vis = input.parse()?; + let unsafety: Option<Token![unsafe]> = input.parse()?; + let safe = + unsafety.is_none() && token::parsing::peek_keyword(input.cursor(), "safe"); + if safe { + token::parsing::keyword(input, "safe")?; + } let static_token = input.parse()?; let mutability = input.parse()?; let ident = input.parse()?; let colon_token = input.parse()?; let ty = input.parse()?; - if input.peek(Token![=]) { + let has_value = input.peek(Token![=]); + if has_value { input.parse::<Token![=]>()?; input.parse::<Expr>()?; - input.parse::<Token![;]>()?; + } + let semi_token: Token![;] = input.parse()?; + if unsafety.is_some() || safe || has_value { Ok(ForeignItem::Verbatim(verbatim::between(&begin, input))) } else { Ok(ForeignItem::Static(ForeignItemStatic { @@ -1818,7 +1903,7 @@ pub(crate) mod parsing { ident, colon_token, ty, - semi_token: input.parse()?, + semi_token, })) } } else if lookahead.peek(Token![type]) { @@ -1849,7 +1934,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ForeignItemFn { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -1865,7 +1950,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ForeignItemStatic { fn parse(input: ParseStream) -> Result<Self> { Ok(ForeignItemStatic { @@ -1881,7 +1966,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ForeignItemType { fn parse(input: ParseStream) -> Result<Self> { Ok(ForeignItemType { @@ -1930,7 +2015,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ForeignItemMacro { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -1948,7 +2033,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemType { fn parse(input: ParseStream) -> Result<Self> { Ok(ItemType { @@ -2002,7 +2087,7 @@ pub(crate) mod parsing { })) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemStruct { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2026,7 +2111,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemEnum { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2050,7 +2135,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemUnion { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2101,7 +2186,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemTrait { fn parse(input: ParseStream) -> Result<Self> { let outer_attrs = input.call(Attribute::parse_outer)?; @@ -2142,7 +2227,11 @@ pub(crate) mod parsing { if input.peek(Token![where]) || input.peek(token::Brace) { break; } - supertraits.push_value(input.parse()?); + supertraits.push_value({ + let allow_precise_capture = false; + let allow_const = true; + TypeParamBound::parse_single(input, allow_precise_capture, allow_const)? + }); if input.peek(Token![where]) || input.peek(token::Brace) { break; } @@ -2176,7 +2265,7 @@ pub(crate) mod parsing { }) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemTraitAlias { fn parse(input: ParseStream) -> Result<Self> { let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?; @@ -2210,7 +2299,11 @@ pub(crate) mod parsing { if input.peek(Token![where]) || input.peek(Token![;]) { break; } - bounds.push_value(input.parse()?); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = false; + TypeParamBound::parse_single(input, allow_precise_capture, allow_const)? + }); if input.peek(Token![where]) || input.peek(Token![;]) { break; } @@ -2232,7 +2325,7 @@ pub(crate) mod parsing { }) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitItem { fn parse(input: ParseStream) -> Result<Self> { let begin = input.fork(); @@ -2242,7 +2335,8 @@ pub(crate) mod parsing { let ahead = input.fork(); let lookahead = ahead.lookahead1(); - let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + let allow_safe = false; + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead, allow_safe) { input.parse().map(TraitItem::Fn) } else if lookahead.peek(Token![const]) { let const_token: Token![const] = ahead.parse()?; @@ -2317,7 +2411,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitItemConst { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2354,7 +2448,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitItemFn { fn parse(input: ParseStream) -> Result<Self> { let mut attrs = input.call(Attribute::parse_outer)?; @@ -2383,7 +2477,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitItemType { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2440,7 +2534,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TraitItemMacro { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2458,7 +2552,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ItemImpl { fn parse(input: ParseStream) -> Result<Self> { let allow_verbatim_impl = false; @@ -2473,15 +2567,7 @@ pub(crate) mod parsing { let unsafety: Option<Token![unsafe]> = input.parse()?; let impl_token: Token![impl] = input.parse()?; - let has_generics = input.peek(Token![<]) - && (input.peek2(Token![>]) - || input.peek2(Token![#]) - || (input.peek2(Ident) || input.peek2(Lifetime)) - && (input.peek3(Token![:]) - || input.peek3(Token![,]) - || input.peek3(Token![>]) - || input.peek3(Token![=])) - || input.peek2(Token![const])); + let has_generics = generics::parsing::choose_generics_over_qpath(input); let mut generics: Generics = if has_generics { input.parse()? } else { @@ -2495,7 +2581,6 @@ pub(crate) mod parsing { input.parse::<Token![const]>()?; } - let begin = input.fork(); let polarity = if input.peek(Token![!]) && !input.peek2(token::Brace) { Some(input.parse::<Token![!]>()?) } else { @@ -2533,13 +2618,14 @@ pub(crate) mod parsing { trait_ = None; } self_ty = input.parse()?; + } else if let Some(polarity) = polarity { + return Err(Error::new( + polarity.span, + "inherent impls cannot be negative", + )); } else { trait_ = None; - self_ty = if polarity.is_none() { - first_ty - } else { - Type::Verbatim(verbatim::between(&begin, input)) - }; + self_ty = first_ty; } generics.where_clause = input.parse()?; @@ -2570,7 +2656,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ImplItem { fn parse(input: ParseStream) -> Result<Self> { let begin = input.fork(); @@ -2587,7 +2673,8 @@ pub(crate) mod parsing { None }; - let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + let allow_safe = false; + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead, allow_safe) { let allow_omitted_body = true; if let Some(item) = parse_impl_item_fn(input, allow_omitted_body)? { Ok(ImplItem::Fn(item)) @@ -2665,7 +2752,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ImplItemConst { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2702,7 +2789,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ImplItemFn { fn parse(input: ParseStream) -> Result<Self> { let allow_omitted_body = false; @@ -2743,7 +2830,7 @@ pub(crate) mod parsing { })) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ImplItemType { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2805,7 +2892,7 @@ pub(crate) mod parsing { })) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ImplItemMacro { fn parse(input: ParseStream) -> Result<Self> { let attrs = input.call(Attribute::parse_outer)?; @@ -2832,16 +2919,7 @@ pub(crate) mod parsing { } } - impl MacroDelimiter { - pub(crate) fn is_brace(&self) -> bool { - match self { - MacroDelimiter::Brace(_) => true, - MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => false, - } - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for StaticMutability { fn parse(input: ParseStream) -> Result<Self> { let mut_token: Option<Token![mut]> = input.parse()?; @@ -2852,13 +2930,25 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; use crate::attr::FilterAttrs; + use crate::data::Fields; + use crate::item::{ + ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType, ImplItemConst, + ImplItemFn, ImplItemMacro, ImplItemType, ItemConst, ItemEnum, ItemExternCrate, ItemFn, + ItemForeignMod, ItemImpl, ItemMacro, ItemMod, ItemStatic, ItemStruct, ItemTrait, + ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver, Signature, StaticMutability, + TraitItemConst, TraitItemFn, TraitItemMacro, TraitItemType, UseGlob, UseGroup, UseName, + UsePath, UseRename, Variadic, + }; + use crate::mac::MacroDelimiter; + use crate::path; + use crate::path::printing::PathStyle; use crate::print::TokensOrDefault; + use crate::ty::Type; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemExternCrate { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2874,7 +2964,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemUse { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2886,7 +2976,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemStatic { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2902,7 +2992,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemConst { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2917,7 +3007,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemFn { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2930,7 +3020,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemMod { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2949,7 +3039,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemForeignMod { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2962,7 +3052,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemType { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2977,7 +3067,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemEnum { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -2992,7 +3082,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemStruct { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3018,7 +3108,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemUnion { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3031,7 +3121,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemTrait { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3053,7 +3143,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemTraitAlias { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3068,7 +3158,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemImpl { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3090,11 +3180,11 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ItemMacro { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); - self.mac.path.to_tokens(tokens); + path::printing::print_path(tokens, &self.mac.path, PathStyle::Mod); self.mac.bang_token.to_tokens(tokens); self.ident.to_tokens(tokens); match &self.mac.delimiter { @@ -3112,7 +3202,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for UsePath { fn to_tokens(&self, tokens: &mut TokenStream) { self.ident.to_tokens(tokens); @@ -3121,14 +3211,14 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for UseName { fn to_tokens(&self, tokens: &mut TokenStream) { self.ident.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for UseRename { fn to_tokens(&self, tokens: &mut TokenStream) { self.ident.to_tokens(tokens); @@ -3137,14 +3227,14 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for UseGlob { fn to_tokens(&self, tokens: &mut TokenStream) { self.star_token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for UseGroup { fn to_tokens(&self, tokens: &mut TokenStream) { self.brace_token.surround(tokens, |tokens| { @@ -3153,7 +3243,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TraitItemConst { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3169,7 +3259,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TraitItemFn { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3188,7 +3278,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TraitItemType { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3208,7 +3298,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TraitItemMacro { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3217,7 +3307,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ImplItemConst { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3233,7 +3323,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ImplItemFn { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3247,7 +3337,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ImplItemType { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3263,7 +3353,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ImplItemMacro { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3272,7 +3362,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ForeignItemFn { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3282,7 +3372,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ForeignItemStatic { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3296,7 +3386,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ForeignItemType { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3309,7 +3399,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ForeignItemMacro { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3318,7 +3408,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Signature { fn to_tokens(&self, tokens: &mut TokenStream) { self.constness.to_tokens(tokens); @@ -3342,7 +3432,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Receiver { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3375,7 +3465,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Variadic { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -3388,7 +3478,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for StaticMutability { fn to_tokens(&self, tokens: &mut TokenStream) { match self { diff --git a/vendor/syn/src/lib.rs b/vendor/syn/src/lib.rs index a74d4b11..58af76d5 100644 --- a/vendor/syn/src/lib.rs +++ b/vendor/syn/src/lib.rs @@ -249,9 +249,11 @@ //! dynamic library libproc_macro from rustc toolchain. // Syn types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/syn/2.0.38")] -#![cfg_attr(doc_cfg, feature(doc_cfg))] +#![doc(html_root_url = "https://docs.rs/syn/2.0.111")] +#![cfg_attr(docsrs, feature(doc_cfg), doc(auto_cfg = false))] +#![deny(unsafe_op_in_unsafe_fn)] #![allow(non_camel_case_types)] +#![cfg_attr(not(check_cfg), allow(unexpected_cfgs))] #![allow( clippy::bool_to_int_with_if, clippy::cast_lossless, @@ -260,18 +262,23 @@ clippy::cast_ptr_alignment, clippy::default_trait_access, clippy::derivable_impls, + clippy::diverging_sub_expression, clippy::doc_markdown, + clippy::elidable_lifetime_names, + clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_auto_deref, + clippy::fn_params_excessive_bools, clippy::if_not_else, clippy::inherent_to_string, + clippy::into_iter_without_iter, clippy::items_after_statements, clippy::large_enum_variant, clippy::let_underscore_untyped, // https://github.com/rust-lang/rust-clippy/issues/10410 clippy::manual_assert, clippy::manual_let_else, + clippy::manual_map, clippy::match_like_matches_macro, - clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wildcard_for_single_variants, // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984 clippy::missing_errors_doc, @@ -279,22 +286,31 @@ clippy::module_name_repetitions, clippy::must_use_candidate, clippy::needless_doctest_main, + clippy::needless_lifetimes, clippy::needless_pass_by_value, + clippy::needless_update, clippy::never_loop, clippy::range_plus_one, clippy::redundant_else, + clippy::ref_option, clippy::return_self_not_must_use, clippy::similar_names, clippy::single_match_else, + clippy::struct_excessive_bools, clippy::too_many_arguments, clippy::too_many_lines, clippy::trivially_copy_pass_by_ref, + clippy::unconditional_recursion, // https://github.com/rust-lang/rust-clippy/issues/12133 + clippy::uninhabited_references, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_unwrap, clippy::used_underscore_binding, clippy::wildcard_imports, )] +#![allow(unknown_lints, mismatched_lifetime_syntaxes)] + +extern crate self as syn; #[cfg(feature = "proc-macro")] extern crate proc_macro; @@ -312,14 +328,21 @@ pub mod token; #[cfg(any(feature = "full", feature = "derive"))] mod attr; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue}; mod bigint; #[cfg(feature = "parsing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub mod buffer; +#[cfg(any( + all(feature = "parsing", feature = "full"), + all(feature = "printing", any(feature = "full", feature = "derive")), +))] +mod classify; + mod custom_keyword; mod custom_punctuation; @@ -327,11 +350,13 @@ mod custom_punctuation; #[cfg(any(feature = "full", feature = "derive"))] mod data; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant}; #[cfg(any(feature = "full", feature = "derive"))] mod derive; #[cfg(feature = "derive")] +#[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput}; mod drops; @@ -342,34 +367,51 @@ pub use crate::error::{Error, Result}; #[cfg(any(feature = "full", feature = "derive"))] mod expr; #[cfg(feature = "full")] -pub use crate::expr::{Arm, FieldValue, Label, RangeLimits}; +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub use crate::expr::{Arm, Label, PointerMutability, RangeLimits}; +#[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] +pub use crate::expr::{ + Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprIndex, ExprLit, ExprMacro, ExprMethodCall, + ExprParen, ExprPath, ExprReference, ExprStruct, ExprUnary, FieldValue, Index, Member, +}; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub use crate::expr::{ - Expr, ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBinary, ExprBlock, ExprBreak, ExprCall, - ExprCast, ExprClosure, ExprConst, ExprContinue, ExprField, ExprForLoop, ExprGroup, ExprIf, - ExprIndex, ExprInfer, ExprLet, ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, - ExprParen, ExprPath, ExprRange, ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry, - ExprTryBlock, ExprTuple, ExprUnary, ExprUnsafe, ExprWhile, ExprYield, Index, Member, + ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprClosure, ExprConst, + ExprContinue, ExprForLoop, ExprGroup, ExprIf, ExprInfer, ExprLet, ExprLoop, ExprMatch, + ExprRange, ExprRawAddr, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprTuple, ExprUnsafe, + ExprWhile, ExprYield, }; -#[cfg(feature = "parsing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub mod ext; #[cfg(feature = "full")] mod file; #[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub use crate::file::File; +#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] +mod fixup; + #[cfg(any(feature = "full", feature = "derive"))] mod generics; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::generics::{ BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeParam, PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound, WhereClause, WherePredicate, }; +#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] +pub use crate::generics::{CapturedParam, PreciseCapture}; #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] +#[cfg_attr( + docsrs, + doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) +)] pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics}; mod ident; @@ -379,6 +421,7 @@ pub use crate::ident::Ident; #[cfg(feature = "full")] mod item; #[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub use crate::item::{ FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType, ImplItem, ImplItemConst, ImplItemFn, ImplItemMacro, ImplItemType, ImplRestriction, Item, @@ -393,9 +436,11 @@ mod lifetime; pub use crate::lifetime::Lifetime; mod lit; +#[doc(hidden)] // https://github.com/dtolnay/syn/issues/1566 +pub use crate::lit::StrStyle; #[doc(inline)] pub use crate::lit::{ - Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle, + Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitInt, LitStr, }; #[cfg(feature = "parsing")] @@ -404,11 +449,12 @@ mod lookahead; #[cfg(any(feature = "full", feature = "derive"))] mod mac; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::mac::{Macro, MacroDelimiter}; #[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] #[cfg_attr( - doc_cfg, + docsrs, doc(cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))) )] pub mod meta; @@ -416,10 +462,11 @@ pub mod meta; #[cfg(any(feature = "full", feature = "derive"))] mod op; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::op::{BinOp, UnOp}; #[cfg(feature = "parsing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub mod parse; #[cfg(all(feature = "parsing", feature = "proc-macro"))] @@ -431,24 +478,27 @@ mod parse_quote; #[cfg(feature = "full")] mod pat; #[cfg(feature = "full")] -pub use crate::expr::{ - ExprConst as PatConst, ExprLit as PatLit, ExprMacro as PatMacro, ExprPath as PatPath, - ExprRange as PatRange, -}; -#[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub use crate::pat::{ - FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct, PatTuple, - PatTupleStruct, PatType, PatWild, + FieldPat, Pat, PatConst, PatIdent, PatLit, PatMacro, PatOr, PatParen, PatPath, PatRange, + PatReference, PatRest, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatType, PatWild, }; #[cfg(any(feature = "full", feature = "derive"))] mod path; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::path::{ AngleBracketedGenericArguments, AssocConst, AssocType, Constraint, GenericArgument, ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf, }; +#[cfg(all( + any(feature = "full", feature = "derive"), + any(feature = "parsing", feature = "printing") +))] +mod precedence; + #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] mod print; @@ -457,19 +507,24 @@ pub mod punctuated; #[cfg(any(feature = "full", feature = "derive"))] mod restriction; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::restriction::{FieldMutability, VisRestricted, Visibility}; mod sealed; +#[cfg(all(feature = "parsing", feature = "derive", not(feature = "full")))] +mod scan_expr; + mod span; #[cfg(all(feature = "parsing", feature = "printing"))] -#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))] pub mod spanned; #[cfg(feature = "full")] mod stmt; #[cfg(feature = "full")] +#[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub use crate::stmt::{Block, Local, LocalInit, Stmt, StmtMacro}; mod thread; @@ -480,6 +535,7 @@ mod tt; #[cfg(any(feature = "full", feature = "derive"))] mod ty; #[cfg(any(feature = "full", feature = "derive"))] +#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub use crate::ty::{ Abi, BareFnArg, BareVariadic, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup, TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference, @@ -492,6 +548,7 @@ mod verbatim; #[cfg(all(feature = "parsing", feature = "full"))] mod whitespace; +#[rustfmt::skip] // https://github.com/rust-lang/rustfmt/issues/6176 mod gen { /// Syntax tree traversal to transform the nodes of an owned syntax tree. /// @@ -574,7 +631,7 @@ mod gen { /// } /// ``` #[cfg(feature = "fold")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))] + #[cfg_attr(docsrs, doc(cfg(feature = "fold")))] #[rustfmt::skip] pub mod fold; @@ -693,7 +750,7 @@ mod gen { /// } /// ``` #[cfg(feature = "visit")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "visit")))] + #[cfg_attr(docsrs, doc(cfg(feature = "visit")))] #[rustfmt::skip] pub mod visit; @@ -787,7 +844,7 @@ mod gen { /// } /// ``` #[cfg(feature = "visit-mut")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "visit-mut")))] + #[cfg_attr(docsrs, doc(cfg(feature = "visit-mut")))] #[rustfmt::skip] pub mod visit_mut; @@ -806,12 +863,19 @@ mod gen { #[cfg(feature = "extra-traits")] #[rustfmt::skip] mod hash; - - #[cfg(any(feature = "full", feature = "derive"))] - #[path = "../gen_helper.rs"] - mod helper; } -pub use crate::gen::*; + +#[cfg(feature = "fold")] +#[cfg_attr(docsrs, doc(cfg(feature = "fold")))] +pub use crate::gen::fold; + +#[cfg(feature = "visit")] +#[cfg_attr(docsrs, doc(cfg(feature = "visit")))] +pub use crate::gen::visit; + +#[cfg(feature = "visit-mut")] +#[cfg_attr(docsrs, doc(cfg(feature = "visit-mut")))] +pub use crate::gen::visit_mut; // Not public API. #[doc(hidden)] @@ -831,42 +895,16 @@ pub mod __private; /// /// [`syn::parse2`]: parse2 /// -/// # Examples -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// use proc_macro::TokenStream; -/// use quote::quote; -/// use syn::DeriveInput; -/// -/// # const IGNORE_TOKENS: &str = stringify! { -/// #[proc_macro_derive(MyMacro)] -/// # }; -/// pub fn my_macro(input: TokenStream) -> TokenStream { -/// // Parse the tokens into a syntax tree -/// let ast: DeriveInput = syn::parse(input).unwrap(); -/// -/// // Build the output, possibly using quasi-quotation -/// let expanded = quote! { -/// /* ... */ -/// }; -/// -/// // Convert into a token stream and return it -/// expanded.into() -/// } -/// ``` +/// This function enforces that the input is fully parsed. If there are any +/// unparsed tokens at the end of the stream, an error is returned. #[cfg(all(feature = "parsing", feature = "proc-macro"))] -#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))] pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> { parse::Parser::parse(T::parse, tokens) } /// Parse a proc-macro2 token stream into the chosen syntax tree node. /// -/// This function will check that the input is fully parsed. If there are -/// any unparsed tokens at the end of the stream, an error is returned. -/// /// This function parses a `proc_macro2::TokenStream` which is commonly useful /// when the input comes from a node of the Syn syntax tree, for example the /// body tokens of a [`Macro`] node. When in a procedural macro parsing the @@ -874,14 +912,20 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> { /// instead. /// /// [`syn::parse`]: parse() +/// +/// This function enforces that the input is fully parsed. If there are any +/// unparsed tokens at the end of the stream, an error is returned. #[cfg(feature = "parsing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> { parse::Parser::parse2(T::parse, tokens) } /// Parse a string of Rust code into the chosen syntax tree node. /// +/// This function enforces that the input is fully parsed. If there are any +/// unparsed tokens at the end of the stream, an error is returned. +/// /// # Hygiene /// /// Every span in the resulting syntax tree will be set to resolve at the macro @@ -902,13 +946,11 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> { /// # run().unwrap(); /// ``` #[cfg(feature = "parsing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> { parse::Parser::parse_str(T::parse, s) } -// FIXME the name parse_file makes it sound like you might pass in a path to a -// file, rather than the content. /// Parse the content of a file of Rust code. /// /// This is different from `syn::parse_str::<File>(content)` in two ways: @@ -922,14 +964,11 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> { /// /// ```no_run /// use std::error::Error; -/// use std::fs::File; +/// use std::fs; /// use std::io::Read; /// /// fn run() -> Result<(), Box<dyn Error>> { -/// let mut file = File::open("path/to/code.rs")?; -/// let mut content = String::new(); -/// file.read_to_string(&mut content)?; -/// +/// let content = fs::read_to_string("path/to/code.rs")?; /// let ast = syn::parse_file(&content)?; /// if let Some(shebang) = ast.shebang { /// println!("{}", shebang); @@ -942,7 +981,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> { /// # run().unwrap(); /// ``` #[cfg(all(feature = "parsing", feature = "full"))] -#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "full"))))] pub fn parse_file(mut content: &str) -> Result<File> { // Strip the BOM if it is present const BOM: &str = "\u{feff}"; diff --git a/vendor/syn/src/lifetime.rs b/vendor/syn/src/lifetime.rs index 29f4cfdb..248af5aa 100644 --- a/vendor/syn/src/lifetime.rs +++ b/vendor/syn/src/lifetime.rs @@ -1,11 +1,10 @@ +#[cfg(feature = "parsing")] +use crate::lookahead; use proc_macro2::{Ident, Span}; use std::cmp::Ordering; use std::fmt::{self, Display}; use std::hash::{Hash, Hasher}; -#[cfg(feature = "parsing")] -use crate::lookahead; - /// A Rust lifetime: `'a`. /// /// Lifetime names must conform to the following rules: @@ -123,10 +122,11 @@ pub_if_not_doc! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; + use crate::error::Result; + use crate::lifetime::Lifetime; + use crate::parse::{Parse, ParseStream}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Lifetime { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| { @@ -140,16 +140,15 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::ext::PunctExt as _; + use crate::lifetime::Lifetime; use proc_macro2::{Punct, Spacing, TokenStream}; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Lifetime { fn to_tokens(&self, tokens: &mut TokenStream) { - let mut apostrophe = Punct::new('\'', Spacing::Joint); - apostrophe.set_span(self.apostrophe); - tokens.append(apostrophe); + tokens.append(Punct::new_spanned('\'', Spacing::Joint, self.apostrophe)); self.ident.to_tokens(tokens); } } diff --git a/vendor/syn/src/lit.rs b/vendor/syn/src/lit.rs index f7426ce8..369c3a12 100644 --- a/vendor/syn/src/lit.rs +++ b/vendor/syn/src/lit.rs @@ -1,4 +1,6 @@ #[cfg(feature = "parsing")] +use crate::ext::TokenStreamExt as _; +#[cfg(feature = "parsing")] use crate::lookahead; #[cfg(feature = "parsing")] use crate::parse::{Parse, Parser}; @@ -6,6 +8,7 @@ use crate::{Error, Result}; use proc_macro2::{Ident, Literal, Span}; #[cfg(feature = "parsing")] use proc_macro2::{TokenStream, TokenTree}; +use std::ffi::{CStr, CString}; use std::fmt::{self, Display}; #[cfg(feature = "extra-traits")] use std::hash::{Hash, Hasher}; @@ -18,7 +21,7 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: crate::Expr#syntax-tree-enums + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums #[non_exhaustive] pub enum Lit { /// A UTF-8 string literal: `"foo"`. @@ -27,6 +30,9 @@ ast_enum_of_structs! { /// A byte string literal: `b"foo"`. ByteStr(LitByteStr), + /// A nul-terminated C-string literal: `c"foo"`. + CStr(LitCStr), + /// A byte literal: `b'f'`. Byte(LitByte), @@ -63,6 +69,13 @@ ast_struct! { } } +ast_struct! { + /// A nul-terminated C-string literal: `c"foo"`. + pub struct LitCStr { + repr: Box<LitRepr>, + } +} + ast_struct! { /// A byte literal: `b'f'`. pub struct LitByte { @@ -132,7 +145,7 @@ impl LitStr { pub fn value(&self) -> String { let repr = self.repr.token.to_string(); - let (value, _suffix) = value::parse_lit_str(&repr); + let (value, _suffix) = value::parse_lit_str(&repr).unwrap(); String::from(value) } @@ -168,7 +181,7 @@ impl LitStr { /// } /// ``` #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse<T: Parse>(&self) -> Result<T> { self.parse_with(T::parse) } @@ -198,16 +211,17 @@ impl LitStr { /// # } /// ``` #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_with<F: Parser>(&self, parser: F) -> Result<F::Output> { use proc_macro2::Group; // Token stream with every span replaced by the given one. fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream { - stream - .into_iter() - .map(|token| respan_token_tree(token, span)) - .collect() + let mut tokens = TokenStream::new(); + for token in stream { + tokens.append(respan_token_tree(token, span)); + } + tokens } // Token tree with every span replaced by the given one. @@ -225,10 +239,11 @@ impl LitStr { // Parse string literal into a token stream with every span equal to the // original literal's span. + let span = self.span(); let mut tokens = TokenStream::from_str(&self.value())?; - tokens = respan_token_stream(tokens, self.span()); + tokens = respan_token_stream(tokens, span); - let result = parser.parse2(tokens)?; + let result = crate::parse::parse_scoped(parser, span, tokens)?; let suffix = self.suffix(); if !suffix.is_empty() { @@ -272,7 +287,42 @@ impl LitByteStr { pub fn value(&self) -> Vec<u8> { let repr = self.repr.token.to_string(); - let (value, _suffix) = value::parse_lit_byte_str(&repr); + let (value, _suffix) = value::parse_lit_byte_str(&repr).unwrap(); + value + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl LitCStr { + pub fn new(value: &CStr, span: Span) -> Self { + let mut token = Literal::c_string(value); + token.set_span(span); + LitCStr { + repr: Box::new(LitRepr { + token, + suffix: Box::<str>::default(), + }), + } + } + + pub fn value(&self) -> CString { + let repr = self.repr.token.to_string(); + let (value, _suffix) = value::parse_lit_c_str(&repr).unwrap(); value } @@ -307,7 +357,7 @@ impl LitByte { pub fn value(&self) -> u8 { let repr = self.repr.token.to_string(); - let (value, _suffix) = value::parse_lit_byte(&repr); + let (value, _suffix) = value::parse_lit_byte(&repr).unwrap(); value } @@ -342,7 +392,7 @@ impl LitChar { pub fn value(&self) -> char { let repr = self.repr.token.to_string(); - let (value, _suffix) = value::parse_lit_char(&repr); + let (value, _suffix) = value::parse_lit_char(&repr).unwrap(); value } @@ -364,10 +414,11 @@ impl LitChar { } impl LitInt { + #[track_caller] pub fn new(repr: &str, span: Span) -> Self { let (digits, suffix) = match value::parse_lit_int(repr) { Some(parse) => parse, - None => panic!("Not an integer literal: `{}`", repr), + None => panic!("not an integer literal: `{}`", repr), }; let mut token: Literal = repr.parse().unwrap(); @@ -435,6 +486,7 @@ impl LitInt { } impl From<Literal> for LitInt { + #[track_caller] fn from(token: Literal) -> Self { let repr = token.to_string(); if let Some((digits, suffix)) = value::parse_lit_int(&repr) { @@ -446,7 +498,7 @@ impl From<Literal> for LitInt { }), } } else { - panic!("Not an integer literal: `{}`", repr); + panic!("not an integer literal: `{}`", repr); } } } @@ -458,10 +510,11 @@ impl Display for LitInt { } impl LitFloat { + #[track_caller] pub fn new(repr: &str, span: Span) -> Self { let (digits, suffix) = match value::parse_lit_float(repr) { Some(parse) => parse, - None => panic!("Not a float literal: `{}`", repr), + None => panic!("not a float literal: `{}`", repr), }; let mut token: Literal = repr.parse().unwrap(); @@ -507,6 +560,7 @@ impl LitFloat { } impl From<Literal> for LitFloat { + #[track_caller] fn from(token: Literal) -> Self { let repr = token.to_string(); if let Some((digits, suffix)) = value::parse_lit_float(&repr) { @@ -518,7 +572,7 @@ impl From<Literal> for LitFloat { }), } } else { - panic!("Not a float literal: `{}`", repr); + panic!("not a float literal: `{}`", repr); } } } @@ -554,145 +608,140 @@ impl LitBool { #[cfg(feature = "extra-traits")] mod debug_impls { - use super::*; + use crate::lit::{LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitInt, LitStr}; use std::fmt::{self, Debug}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitStr { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitStr { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("token", &format_args!("{}", self.repr.token)) - .finish() - } - } self.debug(formatter, "LitStr") } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl LitStr { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitByteStr { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitByteStr { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("token", &format_args!("{}", self.repr.token)) - .finish() - } - } self.debug(formatter, "LitByteStr") } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl LitByteStr { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] + impl Debug for LitCStr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + self.debug(formatter, "LitCStr") + } + } + + impl LitCStr { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitByte { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitByte { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("token", &format_args!("{}", self.repr.token)) - .finish() - } - } self.debug(formatter, "LitByte") } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl LitByte { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitChar { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitChar { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("token", &format_args!("{}", self.repr.token)) - .finish() - } - } self.debug(formatter, "LitChar") } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl LitChar { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitInt { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitInt { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("token", &format_args!("{}", self.repr.token)) - .finish() - } - } self.debug(formatter, "LitInt") } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl LitInt { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitFloat { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitFloat { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("token", &format_args!("{}", self.repr.token)) - .finish() - } - } self.debug(formatter, "LitFloat") } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl LitFloat { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for LitBool { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - impl LitBool { - pub(crate) fn debug( - &self, - formatter: &mut fmt::Formatter, - name: &str, - ) -> fmt::Result { - formatter - .debug_struct(name) - .field("value", &self.value) - .finish() - } - } self.debug(formatter, "LitBool") } } + + impl LitBool { + pub(crate) fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result { + formatter + .debug_struct(name) + .field("value", &self.value) + .finish() + } + } } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for LitRepr { fn clone(&self) -> Self { LitRepr { @@ -703,7 +752,7 @@ impl Clone for LitRepr { } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for LitIntRepr { fn clone(&self) -> Self { LitIntRepr { @@ -715,7 +764,7 @@ impl Clone for LitIntRepr { } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for LitFloatRepr { fn clone(&self) -> Self { LitFloatRepr { @@ -729,7 +778,7 @@ impl Clone for LitFloatRepr { macro_rules! lit_extra_traits { ($ty:ident) => { #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for $ty { fn clone(&self) -> Self { $ty { @@ -739,7 +788,7 @@ macro_rules! lit_extra_traits { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl PartialEq for $ty { fn eq(&self, other: &Self) -> bool { self.repr.token.to_string() == other.repr.token.to_string() @@ -747,7 +796,7 @@ macro_rules! lit_extra_traits { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Hash for $ty { fn hash<H>(&self, state: &mut H) where @@ -770,6 +819,7 @@ macro_rules! lit_extra_traits { lit_extra_traits!(LitStr); lit_extra_traits!(LitByteStr); +lit_extra_traits!(LitCStr); lit_extra_traits!(LitByte); lit_extra_traits!(LitChar); lit_extra_traits!(LitInt); @@ -784,17 +834,16 @@ pub_if_not_doc! { } } -ast_enum! { - /// The style of a string literal, either plain quoted or a raw string like - /// `r##"data"##`. - pub enum StrStyle #no_visit { - /// An ordinary string like `"data"`. - Cooked, - /// A raw string like `r##"data"##`. - /// - /// The unsigned integer is the number of `#` symbols used. - Raw(usize), - } +/// The style of a string literal, either plain quoted or a raw string like +/// `r##"data"##`. +#[doc(hidden)] // https://github.com/dtolnay/syn/issues/1566 +pub enum StrStyle { + /// An ordinary string like `"data"`. + Cooked, + /// A raw string like `r##"data"##`. + /// + /// The unsigned integer is the number of `#` symbols used. + Raw(usize), } #[cfg(feature = "parsing")] @@ -808,12 +857,19 @@ pub_if_not_doc! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; use crate::buffer::Cursor; - use crate::parse::{Parse, ParseStream, Result}; - use proc_macro2::Punct; + use crate::error::Result; + use crate::lit::{ + value, Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitFloatRepr, LitInt, + LitIntRepr, LitStr, + }; + use crate::parse::{Parse, ParseStream, Unexpected}; + use crate::token::{self, Token}; + use proc_macro2::{Literal, Punct, Span}; + use std::cell::Cell; + use std::rc::Rc; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Lit { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| { @@ -884,7 +940,7 @@ pub(crate) mod parsing { )) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitStr { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -895,7 +951,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitByteStr { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -906,7 +962,18 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + impl Parse for LitCStr { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::CStr(lit)) => Ok(lit), + _ => Err(head.error("expected C string literal")), + } + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitByte { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -917,7 +984,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitChar { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -928,7 +995,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitInt { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -939,7 +1006,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitFloat { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -950,7 +1017,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for LitBool { fn parse(input: ParseStream) -> Result<Self> { let head = input.fork(); @@ -960,57 +1027,100 @@ pub(crate) mod parsing { } } } + + fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool { + let scope = Span::call_site(); + let unexpected = Rc::new(Cell::new(Unexpected::None)); + let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected); + peek(&buffer) + } + + macro_rules! impl_token { + ($display:literal $name:ty) => { + impl Token for $name { + fn peek(cursor: Cursor) -> bool { + fn peek(input: ParseStream) -> bool { + <$name as Parse>::parse(input).is_ok() + } + peek_impl(cursor, peek) + } + + fn display() -> &'static str { + $display + } + } + + impl token::private::Sealed for $name {} + }; + } + + impl_token!("literal" Lit); + impl_token!("string literal" LitStr); + impl_token!("byte string literal" LitByteStr); + impl_token!("C-string literal" LitCStr); + impl_token!("byte literal" LitByte); + impl_token!("character literal" LitChar); + impl_token!("integer literal" LitInt); + impl_token!("floating point literal" LitFloat); + impl_token!("boolean literal" LitBool); } #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::lit::{LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitInt, LitStr}; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitStr { fn to_tokens(&self, tokens: &mut TokenStream) { self.repr.token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitByteStr { fn to_tokens(&self, tokens: &mut TokenStream) { self.repr.token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] + impl ToTokens for LitCStr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitByte { fn to_tokens(&self, tokens: &mut TokenStream) { self.repr.token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitChar { fn to_tokens(&self, tokens: &mut TokenStream) { self.repr.token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitInt { fn to_tokens(&self, tokens: &mut TokenStream) { self.repr.token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitFloat { fn to_tokens(&self, tokens: &mut TokenStream) { self.repr.token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for LitBool { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append(self.token()); @@ -1019,51 +1129,81 @@ mod printing { } mod value { - use super::*; use crate::bigint::BigInt; + use crate::lit::{ + Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitFloatRepr, LitInt, + LitIntRepr, LitRepr, LitStr, + }; + use proc_macro2::{Literal, Span}; use std::char; + use std::ffi::CString; use std::ops::{Index, RangeFrom}; impl Lit { /// Interpret a Syn literal from a proc-macro2 literal. pub fn new(token: Literal) -> Self { let repr = token.to_string(); + Lit::from_str(token, &repr) + } + + #[cfg(fuzzing)] + #[doc(hidden)] + pub fn from_str_for_fuzzing(repr: &str) -> Self { + let token = Literal::u8_unsuffixed(0); + Lit::from_str(token, repr) + } - match byte(&repr, 0) { + fn from_str(token: Literal, repr: &str) -> Self { + match byte(repr, 0) { // "...", r"...", r#"..."# b'"' | b'r' => { - let (_, suffix) = parse_lit_str(&repr); - return Lit::Str(LitStr { - repr: Box::new(LitRepr { token, suffix }), - }); + if let Some((_, suffix)) = parse_lit_str(repr) { + return Lit::Str(LitStr { + repr: Box::new(LitRepr { token, suffix }), + }); + } } - b'b' => match byte(&repr, 1) { + b'b' => match byte(repr, 1) { // b"...", br"...", br#"...#" b'"' | b'r' => { - let (_, suffix) = parse_lit_byte_str(&repr); - return Lit::ByteStr(LitByteStr { - repr: Box::new(LitRepr { token, suffix }), - }); + if let Some((_, suffix)) = parse_lit_byte_str(repr) { + return Lit::ByteStr(LitByteStr { + repr: Box::new(LitRepr { token, suffix }), + }); + } } // b'...' b'\'' => { - let (_, suffix) = parse_lit_byte(&repr); - return Lit::Byte(LitByte { - repr: Box::new(LitRepr { token, suffix }), - }); + if let Some((_, suffix)) = parse_lit_byte(repr) { + return Lit::Byte(LitByte { + repr: Box::new(LitRepr { token, suffix }), + }); + } + } + _ => {} + }, + b'c' => match byte(repr, 1) { + // c"...", cr"...", cr#"..."# + b'"' | b'r' => { + if let Some((_, suffix)) = parse_lit_c_str(repr) { + return Lit::CStr(LitCStr { + repr: Box::new(LitRepr { token, suffix }), + }); + } } _ => {} }, // '...' b'\'' => { - let (_, suffix) = parse_lit_char(&repr); - return Lit::Char(LitChar { - repr: Box::new(LitRepr { token, suffix }), - }); + if let Some((_, suffix)) = parse_lit_char(repr) { + return Lit::Char(LitChar { + repr: Box::new(LitRepr { token, suffix }), + }); + } } b'0'..=b'9' | b'-' => { // 0, 123, 0xFF, 0o77, 0b11 - if let Some((digits, suffix)) = parse_lit_int(&repr) { + if let Some((digits, suffix)) = parse_lit_int(repr) { return Lit::Int(LitInt { repr: Box::new(LitIntRepr { token, @@ -1073,7 +1213,7 @@ mod value { }); } // 1.0, 1e-1, 1e+1 - if let Some((digits, suffix)) = parse_lit_float(&repr) { + if let Some((digits, suffix)) = parse_lit_float(repr) { return Lit::Float(LitFloat { repr: Box::new(LitFloatRepr { token, @@ -1092,20 +1232,18 @@ mod value { }); } } - // c"...", cr"...", cr#"..."# - // TODO: add a Lit::CStr variant? - b'c' => return Lit::Verbatim(token), b'(' if repr == "(/*ERROR*/)" => return Lit::Verbatim(token), _ => {} } - panic!("Unrecognized literal: `{}`", repr); + Lit::Verbatim(token) } pub fn suffix(&self) -> &str { match self { Lit::Str(lit) => lit.suffix(), Lit::ByteStr(lit) => lit.suffix(), + Lit::CStr(lit) => lit.suffix(), Lit::Byte(lit) => lit.suffix(), Lit::Char(lit) => lit.suffix(), Lit::Int(lit) => lit.suffix(), @@ -1118,6 +1256,7 @@ mod value { match self { Lit::Str(lit) => lit.span(), Lit::ByteStr(lit) => lit.span(), + Lit::CStr(lit) => lit.span(), Lit::Byte(lit) => lit.span(), Lit::Char(lit) => lit.span(), Lit::Int(lit) => lit.span(), @@ -1131,6 +1270,7 @@ mod value { match self { Lit::Str(lit) => lit.set_span(span), Lit::ByteStr(lit) => lit.set_span(span), + Lit::CStr(lit) => lit.set_span(span), Lit::Byte(lit) => lit.set_span(span), Lit::Char(lit) => lit.set_span(span), Lit::Int(lit) => lit.set_span(span), @@ -1157,7 +1297,7 @@ mod value { } // Returns (content, suffix). - pub(crate) fn parse_lit_str(s: &str) -> (Box<str>, Box<str>) { + pub(crate) fn parse_lit_str(s: &str) -> Option<(Box<str>, Box<str>)> { match byte(s, 0) { b'"' => parse_lit_str_cooked(s), b'r' => parse_lit_str_raw(s), @@ -1165,10 +1305,7 @@ mod value { } } - // Clippy false positive - // https://github.com/rust-lang-nursery/rust-clippy/issues/2329 - #[allow(clippy::needless_continue)] - fn parse_lit_str_cooked(mut s: &str) -> (Box<str>, Box<str>) { + fn parse_lit_str_cooked(mut s: &str) -> Option<(Box<str>, Box<str>)> { assert_eq!(byte(s, 0), b'"'); s = &s[1..]; @@ -1178,18 +1315,21 @@ mod value { b'"' => break, b'\\' => { let b = byte(s, 1); - s = &s[2..]; + s = s.get(2..)?; match b { b'x' => { - let (byte, rest) = backslash_x(s); + let (byte, rest) = backslash_x(s)?; s = rest; - assert!(byte <= 0x7F, "Invalid \\x byte in string literal"); - char::from_u32(u32::from(byte)).unwrap() + if byte > 0x7F { + // invalid \x byte in string literal + return None; + } + char::from(byte) } b'u' => { - let (chr, rest) = backslash_u(s); + let (ch, rest) = backslash_u(s)?; s = rest; - chr + ch } b'n' => '\n', b'r' => '\r', @@ -1205,17 +1345,23 @@ mod value { _ => continue 'outer, } }, - b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + _ => { + // unexpected byte after backslash + return None; + } } } b'\r' => { - assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string"); + if byte(s, 1) != b'\n' { + // bare carriage return not allowed in string + return None; + } s = &s[2..]; '\n' } _ => { let ch = next_chr(s); - s = &s[ch.len_utf8()..]; + s = s.get(ch.len_utf8()..)?; ch } }; @@ -1225,30 +1371,35 @@ mod value { assert!(s.starts_with('"')); let content = content.into_boxed_str(); let suffix = s[1..].to_owned().into_boxed_str(); - (content, suffix) + Some((content, suffix)) } - fn parse_lit_str_raw(mut s: &str) -> (Box<str>, Box<str>) { + fn parse_lit_str_raw(mut s: &str) -> Option<(Box<str>, Box<str>)> { assert_eq!(byte(s, 0), b'r'); s = &s[1..]; let mut pounds = 0; - while byte(s, pounds) == b'#' { - pounds += 1; + loop { + match byte(s, pounds) { + b'#' => pounds += 1, + b'"' => break, + _ => return None, + } } - assert_eq!(byte(s, pounds), b'"'); let close = s.rfind('"').unwrap(); - for end in s[close + 1..close + 1 + pounds].bytes() { - assert_eq!(end, b'#'); + for end in s.get(close + 1..close + 1 + pounds)?.bytes() { + if end != b'#' { + return None; + } } - let content = s[pounds + 1..close].to_owned().into_boxed_str(); + let content = s.get(pounds + 1..close)?.to_owned().into_boxed_str(); let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str(); - (content, suffix) + Some((content, suffix)) } // Returns (content, suffix). - pub(crate) fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) { + pub(crate) fn parse_lit_byte_str(s: &str) -> Option<(Vec<u8>, Box<str>)> { assert_eq!(byte(s, 0), b'b'); match byte(s, 1) { b'"' => parse_lit_byte_str_cooked(s), @@ -1257,10 +1408,7 @@ mod value { } } - // Clippy false positive - // https://github.com/rust-lang-nursery/rust-clippy/issues/2329 - #[allow(clippy::needless_continue)] - fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) { + fn parse_lit_byte_str_cooked(mut s: &str) -> Option<(Vec<u8>, Box<str>)> { assert_eq!(byte(s, 0), b'b'); assert_eq!(byte(s, 1), b'"'); s = &s[2..]; @@ -1274,10 +1422,10 @@ mod value { b'"' => break, b'\\' => { let b = byte(v, 1); - v = &v[2..]; + v = v.get(2..)?; match b { b'x' => { - let (b, rest) = backslash_x(v); + let (b, rest) = backslash_x(v)?; v = rest; b } @@ -1296,16 +1444,22 @@ mod value { continue 'outer; } }, - b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + _ => { + // unexpected byte after backslash + return None; + } } } b'\r' => { - assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string"); + if byte(v, 1) != b'\n' { + // bare carriage return not allowed in string + return None; + } v = &v[2..]; b'\n' } b => { - v = &v[1..]; + v = v.get(1..)?; b } }; @@ -1314,30 +1468,124 @@ mod value { assert_eq!(byte(v, 0), b'"'); let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str(); - (out, suffix) + Some((out, suffix)) } - fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) { + fn parse_lit_byte_str_raw(s: &str) -> Option<(Vec<u8>, Box<str>)> { assert_eq!(byte(s, 0), b'b'); - let (value, suffix) = parse_lit_str_raw(&s[1..]); - (String::from(value).into_bytes(), suffix) + let (value, suffix) = parse_lit_str_raw(&s[1..])?; + Some((String::from(value).into_bytes(), suffix)) + } + + // Returns (content, suffix). + pub(crate) fn parse_lit_c_str(s: &str) -> Option<(CString, Box<str>)> { + assert_eq!(byte(s, 0), b'c'); + match byte(s, 1) { + b'"' => parse_lit_c_str_cooked(s), + b'r' => parse_lit_c_str_raw(s), + _ => unreachable!(), + } + } + + fn parse_lit_c_str_cooked(mut s: &str) -> Option<(CString, Box<str>)> { + assert_eq!(byte(s, 0), b'c'); + assert_eq!(byte(s, 1), b'"'); + s = &s[2..]; + + // We're going to want to have slices which don't respect codepoint boundaries. + let mut v = s.as_bytes(); + + let mut out = Vec::new(); + 'outer: loop { + let byte = match byte(v, 0) { + b'"' => break, + b'\\' => { + let b = byte(v, 1); + v = v.get(2..)?; + match b { + b'x' => { + let (b, rest) = backslash_x(v)?; + if b == 0 { + // \x00 is not allowed in C-string literal + return None; + } + v = rest; + b + } + b'u' => { + let (ch, rest) = backslash_u(v)?; + if ch == '\0' { + // \u{0} is not allowed in C-string literal + return None; + } + v = rest; + out.extend_from_slice(ch.encode_utf8(&mut [0u8; 4]).as_bytes()); + continue 'outer; + } + b'n' => b'\n', + b'r' => b'\r', + b't' => b'\t', + b'\\' => b'\\', + b'\'' => b'\'', + b'"' => b'"', + b'\r' | b'\n' => loop { + let byte = byte(v, 0); + if matches!(byte, b' ' | b'\t' | b'\n' | b'\r') { + v = &v[1..]; + } else { + continue 'outer; + } + }, + _ => { + // unexpected byte after backslash + return None; + } + } + } + b'\r' => { + if byte(v, 1) != b'\n' { + // bare carriage return not allowed in string + return None; + } + v = &v[2..]; + b'\n' + } + b => { + v = v.get(1..)?; + b + } + }; + out.push(byte); + } + + assert_eq!(byte(v, 0), b'"'); + let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str(); + let cstring = CString::new(out).ok()?; + Some((cstring, suffix)) + } + + fn parse_lit_c_str_raw(s: &str) -> Option<(CString, Box<str>)> { + assert_eq!(byte(s, 0), b'c'); + let (value, suffix) = parse_lit_str_raw(&s[1..])?; + let cstring = CString::new(String::from(value)).ok()?; + Some((cstring, suffix)) } // Returns (value, suffix). - pub(crate) fn parse_lit_byte(s: &str) -> (u8, Box<str>) { + pub(crate) fn parse_lit_byte(s: &str) -> Option<(u8, Box<str>)> { assert_eq!(byte(s, 0), b'b'); assert_eq!(byte(s, 1), b'\''); // We're going to want to have slices which don't respect codepoint boundaries. - let mut v = s[2..].as_bytes(); + let mut v = &s.as_bytes()[2..]; let b = match byte(v, 0) { b'\\' => { let b = byte(v, 1); - v = &v[2..]; + v = v.get(2..)?; match b { b'x' => { - let (b, rest) = backslash_x(v); + let (b, rest) = backslash_x(v)?; v = rest; b } @@ -1348,40 +1596,49 @@ mod value { b'0' => b'\0', b'\'' => b'\'', b'"' => b'"', - b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + _ => { + // unexpected byte after backslash + return None; + } } } b => { - v = &v[1..]; + v = v.get(1..)?; b } }; - assert_eq!(byte(v, 0), b'\''); + if byte(v, 0) != b'\'' { + return None; + } + let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str(); - (b, suffix) + Some((b, suffix)) } // Returns (value, suffix). - pub(crate) fn parse_lit_char(mut s: &str) -> (char, Box<str>) { + pub(crate) fn parse_lit_char(mut s: &str) -> Option<(char, Box<str>)> { assert_eq!(byte(s, 0), b'\''); s = &s[1..]; let ch = match byte(s, 0) { b'\\' => { let b = byte(s, 1); - s = &s[2..]; + s = s.get(2..)?; match b { b'x' => { - let (byte, rest) = backslash_x(s); + let (byte, rest) = backslash_x(s)?; s = rest; - assert!(byte <= 0x80, "Invalid \\x byte in string literal"); - char::from_u32(u32::from(byte)).unwrap() + if byte > 0x7F { + // invalid \x byte in character literal + return None; + } + char::from(byte) } b'u' => { - let (chr, rest) = backslash_u(s); + let (ch, rest) = backslash_u(s)?; s = rest; - chr + ch } b'n' => '\n', b'r' => '\r', @@ -1390,21 +1647,28 @@ mod value { b'0' => '\0', b'\'' => '\'', b'"' => '"', - b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + _ => { + // unexpected byte after backslash + return None; + } } } _ => { let ch = next_chr(s); - s = &s[ch.len_utf8()..]; + s = s.get(ch.len_utf8()..)?; ch } }; - assert_eq!(byte(s, 0), b'\''); + + if byte(s, 0) != b'\'' { + return None; + } + let suffix = s[1..].to_owned().into_boxed_str(); - (ch, suffix) + Some((ch, suffix)) } - fn backslash_x<S>(s: &S) -> (u8, &S) + fn backslash_x<S>(s: &S) -> Option<(u8, &S)> where S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized, { @@ -1416,20 +1680,23 @@ mod value { b'0'..=b'9' => b0 - b'0', b'a'..=b'f' => 10 + (b0 - b'a'), b'A'..=b'F' => 10 + (b0 - b'A'), - _ => panic!("unexpected non-hex character after \\x"), + _ => return None, }; ch += match b1 { b'0'..=b'9' => b1 - b'0', b'a'..=b'f' => 10 + (b1 - b'a'), b'A'..=b'F' => 10 + (b1 - b'A'), - _ => panic!("unexpected non-hex character after \\x"), + _ => return None, }; - (ch, &s[2..]) + Some((ch, &s[2..])) } - fn backslash_u(mut s: &str) -> (char, &str) { + fn backslash_u<S>(mut s: &S) -> Option<(char, &S)> + where + S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized, + { if byte(s, 0) != b'{' { - panic!("{}", "expected { after \\u"); + return None; } s = &s[1..]; @@ -1445,26 +1712,25 @@ mod value { s = &s[1..]; continue; } - b'}' if digits == 0 => panic!("invalid empty unicode escape"), + b'}' if digits == 0 => return None, b'}' => break, - _ => panic!("unexpected non-hex character after \\u"), + _ => return None, }; if digits == 6 { - panic!("overlong unicode escape (must have at most 6 hex digits)"); + return None; } ch *= 0x10; ch += u32::from(digit); digits += 1; s = &s[1..]; } - assert!(byte(s, 0) == b'}'); + if byte(s, 0) != b'}' { + return None; + } s = &s[1..]; - if let Some(ch) = char::from_u32(ch) { - (ch, s) - } else { - panic!("character code {:x} is not a valid unicode character", ch); - } + let ch = char::from_u32(ch)?; + Some((ch, s)) } // Returns base 10 digits and suffix. diff --git a/vendor/syn/src/lookahead.rs b/vendor/syn/src/lookahead.rs index e89a4c7f..10b45661 100644 --- a/vendor/syn/src/lookahead.rs +++ b/vendor/syn/src/lookahead.rs @@ -2,9 +2,10 @@ use crate::buffer::Cursor; use crate::error::{self, Error}; use crate::sealed::lookahead::Sealed; use crate::span::IntoSpans; -use crate::token::Token; +use crate::token::{CustomToken, Token}; use proc_macro2::{Delimiter, Span}; use std::cell::RefCell; +use std::fmt::{self, Display}; /// Support for checking the next token in a stream to decide how to parse. /// @@ -110,7 +111,18 @@ impl<'a> Lookahead1<'a> { /// The error message will identify all of the expected token types that /// have been peeked against this lookahead instance. pub fn error(self) -> Error { - let comparisons = self.comparisons.borrow(); + let mut comparisons = self.comparisons.into_inner(); + comparisons.retain_mut(|display| { + if *display == "`)`" { + *display = match self.cursor.scope_delimiter() { + Delimiter::Parenthesis => "`)`", + Delimiter::Brace => "`}`", + Delimiter::Bracket => "`]`", + Delimiter::None => return false, + } + } + true + }); match comparisons.len() { 0 => { if self.cursor.eof() { @@ -128,14 +140,29 @@ impl<'a> Lookahead1<'a> { error::new_at(self.scope, self.cursor, message) } _ => { - let join = comparisons.join(", "); - let message = format!("expected one of: {}", join); + let message = format!("expected one of: {}", CommaSeparated(&comparisons)); error::new_at(self.scope, self.cursor, message) } } } } +struct CommaSeparated<'a>(&'a [&'a str]); + +impl<'a> Display for CommaSeparated<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut first = true; + for &s in self.0 { + if !first { + f.write_str(", ")?; + } + f.write_str(s)?; + first = false; + } + Ok(()) + } +} + /// Types that can be parsed by looking at just one token. /// /// Use [`ParseStream::peek`] to peek one of these types in a parse stream @@ -150,6 +177,160 @@ pub trait Peek: Sealed { type Token: Token; } +/// Pseudo-token used for peeking the end of a parse stream. +/// +/// This type is only useful as an argument to one of the following functions: +/// +/// - [`ParseStream::peek`][crate::parse::ParseBuffer::peek] +/// - [`ParseStream::peek2`][crate::parse::ParseBuffer::peek2] +/// - [`ParseStream::peek3`][crate::parse::ParseBuffer::peek3] +/// - [`Lookahead1::peek`] +/// +/// The peek will return `true` if there are no remaining tokens after that +/// point in the parse stream. +/// +/// # Example +/// +/// Suppose we are parsing attributes containing core::fmt inspired formatting +/// arguments: +/// +/// - `#[fmt("simple example")]` +/// - `#[fmt("interpolation e{}ample", self.x)]` +/// - `#[fmt("interpolation e{x}ample")]` +/// +/// and we want to recognize the cases where no interpolation occurs so that +/// more efficient code can be generated. +/// +/// The following implementation uses `input.peek(Token![,]) && +/// input.peek2(End)` to recognize the case of a trailing comma without +/// consuming the comma from the parse stream, because if it isn't a trailing +/// comma, that same comma needs to be parsed as part of `args`. +/// +/// ``` +/// use proc_macro2::TokenStream; +/// use quote::quote; +/// use syn::parse::{End, Parse, ParseStream, Result}; +/// use syn::{parse_quote, Attribute, LitStr, Token}; +/// +/// struct FormatArgs { +/// template: LitStr, // "...{}..." +/// args: TokenStream, // , self.x +/// } +/// +/// impl Parse for FormatArgs { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let template: LitStr = input.parse()?; +/// +/// let args = if input.is_empty() +/// || input.peek(Token![,]) && input.peek2(End) +/// { +/// input.parse::<Option<Token![,]>>()?; +/// TokenStream::new() +/// } else { +/// input.parse()? +/// }; +/// +/// Ok(FormatArgs { +/// template, +/// args, +/// }) +/// } +/// } +/// +/// fn main() -> Result<()> { +/// let attrs: Vec<Attribute> = parse_quote! { +/// #[fmt("simple example")] +/// #[fmt("interpolation e{}ample", self.x)] +/// #[fmt("interpolation e{x}ample")] +/// }; +/// +/// for attr in &attrs { +/// let FormatArgs { template, args } = attr.parse_args()?; +/// let requires_fmt_machinery = +/// !args.is_empty() || template.value().contains(['{', '}']); +/// let out = if requires_fmt_machinery { +/// quote! { +/// ::core::write!(__formatter, #template #args) +/// } +/// } else { +/// quote! { +/// __formatter.write_str(#template) +/// } +/// }; +/// println!("{}", out); +/// } +/// Ok(()) +/// } +/// ``` +/// +/// Implementing this parsing logic without `peek2(End)` is more clumsy because +/// we'd need a parse stream actually advanced past the comma before being able +/// to find out whether there is anything after it. It would look something +/// like: +/// +/// ``` +/// # use proc_macro2::TokenStream; +/// # use syn::parse::{ParseStream, Result}; +/// # use syn::Token; +/// # +/// # fn parse(input: ParseStream) -> Result<()> { +/// use syn::parse::discouraged::Speculative as _; +/// +/// let ahead = input.fork(); +/// ahead.parse::<Option<Token![,]>>()?; +/// let args = if ahead.is_empty() { +/// input.advance_to(&ahead); +/// TokenStream::new() +/// } else { +/// input.parse()? +/// }; +/// # Ok(()) +/// # } +/// ``` +/// +/// or: +/// +/// ``` +/// # use proc_macro2::TokenStream; +/// # use syn::parse::{ParseStream, Result}; +/// # use syn::Token; +/// # +/// # fn parse(input: ParseStream) -> Result<()> { +/// use quote::ToTokens as _; +/// +/// let comma: Option<Token![,]> = input.parse()?; +/// let mut args = TokenStream::new(); +/// if !input.is_empty() { +/// comma.to_tokens(&mut args); +/// input.parse::<TokenStream>()?.to_tokens(&mut args); +/// } +/// # Ok(()) +/// # } +/// ``` +pub struct End; + +impl Copy for End {} + +impl Clone for End { + fn clone(&self) -> Self { + *self + } +} + +impl Peek for End { + type Token = Self; +} + +impl CustomToken for End { + fn peek(cursor: Cursor) -> bool { + cursor.eof() + } + + fn display() -> &'static str { + "`)`" // Lookahead1 error message will fill in the expected close delimiter + } +} + impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F { type Token = T; } @@ -162,8 +343,6 @@ impl<S> IntoSpans<S> for TokenMarker { } } -pub(crate) fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool { - cursor.group(delimiter).is_some() -} - impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {} + +impl Sealed for End {} diff --git a/vendor/syn/src/mac.rs b/vendor/syn/src/mac.rs index 8f687cc9..15107801 100644 --- a/vendor/syn/src/mac.rs +++ b/vendor/syn/src/mac.rs @@ -1,18 +1,19 @@ -use super::*; +#[cfg(feature = "parsing")] +use crate::error::Result; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream, Parser}; +use crate::path::Path; use crate::token::{Brace, Bracket, Paren}; use proc_macro2::extra::DelimSpan; -#[cfg(any(feature = "parsing", feature = "printing"))] +#[cfg(feature = "parsing")] use proc_macro2::Delimiter; use proc_macro2::TokenStream; #[cfg(feature = "parsing")] use proc_macro2::TokenTree; -#[cfg(feature = "parsing")] -use crate::parse::{Parse, ParseStream, Parser, Result}; - ast_struct! { /// A macro invocation: `println!("{}", mac)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Macro { pub path: Path, pub bang_token: Token![!], @@ -23,7 +24,7 @@ ast_struct! { ast_enum! { /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum MacroDelimiter { Paren(Paren), Brace(Brace), @@ -39,6 +40,14 @@ impl MacroDelimiter { MacroDelimiter::Bracket(token) => &token.span, } } + + #[cfg(all(feature = "full", any(feature = "parsing", feature = "printing")))] + pub(crate) fn is_brace(&self) -> bool { + match self { + MacroDelimiter::Brace(_) => true, + MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => false, + } + } } impl Macro { @@ -125,7 +134,7 @@ impl Macro { /// } /// ``` #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_body<T: Parse>(&self) -> Result<T> { self.parse_body_with(T::parse) } @@ -133,7 +142,7 @@ impl Macro { /// Parse the tokens within the macro invocation's delimiters using the /// given parser. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> { let scope = self.delimiter.span().close(); crate::parse::parse_scoped(parser, scope, self.tokens.clone()) @@ -162,10 +171,12 @@ pub(crate) fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, Tok #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; + use crate::error::Result; + use crate::mac::{parse_delimiter, Macro}; + use crate::parse::{Parse, ParseStream}; + use crate::path::Path; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Macro { fn parse(input: ParseStream) -> Result<Self> { let tokens; @@ -185,8 +196,11 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; - use proc_macro2::TokenStream; + use crate::mac::{Macro, MacroDelimiter}; + use crate::path; + use crate::path::printing::PathStyle; + use crate::token; + use proc_macro2::{Delimiter, TokenStream}; use quote::ToTokens; impl MacroDelimiter { @@ -200,10 +214,10 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Macro { fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); + path::printing::print_path(tokens, &self.path, PathStyle::Mod); self.bang_token.to_tokens(tokens); self.delimiter.surround(tokens, self.tokens.clone()); } diff --git a/vendor/syn/src/macros.rs b/vendor/syn/src/macros.rs index 06ceb542..167f2cf2 100644 --- a/vendor/syn/src/macros.rs +++ b/vendor/syn/src/macros.rs @@ -4,14 +4,17 @@ )] macro_rules! ast_struct { ( - [$($attrs_pub:tt)*] - struct $name:ident #full $($rest:tt)* + $(#[$attr:meta])* + $pub:ident $struct:ident $name:ident #full $body:tt ) => { + check_keyword_matches!(pub $pub); + check_keyword_matches!(struct $struct); + #[cfg(feature = "full")] - $($attrs_pub)* struct $name $($rest)* + $(#[$attr])* $pub $struct $name $body #[cfg(not(feature = "full"))] - $($attrs_pub)* struct $name { + $(#[$attr])* $pub $struct $name { _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>, } @@ -24,35 +27,26 @@ macro_rules! ast_struct { }; ( - [$($attrs_pub:tt)*] - struct $name:ident $($rest:tt)* + $(#[$attr:meta])* + $pub:ident $struct:ident $name:ident $body:tt ) => { - $($attrs_pub)* struct $name $($rest)* - }; + check_keyword_matches!(pub $pub); + check_keyword_matches!(struct $struct); - ($($t:tt)*) => { - strip_attrs_pub!(ast_struct!($($t)*)); + $(#[$attr])* $pub $struct $name $body }; } +#[cfg(any(feature = "full", feature = "derive"))] macro_rules! ast_enum { - // Drop the `#no_visit` attribute, if present. ( - [$($attrs_pub:tt)*] - enum $name:ident #no_visit $($rest:tt)* - ) => ( - ast_enum!([$($attrs_pub)*] enum $name $($rest)*); - ); + $(#[$enum_attr:meta])* + $pub:ident $enum:ident $name:ident $body:tt + ) => { + check_keyword_matches!(pub $pub); + check_keyword_matches!(enum $enum); - ( - [$($attrs_pub:tt)*] - enum $name:ident $($rest:tt)* - ) => ( - $($attrs_pub)* enum $name $($rest)* - ); - - ($($t:tt)*) => { - strip_attrs_pub!(ast_enum!($($t)*)); + $(#[$enum_attr])* $pub $enum $name $body }; } @@ -60,42 +54,32 @@ macro_rules! ast_enum_of_structs { ( $(#[$enum_attr:meta])* $pub:ident $enum:ident $name:ident $body:tt - $($remaining:tt)* ) => { - ast_enum!($(#[$enum_attr])* $pub $enum $name $body); - ast_enum_of_structs_impl!($pub $enum $name $body $($remaining)*); + check_keyword_matches!(pub $pub); + check_keyword_matches!(enum $enum); + + $(#[$enum_attr])* $pub $enum $name $body + + ast_enum_of_structs_impl!($name $body); + + #[cfg(feature = "printing")] + generate_to_tokens!(() tokens $name $body); }; } macro_rules! ast_enum_of_structs_impl { ( - $pub:ident $enum:ident $name:ident { + $name:ident { $( $(#[cfg $cfg_attr:tt])* $(#[doc $($doc_attr:tt)*])* - $variant:ident $( ($($member:ident)::+) )*, + $variant:ident $( ($member:ident) )*, )* } ) => { - check_keyword_matches!(pub $pub); - check_keyword_matches!(enum $enum); - $($( - ast_enum_from_struct!($name::$variant, $($member)::+); + ast_enum_from_struct!($name::$variant, $member); )*)* - - #[cfg(feature = "printing")] - generate_to_tokens! { - () - tokens - $name { - $( - $(#[cfg $cfg_attr])* - $(#[doc $($doc_attr)*])* - $variant $($($member)::+)*, - )* - } - } }; } @@ -132,7 +116,7 @@ macro_rules! generate_to_tokens { ($($arms:tt)*) $tokens:ident $name:ident { $(#[cfg $cfg_attr:tt])* $(#[doc $($doc_attr:tt)*])* - $variant:ident $member:ident, + $variant:ident($member:ident), $($next:tt)* } ) => { @@ -143,7 +127,7 @@ macro_rules! generate_to_tokens { }; (($($arms:tt)*) $tokens:ident $name:ident {}) => { - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ::quote::ToTokens for $name { fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) { match self { @@ -154,32 +138,45 @@ macro_rules! generate_to_tokens { }; } -macro_rules! strip_attrs_pub { - ($mac:ident!($(#[$m:meta])* $pub:ident $($t:tt)*)) => { +// Rustdoc bug: does not respect the doc(hidden) on some items. +#[cfg(all(doc, feature = "parsing"))] +macro_rules! pub_if_not_doc { + ($(#[$m:meta])* $pub:ident $($item:tt)*) => { + check_keyword_matches!(pub $pub); + + $(#[$m])* + $pub(crate) $($item)* + }; +} + +#[cfg(all(not(doc), feature = "parsing"))] +macro_rules! pub_if_not_doc { + ($(#[$m:meta])* $pub:ident $($item:tt)*) => { check_keyword_matches!(pub $pub); - $mac!([$(#[$m])* $pub] $($t)*); + $(#[$m])* + $pub $($item)* }; } macro_rules! check_keyword_matches { (enum enum) => {}; (pub pub) => {}; + (struct struct) => {}; } -// Rustdoc bug: does not respect the doc(hidden) on some items. -#[cfg(all(doc, feature = "parsing"))] -macro_rules! pub_if_not_doc { - ($(#[$m:meta])* pub $($item:tt)*) => { - $(#[$m])* - pub(crate) $($item)* - }; -} +#[cfg(any(feature = "full", feature = "derive"))] +macro_rules! return_impl_trait { + ( + $(#[$attr:meta])* + $vis:vis fn $name:ident $args:tt -> $impl_trait:ty [$concrete:ty] $body:block + ) => { + #[cfg(not(docsrs))] + $(#[$attr])* + $vis fn $name $args -> $concrete $body -#[cfg(all(not(doc), feature = "parsing"))] -macro_rules! pub_if_not_doc { - ($(#[$m:meta])* pub $($item:tt)*) => { - $(#[$m])* - pub $($item)* + #[cfg(docsrs)] + $(#[$attr])* + $vis fn $name $args -> $impl_trait $body }; } diff --git a/vendor/syn/src/meta.rs b/vendor/syn/src/meta.rs index f17b2802..ffeeb262 100644 --- a/vendor/syn/src/meta.rs +++ b/vendor/syn/src/meta.rs @@ -1,8 +1,9 @@ //! Facility for interpreting structured content inside of an `Attribute`. -use crate::ext::IdentExt; +use crate::error::{Error, Result}; +use crate::ext::IdentExt as _; use crate::lit::Lit; -use crate::parse::{Error, ParseStream, Parser, Result}; +use crate::parse::{ParseStream, Parser}; use crate::path::{Path, PathSegment}; use crate::punctuated::Punctuated; use proc_macro2::Ident; diff --git a/vendor/syn/src/op.rs b/vendor/syn/src/op.rs index bff72c8b..575d9faa 100644 --- a/vendor/syn/src/op.rs +++ b/vendor/syn/src/op.rs @@ -1,6 +1,6 @@ ast_enum! { /// A binary operator: `+`, `+=`, `&`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum BinOp { /// The `+` operator (addition) @@ -64,7 +64,7 @@ ast_enum! { ast_enum! { /// A unary operator: `*`, `!`, `-`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum UnOp { /// The `*` operator for dereferencing @@ -78,59 +78,12 @@ ast_enum! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; + use crate::error::Result; + use crate::op::{BinOp, UnOp}; + use crate::parse::{Parse, ParseStream}; - fn parse_binop(input: ParseStream) -> Result<BinOp> { - if input.peek(Token![&&]) { - input.parse().map(BinOp::And) - } else if input.peek(Token![||]) { - input.parse().map(BinOp::Or) - } else if input.peek(Token![<<]) { - input.parse().map(BinOp::Shl) - } else if input.peek(Token![>>]) { - input.parse().map(BinOp::Shr) - } else if input.peek(Token![==]) { - input.parse().map(BinOp::Eq) - } else if input.peek(Token![<=]) { - input.parse().map(BinOp::Le) - } else if input.peek(Token![!=]) { - input.parse().map(BinOp::Ne) - } else if input.peek(Token![>=]) { - input.parse().map(BinOp::Ge) - } else if input.peek(Token![+]) { - input.parse().map(BinOp::Add) - } else if input.peek(Token![-]) { - input.parse().map(BinOp::Sub) - } else if input.peek(Token![*]) { - input.parse().map(BinOp::Mul) - } else if input.peek(Token![/]) { - input.parse().map(BinOp::Div) - } else if input.peek(Token![%]) { - input.parse().map(BinOp::Rem) - } else if input.peek(Token![^]) { - input.parse().map(BinOp::BitXor) - } else if input.peek(Token![&]) { - input.parse().map(BinOp::BitAnd) - } else if input.peek(Token![|]) { - input.parse().map(BinOp::BitOr) - } else if input.peek(Token![<]) { - input.parse().map(BinOp::Lt) - } else if input.peek(Token![>]) { - input.parse().map(BinOp::Gt) - } else { - Err(input.error("expected binary operator")) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for BinOp { - #[cfg(not(feature = "full"))] - fn parse(input: ParseStream) -> Result<Self> { - parse_binop(input) - } - - #[cfg(feature = "full")] fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![+=]) { input.parse().map(BinOp::AddAssign) @@ -152,13 +105,49 @@ pub(crate) mod parsing { input.parse().map(BinOp::ShlAssign) } else if input.peek(Token![>>=]) { input.parse().map(BinOp::ShrAssign) + } else if input.peek(Token![&&]) { + input.parse().map(BinOp::And) + } else if input.peek(Token![||]) { + input.parse().map(BinOp::Or) + } else if input.peek(Token![<<]) { + input.parse().map(BinOp::Shl) + } else if input.peek(Token![>>]) { + input.parse().map(BinOp::Shr) + } else if input.peek(Token![==]) { + input.parse().map(BinOp::Eq) + } else if input.peek(Token![<=]) { + input.parse().map(BinOp::Le) + } else if input.peek(Token![!=]) { + input.parse().map(BinOp::Ne) + } else if input.peek(Token![>=]) { + input.parse().map(BinOp::Ge) + } else if input.peek(Token![+]) { + input.parse().map(BinOp::Add) + } else if input.peek(Token![-]) { + input.parse().map(BinOp::Sub) + } else if input.peek(Token![*]) { + input.parse().map(BinOp::Mul) + } else if input.peek(Token![/]) { + input.parse().map(BinOp::Div) + } else if input.peek(Token![%]) { + input.parse().map(BinOp::Rem) + } else if input.peek(Token![^]) { + input.parse().map(BinOp::BitXor) + } else if input.peek(Token![&]) { + input.parse().map(BinOp::BitAnd) + } else if input.peek(Token![|]) { + input.parse().map(BinOp::BitOr) + } else if input.peek(Token![<]) { + input.parse().map(BinOp::Lt) + } else if input.peek(Token![>]) { + input.parse().map(BinOp::Gt) } else { - parse_binop(input) + Err(input.error("expected binary operator")) } } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for UnOp { fn parse(input: ParseStream) -> Result<Self> { let lookahead = input.lookahead1(); @@ -177,11 +166,11 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::op::{BinOp, UnOp}; use proc_macro2::TokenStream; use quote::ToTokens; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for BinOp { fn to_tokens(&self, tokens: &mut TokenStream) { match self { @@ -217,7 +206,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for UnOp { fn to_tokens(&self, tokens: &mut TokenStream) { match self { diff --git a/vendor/syn/src/parse.rs b/vendor/syn/src/parse.rs index 5a2aeb62..57531005 100644 --- a/vendor/syn/src/parse.rs +++ b/vendor/syn/src/parse.rs @@ -185,11 +185,11 @@ pub mod discouraged; use crate::buffer::{Cursor, TokenBuffer}; use crate::error; use crate::lookahead; -#[cfg(feature = "proc-macro")] -use crate::proc_macro; use crate::punctuated::Punctuated; use crate::token::Token; -use proc_macro2::{self, Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree}; +use proc_macro2::{Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree}; +#[cfg(feature = "printing")] +use quote::ToTokens; use std::cell::Cell; use std::fmt::{self, Debug, Display}; #[cfg(feature = "extra-traits")] @@ -197,11 +197,12 @@ use std::hash::{Hash, Hasher}; use std::marker::PhantomData; use std::mem; use std::ops::Deref; +use std::panic::{RefUnwindSafe, UnwindSafe}; use std::rc::Rc; use std::str::FromStr; pub use crate::error::{Error, Result}; -pub use crate::lookahead::{Lookahead1, Peek}; +pub use crate::lookahead::{End, Lookahead1, Peek}; /// Parsing interface implemented by all types that can be parsed in a default /// way from a token stream. @@ -262,10 +263,11 @@ pub struct ParseBuffer<'a> { impl<'a> Drop for ParseBuffer<'a> { fn drop(&mut self) { - if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) { + if let Some((unexpected_span, delimiter)) = span_of_unexpected_ignoring_nones(self.cursor()) + { let (inner, old_span) = inner_unexpected(self); if old_span.is_none() { - inner.set(Unexpected::Some(unexpected_span)); + inner.set(Unexpected::Some(unexpected_span, delimiter)); } } } @@ -283,6 +285,9 @@ impl<'a> Debug for ParseBuffer<'a> { } } +impl<'a> UnwindSafe for ParseBuffer<'a> {} +impl<'a> RefUnwindSafe for ParseBuffer<'a> {} + /// Cursor state associated with speculative parsing. /// /// This type is the input of the closure provided to [`ParseStream::step`]. @@ -393,7 +398,7 @@ pub(crate) fn new_parse_buffer( pub(crate) enum Unexpected { None, - Some(Span), + Some(Span, Delimiter), Chain(Rc<Cell<Unexpected>>), } @@ -407,7 +412,7 @@ impl Clone for Unexpected { fn clone(&self) -> Self { match self { Unexpected::None => Unexpected::None, - Unexpected::Some(span) => Unexpected::Some(*span), + Unexpected::Some(span, delimiter) => Unexpected::Some(*span, *delimiter), Unexpected::Chain(next) => Unexpected::Chain(next.clone()), } } @@ -422,12 +427,12 @@ fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T { ret } -fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) { +fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<(Span, Delimiter)>) { let mut unexpected = get_unexpected(buffer); loop { match cell_clone(&unexpected) { Unexpected::None => return (unexpected, None), - Unexpected::Some(span) => return (unexpected, Some(span)), + Unexpected::Some(span, delimiter) => return (unexpected, Some((span, delimiter))), Unexpected::Chain(next) => unexpected = next, } } @@ -437,7 +442,7 @@ pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> { cell_clone(&buffer.unexpected).unwrap() } -fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> { +fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<(Span, Delimiter)> { if cursor.eof() { return None; } @@ -450,7 +455,7 @@ fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> { if cursor.eof() { None } else { - Some(cursor.span()) + Some((cursor.span(), cursor.scope_delimiter())) } } @@ -497,7 +502,7 @@ impl<'a> ParseBuffer<'a> { /// } /// } /// ``` - pub fn call<T>(&self, function: fn(ParseStream) -> Result<T>) -> Result<T> { + pub fn call<T>(&'a self, function: fn(ParseStream<'a>) -> Result<T>) -> Result<T> { function(self) } @@ -513,8 +518,8 @@ impl<'a> ParseBuffer<'a> { /// /// - `input.peek(Token![struct])` /// - `input.peek(Token![==])` - /// - `input.peek(Ident)` *(does not accept keywords)* - /// - `input.peek(Ident::peek_any)` + /// - `input.peek(syn::Ident)` *(does not accept keywords)* + /// - `input.peek(syn::Ident::peek_any)` /// - `input.peek(Lifetime)` /// - `input.peek(token::Brace)` /// @@ -614,11 +619,6 @@ impl<'a> ParseBuffer<'a> { /// ``` pub fn peek2<T: Peek>(&self, token: T) -> bool { fn peek2(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool { - if let Some(group) = buffer.cursor().group(Delimiter::None) { - if group.0.skip().map_or(false, peek) { - return true; - } - } buffer.cursor().skip().map_or(false, peek) } @@ -629,11 +629,6 @@ impl<'a> ParseBuffer<'a> { /// Looks at the third-next token in the parse stream. pub fn peek3<T: Peek>(&self, token: T) -> bool { fn peek3(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool { - if let Some(group) = buffer.cursor().group(Delimiter::None) { - if group.0.skip().and_then(Cursor::skip).map_or(false, peek) { - return true; - } - } buffer .cursor() .skip() @@ -737,8 +732,8 @@ impl<'a> ParseBuffer<'a> { /// } /// ``` pub fn parse_terminated<T, P>( - &self, - parser: fn(ParseStream) -> Result<T>, + &'a self, + parser: fn(ParseStream<'a>) -> Result<T>, separator: P, ) -> Result<Punctuated<T, P::Token>> where @@ -749,10 +744,17 @@ impl<'a> ParseBuffer<'a> { Punctuated::parse_terminated_with(self, parser) } - /// Returns whether there are tokens remaining in this stream. + /// Returns whether there are no more tokens remaining to be parsed from + /// this stream. + /// + /// This method returns true upon reaching the end of the content within a + /// set of delimiters, as well as at the end of the tokens provided to the + /// outermost parsing entry point. /// - /// This method returns true at the end of the content of a set of - /// delimiters, as well as at the very end of the complete macro input. + /// This is equivalent to + /// <code>.<a href="#method.peek">peek</a>(<a href="struct.End.html">syn::parse::End</a>)</code>. + /// Use `.peek2(End)` or `.peek3(End)` to look for the end of a parse stream + /// further ahead than the current position. /// /// # Example /// @@ -1096,26 +1098,78 @@ impl<'a> ParseBuffer<'a> { /// /// Cursors are immutable so no operations you perform against the cursor /// will affect the state of this parse stream. + /// + /// # Example + /// + /// ``` + /// use proc_macro2::TokenStream; + /// use syn::buffer::Cursor; + /// use syn::parse::{ParseStream, Result}; + /// + /// // Run a parser that returns T, but get its output as TokenStream instead of T. + /// // This works without T needing to implement ToTokens. + /// fn recognize_token_stream<T>( + /// recognizer: fn(ParseStream) -> Result<T>, + /// ) -> impl Fn(ParseStream) -> Result<TokenStream> { + /// move |input| { + /// let begin = input.cursor(); + /// recognizer(input)?; + /// let end = input.cursor(); + /// Ok(tokens_between(begin, end)) + /// } + /// } + /// + /// // Collect tokens between two cursors as a TokenStream. + /// fn tokens_between(begin: Cursor, end: Cursor) -> TokenStream { + /// assert!(begin <= end); + /// + /// let mut cursor = begin; + /// let mut tokens = TokenStream::new(); + /// while cursor < end { + /// let (token, next) = cursor.token_tree().unwrap(); + /// tokens.extend(std::iter::once(token)); + /// cursor = next; + /// } + /// tokens + /// } + /// + /// fn main() { + /// use quote::quote; + /// use syn::parse::{Parse, Parser}; + /// use syn::Token; + /// + /// // Parse syn::Type as a TokenStream, surrounded by angle brackets. + /// fn example(input: ParseStream) -> Result<TokenStream> { + /// let _langle: Token![<] = input.parse()?; + /// let ty = recognize_token_stream(syn::Type::parse)(input)?; + /// let _rangle: Token![>] = input.parse()?; + /// Ok(ty) + /// } + /// + /// let tokens = quote! { <fn() -> u8> }; + /// println!("{}", example.parse2(tokens).unwrap()); + /// } + /// ``` pub fn cursor(&self) -> Cursor<'a> { self.cell.get() } fn check_unexpected(&self) -> Result<()> { match inner_unexpected(self).1 { - Some(span) => Err(Error::new(span, "unexpected token")), + Some((span, delimiter)) => Err(err_unexpected_token(span, delimiter)), None => Ok(()), } } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl<T: Parse> Parse for Box<T> { fn parse(input: ParseStream) -> Result<Self> { input.parse().map(Box::new) } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl<T: Parse + Token> Parse for Option<T> { fn parse(input: ParseStream) -> Result<Self> { if T::peek(input.cursor()) { @@ -1126,14 +1180,14 @@ impl<T: Parse + Token> Parse for Option<T> { } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TokenStream { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| Ok((cursor.token_stream(), Cursor::empty()))) } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TokenTree { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| match cursor.token_tree() { @@ -1143,7 +1197,7 @@ impl Parse for TokenTree { } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Group { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| { @@ -1157,7 +1211,7 @@ impl Parse for Group { } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Punct { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| match cursor.punct() { @@ -1167,7 +1221,7 @@ impl Parse for Punct { } } -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Literal { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| match cursor.literal() { @@ -1187,24 +1241,24 @@ pub trait Parser: Sized { /// Parse a proc-macro2 token stream into the chosen syntax tree node. /// - /// This function will check that the input is fully parsed. If there are - /// any unparsed tokens at the end of the stream, an error is returned. + /// This function enforces that the input is fully parsed. If there are any + /// unparsed tokens at the end of the stream, an error is returned. fn parse2(self, tokens: TokenStream) -> Result<Self::Output>; /// Parse tokens of source code into the chosen syntax tree node. /// - /// This function will check that the input is fully parsed. If there are - /// any unparsed tokens at the end of the stream, an error is returned. + /// This function enforces that the input is fully parsed. If there are any + /// unparsed tokens at the end of the stream, an error is returned. #[cfg(feature = "proc-macro")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] + #[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))] fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> { self.parse2(proc_macro2::TokenStream::from(tokens)) } /// Parse a string of Rust code into the chosen syntax tree node. /// - /// This function will check that the input is fully parsed. If there are - /// any unparsed tokens at the end of the string, an error is returned. + /// This function enforces that the input is fully parsed. If there are any + /// unparsed tokens at the end of the string, an error is returned. /// /// # Hygiene /// @@ -1216,7 +1270,6 @@ pub trait Parser: Sized { // Not public API. #[doc(hidden)] - #[cfg(any(feature = "full", feature = "derive"))] fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> { let _ = scope; self.parse2(tokens) @@ -1241,14 +1294,15 @@ where let state = tokens_to_parse_buffer(&buf); let node = self(&state)?; state.check_unexpected()?; - if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) { - Err(Error::new(unexpected_span, "unexpected token")) + if let Some((unexpected_span, delimiter)) = + span_of_unexpected_ignoring_nones(state.cursor()) + { + Err(err_unexpected_token(unexpected_span, delimiter)) } else { Ok(node) } } - #[cfg(any(feature = "full", feature = "derive"))] fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> { let buf = TokenBuffer::new2(tokens); let cursor = buf.begin(); @@ -1256,19 +1310,30 @@ where let state = new_parse_buffer(scope, cursor, unexpected); let node = self(&state)?; state.check_unexpected()?; - if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) { - Err(Error::new(unexpected_span, "unexpected token")) + if let Some((unexpected_span, delimiter)) = + span_of_unexpected_ignoring_nones(state.cursor()) + { + Err(err_unexpected_token(unexpected_span, delimiter)) } else { Ok(node) } } } -#[cfg(any(feature = "full", feature = "derive"))] pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> { f.__parse_scoped(scope, tokens) } +fn err_unexpected_token(span: Span, delimiter: Delimiter) -> Error { + let msg = match delimiter { + Delimiter::Parenthesis => "unexpected token, expected `)`", + Delimiter::Brace => "unexpected token, expected `}`", + Delimiter::Bracket => "unexpected token, expected `]`", + Delimiter::None => "unexpected token", + }; + Error::new(span, msg) +} + /// An empty syntax tree node that consumes no tokens when parsed. /// /// This is useful for attribute macros that want to ensure they are not @@ -1307,8 +1372,28 @@ impl Parse for Nothing { } } +#[cfg(feature = "printing")] +#[cfg_attr(docsrs, doc(cfg(feature = "printing")))] +impl ToTokens for Nothing { + fn to_tokens(&self, tokens: &mut TokenStream) { + let _ = tokens; + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Clone for Nothing { + fn clone(&self) -> Self { + *self + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] +impl Copy for Nothing {} + #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for Nothing { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Nothing") @@ -1316,11 +1401,11 @@ impl Debug for Nothing { } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Eq for Nothing {} #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl PartialEq for Nothing { fn eq(&self, _other: &Self) -> bool { true @@ -1328,7 +1413,7 @@ impl PartialEq for Nothing { } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Hash for Nothing { fn hash<H: Hasher>(&self, _state: &mut H) {} } diff --git a/vendor/syn/src/parse_macro_input.rs b/vendor/syn/src/parse_macro_input.rs index 6f1562f4..f0660aed 100644 --- a/vendor/syn/src/parse_macro_input.rs +++ b/vendor/syn/src/parse_macro_input.rs @@ -104,7 +104,7 @@ /// # } /// ``` #[macro_export] -#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))] macro_rules! parse_macro_input { ($tokenstream:ident as $ty:ty) => { match $crate::parse::<$ty>($tokenstream) { diff --git a/vendor/syn/src/parse_quote.rs b/vendor/syn/src/parse_quote.rs index 59e51b41..2db20597 100644 --- a/vendor/syn/src/parse_quote.rs +++ b/vendor/syn/src/parse_quote.rs @@ -53,18 +53,29 @@ /// /// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]` /// or inner like `#![...]` +/// - [`Vec<Attribute>`] — parses multiple attributes, including mixed kinds in +/// any order /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation /// `P` with optional trailing punctuation +/// - [`Vec<Arm>`] — parses arms separated by optional commas according to the +/// same grammar as the inside of a `match` expression /// - [`Vec<Stmt>`] — parses the same as `Block::parse_within` +/// - [`Pat`], [`Box<Pat>`] — parses the same as +/// `Pat::parse_multi_with_leading_vert` +/// - [`Field`] — parses a named or unnamed struct field /// +/// [`Vec<Attribute>`]: Attribute +/// [`Vec<Arm>`]: Arm /// [`Vec<Stmt>`]: Block::parse_within +/// [`Pat`]: Pat::parse_multi_with_leading_vert +/// [`Box<Pat>`]: Pat::parse_multi_with_leading_vert /// /// # Panics /// /// Panics if the tokens fail to parse as the expected syntax tree type. The /// caller is responsible for ensuring that the input tokens are syntactically /// valid. -#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))] #[macro_export] macro_rules! parse_quote { ($($tt:tt)*) => { @@ -96,7 +107,7 @@ macro_rules! parse_quote { /// }; /// } /// ``` -#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))] #[macro_export] macro_rules! parse_quote_spanned { ($span:expr=> $($tt:tt)*) => { @@ -107,11 +118,13 @@ macro_rules! parse_quote_spanned { //////////////////////////////////////////////////////////////////////////////// // Can parse any type that implements Parse. -use crate::parse::{Parse, ParseStream, Parser, Result}; +use crate::error::Result; +use crate::parse::{Parse, ParseStream, Parser}; use proc_macro2::TokenStream; // Not public API. #[doc(hidden)] +#[track_caller] pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T { let parser = T::parse; match parser.parse2(token_stream) { @@ -136,9 +149,9 @@ impl<T: Parse> ParseQuote for T { use crate::punctuated::Punctuated; #[cfg(any(feature = "full", feature = "derive"))] -use crate::{attr, Attribute}; +use crate::{attr, Attribute, Field, FieldMutability, Ident, Type, Visibility}; #[cfg(feature = "full")] -use crate::{Block, Pat, Stmt}; +use crate::{Arm, Block, Pat, Stmt}; #[cfg(any(feature = "full", feature = "derive"))] impl ParseQuote for Attribute { @@ -151,6 +164,47 @@ impl ParseQuote for Attribute { } } +#[cfg(any(feature = "full", feature = "derive"))] +impl ParseQuote for Vec<Attribute> { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = Vec::new(); + while !input.is_empty() { + attrs.push(ParseQuote::parse(input)?); + } + Ok(attrs) + } +} + +#[cfg(any(feature = "full", feature = "derive"))] +impl ParseQuote for Field { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + + let ident: Option<Ident>; + let colon_token: Option<Token![:]>; + let is_named = input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]); + if is_named { + ident = Some(input.parse()?); + colon_token = Some(input.parse()?); + } else { + ident = None; + colon_token = None; + } + + let ty: Type = input.parse()?; + + Ok(Field { + attrs, + vis, + mutability: FieldMutability::None, + ident, + colon_token, + ty, + }) + } +} + #[cfg(feature = "full")] impl ParseQuote for Pat { fn parse(input: ParseStream) -> Result<Self> { @@ -177,3 +231,10 @@ impl ParseQuote for Vec<Stmt> { Block::parse_within(input) } } + +#[cfg(feature = "full")] +impl ParseQuote for Vec<Arm> { + fn parse(input: ParseStream) -> Result<Self> { + Arm::parse_multiple(input) + } +} diff --git a/vendor/syn/src/pat.rs b/vendor/syn/src/pat.rs index df7da5bb..5cc3ff90 100644 --- a/vendor/syn/src/pat.rs +++ b/vendor/syn/src/pat.rs @@ -1,7 +1,17 @@ -use super::*; +use crate::attr::Attribute; +use crate::expr::Member; +use crate::ident::Ident; +use crate::path::{Path, QSelf}; use crate::punctuated::Punctuated; +use crate::token; +use crate::ty::Type; use proc_macro2::TokenStream; +pub use crate::expr::{ + ExprConst as PatConst, ExprLit as PatLit, ExprMacro as PatMacro, ExprPath as PatPath, + ExprRange as PatRange, +}; + ast_enum_of_structs! { /// A pattern in a local binding, function signature, match expression, or /// various other places. @@ -10,8 +20,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] #[non_exhaustive] pub enum Pat { /// A const block: `const { ... }`. @@ -74,12 +84,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match pat { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // Pat::Box(pat) => {...} // Pat::Ident(pat) => {...} // ... // Pat::Wild(pat) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -95,7 +106,7 @@ ast_struct! { /// /// It may also be a unit struct or struct variant (e.g. `None`), or a /// constant; these cannot be distinguished syntactically. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatIdent { pub attrs: Vec<Attribute>, pub by_ref: Option<Token![ref]>, @@ -107,7 +118,7 @@ ast_struct! { ast_struct! { /// A pattern that matches any one of a set of cases. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatOr { pub attrs: Vec<Attribute>, pub leading_vert: Option<Token![|]>, @@ -117,6 +128,7 @@ ast_struct! { ast_struct! { /// A parenthesized pattern: `(A | B)`. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatParen { pub attrs: Vec<Attribute>, pub paren_token: token::Paren, @@ -126,7 +138,7 @@ ast_struct! { ast_struct! { /// A reference pattern: `&mut var`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatReference { pub attrs: Vec<Attribute>, pub and_token: Token![&], @@ -137,7 +149,7 @@ ast_struct! { ast_struct! { /// The dots in a tuple or slice pattern: `[0, 1, ..]`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatRest { pub attrs: Vec<Attribute>, pub dot2_token: Token![..], @@ -146,7 +158,7 @@ ast_struct! { ast_struct! { /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatSlice { pub attrs: Vec<Attribute>, pub bracket_token: token::Bracket, @@ -156,7 +168,7 @@ ast_struct! { ast_struct! { /// A struct or struct variant pattern: `Variant { x, y, .. }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatStruct { pub attrs: Vec<Attribute>, pub qself: Option<QSelf>, @@ -169,7 +181,7 @@ ast_struct! { ast_struct! { /// A tuple pattern: `(a, b)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatTuple { pub attrs: Vec<Attribute>, pub paren_token: token::Paren, @@ -179,7 +191,7 @@ ast_struct! { ast_struct! { /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatTupleStruct { pub attrs: Vec<Attribute>, pub qself: Option<QSelf>, @@ -191,7 +203,7 @@ ast_struct! { ast_struct! { /// A type ascription pattern: `foo: f64`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatType { pub attrs: Vec<Attribute>, pub pat: Box<Pat>, @@ -202,7 +214,7 @@ ast_struct! { ast_struct! { /// A pattern that matches any value: `_`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct PatWild { pub attrs: Vec<Attribute>, pub underscore_token: Token![_], @@ -214,7 +226,7 @@ ast_struct! { /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct FieldPat { pub attrs: Vec<Attribute>, pub member: Member, @@ -225,12 +237,28 @@ ast_struct! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::{ParseBuffer, ParseStream, Result}; - use crate::path; + use crate::attr::Attribute; + use crate::error::{self, Result}; + use crate::expr::{ + Expr, ExprConst, ExprLit, ExprMacro, ExprPath, ExprRange, Member, RangeLimits, + }; + use crate::ext::IdentExt as _; + use crate::ident::Ident; + use crate::lit::Lit; + use crate::mac::{self, Macro}; + use crate::parse::{Parse, ParseBuffer, ParseStream}; + use crate::pat::{ + FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct, + PatTuple, PatTupleStruct, PatType, PatWild, + }; + use crate::path::{self, Path, QSelf}; + use crate::punctuated::Punctuated; + use crate::stmt::Block; + use crate::token; + use crate::verbatim; + use proc_macro2::TokenStream; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Pat { /// Parse a pattern that does _not_ involve `|` at the top level. /// @@ -354,6 +382,18 @@ pub(crate) mod parsing { } } + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + impl Parse for PatType { + fn parse(input: ParseStream) -> Result<Self> { + Ok(PatType { + attrs: Vec::new(), + pat: Box::new(Pat::parse_single(input)?), + colon_token: input.parse()?, + ty: input.parse()?, + }) + } + } + fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> { let mut pat = Pat::parse_single(input)?; if leading_vert.is_some() @@ -377,7 +417,8 @@ pub(crate) mod parsing { } fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> { - let (qself, path) = path::parsing::qpath(input, true)?; + let expr_style = true; + let (qself, path) = path::parsing::qpath(input, expr_style)?; if qself.is_none() && input.peek(Token![!]) @@ -430,7 +471,13 @@ pub(crate) mod parsing { attrs: Vec::new(), by_ref: input.parse()?, mutability: input.parse()?, - ident: input.call(Ident::parse_any)?, + ident: { + if input.peek(Token![self]) { + input.call(Ident::parse_any)? + } else { + input.parse()? + } + }, subpat: { if input.peek(Token![@]) { let at_token: Token![@] = input.parse()?; @@ -506,15 +553,6 @@ pub(crate) mod parsing { }) } - impl Member { - fn is_unnamed(&self) -> bool { - match self { - Member::Named(_) => false, - Member::Unnamed(_) => true, - } - } - } - fn field_pat(input: ParseStream) -> Result<FieldPat> { let begin = input.fork(); let boxed: Option<Token![box]> = input.parse()?; @@ -528,7 +566,7 @@ pub(crate) mod parsing { }?; if boxed.is_none() && by_ref.is_none() && mutability.is_none() && input.peek(Token![:]) - || member.is_unnamed() + || !member.is_named() { return Ok(FieldPat { attrs: Vec::new(), @@ -768,12 +806,17 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; use crate::attr::FilterAttrs; + use crate::pat::{ + FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct, + PatTuple, PatTupleStruct, PatType, PatWild, + }; + use crate::path; + use crate::path::printing::PathStyle; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatIdent { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -787,7 +830,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatOr { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -796,7 +839,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatParen { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -806,7 +849,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatReference { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -816,7 +859,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatRest { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -824,7 +867,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatSlice { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -834,11 +877,11 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatStruct { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); - path::printing::print_path(tokens, &self.qself, &self.path); + path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::Expr); self.brace_token.surround(tokens, |tokens| { self.fields.to_tokens(tokens); // NOTE: We need a comma before the dot2 token if it is present. @@ -850,28 +893,37 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatTuple { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); self.paren_token.surround(tokens, |tokens| { self.elems.to_tokens(tokens); + // If there is only one element, a trailing comma is needed to + // distinguish PatTuple from PatParen, unless this is `(..)` + // which is a tuple pattern even without comma. + if self.elems.len() == 1 + && !self.elems.trailing_punct() + && !matches!(self.elems[0], Pat::Rest { .. }) + { + <Token![,]>::default().to_tokens(tokens); + } }); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatTupleStruct { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); - path::printing::print_path(tokens, &self.qself, &self.path); + path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::Expr); self.paren_token.surround(tokens, |tokens| { self.elems.to_tokens(tokens); }); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatType { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -881,7 +933,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PatWild { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -889,7 +941,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for FieldPat { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); diff --git a/vendor/syn/src/path.rs b/vendor/syn/src/path.rs index b9d96e66..d2fcb9bc 100644 --- a/vendor/syn/src/path.rs +++ b/vendor/syn/src/path.rs @@ -1,9 +1,16 @@ -use super::*; +#[cfg(feature = "parsing")] +use crate::error::Result; +use crate::expr::Expr; +use crate::generics::TypeParamBound; +use crate::ident::Ident; +use crate::lifetime::Lifetime; use crate::punctuated::Punctuated; +use crate::token; +use crate::ty::{ReturnType, Type}; ast_struct! { /// A path at which a named item is exported (e.g. `std::collections::HashMap`). - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Path { pub leading_colon: Option<Token![::]>, pub segments: Punctuated<PathSegment, Token![::]>, @@ -85,7 +92,7 @@ impl Path { /// An error if this path is not a single ident, as defined in `get_ident`. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn require_ident(&self) -> Result<&Ident> { self.get_ident().ok_or_else(|| { crate::error::new2( @@ -99,7 +106,7 @@ impl Path { ast_struct! { /// A segment of a path together with any path arguments on that segment. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct PathSegment { pub ident: Ident, pub arguments: PathArguments, @@ -128,7 +135,7 @@ ast_enum! { /// ## Parenthesized /// /// The `(A, B) -> C` in `Fn(A, B) -> C`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum PathArguments { None, /// The `<'a, T>` in `std::slice::iter<'a, T>`. @@ -163,7 +170,7 @@ impl PathArguments { ast_enum! { /// An individual generic argument, like `'a`, `T`, or `Item = T`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum GenericArgument { /// A lifetime argument. @@ -189,7 +196,7 @@ ast_enum! { ast_struct! { /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K, /// V>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct AngleBracketedGenericArguments { pub colon2_token: Option<Token![::]>, pub lt_token: Token![<], @@ -201,7 +208,7 @@ ast_struct! { ast_struct! { /// A binding (equality constraint) on an associated type: the `Item = u8` /// in `Iterator<Item = u8>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct AssocType { pub ident: Ident, pub generics: Option<AngleBracketedGenericArguments>, @@ -213,7 +220,7 @@ ast_struct! { ast_struct! { /// An equality constraint on an associated constant: the `PANIC = false` in /// `Trait<PANIC = false>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct AssocConst { pub ident: Ident, pub generics: Option<AngleBracketedGenericArguments>, @@ -224,7 +231,7 @@ ast_struct! { ast_struct! { /// An associated type bound: `Iterator<Item: Display>`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Constraint { pub ident: Ident, pub generics: Option<AngleBracketedGenericArguments>, @@ -236,7 +243,7 @@ ast_struct! { ast_struct! { /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) -> /// C`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct ParenthesizedGenericArguments { pub paren_token: token::Paren, /// `(A, B)` @@ -263,7 +270,7 @@ ast_struct! { /// ^~~~~~ ^ /// ty position = 0 /// ``` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct QSelf { pub lt_token: Token![<], pub ty: Box<Type>, @@ -275,19 +282,37 @@ ast_struct! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - - use crate::ext::IdentExt; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + use crate::error::Result; + #[cfg(feature = "full")] + use crate::expr::ExprBlock; + use crate::expr::{Expr, ExprPath}; + use crate::ext::IdentExt as _; + #[cfg(feature = "full")] + use crate::generics::TypeParamBound; + use crate::ident::Ident; + use crate::lifetime::Lifetime; + use crate::lit::Lit; + use crate::parse::{Parse, ParseStream}; + #[cfg(feature = "full")] + use crate::path::Constraint; + use crate::path::{ + AngleBracketedGenericArguments, AssocConst, AssocType, GenericArgument, + ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf, + }; + use crate::punctuated::Punctuated; + use crate::token; + use crate::ty::{ReturnType, Type}; + #[cfg(not(feature = "full"))] + use crate::verbatim; + + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Path { fn parse(input: ParseStream) -> Result<Self> { Self::parse_helper(input, false) } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for GenericArgument { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Lifetime) && !input.peek2(Token![+]) { @@ -352,8 +377,15 @@ pub(crate) mod parsing { if input.peek(Token![,]) || input.peek(Token![>]) { break; } - let value: TypeParamBound = input.parse()?; - bounds.push_value(value); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = true; + TypeParamBound::parse_single( + input, + allow_precise_capture, + allow_const, + )? + }); if !input.peek(Token![+]) { break; } @@ -418,13 +450,16 @@ pub(crate) mod parsing { /// The ordinary [`Parse`] impl for `AngleBracketedGenericArguments` /// parses optional leading `::`. #[cfg(feature = "full")] - #[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "full"))))] pub fn parse_turbofish(input: ParseStream) -> Result<Self> { let colon2_token: Token![::] = input.parse()?; Self::do_parse(Some(colon2_token), input) } - fn do_parse(colon2_token: Option<Token![::]>, input: ParseStream) -> Result<Self> { + pub(crate) fn do_parse( + colon2_token: Option<Token![::]>, + input: ParseStream, + ) -> Result<Self> { Ok(AngleBracketedGenericArguments { colon2_token, lt_token: input.parse()?, @@ -449,7 +484,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for AngleBracketedGenericArguments { fn parse(input: ParseStream) -> Result<Self> { let colon2_token: Option<Token![::]> = input.parse()?; @@ -457,7 +492,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ParenthesizedGenericArguments { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -469,7 +504,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for PathSegment { fn parse(input: ParseStream) -> Result<Self> { Self::parse_helper(input, false) @@ -493,7 +528,10 @@ pub(crate) mod parsing { input.parse()? }; - if !expr_style && input.peek(Token![<]) && !input.peek(Token![<=]) + if !expr_style + && input.peek(Token![<]) + && !input.peek(Token![<=]) + && !input.peek(Token![<<=]) || input.peek(Token![::]) && input.peek3(Token![<]) { Ok(PathSegment { @@ -537,7 +575,7 @@ pub(crate) mod parsing { /// } /// } /// ``` - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_mod_style(input: ParseStream) -> Result<Self> { Ok(Path { leading_colon: input.parse()?, @@ -660,7 +698,11 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] pub(crate) mod printing { - use super::*; + use crate::generics; + use crate::path::{ + AngleBracketedGenericArguments, AssocConst, AssocType, Constraint, GenericArgument, + ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf, + }; use crate::print::TokensOrDefault; #[cfg(feature = "parsing")] use crate::spanned::Spanned; @@ -670,67 +712,76 @@ pub(crate) mod printing { use quote::ToTokens; use std::cmp; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + pub(crate) enum PathStyle { + Expr, + Mod, + AsWritten, + } + + impl Copy for PathStyle {} + + impl Clone for PathStyle { + fn clone(&self) -> Self { + *self + } + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Path { fn to_tokens(&self, tokens: &mut TokenStream) { - self.leading_colon.to_tokens(tokens); - self.segments.to_tokens(tokens); + print_path(tokens, self, PathStyle::AsWritten); + } + } + + pub(crate) fn print_path(tokens: &mut TokenStream, path: &Path, style: PathStyle) { + path.leading_colon.to_tokens(tokens); + for segment in path.segments.pairs() { + print_path_segment(tokens, segment.value(), style); + segment.punct().to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PathSegment { fn to_tokens(&self, tokens: &mut TokenStream) { - self.ident.to_tokens(tokens); - self.arguments.to_tokens(tokens); + print_path_segment(tokens, self, PathStyle::AsWritten); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + fn print_path_segment(tokens: &mut TokenStream, segment: &PathSegment, style: PathStyle) { + segment.ident.to_tokens(tokens); + print_path_arguments(tokens, &segment.arguments, style); + } + + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for PathArguments { fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - PathArguments::None => {} - PathArguments::AngleBracketed(arguments) => { - arguments.to_tokens(tokens); - } - PathArguments::Parenthesized(arguments) => { - arguments.to_tokens(tokens); - } + print_path_arguments(tokens, self, PathStyle::AsWritten); + } + } + + fn print_path_arguments(tokens: &mut TokenStream, arguments: &PathArguments, style: PathStyle) { + match arguments { + PathArguments::None => {} + PathArguments::AngleBracketed(arguments) => { + print_angle_bracketed_generic_arguments(tokens, arguments, style); + } + PathArguments::Parenthesized(arguments) => { + print_parenthesized_generic_arguments(tokens, arguments, style); } } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for GenericArgument { #[allow(clippy::match_same_arms)] fn to_tokens(&self, tokens: &mut TokenStream) { match self { GenericArgument::Lifetime(lt) => lt.to_tokens(tokens), GenericArgument::Type(ty) => ty.to_tokens(tokens), - GenericArgument::Const(expr) => match expr { - Expr::Lit(expr) => expr.to_tokens(tokens), - - Expr::Path(expr) - if expr.attrs.is_empty() - && expr.qself.is_none() - && expr.path.get_ident().is_some() => - { - expr.to_tokens(tokens); - } - - #[cfg(feature = "full")] - Expr::Block(expr) => expr.to_tokens(tokens), - - #[cfg(not(feature = "full"))] - Expr::Verbatim(expr) => expr.to_tokens(tokens), - - // ERROR CORRECTION: Add braces to make sure that the - // generated code is valid. - _ => token::Brace::default().surround(tokens, |tokens| { - expr.to_tokens(tokens); - }), - }, + GenericArgument::Const(expr) => { + generics::printing::print_const_argument(expr, tokens); + } GenericArgument::AssocType(assoc) => assoc.to_tokens(tokens), GenericArgument::AssocConst(assoc) => assoc.to_tokens(tokens), GenericArgument::Constraint(constraint) => constraint.to_tokens(tokens), @@ -738,50 +789,62 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for AngleBracketedGenericArguments { fn to_tokens(&self, tokens: &mut TokenStream) { - self.colon2_token.to_tokens(tokens); - self.lt_token.to_tokens(tokens); - - // Print lifetimes before types/consts/bindings, regardless of their - // order in self.args. - let mut trailing_or_empty = true; - for param in self.args.pairs() { - match param.value() { - GenericArgument::Lifetime(_) => { - param.to_tokens(tokens); - trailing_or_empty = param.punct().is_some(); - } - GenericArgument::Type(_) - | GenericArgument::Const(_) - | GenericArgument::AssocType(_) - | GenericArgument::AssocConst(_) - | GenericArgument::Constraint(_) => {} + print_angle_bracketed_generic_arguments(tokens, self, PathStyle::AsWritten); + } + } + + pub(crate) fn print_angle_bracketed_generic_arguments( + tokens: &mut TokenStream, + arguments: &AngleBracketedGenericArguments, + style: PathStyle, + ) { + if let PathStyle::Mod = style { + return; + } + + conditionally_print_turbofish(tokens, &arguments.colon2_token, style); + arguments.lt_token.to_tokens(tokens); + + // Print lifetimes before types/consts/bindings, regardless of their + // order in args. + let mut trailing_or_empty = true; + for param in arguments.args.pairs() { + match param.value() { + GenericArgument::Lifetime(_) => { + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); } + GenericArgument::Type(_) + | GenericArgument::Const(_) + | GenericArgument::AssocType(_) + | GenericArgument::AssocConst(_) + | GenericArgument::Constraint(_) => {} } - for param in self.args.pairs() { - match param.value() { - GenericArgument::Type(_) - | GenericArgument::Const(_) - | GenericArgument::AssocType(_) - | GenericArgument::AssocConst(_) - | GenericArgument::Constraint(_) => { - if !trailing_or_empty { - <Token![,]>::default().to_tokens(tokens); - } - param.to_tokens(tokens); - trailing_or_empty = param.punct().is_some(); + } + for param in arguments.args.pairs() { + match param.value() { + GenericArgument::Type(_) + | GenericArgument::Const(_) + | GenericArgument::AssocType(_) + | GenericArgument::AssocConst(_) + | GenericArgument::Constraint(_) => { + if !trailing_or_empty { + <Token![,]>::default().to_tokens(tokens); } - GenericArgument::Lifetime(_) => {} + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); } + GenericArgument::Lifetime(_) => {} } - - self.gt_token.to_tokens(tokens); } + + arguments.gt_token.to_tokens(tokens); } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for AssocType { fn to_tokens(&self, tokens: &mut TokenStream) { self.ident.to_tokens(tokens); @@ -791,17 +854,17 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for AssocConst { fn to_tokens(&self, tokens: &mut TokenStream) { self.ident.to_tokens(tokens); self.generics.to_tokens(tokens); self.eq_token.to_tokens(tokens); - self.value.to_tokens(tokens); + generics::printing::print_const_argument(&self.value, tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Constraint { fn to_tokens(&self, tokens: &mut TokenStream) { self.ident.to_tokens(tokens); @@ -811,21 +874,39 @@ pub(crate) mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ParenthesizedGenericArguments { fn to_tokens(&self, tokens: &mut TokenStream) { - self.paren_token.surround(tokens, |tokens| { - self.inputs.to_tokens(tokens); - }); - self.output.to_tokens(tokens); + print_parenthesized_generic_arguments(tokens, self, PathStyle::AsWritten); + } + } + + fn print_parenthesized_generic_arguments( + tokens: &mut TokenStream, + arguments: &ParenthesizedGenericArguments, + style: PathStyle, + ) { + if let PathStyle::Mod = style { + return; } + + conditionally_print_turbofish(tokens, &None, style); + arguments.paren_token.surround(tokens, |tokens| { + arguments.inputs.to_tokens(tokens); + }); + arguments.output.to_tokens(tokens); } - pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) { + pub(crate) fn print_qpath( + tokens: &mut TokenStream, + qself: &Option<QSelf>, + path: &Path, + style: PathStyle, + ) { let qself = match qself { Some(qself) => qself, None => { - path.to_tokens(tokens); + print_path(tokens, path, style); return; } }; @@ -838,25 +919,36 @@ pub(crate) mod printing { TokensOrDefault(&qself.as_token).to_tokens(tokens); path.leading_colon.to_tokens(tokens); for (i, segment) in segments.by_ref().take(pos).enumerate() { + print_path_segment(tokens, segment.value(), PathStyle::AsWritten); if i + 1 == pos { - segment.value().to_tokens(tokens); qself.gt_token.to_tokens(tokens); - segment.punct().to_tokens(tokens); - } else { - segment.to_tokens(tokens); } + segment.punct().to_tokens(tokens); } } else { qself.gt_token.to_tokens(tokens); path.leading_colon.to_tokens(tokens); } for segment in segments { - segment.to_tokens(tokens); + print_path_segment(tokens, segment.value(), style); + segment.punct().to_tokens(tokens); + } + } + + fn conditionally_print_turbofish( + tokens: &mut TokenStream, + colon2_token: &Option<Token![::]>, + style: PathStyle, + ) { + match style { + PathStyle::Expr => TokensOrDefault(colon2_token).to_tokens(tokens), + PathStyle::Mod => unreachable!(), + PathStyle::AsWritten => colon2_token.to_tokens(tokens), } } #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))] impl Spanned for QSelf { fn span(&self) -> Span { struct QSelfDelimiters<'a>(&'a QSelf); diff --git a/vendor/syn/src/precedence.rs b/vendor/syn/src/precedence.rs new file mode 100644 index 00000000..1891bfc2 --- /dev/null +++ b/vendor/syn/src/precedence.rs @@ -0,0 +1,210 @@ +#[cfg(all(feature = "printing", feature = "full"))] +use crate::attr::{AttrStyle, Attribute}; +#[cfg(feature = "printing")] +use crate::expr::Expr; +#[cfg(all(feature = "printing", feature = "full"))] +use crate::expr::{ + ExprArray, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprCall, ExprConst, ExprContinue, + ExprField, ExprForLoop, ExprGroup, ExprIf, ExprIndex, ExprInfer, ExprLit, ExprLoop, ExprMacro, + ExprMatch, ExprMethodCall, ExprParen, ExprPath, ExprRepeat, ExprReturn, ExprStruct, ExprTry, + ExprTryBlock, ExprTuple, ExprUnsafe, ExprWhile, ExprYield, +}; +use crate::op::BinOp; +#[cfg(all(feature = "printing", feature = "full"))] +use crate::ty::ReturnType; +use std::cmp::Ordering; + +// Reference: https://doc.rust-lang.org/reference/expressions.html#expression-precedence +pub(crate) enum Precedence { + // return, break, closures + Jump, + // = += -= *= /= %= &= |= ^= <<= >>= + Assign, + // .. ..= + Range, + // || + Or, + // && + And, + // let + #[cfg(feature = "printing")] + Let, + // == != < > <= >= + Compare, + // | + BitOr, + // ^ + BitXor, + // & + BitAnd, + // << >> + Shift, + // + - + Sum, + // * / % + Product, + // as + Cast, + // unary - * ! & &mut + #[cfg(feature = "printing")] + Prefix, + // paths, loops, function calls, array indexing, field expressions, method calls + #[cfg(feature = "printing")] + Unambiguous, +} + +impl Precedence { + pub(crate) const MIN: Self = Precedence::Jump; + + pub(crate) fn of_binop(op: &BinOp) -> Self { + match op { + BinOp::Add(_) | BinOp::Sub(_) => Precedence::Sum, + BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Product, + BinOp::And(_) => Precedence::And, + BinOp::Or(_) => Precedence::Or, + BinOp::BitXor(_) => Precedence::BitXor, + BinOp::BitAnd(_) => Precedence::BitAnd, + BinOp::BitOr(_) => Precedence::BitOr, + BinOp::Shl(_) | BinOp::Shr(_) => Precedence::Shift, + + BinOp::Eq(_) + | BinOp::Lt(_) + | BinOp::Le(_) + | BinOp::Ne(_) + | BinOp::Ge(_) + | BinOp::Gt(_) => Precedence::Compare, + + BinOp::AddAssign(_) + | BinOp::SubAssign(_) + | BinOp::MulAssign(_) + | BinOp::DivAssign(_) + | BinOp::RemAssign(_) + | BinOp::BitXorAssign(_) + | BinOp::BitAndAssign(_) + | BinOp::BitOrAssign(_) + | BinOp::ShlAssign(_) + | BinOp::ShrAssign(_) => Precedence::Assign, + } + } + + #[cfg(feature = "printing")] + pub(crate) fn of(e: &Expr) -> Self { + #[cfg(feature = "full")] + fn prefix_attrs(attrs: &[Attribute]) -> Precedence { + for attr in attrs { + if let AttrStyle::Outer = attr.style { + return Precedence::Prefix; + } + } + Precedence::Unambiguous + } + + match e { + #[cfg(feature = "full")] + Expr::Closure(e) => match e.output { + ReturnType::Default => Precedence::Jump, + ReturnType::Type(..) => prefix_attrs(&e.attrs), + }, + + #[cfg(feature = "full")] + Expr::Break(ExprBreak { expr, .. }) + | Expr::Return(ExprReturn { expr, .. }) + | Expr::Yield(ExprYield { expr, .. }) => match expr { + Some(_) => Precedence::Jump, + None => Precedence::Unambiguous, + }, + + Expr::Assign(_) => Precedence::Assign, + Expr::Range(_) => Precedence::Range, + Expr::Binary(e) => Precedence::of_binop(&e.op), + Expr::Let(_) => Precedence::Let, + Expr::Cast(_) => Precedence::Cast, + Expr::RawAddr(_) | Expr::Reference(_) | Expr::Unary(_) => Precedence::Prefix, + + #[cfg(feature = "full")] + Expr::Array(ExprArray { attrs, .. }) + | Expr::Async(ExprAsync { attrs, .. }) + | Expr::Await(ExprAwait { attrs, .. }) + | Expr::Block(ExprBlock { attrs, .. }) + | Expr::Call(ExprCall { attrs, .. }) + | Expr::Const(ExprConst { attrs, .. }) + | Expr::Continue(ExprContinue { attrs, .. }) + | Expr::Field(ExprField { attrs, .. }) + | Expr::ForLoop(ExprForLoop { attrs, .. }) + | Expr::Group(ExprGroup { attrs, .. }) + | Expr::If(ExprIf { attrs, .. }) + | Expr::Index(ExprIndex { attrs, .. }) + | Expr::Infer(ExprInfer { attrs, .. }) + | Expr::Lit(ExprLit { attrs, .. }) + | Expr::Loop(ExprLoop { attrs, .. }) + | Expr::Macro(ExprMacro { attrs, .. }) + | Expr::Match(ExprMatch { attrs, .. }) + | Expr::MethodCall(ExprMethodCall { attrs, .. }) + | Expr::Paren(ExprParen { attrs, .. }) + | Expr::Path(ExprPath { attrs, .. }) + | Expr::Repeat(ExprRepeat { attrs, .. }) + | Expr::Struct(ExprStruct { attrs, .. }) + | Expr::Try(ExprTry { attrs, .. }) + | Expr::TryBlock(ExprTryBlock { attrs, .. }) + | Expr::Tuple(ExprTuple { attrs, .. }) + | Expr::Unsafe(ExprUnsafe { attrs, .. }) + | Expr::While(ExprWhile { attrs, .. }) => prefix_attrs(attrs), + + #[cfg(not(feature = "full"))] + Expr::Array(_) + | Expr::Async(_) + | Expr::Await(_) + | Expr::Block(_) + | Expr::Call(_) + | Expr::Const(_) + | Expr::Continue(_) + | Expr::Field(_) + | Expr::ForLoop(_) + | Expr::Group(_) + | Expr::If(_) + | Expr::Index(_) + | Expr::Infer(_) + | Expr::Lit(_) + | Expr::Loop(_) + | Expr::Macro(_) + | Expr::Match(_) + | Expr::MethodCall(_) + | Expr::Paren(_) + | Expr::Path(_) + | Expr::Repeat(_) + | Expr::Struct(_) + | Expr::Try(_) + | Expr::TryBlock(_) + | Expr::Tuple(_) + | Expr::Unsafe(_) + | Expr::While(_) => Precedence::Unambiguous, + + Expr::Verbatim(_) => Precedence::Unambiguous, + + #[cfg(not(feature = "full"))] + Expr::Break(_) | Expr::Closure(_) | Expr::Return(_) | Expr::Yield(_) => unreachable!(), + } + } +} + +impl Copy for Precedence {} + +impl Clone for Precedence { + fn clone(&self) -> Self { + *self + } +} + +impl PartialEq for Precedence { + fn eq(&self, other: &Self) -> bool { + *self as u8 == *other as u8 + } +} + +impl PartialOrd for Precedence { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + let this = *self as u8; + let other = *other as u8; + Some(this.cmp(&other)) + } +} diff --git a/vendor/syn/src/punctuated.rs b/vendor/syn/src/punctuated.rs index 3ea8a1d4..fdefc7d2 100644 --- a/vendor/syn/src/punctuated.rs +++ b/vendor/syn/src/punctuated.rs @@ -20,6 +20,15 @@ //! ~~~~^ ~~~~^ ~~~~ //! ``` +use crate::drops::{NoDrop, TrivialDrop}; +#[cfg(feature = "parsing")] +use crate::error::Result; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream}; +#[cfg(feature = "parsing")] +use crate::token::Token; +#[cfg(all(feature = "fold", any(feature = "full", feature = "derive")))] +use std::collections::VecDeque; #[cfg(feature = "extra-traits")] use std::fmt::{self, Debug}; #[cfg(feature = "extra-traits")] @@ -31,12 +40,6 @@ use std::option; use std::slice; use std::vec; -use crate::drops::{NoDrop, TrivialDrop}; -#[cfg(feature = "parsing")] -use crate::parse::{Parse, ParseStream, Result}; -#[cfg(feature = "parsing")] -use crate::token::Token; - /// **A punctuated sequence of syntax tree nodes of type `T` separated by /// punctuation of type `P`.** /// @@ -91,6 +94,29 @@ impl<T, P> Punctuated<T, P> { self.iter_mut().next_back() } + /// Borrows the element at the given index. + pub fn get(&self, index: usize) -> Option<&T> { + if let Some((value, _punct)) = self.inner.get(index) { + Some(value) + } else if index == self.inner.len() { + self.last.as_deref() + } else { + None + } + } + + /// Mutably borrows the element at the given index. + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + let inner_len = self.inner.len(); + if let Some((value, _punct)) = self.inner.get_mut(index) { + Some(value) + } else if index == inner_len { + self.last.as_deref_mut() + } else { + None + } + } + /// Returns an iterator over borrowed syntax tree nodes of type `&T`. pub fn iter(&self) -> Iter<T> { Iter { @@ -262,7 +288,7 @@ impl<T, P> Punctuated<T, P> { /// Parsing continues until the end of this parse stream. The entire content /// of this parse stream must consist of `T` and `P`. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_terminated(input: ParseStream) -> Result<Self> where T: Parse, @@ -280,10 +306,10 @@ impl<T, P> Punctuated<T, P> { /// /// [`parse_terminated`]: Punctuated::parse_terminated #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_terminated_with( - input: ParseStream, - parser: fn(ParseStream) -> Result<T>, + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + pub fn parse_terminated_with<'a>( + input: ParseStream<'a>, + parser: fn(ParseStream<'a>) -> Result<T>, ) -> Result<Self> where P: Parse, @@ -314,7 +340,7 @@ impl<T, P> Punctuated<T, P> { /// is not followed by a `P`, even if there are remaining tokens in the /// stream. #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self> where T: Parse, @@ -332,10 +358,10 @@ impl<T, P> Punctuated<T, P> { /// /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_separated_nonempty_with( - input: ParseStream, - parser: fn(ParseStream) -> Result<T>, + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + pub fn parse_separated_nonempty_with<'a>( + input: ParseStream<'a>, + parser: fn(ParseStream<'a>) -> Result<T>, ) -> Result<Self> where P: Token + Parse, @@ -357,7 +383,7 @@ impl<T, P> Punctuated<T, P> { } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl<T, P> Clone for Punctuated<T, P> where T: Clone, @@ -377,7 +403,7 @@ where } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<T, P> Eq for Punctuated<T, P> where T: Eq, @@ -386,7 +412,7 @@ where } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<T, P> PartialEq for Punctuated<T, P> where T: PartialEq, @@ -399,7 +425,7 @@ where } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<T, P> Hash for Punctuated<T, P> where T: Hash, @@ -413,7 +439,7 @@ where } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl<T: Debug, P: Debug> Debug for Punctuated<T, P> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut list = f.debug_list(); @@ -477,7 +503,7 @@ where let mut nomore = false; for pair in i { if nomore { - panic!("Punctuated extended with items after a Pair::End"); + panic!("punctuated extended with items after a Pair::End"); } match pair { Pair::Punctuated(a, b) => punctuated.inner.push((a, b)), @@ -495,8 +521,13 @@ impl<T, P> IntoIterator for Punctuated<T, P> { fn into_iter(self) -> Self::IntoIter { let mut elements = Vec::with_capacity(self.len()); - elements.extend(self.inner.into_iter().map(|pair| pair.0)); - elements.extend(self.last.map(|t| *t)); + + for (t, _) in self.inner { + elements.push(t); + } + if let Some(t) = self.last { + elements.push(*t); + } IntoIter { inner: elements.into_iter(), @@ -1006,7 +1037,7 @@ impl<T, P> Pair<T, P> { } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl<T, P> Pair<&T, &P> { pub fn cloned(self) -> Pair<T, P> where @@ -1021,7 +1052,7 @@ impl<T, P> Pair<&T, &P> { } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl<T, P> Clone for Pair<T, P> where T: Clone, @@ -1036,7 +1067,7 @@ where } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl<T, P> Copy for Pair<T, P> where T: Copy, @@ -1048,7 +1079,7 @@ impl<T, P> Index<usize> for Punctuated<T, P> { type Output = T; fn index(&self, index: usize) -> &Self::Output { - if index == self.len() - 1 { + if index.checked_add(1) == Some(self.len()) { match &self.last { Some(t) => t, None => &self.inner[index].0, @@ -1061,7 +1092,7 @@ impl<T, P> Index<usize> for Punctuated<T, P> { impl<T, P> IndexMut<usize> for Punctuated<T, P> { fn index_mut(&mut self, index: usize) -> &mut Self::Output { - if index == self.len() - 1 { + if index.checked_add(1) == Some(self.len()) { match &mut self.last { Some(t) => t, None => &mut self.inner[index].0, @@ -1072,13 +1103,43 @@ impl<T, P> IndexMut<usize> for Punctuated<T, P> { } } +#[cfg(all(feature = "fold", any(feature = "full", feature = "derive")))] +pub(crate) fn fold<T, P, V, F>( + punctuated: Punctuated<T, P>, + fold: &mut V, + mut f: F, +) -> Punctuated<T, P> +where + V: ?Sized, + F: FnMut(&mut V, T) -> T, +{ + let Punctuated { inner, last } = punctuated; + + // Convert into VecDeque to prevent needing to allocate a new Vec<(T, P)> + // for the folded elements. + let mut inner = VecDeque::from(inner); + for _ in 0..inner.len() { + if let Some((t, p)) = inner.pop_front() { + inner.push_back((f(fold, t), p)); + } + } + + Punctuated { + inner: Vec::from(inner), + last: match last { + Some(t) => Some(Box::new(f(fold, *t))), + None => None, + }, + } +} + #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::punctuated::{Pair, Punctuated}; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl<T, P> ToTokens for Punctuated<T, P> where T: ToTokens, @@ -1089,7 +1150,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl<T, P> ToTokens for Pair<T, P> where T: ToTokens, diff --git a/vendor/syn/src/restriction.rs b/vendor/syn/src/restriction.rs index 97c7f5ae..6e6758f3 100644 --- a/vendor/syn/src/restriction.rs +++ b/vendor/syn/src/restriction.rs @@ -1,4 +1,5 @@ -use super::*; +use crate::path::Path; +use crate::token; ast_enum! { /// The visibility level of an item: inherited or `pub` or @@ -8,8 +9,8 @@ ast_enum! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum Visibility { /// A public visibility level: `pub`. Public(Token![pub]), @@ -26,7 +27,7 @@ ast_enum! { ast_struct! { /// A visibility level restricted to some path: `pub(self)` or /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct VisRestricted { pub pub_token: Token![pub], pub paren_token: token::Paren, @@ -37,7 +38,7 @@ ast_struct! { ast_enum! { /// Unused, but reserved for RFC 3323 restrictions. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum FieldMutability { None, @@ -57,12 +58,16 @@ ast_enum! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::discouraged::Speculative; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + use crate::error::Result; + use crate::ext::IdentExt as _; + use crate::ident::Ident; + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseStream}; + use crate::path::Path; + use crate::restriction::{VisRestricted, Visibility}; + use crate::token; + + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Visibility { fn parse(input: ParseStream) -> Result<Self> { // Recognize an empty None-delimited group, as produced by a $:vis @@ -141,11 +146,13 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; + use crate::path; + use crate::path::printing::PathStyle; + use crate::restriction::{VisRestricted, Visibility}; use proc_macro2::TokenStream; use quote::ToTokens; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Visibility { fn to_tokens(&self, tokens: &mut TokenStream) { match self { @@ -156,7 +163,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for VisRestricted { fn to_tokens(&self, tokens: &mut TokenStream) { self.pub_token.to_tokens(tokens); @@ -164,7 +171,7 @@ mod printing { // TODO: If we have a path which is not "self" or "super" or // "crate", automatically add the "in" token. self.in_token.to_tokens(tokens); - self.path.to_tokens(tokens); + path::printing::print_path(tokens, &self.path, PathStyle::Mod); }); } } diff --git a/vendor/syn/src/scan_expr.rs b/vendor/syn/src/scan_expr.rs new file mode 100644 index 00000000..a3a0416c --- /dev/null +++ b/vendor/syn/src/scan_expr.rs @@ -0,0 +1,268 @@ +use self::{Action::*, Input::*}; +use proc_macro2::{Delimiter, Ident, Spacing, TokenTree}; +use syn::parse::{ParseStream, Result}; +#[allow(unused_imports)] +//#[cfg_attr(not(test), expect(unused_imports))] // Rust 1.81+ +use syn::Token; +use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Type}; + +enum Input { + Keyword(&'static str), + Punct(&'static str), + ConsumeAny, + ConsumeBinOp, + ConsumeBrace, + ConsumeDelimiter, + ConsumeIdent, + ConsumeLifetime, + ConsumeLiteral, + ConsumeNestedBrace, + ExpectPath, + ExpectTurbofish, + ExpectType, + CanBeginExpr, + Otherwise, + Empty, +} + +enum Action { + SetState(&'static [(Input, Action)]), + IncDepth, + DecDepth, + Finish, +} + +static INIT: [(Input, Action); 28] = [ + (ConsumeDelimiter, SetState(&POSTFIX)), + (Keyword("async"), SetState(&ASYNC)), + (Keyword("break"), SetState(&BREAK_LABEL)), + (Keyword("const"), SetState(&CONST)), + (Keyword("continue"), SetState(&CONTINUE)), + (Keyword("for"), SetState(&FOR)), + (Keyword("if"), IncDepth), + (Keyword("let"), SetState(&PATTERN)), + (Keyword("loop"), SetState(&BLOCK)), + (Keyword("match"), IncDepth), + (Keyword("move"), SetState(&CLOSURE)), + (Keyword("return"), SetState(&RETURN)), + (Keyword("static"), SetState(&CLOSURE)), + (Keyword("unsafe"), SetState(&BLOCK)), + (Keyword("while"), IncDepth), + (Keyword("yield"), SetState(&RETURN)), + (Keyword("_"), SetState(&POSTFIX)), + (Punct("!"), SetState(&INIT)), + (Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])), + (Punct("&"), SetState(&REFERENCE)), + (Punct("*"), SetState(&INIT)), + (Punct("-"), SetState(&INIT)), + (Punct("..="), SetState(&INIT)), + (Punct(".."), SetState(&RANGE)), + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])), + (ConsumeLiteral, SetState(&POSTFIX)), + (ExpectPath, SetState(&PATH)), +]; + +static POSTFIX: [(Input, Action); 10] = [ + (Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])), + (Punct("..="), SetState(&INIT)), + (Punct(".."), SetState(&RANGE)), + (Punct("."), SetState(&DOT)), + (Punct("?"), SetState(&POSTFIX)), + (ConsumeBinOp, SetState(&INIT)), + (Punct("="), SetState(&INIT)), + (ConsumeNestedBrace, SetState(&IF_THEN)), + (ConsumeDelimiter, SetState(&POSTFIX)), + (Empty, Finish), +]; + +static ASYNC: [(Input, Action); 3] = [ + (Keyword("move"), SetState(&ASYNC)), + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeBrace, SetState(&POSTFIX)), +]; + +static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))]; + +static BREAK_LABEL: [(Input, Action); 2] = [ + (ConsumeLifetime, SetState(&BREAK_VALUE)), + (Otherwise, SetState(&BREAK_VALUE)), +]; + +static BREAK_VALUE: [(Input, Action); 3] = [ + (ConsumeNestedBrace, SetState(&IF_THEN)), + (CanBeginExpr, SetState(&INIT)), + (Otherwise, SetState(&POSTFIX)), +]; + +static CLOSURE: [(Input, Action); 7] = [ + (Keyword("async"), SetState(&CLOSURE)), + (Keyword("move"), SetState(&CLOSURE)), + (Punct(","), SetState(&CLOSURE)), + (Punct(">"), SetState(&CLOSURE)), + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeLifetime, SetState(&CLOSURE)), + (ConsumeIdent, SetState(&CLOSURE)), +]; + +static CLOSURE_ARGS: [(Input, Action); 2] = [ + (Punct("|"), SetState(&CLOSURE_RET)), + (ConsumeAny, SetState(&CLOSURE_ARGS)), +]; + +static CLOSURE_RET: [(Input, Action); 2] = [ + (Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])), + (Otherwise, SetState(&INIT)), +]; + +static CONST: [(Input, Action); 2] = [ + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeBrace, SetState(&POSTFIX)), +]; + +static CONTINUE: [(Input, Action); 2] = [ + (ConsumeLifetime, SetState(&POSTFIX)), + (Otherwise, SetState(&POSTFIX)), +]; + +static DOT: [(Input, Action); 3] = [ + (Keyword("await"), SetState(&POSTFIX)), + (ConsumeIdent, SetState(&METHOD)), + (ConsumeLiteral, SetState(&POSTFIX)), +]; + +static FOR: [(Input, Action); 2] = [ + (Punct("<"), SetState(&CLOSURE)), + (Otherwise, SetState(&PATTERN)), +]; + +static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)]; +static IF_THEN: [(Input, Action); 2] = + [(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)]; + +static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))]; + +static PATH: [(Input, Action); 4] = [ + (Punct("!="), SetState(&INIT)), + (Punct("!"), SetState(&INIT)), + (ConsumeNestedBrace, SetState(&IF_THEN)), + (Otherwise, SetState(&POSTFIX)), +]; + +static PATTERN: [(Input, Action); 15] = [ + (ConsumeDelimiter, SetState(&PATTERN)), + (Keyword("box"), SetState(&PATTERN)), + (Keyword("in"), IncDepth), + (Keyword("mut"), SetState(&PATTERN)), + (Keyword("ref"), SetState(&PATTERN)), + (Keyword("_"), SetState(&PATTERN)), + (Punct("!"), SetState(&PATTERN)), + (Punct("&"), SetState(&PATTERN)), + (Punct("..="), SetState(&PATTERN)), + (Punct(".."), SetState(&PATTERN)), + (Punct("="), SetState(&INIT)), + (Punct("@"), SetState(&PATTERN)), + (Punct("|"), SetState(&PATTERN)), + (ConsumeLiteral, SetState(&PATTERN)), + (ExpectPath, SetState(&PATTERN)), +]; + +static RANGE: [(Input, Action); 6] = [ + (Punct("..="), SetState(&INIT)), + (Punct(".."), SetState(&RANGE)), + (Punct("."), SetState(&DOT)), + (ConsumeNestedBrace, SetState(&IF_THEN)), + (Empty, Finish), + (Otherwise, SetState(&INIT)), +]; + +static RAW: [(Input, Action); 3] = [ + (Keyword("const"), SetState(&INIT)), + (Keyword("mut"), SetState(&INIT)), + (Otherwise, SetState(&POSTFIX)), +]; + +static REFERENCE: [(Input, Action); 3] = [ + (Keyword("mut"), SetState(&INIT)), + (Keyword("raw"), SetState(&RAW)), + (Otherwise, SetState(&INIT)), +]; + +static RETURN: [(Input, Action); 2] = [ + (CanBeginExpr, SetState(&INIT)), + (Otherwise, SetState(&POSTFIX)), +]; + +pub(crate) fn scan_expr(input: ParseStream) -> Result<()> { + let mut state = INIT.as_slice(); + let mut depth = 0usize; + 'table: loop { + for rule in state { + if match rule.0 { + Input::Keyword(expected) => input.step(|cursor| match cursor.ident() { + Some((ident, rest)) if ident == expected => Ok((true, rest)), + _ => Ok((false, *cursor)), + })?, + Input::Punct(expected) => input.step(|cursor| { + let begin = *cursor; + let mut cursor = begin; + for (i, ch) in expected.chars().enumerate() { + match cursor.punct() { + Some((punct, _)) if punct.as_char() != ch => break, + Some((_, rest)) if i == expected.len() - 1 => { + return Ok((true, rest)); + } + Some((punct, rest)) if punct.spacing() == Spacing::Joint => { + cursor = rest; + } + _ => break, + } + } + Ok((false, begin)) + })?, + Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(), + Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(), + Input::ConsumeBrace | Input::ConsumeNestedBrace => { + (matches!(rule.0, Input::ConsumeBrace) || depth > 0) + && input.step(|cursor| match cursor.group(Delimiter::Brace) { + Some((_inside, _span, rest)) => Ok((true, rest)), + None => Ok((false, *cursor)), + })? + } + Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() { + Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)), + None => Ok((false, *cursor)), + })?, + Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(), + Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(), + Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(), + Input::ExpectPath => { + input.parse::<ExprPath>()?; + true + } + Input::ExpectTurbofish => { + if input.peek(Token![::]) { + input.parse::<AngleBracketedGenericArguments>()?; + } + true + } + Input::ExpectType => { + Type::without_plus(input)?; + true + } + Input::CanBeginExpr => Expr::peek(input), + Input::Otherwise => true, + Input::Empty => input.is_empty() || input.peek(Token![,]), + } { + state = match rule.1 { + Action::SetState(next) => next, + Action::IncDepth => (depth += 1, &INIT).1, + Action::DecDepth => (depth -= 1, &POSTFIX).1, + Action::Finish => return if depth == 0 { Ok(()) } else { break }, + }; + continue 'table; + } + } + return Err(input.error("unsupported expression")); + } +} diff --git a/vendor/syn/src/spanned.rs b/vendor/syn/src/spanned.rs index 98aa0aa1..17b69e9f 100644 --- a/vendor/syn/src/spanned.rs +++ b/vendor/syn/src/spanned.rs @@ -108,7 +108,7 @@ impl<T: ?Sized + ToTokens> Spanned for T { } mod private { - use super::*; + use crate::spanned::ToTokens; pub trait Sealed {} impl<T: ?Sized + ToTokens> Sealed for T {} diff --git a/vendor/syn/src/stmt.rs b/vendor/syn/src/stmt.rs index fb67fecc..970bc13d 100644 --- a/vendor/syn/src/stmt.rs +++ b/vendor/syn/src/stmt.rs @@ -1,8 +1,13 @@ -use super::*; +use crate::attr::Attribute; +use crate::expr::Expr; +use crate::item::Item; +use crate::mac::Macro; +use crate::pat::Pat; +use crate::token; ast_struct! { /// A braced block containing Rust statements. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Block { pub brace_token: token::Brace, /// Statements in a block @@ -12,7 +17,7 @@ ast_struct! { ast_enum! { /// A statement, usually ending in a semicolon. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub enum Stmt { /// A local (let) binding. Local(Local), @@ -33,8 +38,8 @@ ast_enum! { } ast_struct! { - /// A local `let` binding: `let x: u64 = s.parse()?`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + /// A local `let` binding: `let x: u64 = s.parse()?;`. + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct Local { pub attrs: Vec<Attribute>, pub let_token: Token![let], @@ -50,7 +55,7 @@ ast_struct! { /// /// `LocalInit` represents `= s.parse()?` in `let x: u64 = s.parse()?` and /// `= r else { return }` in `let Ok(x) = r else { return }`. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct LocalInit { pub eq_token: Token![=], pub expr: Box<Expr>, @@ -64,7 +69,7 @@ ast_struct! { /// Syntactically it's ambiguous which other kind of statement this macro /// would expand to. It can be any of local variable (`let`), item, or /// expression. - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub struct StmtMacro { pub attrs: Vec<Attribute>, pub mac: Macro, @@ -74,9 +79,20 @@ ast_struct! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::parse::discouraged::Speculative; - use crate::parse::{Parse, ParseStream, Result}; + use crate::attr::Attribute; + use crate::classify; + use crate::error::Result; + use crate::expr::{Expr, ExprBlock, ExprMacro}; + use crate::ident::Ident; + use crate::item; + use crate::mac::{self, Macro}; + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseStream}; + use crate::pat::{Pat, PatType}; + use crate::path::Path; + use crate::stmt::{Block, Local, LocalInit, Stmt, StmtMacro}; + use crate::token; + use crate::ty::Type; use proc_macro2::TokenStream; struct AllowNoSemi(bool); @@ -131,7 +147,7 @@ pub(crate) mod parsing { /// } /// } /// ``` - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> { let mut stmts = Vec::new(); loop { @@ -143,7 +159,7 @@ pub(crate) mod parsing { } let stmt = parse_stmt(input, AllowNoSemi(true))?; let requires_semicolon = match &stmt { - Stmt::Expr(stmt, None) => expr::requires_terminator(stmt), + Stmt::Expr(stmt, None) => classify::requires_semi_to_be_stmt(stmt), Stmt::Macro(stmt) => { stmt.semi_token.is_none() && !stmt.mac.delimiter.is_brace() } @@ -160,7 +176,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Block { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -171,7 +187,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Stmt { fn parse(input: ParseStream) -> Result<Self> { let allow_nosemi = AllowNoSemi(false); @@ -192,7 +208,8 @@ pub(crate) mod parsing { if ahead.peek2(Ident) || ahead.peek2(Token![try]) { is_item_macro = true; } else if ahead.peek2(token::Brace) - && !(ahead.peek3(Token![.]) || ahead.peek3(Token![?])) + && !(ahead.peek3(Token![.]) && !ahead.peek3(Token![..]) + || ahead.peek3(Token![?])) { input.advance_to(&ahead); return stmt_mac(input, attrs, path).map(Stmt::Macro); @@ -200,7 +217,7 @@ pub(crate) mod parsing { } } - if input.peek(Token![let]) { + if input.peek(Token![let]) && !input.peek(token::Group) { stmt_local(input, attrs).map(Stmt::Local) } else if input.peek(Token![pub]) || input.peek(Token![crate]) && !input.peek2(Token![::]) @@ -282,8 +299,8 @@ pub(crate) mod parsing { let eq_token: Token![=] = eq_token; let expr: Expr = input.parse()?; - let diverge = if let Some(else_token) = input.parse()? { - let else_token: Token![else] = else_token; + let diverge = if !classify::expr_trailing_brace(&expr) && input.peek(Token![else]) { + let else_token: Token![else] = input.parse()?; let diverge = ExprBlock { attrs: Vec::new(), label: None, @@ -319,7 +336,7 @@ pub(crate) mod parsing { allow_nosemi: AllowNoSemi, mut attrs: Vec<Attribute>, ) -> Result<Stmt> { - let mut e = expr::parsing::expr_early(input)?; + let mut e = Expr::parse_with_earlier_boundary_rule(input)?; let mut attr_target = &mut e; loop { @@ -351,6 +368,7 @@ pub(crate) mod parsing { | Expr::Paren(_) | Expr::Path(_) | Expr::Range(_) + | Expr::RawAddr(_) | Expr::Reference(_) | Expr::Repeat(_) | Expr::Return(_) @@ -385,7 +403,7 @@ pub(crate) mod parsing { if semi_token.is_some() { Ok(Stmt::Expr(e, semi_token)) - } else if allow_nosemi.0 || !expr::requires_terminator(&e) { + } else if allow_nosemi.0 || !classify::requires_semi_to_be_stmt(&e) { Ok(Stmt::Expr(e, None)) } else { Err(input.error("expected semicolon")) @@ -394,12 +412,16 @@ pub(crate) mod parsing { } #[cfg(feature = "printing")] -mod printing { - use super::*; +pub(crate) mod printing { + use crate::classify; + use crate::expr::{self, Expr}; + use crate::fixup::FixupContext; + use crate::stmt::{Block, Local, Stmt, StmtMacro}; + use crate::token; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Block { fn to_tokens(&self, tokens: &mut TokenStream) { self.brace_token.surround(tokens, |tokens| { @@ -408,14 +430,14 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Stmt { fn to_tokens(&self, tokens: &mut TokenStream) { match self { Stmt::Local(local) => local.to_tokens(tokens), Stmt::Item(item) => item.to_tokens(tokens), Stmt::Expr(expr, semi) => { - expr.to_tokens(tokens); + expr::printing::print_expr(expr, tokens, FixupContext::new_stmt()); semi.to_tokens(tokens); } Stmt::Macro(mac) => mac.to_tokens(tokens), @@ -423,7 +445,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Local { fn to_tokens(&self, tokens: &mut TokenStream) { expr::printing::outer_attrs_to_tokens(&self.attrs, tokens); @@ -431,17 +453,27 @@ mod printing { self.pat.to_tokens(tokens); if let Some(init) = &self.init { init.eq_token.to_tokens(tokens); - init.expr.to_tokens(tokens); + expr::printing::print_subexpression( + &init.expr, + init.diverge.is_some() && classify::expr_trailing_brace(&init.expr), + tokens, + FixupContext::NONE, + ); if let Some((else_token, diverge)) = &init.diverge { else_token.to_tokens(tokens); - diverge.to_tokens(tokens); + match &**diverge { + Expr::Block(diverge) => diverge.to_tokens(tokens), + _ => token::Brace::default().surround(tokens, |tokens| { + expr::printing::print_expr(diverge, tokens, FixupContext::new_stmt()); + }), + } } } self.semi_token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for StmtMacro { fn to_tokens(&self, tokens: &mut TokenStream) { expr::printing::outer_attrs_to_tokens(&self.attrs, tokens); diff --git a/vendor/syn/src/token.rs b/vendor/syn/src/token.rs index af7f25c4..52321fc6 100644 --- a/vendor/syn/src/token.rs +++ b/vendor/syn/src/token.rs @@ -88,6 +88,8 @@ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html +#[cfg(feature = "parsing")] +pub(crate) use self::private::CustomToken; use self::private::WithSpan; #[cfg(feature = "parsing")] use crate::buffer::Cursor; @@ -96,10 +98,6 @@ use crate::error::Result; #[cfg(feature = "parsing")] use crate::lifetime::Lifetime; #[cfg(feature = "parsing")] -use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr}; -#[cfg(feature = "parsing")] -use crate::lookahead; -#[cfg(feature = "parsing")] use crate::parse::{Parse, ParseStream}; use crate::span::IntoSpans; use proc_macro2::extra::DelimSpan; @@ -111,7 +109,7 @@ use proc_macro2::{Delimiter, Ident}; #[cfg(feature = "parsing")] use proc_macro2::{Literal, Punct, TokenTree}; #[cfg(feature = "printing")] -use quote::{ToTokens, TokenStreamExt}; +use quote::{ToTokens, TokenStreamExt as _}; #[cfg(feature = "extra-traits")] use std::cmp; #[cfg(feature = "extra-traits")] @@ -134,7 +132,9 @@ pub trait Token: private::Sealed { fn display() -> &'static str; } -mod private { +pub(crate) mod private { + #[cfg(feature = "parsing")] + use crate::buffer::Cursor; use proc_macro2::Span; #[cfg(feature = "parsing")] @@ -143,63 +143,27 @@ mod private { /// Support writing `token.span` rather than `token.spans[0]` on tokens that /// hold a single span. #[repr(transparent)] - #[allow(unknown_lints, repr_transparent_external_private_fields)] // False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482 + #[allow(unknown_lints, repr_transparent_non_zst_fields)] // False positive: https://github.com/rust-lang/rust/issues/115922 pub struct WithSpan { pub span: Span, } + + // Not public API. + #[doc(hidden)] + #[cfg(feature = "parsing")] + pub trait CustomToken { + fn peek(cursor: Cursor) -> bool; + fn display() -> &'static str; + } } #[cfg(feature = "parsing")] impl private::Sealed for Ident {} -#[cfg(feature = "parsing")] -fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool { - use crate::parse::Unexpected; - use std::cell::Cell; - use std::rc::Rc; - - let scope = Span::call_site(); - let unexpected = Rc::new(Cell::new(Unexpected::None)); - let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected); - peek(&buffer) -} - -macro_rules! impl_token { - ($display:literal $name:ty) => { - #[cfg(feature = "parsing")] - impl Token for $name { - fn peek(cursor: Cursor) -> bool { - fn peek(input: ParseStream) -> bool { - <$name as Parse>::parse(input).is_ok() - } - peek_impl(cursor, peek) - } - - fn display() -> &'static str { - $display - } - } - - #[cfg(feature = "parsing")] - impl private::Sealed for $name {} - }; -} - -impl_token!("lifetime" Lifetime); -impl_token!("literal" Lit); -impl_token!("string literal" LitStr); -impl_token!("byte string literal" LitByteStr); -impl_token!("byte literal" LitByte); -impl_token!("character literal" LitChar); -impl_token!("integer literal" LitInt); -impl_token!("floating point literal" LitFloat); -impl_token!("boolean literal" LitBool); -impl_token!("group token" proc_macro2::Group); - macro_rules! impl_low_level_token { - ($display:literal $ty:ident $get:ident) => { + ($display:literal $($path:ident)::+ $get:ident) => { #[cfg(feature = "parsing")] - impl Token for $ty { + impl Token for $($path)::+ { fn peek(cursor: Cursor) -> bool { cursor.$get().is_some() } @@ -210,21 +174,15 @@ macro_rules! impl_low_level_token { } #[cfg(feature = "parsing")] - impl private::Sealed for $ty {} + impl private::Sealed for $($path)::+ {} }; } impl_low_level_token!("punctuation token" Punct punct); impl_low_level_token!("literal" Literal literal); impl_low_level_token!("token" TokenTree token_tree); - -// Not public API. -#[doc(hidden)] -#[cfg(feature = "parsing")] -pub trait CustomToken { - fn peek(cursor: Cursor) -> bool; - fn display() -> &'static str; -} +impl_low_level_token!("group token" proc_macro2::Group any_group); +impl_low_level_token!("lifetime" Lifetime lifetime); #[cfg(feature = "parsing")] impl<T: CustomToken> private::Sealed for T {} @@ -270,11 +228,11 @@ macro_rules! define_keywords { } #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Copy for $name {} #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for $name { fn clone(&self) -> Self { *self @@ -282,7 +240,7 @@ macro_rules! define_keywords { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for $name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(stringify!($name)) @@ -290,11 +248,11 @@ macro_rules! define_keywords { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl cmp::Eq for $name {} #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl PartialEq for $name { fn eq(&self, _other: &$name) -> bool { true @@ -302,13 +260,13 @@ macro_rules! define_keywords { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Hash for $name { fn hash<H: Hasher>(&self, _state: &mut H) {} } #[cfg(feature = "printing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for $name { fn to_tokens(&self, tokens: &mut TokenStream) { printing::keyword($token, self.span, tokens); @@ -316,7 +274,7 @@ macro_rules! define_keywords { } #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for $name { fn parse(input: ParseStream) -> Result<Self> { Ok($name { @@ -366,7 +324,7 @@ macro_rules! define_punctuation_structs { ($($token:literal pub struct $name:ident/$len:tt #[doc = $usage:literal])*) => { $( #[cfg_attr(not(doc), repr(transparent))] - #[allow(unknown_lints, repr_transparent_external_private_fields)] // False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482 + #[allow(unknown_lints, repr_transparent_non_zst_fields)] // False positive: https://github.com/rust-lang/rust/issues/115922 #[doc = concat!('`', $token, '`')] /// /// Usage: @@ -397,11 +355,11 @@ macro_rules! define_punctuation_structs { } #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Copy for $name {} #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for $name { fn clone(&self) -> Self { *self @@ -409,7 +367,7 @@ macro_rules! define_punctuation_structs { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for $name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(stringify!($name)) @@ -417,11 +375,11 @@ macro_rules! define_punctuation_structs { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl cmp::Eq for $name {} #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl PartialEq for $name { fn eq(&self, _other: &$name) -> bool { true @@ -429,7 +387,7 @@ macro_rules! define_punctuation_structs { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Hash for $name { fn hash<H: Hasher>(&self, _state: &mut H) {} } @@ -447,7 +405,7 @@ macro_rules! define_punctuation { } #[cfg(feature = "printing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for $name { fn to_tokens(&self, tokens: &mut TokenStream) { printing::punct($token, &self.spans, tokens); @@ -455,7 +413,7 @@ macro_rules! define_punctuation { } #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for $name { fn parse(input: ParseStream) -> Result<Self> { Ok($name { @@ -504,11 +462,11 @@ macro_rules! define_delimiters { } #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Copy for $name {} #[cfg(feature = "clone-impls")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + #[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for $name { fn clone(&self) -> Self { *self @@ -516,7 +474,7 @@ macro_rules! define_delimiters { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for $name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(stringify!($name)) @@ -524,11 +482,11 @@ macro_rules! define_delimiters { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl cmp::Eq for $name {} #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl PartialEq for $name { fn eq(&self, _other: &$name) -> bool { true @@ -536,13 +494,14 @@ macro_rules! define_delimiters { } #[cfg(feature = "extra-traits")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + #[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Hash for $name { fn hash<H: Hasher>(&self, _state: &mut H) {} } impl $name { #[cfg(feature = "printing")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] pub fn surround<F>(&self, tokens: &mut TokenStream, f: F) where F: FnOnce(&mut TokenStream), @@ -564,7 +523,7 @@ define_punctuation_structs! { } #[cfg(feature = "printing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Underscore { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append(Ident::new("_", self.span)); @@ -572,7 +531,7 @@ impl ToTokens for Underscore { } #[cfg(feature = "parsing")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Underscore { fn parse(input: ParseStream) -> Result<Self> { input.step(|cursor| { @@ -633,11 +592,11 @@ impl std::default::Default for Group { } #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Copy for Group {} #[cfg(feature = "clone-impls")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +#[cfg_attr(docsrs, doc(cfg(feature = "clone-impls")))] impl Clone for Group { fn clone(&self) -> Self { *self @@ -645,7 +604,7 @@ impl Clone for Group { } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Debug for Group { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Group") @@ -653,11 +612,11 @@ impl Debug for Group { } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl cmp::Eq for Group {} #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl PartialEq for Group { fn eq(&self, _other: &Group) -> bool { true @@ -665,13 +624,14 @@ impl PartialEq for Group { } #[cfg(feature = "extra-traits")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +#[cfg_attr(docsrs, doc(cfg(feature = "extra-traits")))] impl Hash for Group { fn hash<H: Hasher>(&self, _state: &mut H) {} } impl Group { #[cfg(feature = "printing")] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] pub fn surround<F>(&self, tokens: &mut TokenStream, f: F) where F: FnOnce(&mut TokenStream), @@ -688,7 +648,7 @@ impl private::Sealed for Group {} #[cfg(feature = "parsing")] impl Token for Paren { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::Parenthesis) + cursor.group(Delimiter::Parenthesis).is_some() } fn display() -> &'static str { @@ -699,7 +659,7 @@ impl Token for Paren { #[cfg(feature = "parsing")] impl Token for Brace { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::Brace) + cursor.group(Delimiter::Brace).is_some() } fn display() -> &'static str { @@ -710,7 +670,7 @@ impl Token for Brace { #[cfg(feature = "parsing")] impl Token for Bracket { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::Bracket) + cursor.group(Delimiter::Bracket).is_some() } fn display() -> &'static str { @@ -721,7 +681,7 @@ impl Token for Bracket { #[cfg(feature = "parsing")] impl Token for Group { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::None) + cursor.group(Delimiter::None).is_some() } fn display() -> &'static str { @@ -763,6 +723,7 @@ define_keywords! { "override" pub struct Override "priv" pub struct Priv "pub" pub struct Pub + "raw" pub struct Raw "ref" pub struct Ref "return" pub struct Return "Self" pub struct SelfType @@ -941,6 +902,7 @@ macro_rules! Token { [override] => { $crate::token::Override }; [priv] => { $crate::token::Priv }; [pub] => { $crate::token::Pub }; + [raw] => { $crate::token::Raw }; [ref] => { $crate::token::Ref }; [return] => { $crate::token::Return }; [Self] => { $crate::token::SelfType }; @@ -1095,8 +1057,9 @@ pub(crate) mod parsing { #[doc(hidden)] #[cfg(feature = "printing")] pub(crate) mod printing { + use crate::ext::PunctExt as _; use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream}; - use quote::TokenStreamExt; + use quote::TokenStreamExt as _; #[doc(hidden)] pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) { @@ -1107,14 +1070,10 @@ pub(crate) mod printing { let ch = chars.next_back().unwrap(); let span = spans.next_back().unwrap(); for (ch, span) in chars.zip(spans) { - let mut op = Punct::new(ch, Spacing::Joint); - op.set_span(*span); - tokens.append(op); + tokens.append(Punct::new_spanned(ch, Spacing::Joint, *span)); } - let mut op = Punct::new(ch, Spacing::Alone); - op.set_span(*span); - tokens.append(op); + tokens.append(Punct::new_spanned(ch, Spacing::Alone, *span)); } pub(crate) fn keyword(s: &str, span: Span, tokens: &mut TokenStream) { diff --git a/vendor/syn/src/tt.rs b/vendor/syn/src/tt.rs index d8dc9aaa..2a9843e1 100644 --- a/vendor/syn/src/tt.rs +++ b/vendor/syn/src/tt.rs @@ -1,12 +1,10 @@ -use proc_macro2::{Delimiter, TokenStream, TokenTree}; +use proc_macro2::{Delimiter, Spacing, TokenStream, TokenTree}; use std::hash::{Hash, Hasher}; pub(crate) struct TokenTreeHelper<'a>(pub &'a TokenTree); impl<'a> PartialEq for TokenTreeHelper<'a> { fn eq(&self, other: &Self) -> bool { - use proc_macro2::Spacing; - match (self.0, other.0) { (TokenTree::Group(g1), TokenTree::Group(g2)) => { match (g1.delimiter(), g2.delimiter()) { @@ -17,19 +15,7 @@ impl<'a> PartialEq for TokenTreeHelper<'a> { _ => return false, } - let s1 = g1.stream().into_iter(); - let mut s2 = g2.stream().into_iter(); - - for item1 in s1 { - let item2 = match s2.next() { - Some(item) => item, - None => return false, - }; - if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { - return false; - } - } - s2.next().is_none() + TokenStreamHelper(&g1.stream()) == TokenStreamHelper(&g2.stream()) } (TokenTree::Punct(o1), TokenTree::Punct(o2)) => { o1.as_char() == o2.as_char() @@ -47,8 +33,6 @@ impl<'a> PartialEq for TokenTreeHelper<'a> { impl<'a> Hash for TokenTreeHelper<'a> { fn hash<H: Hasher>(&self, h: &mut H) { - use proc_macro2::Spacing; - match self.0 { TokenTree::Group(g) => { 0u8.hash(h); @@ -62,7 +46,7 @@ impl<'a> Hash for TokenTreeHelper<'a> { for item in g.stream() { TokenTreeHelper(&item).hash(h); } - 0xffu8.hash(h); // terminator w/ a variant we don't normally hash + 0xFFu8.hash(h); // terminator w/ a variant we don't normally hash } TokenTree::Punct(op) => { 1u8.hash(h); @@ -82,25 +66,30 @@ pub(crate) struct TokenStreamHelper<'a>(pub &'a TokenStream); impl<'a> PartialEq for TokenStreamHelper<'a> { fn eq(&self, other: &Self) -> bool { - let left = self.0.clone().into_iter().collect::<Vec<_>>(); - let right = other.0.clone().into_iter().collect::<Vec<_>>(); - if left.len() != right.len() { - return false; - } - for (a, b) in left.into_iter().zip(right) { - if TokenTreeHelper(&a) != TokenTreeHelper(&b) { + let left = self.0.clone().into_iter(); + let mut right = other.0.clone().into_iter(); + + for item1 in left { + let item2 = match right.next() { + Some(item) => item, + None => return false, + }; + if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { return false; } } - true + + right.next().is_none() } } impl<'a> Hash for TokenStreamHelper<'a> { fn hash<H: Hasher>(&self, state: &mut H) { - let tts = self.0.clone().into_iter().collect::<Vec<_>>(); - tts.len().hash(state); - for tt in tts { + let tokens = self.0.clone().into_iter(); + + tokens.clone().count().hash(state); + + for tt in tokens { TokenTreeHelper(&tt).hash(state); } } diff --git a/vendor/syn/src/ty.rs b/vendor/syn/src/ty.rs index 0f41fe4f..5b4177f6 100644 --- a/vendor/syn/src/ty.rs +++ b/vendor/syn/src/ty.rs @@ -1,5 +1,13 @@ -use super::*; +use crate::attr::Attribute; +use crate::expr::Expr; +use crate::generics::{BoundLifetimes, TypeParamBound}; +use crate::ident::Ident; +use crate::lifetime::Lifetime; +use crate::lit::LitStr; +use crate::mac::Macro; +use crate::path::{Path, QSelf}; use crate::punctuated::Punctuated; +use crate::token; use proc_macro2::TokenStream; ast_enum_of_structs! { @@ -9,8 +17,8 @@ ast_enum_of_structs! { /// /// This type is a [syntax tree enum]. /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + /// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] #[non_exhaustive] pub enum Type { /// A fixed size array type: `[T; n]`. @@ -64,12 +72,13 @@ ast_enum_of_structs! { // For testing exhaustiveness in downstream code, use the following idiom: // // match ty { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // // Type::Array(ty) => {...} // Type::BareFn(ty) => {...} // ... // Type::Verbatim(ty) => {...} // - // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] // _ => { /* some sane fallback */ } // } // @@ -82,7 +91,7 @@ ast_enum_of_structs! { ast_struct! { /// A fixed size array type: `[T; n]`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeArray { pub bracket_token: token::Bracket, pub elem: Box<Type>, @@ -93,7 +102,7 @@ ast_struct! { ast_struct! { /// A bare function type: `fn(usize) -> bool`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeBareFn { pub lifetimes: Option<BoundLifetimes>, pub unsafety: Option<Token![unsafe]>, @@ -108,7 +117,7 @@ ast_struct! { ast_struct! { /// A type contained within invisible delimiters. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeGroup { pub group_token: token::Group, pub elem: Box<Type>, @@ -118,7 +127,7 @@ ast_struct! { ast_struct! { /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or /// a lifetime. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeImplTrait { pub impl_token: Token![impl], pub bounds: Punctuated<TypeParamBound, Token![+]>, @@ -127,7 +136,7 @@ ast_struct! { ast_struct! { /// Indication that a type should be inferred by the compiler: `_`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeInfer { pub underscore_token: Token![_], } @@ -135,7 +144,7 @@ ast_struct! { ast_struct! { /// A macro in the type position. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeMacro { pub mac: Macro, } @@ -143,7 +152,7 @@ ast_struct! { ast_struct! { /// The never type: `!`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeNever { pub bang_token: Token![!], } @@ -151,7 +160,7 @@ ast_struct! { ast_struct! { /// A parenthesized type equivalent to the inner type. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeParen { pub paren_token: token::Paren, pub elem: Box<Type>, @@ -161,7 +170,7 @@ ast_struct! { ast_struct! { /// A path like `std::slice::Iter`, optionally qualified with a /// self-type as in `<Vec<T> as SomeTrait>::Associated`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypePath { pub qself: Option<QSelf>, pub path: Path, @@ -170,7 +179,7 @@ ast_struct! { ast_struct! { /// A raw pointer type: `*const T` or `*mut T`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypePtr { pub star_token: Token![*], pub const_token: Option<Token![const]>, @@ -181,7 +190,7 @@ ast_struct! { ast_struct! { /// A reference type: `&'a T` or `&'a mut T`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeReference { pub and_token: Token![&], pub lifetime: Option<Lifetime>, @@ -192,7 +201,7 @@ ast_struct! { ast_struct! { /// A dynamically sized slice type: `[T]`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeSlice { pub bracket_token: token::Bracket, pub elem: Box<Type>, @@ -202,7 +211,7 @@ ast_struct! { ast_struct! { /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a /// trait or a lifetime. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeTraitObject { pub dyn_token: Option<Token![dyn]>, pub bounds: Punctuated<TypeParamBound, Token![+]>, @@ -211,7 +220,7 @@ ast_struct! { ast_struct! { /// A tuple type: `(A, B, C, String)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct TypeTuple { pub paren_token: token::Paren, pub elems: Punctuated<Type, Token![,]>, @@ -220,7 +229,7 @@ ast_struct! { ast_struct! { /// The binary interface of a function: `extern "C"`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct Abi { pub extern_token: Token![extern], pub name: Option<LitStr>, @@ -229,7 +238,7 @@ ast_struct! { ast_struct! { /// An argument in a function type: the `usize` in `fn(usize) -> bool`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct BareFnArg { pub attrs: Vec<Attribute>, pub name: Option<(Ident, Token![:])>, @@ -239,7 +248,7 @@ ast_struct! { ast_struct! { /// The variadic argument of a function pointer like `fn(usize, ...)`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub struct BareVariadic { pub attrs: Vec<Attribute>, pub name: Option<(Ident, Token![:])>, @@ -250,7 +259,7 @@ ast_struct! { ast_enum! { /// Return type of a function signature. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] pub enum ReturnType { /// Return type is not specified. /// @@ -263,13 +272,27 @@ ast_enum! { #[cfg(feature = "parsing")] pub(crate) mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::{Parse, ParseStream, Result}; + use crate::attr::Attribute; + use crate::error::{self, Result}; + use crate::ext::IdentExt as _; + use crate::generics::{BoundLifetimes, TraitBound, TraitBoundModifier, TypeParamBound}; + use crate::ident::Ident; + use crate::lifetime::Lifetime; + use crate::mac::{self, Macro}; + use crate::parse::{Parse, ParseStream}; use crate::path; + use crate::path::{Path, PathArguments, QSelf}; + use crate::punctuated::Punctuated; + use crate::token; + use crate::ty::{ + Abi, BareFnArg, BareVariadic, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup, + TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, + TypeReference, TypeSlice, TypeTraitObject, TypeTuple, + }; + use crate::verbatim; use proc_macro2::Span; - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Type { fn parse(input: ParseStream) -> Result<Self> { let allow_plus = true; @@ -284,7 +307,7 @@ pub(crate) mod parsing { /// contain a `+` character. /// /// This parser does not allow a `+`, while the default parser does. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn without_plus(input: ParseStream) -> Result<Self> { let allow_plus = false; let allow_group_generic = true; @@ -327,7 +350,7 @@ pub(crate) mod parsing { Path::parse_rest(input, &mut ty.path, false)?; return Ok(Type::Path(ty)); } else { - group.elem = Box::new(Type::Path(ty)); + *group.elem = Type::Path(ty); } } } @@ -379,7 +402,15 @@ pub(crate) mod parsing { })); while let Some(plus) = input.parse()? { bounds.push_punct(plus); - bounds.push_value(input.parse()?); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = false; + TypeParamBound::parse_single( + input, + allow_precise_capture, + allow_const, + )? + }); } bounds }, @@ -434,6 +465,7 @@ pub(crate) mod parsing { }) } other @ (TypeParamBound::Lifetime(_) + | TypeParamBound::PreciseCapture(_) | TypeParamBound::Verbatim(_)) => other, } } @@ -446,7 +478,15 @@ pub(crate) mod parsing { bounds.push_value(first); while let Some(plus) = input.parse()? { bounds.push_punct(plus); - bounds.push_value(input.parse()?); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = false; + TypeParamBound::parse_single( + input, + allow_precise_capture, + allow_const, + )? + }); } bounds }, @@ -509,7 +549,11 @@ pub(crate) mod parsing { { break; } - bounds.push_value(input.parse()?); + bounds.push_value({ + let allow_precise_capture = false; + let allow_const = false; + TypeParamBound::parse_single(input, allow_precise_capture, allow_const)? + }); } } return Ok(Type::TraitObject(TypeTraitObject { @@ -524,14 +568,14 @@ pub(crate) mod parsing { let dyn_span = dyn_token.span; let star_token: Option<Token![*]> = input.parse()?; let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?; - return Ok(if star_token.is_some() { + Ok(if star_token.is_some() { Type::Verbatim(verbatim::between(&begin, input)) } else { Type::TraitObject(TypeTraitObject { dyn_token: Some(dyn_token), bounds, }) - }); + }) } else if lookahead.peek(token::Bracket) { let content; let bracket_token = bracketed!(content in input); @@ -566,7 +610,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeSlice { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -577,7 +621,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeArray { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -590,7 +634,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypePtr { fn parse(input: ParseStream) -> Result<Self> { let star_token: Token![*] = input.parse()?; @@ -613,7 +657,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeReference { fn parse(input: ParseStream) -> Result<Self> { Ok(TypeReference { @@ -626,7 +670,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeBareFn { fn parse(input: ParseStream) -> Result<Self> { let args; @@ -646,7 +690,7 @@ pub(crate) mod parsing { if inputs.empty_or_trailing() && (args.peek(Token![...]) - || args.peek(Ident) + || (args.peek(Ident) || args.peek(Token![_])) && args.peek2(Token![:]) && args.peek3(Token![...])) { @@ -673,7 +717,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeNever { fn parse(input: ParseStream) -> Result<Self> { Ok(TypeNever { @@ -682,7 +726,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeInfer { fn parse(input: ParseStream) -> Result<Self> { Ok(TypeInfer { @@ -691,7 +735,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeTuple { fn parse(input: ParseStream) -> Result<Self> { let content; @@ -724,7 +768,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeMacro { fn parse(input: ParseStream) -> Result<Self> { Ok(TypeMacro { @@ -733,7 +777,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypePath { fn parse(input: ParseStream) -> Result<Self> { let expr_style = false; @@ -743,7 +787,7 @@ pub(crate) mod parsing { } impl ReturnType { - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn without_plus(input: ParseStream) -> Result<Self> { let allow_plus = false; Self::parse(input, allow_plus) @@ -761,7 +805,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for ReturnType { fn parse(input: ParseStream) -> Result<Self> { let allow_plus = true; @@ -769,7 +813,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeTraitObject { fn parse(input: ParseStream) -> Result<Self> { let allow_plus = true; @@ -778,7 +822,7 @@ pub(crate) mod parsing { } impl TypeTraitObject { - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn without_plus(input: ParseStream) -> Result<Self> { let allow_plus = false; Self::parse(input, allow_plus) @@ -800,18 +844,28 @@ pub(crate) mod parsing { input: ParseStream, allow_plus: bool, ) -> Result<Punctuated<TypeParamBound, Token![+]>> { - let bounds = TypeParamBound::parse_multiple(input, allow_plus)?; + let allow_precise_capture = false; + let allow_const = false; + let bounds = TypeParamBound::parse_multiple( + input, + allow_plus, + allow_precise_capture, + allow_const, + )?; let mut last_lifetime_span = None; let mut at_least_one_trait = false; for bound in &bounds { match bound { - TypeParamBound::Trait(_) | TypeParamBound::Verbatim(_) => { + TypeParamBound::Trait(_) => { at_least_one_trait = true; break; } TypeParamBound::Lifetime(lifetime) => { last_lifetime_span = Some(lifetime.ident.span()); } + TypeParamBound::PreciseCapture(_) | TypeParamBound::Verbatim(_) => { + unreachable!() + } } } // Just lifetimes like `'a + 'b` is not a TraitObject. @@ -823,7 +877,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeImplTrait { fn parse(input: ParseStream) -> Result<Self> { let allow_plus = true; @@ -832,7 +886,7 @@ pub(crate) mod parsing { } impl TypeImplTrait { - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] pub fn without_plus(input: ParseStream) -> Result<Self> { let allow_plus = false; Self::parse(input, allow_plus) @@ -840,17 +894,40 @@ pub(crate) mod parsing { pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> { let impl_token: Token![impl] = input.parse()?; - let bounds = TypeParamBound::parse_multiple(input, allow_plus)?; - let mut last_lifetime_span = None; + let allow_precise_capture = true; + let allow_const = true; + let bounds = TypeParamBound::parse_multiple( + input, + allow_plus, + allow_precise_capture, + allow_const, + )?; + let mut last_nontrait_span = None; let mut at_least_one_trait = false; for bound in &bounds { match bound { - TypeParamBound::Trait(_) | TypeParamBound::Verbatim(_) => { + TypeParamBound::Trait(_) => { at_least_one_trait = true; break; } TypeParamBound::Lifetime(lifetime) => { - last_lifetime_span = Some(lifetime.ident.span()); + last_nontrait_span = Some(lifetime.ident.span()); + } + TypeParamBound::PreciseCapture(precise_capture) => { + #[cfg(feature = "full")] + { + last_nontrait_span = Some(precise_capture.gt_token.span); + } + #[cfg(not(feature = "full"))] + { + _ = precise_capture; + unreachable!(); + } + } + TypeParamBound::Verbatim(_) => { + // `[const] Trait` + at_least_one_trait = true; + break; } } } @@ -858,7 +935,7 @@ pub(crate) mod parsing { let msg = "at least one trait must be specified"; return Err(error::new2( impl_token.span, - last_lifetime_span.unwrap(), + last_nontrait_span.unwrap(), msg, )); } @@ -866,7 +943,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeGroup { fn parse(input: ParseStream) -> Result<Self> { let group = crate::group::parse_group(input)?; @@ -877,7 +954,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for TypeParen { fn parse(input: ParseStream) -> Result<Self> { let allow_plus = false; @@ -898,7 +975,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for BareFnArg { fn parse(input: ParseStream) -> Result<Self> { let allow_self = false; @@ -969,7 +1046,7 @@ pub(crate) mod parsing { }) } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Abi { fn parse(input: ParseStream) -> Result<Self> { Ok(Abi { @@ -979,7 +1056,7 @@ pub(crate) mod parsing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Option<Abi> { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![extern]) { @@ -993,13 +1070,19 @@ pub(crate) mod parsing { #[cfg(feature = "printing")] mod printing { - use super::*; use crate::attr::FilterAttrs; + use crate::path; + use crate::path::printing::PathStyle; use crate::print::TokensOrDefault; + use crate::ty::{ + Abi, BareFnArg, BareVariadic, ReturnType, TypeArray, TypeBareFn, TypeGroup, TypeImplTrait, + TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference, TypeSlice, + TypeTraitObject, TypeTuple, + }; use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; + use quote::{ToTokens, TokenStreamExt as _}; - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeSlice { fn to_tokens(&self, tokens: &mut TokenStream) { self.bracket_token.surround(tokens, |tokens| { @@ -1008,7 +1091,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeArray { fn to_tokens(&self, tokens: &mut TokenStream) { self.bracket_token.surround(tokens, |tokens| { @@ -1019,7 +1102,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypePtr { fn to_tokens(&self, tokens: &mut TokenStream) { self.star_token.to_tokens(tokens); @@ -1033,7 +1116,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeReference { fn to_tokens(&self, tokens: &mut TokenStream) { self.and_token.to_tokens(tokens); @@ -1043,7 +1126,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeBareFn { fn to_tokens(&self, tokens: &mut TokenStream) { self.lifetimes.to_tokens(tokens); @@ -1064,14 +1147,14 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeNever { fn to_tokens(&self, tokens: &mut TokenStream) { self.bang_token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeTuple { fn to_tokens(&self, tokens: &mut TokenStream) { self.paren_token.surround(tokens, |tokens| { @@ -1085,14 +1168,14 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypePath { fn to_tokens(&self, tokens: &mut TokenStream) { - path::printing::print_path(tokens, &self.qself, &self.path); + path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::AsWritten); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeTraitObject { fn to_tokens(&self, tokens: &mut TokenStream) { self.dyn_token.to_tokens(tokens); @@ -1100,7 +1183,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeImplTrait { fn to_tokens(&self, tokens: &mut TokenStream) { self.impl_token.to_tokens(tokens); @@ -1108,7 +1191,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeGroup { fn to_tokens(&self, tokens: &mut TokenStream) { self.group_token.surround(tokens, |tokens| { @@ -1117,7 +1200,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeParen { fn to_tokens(&self, tokens: &mut TokenStream) { self.paren_token.surround(tokens, |tokens| { @@ -1126,21 +1209,21 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeInfer { fn to_tokens(&self, tokens: &mut TokenStream) { self.underscore_token.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for TypeMacro { fn to_tokens(&self, tokens: &mut TokenStream) { self.mac.to_tokens(tokens); } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ReturnType { fn to_tokens(&self, tokens: &mut TokenStream) { match self { @@ -1153,7 +1236,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for BareFnArg { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -1165,7 +1248,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for BareVariadic { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.append_all(self.attrs.outer()); @@ -1178,7 +1261,7 @@ mod printing { } } - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for Abi { fn to_tokens(&self, tokens: &mut TokenStream) { self.extern_token.to_tokens(tokens); diff --git a/vendor/syn/src/verbatim.rs b/vendor/syn/src/verbatim.rs index 54dc1cfa..4a7ea2e1 100644 --- a/vendor/syn/src/verbatim.rs +++ b/vendor/syn/src/verbatim.rs @@ -1,7 +1,7 @@ +use crate::ext::TokenStreamExt as _; use crate::parse::ParseStream; use proc_macro2::{Delimiter, TokenStream}; use std::cmp::Ordering; -use std::iter; pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) -> TokenStream { let end = end.cursor(); @@ -26,7 +26,7 @@ pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) -> Token } } - tokens.extend(iter::once(tt)); + tokens.append(tt); cursor = next; } tokens diff --git a/vendor/syn/src/whitespace.rs b/vendor/syn/src/whitespace.rs index f8de1b98..a50b5069 100644 --- a/vendor/syn/src/whitespace.rs +++ b/vendor/syn/src/whitespace.rs @@ -41,11 +41,11 @@ pub(crate) fn skip(mut s: &str) -> &str { } } match byte { - b' ' | 0x09..=0x0d => { + b' ' | 0x09..=0x0D => { s = &s[1..]; continue; } - b if b <= 0x7f => {} + b if b <= 0x7F => {} _ => { let ch = s.chars().next().unwrap(); if is_whitespace(ch) { diff --git a/vendor/syn/tests/common/eq.rs b/vendor/syn/tests/common/eq.rs index 3c2b1c12..73a68113 100644 --- a/vendor/syn/tests/common/eq.rs +++ b/vendor/syn/tests/common/eq.rs @@ -10,23 +10,24 @@ use rustc_ast::ast::AngleBracketedArg; use rustc_ast::ast::AngleBracketedArgs; use rustc_ast::ast::AnonConst; use rustc_ast::ast::Arm; -use rustc_ast::ast::AssocConstraint; -use rustc_ast::ast::AssocConstraintKind; +use rustc_ast::ast::AsmMacro; +use rustc_ast::ast::AssignOpKind; +use rustc_ast::ast::AssocItemConstraint; +use rustc_ast::ast::AssocItemConstraintKind; use rustc_ast::ast::AssocItemKind; -use rustc_ast::ast::Async; use rustc_ast::ast::AttrArgs; -use rustc_ast::ast::AttrArgsEq; use rustc_ast::ast::AttrId; use rustc_ast::ast::AttrItem; use rustc_ast::ast::AttrKind; use rustc_ast::ast::AttrStyle; use rustc_ast::ast::Attribute; -use rustc_ast::ast::BareFnTy; use rustc_ast::ast::BinOpKind; -use rustc_ast::ast::BindingAnnotation; +use rustc_ast::ast::BindingMode; use rustc_ast::ast::Block; use rustc_ast::ast::BlockCheckMode; use rustc_ast::ast::BorrowKind; +use rustc_ast::ast::BoundAsyncness; +use rustc_ast::ast::BoundConstness; use rustc_ast::ast::BoundPolarity; use rustc_ast::ast::ByRef; use rustc_ast::ast::CaptureBy; @@ -34,8 +35,12 @@ use rustc_ast::ast::Closure; use rustc_ast::ast::ClosureBinder; use rustc_ast::ast::Const; use rustc_ast::ast::ConstItem; +use rustc_ast::ast::ConstItemRhs; +use rustc_ast::ast::CoroutineKind; use rustc_ast::ast::Crate; use rustc_ast::ast::Defaultness; +use rustc_ast::ast::Delegation; +use rustc_ast::ast::DelegationMac; use rustc_ast::ast::DelimArgs; use rustc_ast::ast::EnumDef; use rustc_ast::ast::Expr; @@ -45,10 +50,13 @@ use rustc_ast::ast::Extern; use rustc_ast::ast::FieldDef; use rustc_ast::ast::FloatTy; use rustc_ast::ast::Fn; +use rustc_ast::ast::FnContract; use rustc_ast::ast::FnDecl; use rustc_ast::ast::FnHeader; +use rustc_ast::ast::FnPtrTy; use rustc_ast::ast::FnRetTy; use rustc_ast::ast::FnSig; +use rustc_ast::ast::ForLoopKind; use rustc_ast::ast::ForeignItemKind; use rustc_ast::ast::ForeignMod; use rustc_ast::ast::FormatAlignment; @@ -65,6 +73,7 @@ use rustc_ast::ast::FormatOptions; use rustc_ast::ast::FormatPlaceholder; use rustc_ast::ast::FormatSign; use rustc_ast::ast::FormatTrait; +use rustc_ast::ast::GenBlockKind; use rustc_ast::ast::GenericArg; use rustc_ast::ast::GenericArgs; use rustc_ast::ast::GenericBound; @@ -95,6 +104,10 @@ use rustc_ast::ast::MacCall; use rustc_ast::ast::MacCallStmt; use rustc_ast::ast::MacStmtStyle; use rustc_ast::ast::MacroDef; +use rustc_ast::ast::MatchKind; +use rustc_ast::ast::MetaItem; +use rustc_ast::ast::MetaItemInner; +use rustc_ast::ast::MetaItemKind; use rustc_ast::ast::MetaItemLit; use rustc_ast::ast::MethodCall; use rustc_ast::ast::ModKind; @@ -105,17 +118,23 @@ use rustc_ast::ast::Mutability; use rustc_ast::ast::NodeId; use rustc_ast::ast::NormalAttr; use rustc_ast::ast::Param; +use rustc_ast::ast::Parens; use rustc_ast::ast::ParenthesizedArgs; use rustc_ast::ast::Pat; use rustc_ast::ast::PatField; +use rustc_ast::ast::PatFieldsRest; use rustc_ast::ast::PatKind; use rustc_ast::ast::Path; use rustc_ast::ast::PathSegment; +use rustc_ast::ast::Pinnedness; use rustc_ast::ast::PolyTraitRef; +use rustc_ast::ast::PreciseCapturingArg; use rustc_ast::ast::QSelf; use rustc_ast::ast::RangeEnd; use rustc_ast::ast::RangeLimits; use rustc_ast::ast::RangeSyntax; +use rustc_ast::ast::Recovered; +use rustc_ast::ast::Safety; use rustc_ast::ast::StaticItem; use rustc_ast::ast::Stmt; use rustc_ast::ast::StmtKind; @@ -125,16 +144,20 @@ use rustc_ast::ast::StructExpr; use rustc_ast::ast::StructRest; use rustc_ast::ast::Term; use rustc_ast::ast::Trait; -use rustc_ast::ast::TraitBoundModifier; +use rustc_ast::ast::TraitAlias; +use rustc_ast::ast::TraitBoundModifiers; +use rustc_ast::ast::TraitImplHeader; use rustc_ast::ast::TraitObjectSyntax; use rustc_ast::ast::TraitRef; use rustc_ast::ast::Ty; use rustc_ast::ast::TyAlias; -use rustc_ast::ast::TyAliasWhereClause; use rustc_ast::ast::TyKind; +use rustc_ast::ast::TyPat; +use rustc_ast::ast::TyPatKind; use rustc_ast::ast::UintTy; use rustc_ast::ast::UnOp; -use rustc_ast::ast::Unsafe; +use rustc_ast::ast::UnsafeBinderCastKind; +use rustc_ast::ast::UnsafeBinderTy; use rustc_ast::ast::UnsafeSource; use rustc_ast::ast::UseTree; use rustc_ast::ast::UseTreeKind; @@ -146,19 +169,22 @@ use rustc_ast::ast::WhereBoundPredicate; use rustc_ast::ast::WhereClause; use rustc_ast::ast::WhereEqPredicate; use rustc_ast::ast::WherePredicate; +use rustc_ast::ast::WherePredicateKind; use rustc_ast::ast::WhereRegionPredicate; -use rustc_ast::ptr::P; -use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind}; +use rustc_ast::ast::YieldKind; +use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Lit, Token, TokenKind}; use rustc_ast::tokenstream::{ - AttrTokenStream, AttrTokenTree, AttributesData, DelimSpan, LazyAttrTokenStream, Spacing, - TokenStream, TokenTree, + AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream, + Spacing, TokenStream, TokenTree, }; -use rustc_data_structures::sync::Lrc; +use rustc_data_structures::packed::Pu128; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{sym, Ident}; -use rustc_span::{ErrorGuaranteed, Span, Symbol, SyntaxContext, DUMMY_SP}; +use rustc_span::symbol::{sym, ByteSymbol, Ident, Symbol}; +use rustc_span::{ErrorGuaranteed, Span, SyntaxContext, DUMMY_SP}; +use std::borrow::Cow; use std::collections::HashMap; use std::hash::{BuildHasher, Hash}; +use std::sync::Arc; use thin_vec::ThinVec; pub trait SpanlessEq { @@ -171,13 +197,7 @@ impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> { } } -impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> { - fn eq(&self, other: &Self) -> bool { - SpanlessEq::eq(&**self, &**other) - } -} - -impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> { +impl<T: ?Sized + SpanlessEq> SpanlessEq for Arc<T> { fn eq(&self, other: &Self) -> bool { SpanlessEq::eq(&**self, &**other) } @@ -236,6 +256,12 @@ impl<K: Eq + Hash, V: SpanlessEq, S: BuildHasher> SpanlessEq for HashMap<K, V, S } } +impl<'a, T: ?Sized + ToOwned + SpanlessEq> SpanlessEq for Cow<'a, T> { + fn eq(&self, other: &Self) -> bool { + <T as SpanlessEq>::eq(self, other) + } +} + impl<T: SpanlessEq> SpanlessEq for Spanned<T> { fn eq(&self, other: &Self) -> bool { SpanlessEq::eq(&self.node, &other.node) @@ -283,6 +309,7 @@ macro_rules! spanless_eq_partial_eq { }; } +spanless_eq_partial_eq!(()); spanless_eq_partial_eq!(bool); spanless_eq_partial_eq!(u8); spanless_eq_partial_eq!(u16); @@ -290,8 +317,11 @@ spanless_eq_partial_eq!(u32); spanless_eq_partial_eq!(u128); spanless_eq_partial_eq!(usize); spanless_eq_partial_eq!(char); +spanless_eq_partial_eq!(str); spanless_eq_partial_eq!(String); +spanless_eq_partial_eq!(Pu128); spanless_eq_partial_eq!(Symbol); +spanless_eq_partial_eq!(ByteSymbol); spanless_eq_partial_eq!(CommentKind); spanless_eq_partial_eq!(Delimiter); spanless_eq_partial_eq!(InlineAsmOptions); @@ -448,45 +478,50 @@ macro_rules! spanless_eq_enum { spanless_eq_struct!(AngleBracketedArgs; span args); spanless_eq_struct!(AnonConst; id value); spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder); -spanless_eq_struct!(AssocConstraint; id ident gen_args kind span); -spanless_eq_struct!(AttrItem; path args tokens); +spanless_eq_struct!(AssocItemConstraint; id ident gen_args kind span); +spanless_eq_struct!(AttrItem; unsafety path args tokens); spanless_eq_struct!(AttrTokenStream; 0); spanless_eq_struct!(Attribute; kind id style span); -spanless_eq_struct!(AttributesData; attrs tokens); -spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span); -spanless_eq_struct!(BindingAnnotation; 0 1); -spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal); -spanless_eq_struct!(Closure; binder capture_clause constness asyncness movability fn_decl body !fn_decl_span !fn_arg_span); -spanless_eq_struct!(ConstItem; defaultness generics ty expr); +spanless_eq_struct!(AttrsTarget; attrs tokens); +spanless_eq_struct!(BindingMode; 0 1); +spanless_eq_struct!(Block; stmts id rules span tokens); +spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind movability fn_decl body !fn_decl_span !fn_arg_span); +spanless_eq_struct!(ConstItem; defaultness ident generics ty rhs define_opaque); spanless_eq_struct!(Crate; attrs items spans id is_placeholder); +spanless_eq_struct!(Delegation; id qself path ident rename body from_glob); +spanless_eq_struct!(DelegationMac; qself prefix suffixes body); spanless_eq_struct!(DelimArgs; dspan delim tokens); +spanless_eq_struct!(DelimSpacing; open close); spanless_eq_struct!(EnumDef; variants); spanless_eq_struct!(Expr; id kind span attrs !tokens); spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder); -spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder); -spanless_eq_struct!(Fn; defaultness generics sig body); +spanless_eq_struct!(FieldDef; attrs id span vis safety ident ty default is_placeholder); +spanless_eq_struct!(Fn; defaultness ident generics sig contract define_opaque body); +spanless_eq_struct!(FnContract; declarations requires ensures); spanless_eq_struct!(FnDecl; inputs output); -spanless_eq_struct!(FnHeader; constness asyncness unsafety ext); +spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext); +spanless_eq_struct!(FnPtrTy; safety ext generic_params decl decl_span); spanless_eq_struct!(FnSig; header decl span); -spanless_eq_struct!(ForeignMod; unsafety abi items); +spanless_eq_struct!(ForeignMod; extern_span safety abi items); spanless_eq_struct!(FormatArgPosition; index kind span); -spanless_eq_struct!(FormatArgs; span template arguments); +spanless_eq_struct!(FormatArgs; span template arguments uncooked_fmt_str is_source_literal); spanless_eq_struct!(FormatArgument; kind expr); spanless_eq_struct!(FormatOptions; width precision alignment fill sign alternate zero_pad debug_hex); spanless_eq_struct!(FormatPlaceholder; argument span format_trait format_options); spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span); spanless_eq_struct!(Generics; params where_clause span); -spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items); -spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis options line_spans); +spanless_eq_struct!(Impl; generics constness of_trait self_ty items); +spanless_eq_struct!(InlineAsm; asm_macro template template_strs operands clobber_abis options line_spans); spanless_eq_struct!(InlineAsmSym; id qself path); -spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens); +spanless_eq_struct!(Item<K>; attrs id span vis kind !tokens); spanless_eq_struct!(Label; ident); spanless_eq_struct!(Lifetime; id ident); spanless_eq_struct!(Lit; kind symbol suffix); -spanless_eq_struct!(Local; pat ty kind id span attrs !tokens); +spanless_eq_struct!(Local; id super_ pat ty kind span colon_sp attrs !tokens); spanless_eq_struct!(MacCall; path args); spanless_eq_struct!(MacCallStmt; mac style attrs tokens); spanless_eq_struct!(MacroDef; body macro_rules); +spanless_eq_struct!(MetaItem; unsafety path kind span); spanless_eq_struct!(MetaItemLit; symbol suffix kind span); spanless_eq_struct!(MethodCall; seg receiver args !span); spanless_eq_struct!(ModSpans; !inner_span !inject_use_span); @@ -497,46 +532,54 @@ spanless_eq_struct!(Pat; id kind span tokens); spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder); spanless_eq_struct!(Path; span segments tokens); spanless_eq_struct!(PathSegment; ident id args); -spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span); +spanless_eq_struct!(PolyTraitRef; bound_generic_params modifiers trait_ref span parens); spanless_eq_struct!(QSelf; ty path_span position); -spanless_eq_struct!(StaticItem; ty mutability expr); +spanless_eq_struct!(StaticItem; ident ty safety mutability expr define_opaque); spanless_eq_struct!(Stmt; id kind span); spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span); spanless_eq_struct!(StructExpr; qself path fields rest); spanless_eq_struct!(Token; kind span); -spanless_eq_struct!(Trait; unsafety is_auto generics bounds items); +spanless_eq_struct!(Trait; constness safety is_auto ident generics bounds items); +spanless_eq_struct!(TraitAlias; constness ident generics bounds); +spanless_eq_struct!(TraitBoundModifiers; constness asyncness polarity); +spanless_eq_struct!(TraitImplHeader; defaultness safety polarity trait_ref); spanless_eq_struct!(TraitRef; path ref_id); spanless_eq_struct!(Ty; id kind span tokens); -spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty); -spanless_eq_struct!(TyAliasWhereClause; !0 1); +spanless_eq_struct!(TyAlias; defaultness ident generics after_where_clause bounds ty); +spanless_eq_struct!(TyPat; id kind span tokens); +spanless_eq_struct!(UnsafeBinderTy; generic_params inner_ty); spanless_eq_struct!(UseTree; prefix kind span); spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder); spanless_eq_struct!(Visibility; kind span tokens); -spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds); +spanless_eq_struct!(WhereBoundPredicate; bound_generic_params bounded_ty bounds); spanless_eq_struct!(WhereClause; has_where_token predicates span); -spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty); -spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds); +spanless_eq_struct!(WhereEqPredicate; lhs_ty rhs_ty); +spanless_eq_struct!(WherePredicate; attrs kind id span is_placeholder); +spanless_eq_struct!(WhereRegionPredicate; lifetime bounds); spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0)); -spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds)); -spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0)); -spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No); -spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1)); -spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0)); +spanless_eq_enum!(AsmMacro; Asm GlobalAsm NakedAsm); +spanless_eq_enum!(AssocItemConstraintKind; Equality(term) Bound(bounds)); +spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0) Delegation(0) DelegationMac(0)); +spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(eq_span expr)); spanless_eq_enum!(AttrStyle; Outer Inner); -spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2) Attributes(0)); +spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) AttrsTarget(0)); spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt); spanless_eq_enum!(BlockCheckMode; Default Unsafe(0)); -spanless_eq_enum!(BorrowKind; Ref Raw); +spanless_eq_enum!(BorrowKind; Ref Raw Pin); +spanless_eq_enum!(BoundAsyncness; Normal Async(0)); +spanless_eq_enum!(BoundConstness; Never Always(0) Maybe(0)); spanless_eq_enum!(BoundPolarity; Positive Negative(0) Maybe(0)); -spanless_eq_enum!(ByRef; Yes No); -spanless_eq_enum!(CaptureBy; Value Ref); +spanless_eq_enum!(ByRef; Yes(0 1) No); +spanless_eq_enum!(CaptureBy; Value(move_kw) Ref Use(use_kw)); spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params)); spanless_eq_enum!(Const; Yes(0) No); +spanless_eq_enum!(ConstItemRhs; TypeConst(0) Body(0)); spanless_eq_enum!(Defaultness; Default(0) Final); spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1)); -spanless_eq_enum!(FloatTy; F32 F64); +spanless_eq_enum!(FloatTy; F16 F32 F64 F128); spanless_eq_enum!(FnRetTy; Default(0) Ty(0)); -spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0)); +spanless_eq_enum!(ForLoopKind; For ForAwait); +spanless_eq_enum!(ForeignItemKind; Static(0) Fn(0) TyAlias(0) MacCall(0)); spanless_eq_enum!(FormatAlignment; Left Right Center); spanless_eq_enum!(FormatArgPositionKind; Implicit Number Named); spanless_eq_enum!(FormatArgsPiece; Literal(0) Placeholder(0)); @@ -545,12 +588,13 @@ spanless_eq_enum!(FormatCount; Literal(0) Argument(0)); spanless_eq_enum!(FormatDebugHex; Lower Upper); spanless_eq_enum!(FormatSign; Plus Minus); spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer Binary LowerHex UpperHex); +spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen); spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0)); -spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0)); -spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0)); -spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default)); +spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0) ParenthesizedElided(0)); +spanless_eq_enum!(GenericBound; Trait(0) Outlives(0) Use(0 1)); +spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty span default)); spanless_eq_enum!(ImplPolarity; Positive Negative(0)); -spanless_eq_enum!(Inline; Yes No); +spanless_eq_enum!(Inline; Yes No(had_parse_error)); spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0)); spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span)); spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128); @@ -559,48 +603,67 @@ spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed); spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed); spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1)); spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces); +spanless_eq_enum!(MatchKind; Prefix Postfix); +spanless_eq_enum!(MetaItemKind; Word List(0) NameValue(0)); +spanless_eq_enum!(MetaItemInner; MetaItem(0) Lit(0)); spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded); spanless_eq_enum!(Movability; Static Movable); spanless_eq_enum!(Mutability; Mut Not); +spanless_eq_enum!(Parens; Yes No); +spanless_eq_enum!(PatFieldsRest; Rest(0) Recovered(0) None); +spanless_eq_enum!(Pinnedness; Not Pinned); +spanless_eq_enum!(PreciseCapturingArg; Lifetime(0) Arg(0 1)); spanless_eq_enum!(RangeEnd; Included(0) Excluded); spanless_eq_enum!(RangeLimits; HalfOpen Closed); -spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0)); +spanless_eq_enum!(Recovered; No Yes(0)); +spanless_eq_enum!(Safety; Unsafe(0) Safe(0) Default); +spanless_eq_enum!(StmtKind; Let(0) Item(0) Expr(0) Semi(0) Empty MacCall(0)); spanless_eq_enum!(StrStyle; Cooked Raw(0)); spanless_eq_enum!(StructRest; Base(0) Rest(0) None); spanless_eq_enum!(Term; Ty(0) Const(0)); -spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2)); -spanless_eq_enum!(TraitBoundModifier; None Negative Maybe MaybeConst MaybeConstNegative MaybeConstMaybe); -spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None); +spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3)); +spanless_eq_enum!(TraitObjectSyntax; Dyn None); +spanless_eq_enum!(TyPatKind; Range(0 1 2) NotNull Or(0) Err(0)); spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128); spanless_eq_enum!(UnOp; Deref Not Neg); -spanless_eq_enum!(Unsafe; Yes(0) No); +spanless_eq_enum!(UnsafeBinderCastKind; Wrap Unwrap); spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided); -spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob); -spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0)); +spanless_eq_enum!(UseTreeKind; Simple(0) Nested(items span) Glob); +spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0)); spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited); -spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0)); +spanless_eq_enum!(WherePredicateKind; BoundPredicate(0) RegionPredicate(0) EqPredicate(0)); +spanless_eq_enum!(YieldKind; Prefix(0) Postfix(0)); +spanless_eq_enum!(AssignOpKind; AddAssign SubAssign MulAssign DivAssign + RemAssign BitXorAssign BitAndAssign BitOrAssign ShlAssign ShrAssign); +spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id) + Gen(span closure_id return_impl_trait_id) + AsyncGen(span closure_id return_impl_trait_id)); spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3) - If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1 2) Match(0 1) Closure(0) - Block(0 1) Async(0 1) Await(0 1) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2) - Field(0 1) Index(0 1 2) Underscore Range(0 1 2) Path(0 1) AddrOf(0 1 2) - Break(0 1) Continue(0) Ret(0) InlineAsm(0) OffsetOf(0 1) MacCall(0) - Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) Become(0) - IncludedBytes(0) FormatArgs(0) Err); + If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2) + Match(0 1 2) Closure(0) Block(0 1) Gen(0 1 2 3) Await(0 1) Use(0 1) + TryBlock(0) Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore + Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) + InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) + Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) + UnsafeBinderCast(0 1 2) Err(0) Dummy); spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr) InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const) - Sym(sym)); -spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0) Const(0) Fn(0) - Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1) - Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0)); + Sym(sym) Label(block)); +spanless_eq_enum!(ItemKind; ExternCrate(0 1) Use(0) Static(0) Const(0) Fn(0) + Mod(0 1 2) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1 2) Struct(0 1 2) + Union(0 1 2) Trait(0) TraitAlias(0) Impl(0) MacCall(0) MacroDef(0 1) + Delegation(0) DelegationMac(0)); spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0) - Int(0 1) Float(0 1) Bool(0) Err); -spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2) - Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest - Paren(0) MacCall(0)); -spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never - Tup(0) AnonStruct(0) AnonUnion(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) - Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) Err CVarArgs); + Int(0 1) Float(0 1) Bool(0) Err(0)); +spanless_eq_enum!(PatKind; Missing Wild Ident(0 1 2) Struct(0 1 2 3) + TupleStruct(0 1 2) Or(0) Path(0 1) Tuple(0) Box(0) Deref(0) Ref(0 1 2) + Expr(0) Range(0 1 2) Slice(0) Rest Never Guard(0 1) Paren(0) MacCall(0) + Err(0)); +spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) PinnedRef(0 1) + FnPtr(0) UnsafeBinder(0) Never Tup(0) Path(0 1) TraitObject(0 1) + ImplTrait(0 1) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs + Pat(0 1) Dummy Err(0)); impl SpanlessEq for Ident { fn eq(&self, other: &Self) -> bool { @@ -636,8 +699,8 @@ impl SpanlessEq for Param { } = other; SpanlessEq::eq(id, id2) && SpanlessEq::eq(is_placeholder, is_placeholder2) - && (matches!(ty.kind, TyKind::Err) - || matches!(ty2.kind, TyKind::Err) + && (matches!(ty.kind, TyKind::Err(_)) + || matches!(ty2.kind, TyKind::Err(_)) || SpanlessEq::eq(attrs, attrs2) && SpanlessEq::eq(ty, ty2) && SpanlessEq::eq(pat, pat2)) @@ -652,14 +715,6 @@ impl SpanlessEq for TokenKind { TokenKind::DotDotEq | TokenKind::DotDotDot => true, _ => false, }, - (TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => { - match (this.as_ref(), other.as_ref()) { - (Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => { - SpanlessEq::eq(this, other) - } - _ => this == other, - } - } _ => self == other, } } @@ -667,16 +722,14 @@ impl SpanlessEq for TokenKind { impl SpanlessEq for TokenStream { fn eq(&self, other: &Self) -> bool { - let mut this_trees = self.trees(); - let mut other_trees = other.trees(); + let mut this_trees = self.iter(); + let mut other_trees = other.iter(); loop { - let this = match this_trees.next() { - None => return other_trees.next().is_none(), - Some(tree) => tree, + let Some(this) = this_trees.next() else { + return other_trees.next().is_none(); }; - let other = match other_trees.next() { - None => return false, - Some(tree) => tree, + let Some(other) = other_trees.next() else { + return false; }; if SpanlessEq::eq(this, other) { continue; @@ -714,7 +767,7 @@ fn doc_comment<'a>( match trees.next() { Some(TokenTree::Token( Token { - kind: TokenKind::Not, + kind: TokenKind::Bang, span: _, }, _spacing, @@ -722,15 +775,15 @@ fn doc_comment<'a>( _ => return false, } } - let stream = match trees.next() { - Some(TokenTree::Delimited(_span, Delimiter::Bracket, stream)) => stream, - _ => return false, + let Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket, stream)) = trees.next() + else { + return false; }; - let mut trees = stream.trees(); + let mut trees = stream.iter(); match trees.next() { Some(TokenTree::Token( Token { - kind: TokenKind::Ident(symbol, false), + kind: TokenKind::Ident(symbol, IdentIsRaw::No), span: _, }, _spacing, @@ -764,30 +817,10 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool { Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped), Err(_) => false, }, - Token { - kind: TokenKind::Interpolated(nonterminal), - span: _, - } => match nonterminal.as_ref() { - Nonterminal::NtExpr(expr) => match &expr.kind { - ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped), - _ => false, - }, - _ => false, - }, _ => false, } } -fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool { - match value { - AttrArgsEq::Ast(expr) => match &expr.kind { - ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped), - _ => false, - }, - AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped), - } -} - fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool { match lit { MetaItemLit { @@ -848,9 +881,10 @@ impl SpanlessEq for AttrKind { SpanlessEq::eq(&path, &normal2.item.path) && match &normal2.item.args { AttrArgs::Empty | AttrArgs::Delimited(_) => false, - AttrArgs::Eq(_span, value) => { - is_escaped_literal_attr_args(value, *unescaped) - } + AttrArgs::Eq { eq_span: _, expr } => match &expr.kind { + ExprKind::Lit(lit) => is_escaped_lit(lit, *unescaped), + _ => false, + }, } } (AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self), diff --git a/vendor/syn/tests/common/mod.rs b/vendor/syn/tests/common/mod.rs index 2156530b..ead830f8 100644 --- a/vendor/syn/tests/common/mod.rs +++ b/vendor/syn/tests/common/mod.rs @@ -1,28 +1,6 @@ #![allow(dead_code)] #![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)] -use rayon::ThreadPoolBuilder; -use std::env; - pub mod eq; pub mod parse; - -/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it. -pub fn abort_after() -> usize { - match env::var("ABORT_AFTER_FAILURE") { - Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"), - Err(_) => usize::max_value(), - } -} - -/// Configure Rayon threadpool. -pub fn rayon_init() { - let stack_size = match env::var("RUST_MIN_STACK") { - Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"), - Err(_) => 20 * 1024 * 1024, - }; - ThreadPoolBuilder::new() - .stack_size(stack_size) - .build_global() - .unwrap(); -} +pub mod visit; diff --git a/vendor/syn/tests/common/parse.rs b/vendor/syn/tests/common/parse.rs index 73be1018..81ae357c 100644 --- a/vendor/syn/tests/common/parse.rs +++ b/vendor/syn/tests/common/parse.rs @@ -1,31 +1,32 @@ extern crate rustc_ast; extern crate rustc_driver; extern crate rustc_expand; -extern crate rustc_parse as parse; +extern crate rustc_parse; extern crate rustc_session; extern crate rustc_span; use rustc_ast::ast; -use rustc_ast::ptr::P; +use rustc_parse::lexer::StripTokens; use rustc_session::parse::ParseSess; -use rustc_span::source_map::FilePathMapping; use rustc_span::FileName; use std::panic; -pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> { +pub fn librustc_expr(input: &str) -> Option<Box<ast::Expr>> { match panic::catch_unwind(|| { let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(); - let file_path_mapping = FilePathMapping::empty(); - let sess = ParseSess::new(locale_resources, file_path_mapping); - let e = parse::new_parser_from_source_str( + let sess = ParseSess::new(locale_resources); + let name = FileName::Custom("test_precedence".to_string()); + let mut parser = rustc_parse::new_parser_from_source_str( &sess, - FileName::Custom("test_precedence".to_string()), + name, input.to_string(), + StripTokens::ShebangAndFrontmatter, ) - .parse_expr(); - match e { + .unwrap(); + let presult = parser.parse_expr(); + match presult { Ok(expr) => Some(expr), - Err(mut diagnostic) => { + Err(diagnostic) => { diagnostic.emit(); None } diff --git a/vendor/syn/tests/common/visit.rs b/vendor/syn/tests/common/visit.rs new file mode 100644 index 00000000..2d2a6c53 --- /dev/null +++ b/vendor/syn/tests/common/visit.rs @@ -0,0 +1,119 @@ +use proc_macro2::{Delimiter, Group, TokenStream, TokenTree}; +use std::mem; +use syn::visit_mut::{self, VisitMut}; +use syn::{Expr, File, Generics, LifetimeParam, MacroDelimiter, Stmt, StmtMacro, TypeParam}; + +pub struct FlattenParens { + discard_paren_attrs: bool, +} + +impl FlattenParens { + pub fn discard_attrs() -> Self { + FlattenParens { + discard_paren_attrs: true, + } + } + + pub fn combine_attrs() -> Self { + FlattenParens { + discard_paren_attrs: false, + } + } + + pub fn visit_token_stream_mut(tokens: &mut TokenStream) { + *tokens = mem::take(tokens) + .into_iter() + .flat_map(|tt| { + if let TokenTree::Group(group) = tt { + let delimiter = group.delimiter(); + let mut content = group.stream(); + Self::visit_token_stream_mut(&mut content); + if let Delimiter::Parenthesis = delimiter { + content + } else { + TokenStream::from(TokenTree::Group(Group::new(delimiter, content))) + } + } else { + TokenStream::from(tt) + } + }) + .collect(); + } +} + +impl VisitMut for FlattenParens { + fn visit_expr_mut(&mut self, e: &mut Expr) { + while let Expr::Paren(paren) = e { + let paren_attrs = mem::take(&mut paren.attrs); + *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER); + if !paren_attrs.is_empty() && !self.discard_paren_attrs { + let nested_attrs = match e { + Expr::Assign(e) => &mut e.attrs, + Expr::Binary(e) => &mut e.attrs, + Expr::Cast(e) => &mut e.attrs, + _ => unimplemented!(), + }; + assert!(nested_attrs.is_empty()); + *nested_attrs = paren_attrs; + } + } + visit_mut::visit_expr_mut(self, e); + } +} + +pub struct AsIfPrinted; + +impl VisitMut for AsIfPrinted { + fn visit_file_mut(&mut self, file: &mut File) { + file.shebang = None; + visit_mut::visit_file_mut(self, file); + } + + fn visit_generics_mut(&mut self, generics: &mut Generics) { + if generics.params.is_empty() { + generics.lt_token = None; + generics.gt_token = None; + } + if let Some(where_clause) = &generics.where_clause { + if where_clause.predicates.is_empty() { + generics.where_clause = None; + } + } + visit_mut::visit_generics_mut(self, generics); + } + + fn visit_lifetime_param_mut(&mut self, param: &mut LifetimeParam) { + if param.bounds.is_empty() { + param.colon_token = None; + } + visit_mut::visit_lifetime_param_mut(self, param); + } + + fn visit_stmt_mut(&mut self, stmt: &mut Stmt) { + if let Stmt::Expr(expr, semi) = stmt { + if let Expr::Macro(e) = expr { + if match e.mac.delimiter { + MacroDelimiter::Brace(_) => true, + MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => semi.is_some(), + } { + let Expr::Macro(expr) = mem::replace(expr, Expr::PLACEHOLDER) else { + unreachable!(); + }; + *stmt = Stmt::Macro(StmtMacro { + attrs: expr.attrs, + mac: expr.mac, + semi_token: *semi, + }); + } + } + } + visit_mut::visit_stmt_mut(self, stmt); + } + + fn visit_type_param_mut(&mut self, param: &mut TypeParam) { + if param.bounds.is_empty() { + param.colon_token = None; + } + visit_mut::visit_type_param_mut(self, param); + } +} diff --git a/vendor/syn/tests/debug/gen.rs b/vendor/syn/tests/debug/gen.rs index 3f92598d..f91977a6 100644 --- a/vendor/syn/tests/debug/gen.rs +++ b/vendor/syn/tests/debug/gen.rs @@ -1,7 +1,7 @@ // This file is @generated by syn-internal-codegen. // It is not intended for manual editing. -#![allow(repr_transparent_external_private_fields)] +#![allow(repr_transparent_non_zst_fields)] #![allow(clippy::match_wildcard_for_single_variants)] use super::{Lite, Present}; use ref_cast::RefCast; @@ -301,9 +301,7 @@ impl Debug for Lite<syn::BinOp> { impl Debug for Lite<syn::Block> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("Block"); - if !self.value.stmts.is_empty() { - formatter.field("stmts", Lite(&self.value.stmts)); - } + formatter.field("stmts", Lite(&self.value.stmts)); formatter.finish() } } @@ -316,6 +314,27 @@ impl Debug for Lite<syn::BoundLifetimes> { formatter.finish() } } +impl Debug for Lite<syn::CapturedParam> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match &self.value { + syn::CapturedParam::Lifetime(_val) => { + formatter.write_str("CapturedParam::Lifetime")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::CapturedParam::Ident(_val) => { + formatter.write_str("CapturedParam::Ident")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + _ => unreachable!(), + } + } +} impl Debug for Lite<syn::ConstParam> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("ConstParam"); @@ -858,6 +877,15 @@ impl Debug for Lite<syn::Expr> { } formatter.finish() } + syn::Expr::RawAddr(_val) => { + let mut formatter = formatter.debug_struct("Expr::RawAddr"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mutability", Lite(&_val.mutability)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } syn::Expr::Reference(_val) => { let mut formatter = formatter.debug_struct("Expr::Reference"); if !_val.attrs.is_empty() { @@ -1515,6 +1543,17 @@ impl Debug for Lite<syn::ExprRange> { formatter.finish() } } +impl Debug for Lite<syn::ExprRawAddr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprRawAddr"); + if !self.value.attrs.is_empty() { + formatter.field("attrs", Lite(&self.value.attrs)); + } + formatter.field("mutability", Lite(&self.value.mutability)); + formatter.field("expr", Lite(&self.value.expr)); + formatter.finish() + } +} impl Debug for Lite<syn::ExprReference> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("ExprReference"); @@ -2866,6 +2905,7 @@ impl Debug for Lite<syn::Lit> { match &self.value { syn::Lit::Str(_val) => write!(formatter, "{:?}", _val.value()), syn::Lit::ByteStr(_val) => write!(formatter, "{:?}", _val.value()), + syn::Lit::CStr(_val) => write!(formatter, "{:?}", _val.value()), syn::Lit::Byte(_val) => write!(formatter, "{:?}", _val.value()), syn::Lit::Char(_val) => write!(formatter, "{:?}", _val.value()), syn::Lit::Int(_val) => write!(formatter, "{}", _val), @@ -2903,6 +2943,11 @@ impl Debug for Lite<syn::LitByteStr> { write!(formatter, "{:?}", self.value.value()) } } +impl Debug for Lite<syn::LitCStr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{:?}", self.value.value()) + } +} impl Debug for Lite<syn::LitChar> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{:?}", self.value.value()) @@ -3528,6 +3573,29 @@ impl Debug for Lite<syn::PathSegment> { formatter.finish() } } +impl Debug for Lite<syn::PointerMutability> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match &self.value { + syn::PointerMutability::Const(_val) => { + formatter.write_str("PointerMutability::Const")?; + Ok(()) + } + syn::PointerMutability::Mut(_val) => { + formatter.write_str("PointerMutability::Mut")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::PreciseCapture> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PreciseCapture"); + if !self.value.params.is_empty() { + formatter.field("params", Lite(&self.value.params)); + } + formatter.finish() + } +} impl Debug for Lite<syn::PredicateLifetime> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut formatter = formatter.debug_struct("PredicateLifetime"); @@ -4350,6 +4418,13 @@ impl Debug for Lite<syn::TypeParamBound> { formatter.field("ident", Lite(&_val.ident)); formatter.finish() } + syn::TypeParamBound::PreciseCapture(_val) => { + formatter.write_str("TypeParamBound::PreciseCapture")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } syn::TypeParamBound::Verbatim(_val) => { formatter.write_str("TypeParamBound::Verbatim")?; formatter.write_str("(`")?; @@ -4662,3 +4737,503 @@ impl Debug for Lite<syn::WherePredicate> { } } } +impl Debug for Lite<syn::token::Abstract> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![abstract]") + } +} +impl Debug for Lite<syn::token::And> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![&]") + } +} +impl Debug for Lite<syn::token::AndAnd> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![&&]") + } +} +impl Debug for Lite<syn::token::AndEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![&=]") + } +} +impl Debug for Lite<syn::token::As> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![as]") + } +} +impl Debug for Lite<syn::token::Async> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![async]") + } +} +impl Debug for Lite<syn::token::At> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![@]") + } +} +impl Debug for Lite<syn::token::Auto> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![auto]") + } +} +impl Debug for Lite<syn::token::Await> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![await]") + } +} +impl Debug for Lite<syn::token::Become> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![become]") + } +} +impl Debug for Lite<syn::token::Box> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![box]") + } +} +impl Debug for Lite<syn::token::Break> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![break]") + } +} +impl Debug for Lite<syn::token::Caret> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![^]") + } +} +impl Debug for Lite<syn::token::CaretEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![^=]") + } +} +impl Debug for Lite<syn::token::Colon> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![:]") + } +} +impl Debug for Lite<syn::token::Comma> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![,]") + } +} +impl Debug for Lite<syn::token::Const> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![const]") + } +} +impl Debug for Lite<syn::token::Continue> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![continue]") + } +} +impl Debug for Lite<syn::token::Crate> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![crate]") + } +} +impl Debug for Lite<syn::token::Default> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![default]") + } +} +impl Debug for Lite<syn::token::Do> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![do]") + } +} +impl Debug for Lite<syn::token::Dollar> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![$]") + } +} +impl Debug for Lite<syn::token::Dot> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![.]") + } +} +impl Debug for Lite<syn::token::DotDot> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![..]") + } +} +impl Debug for Lite<syn::token::DotDotDot> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![...]") + } +} +impl Debug for Lite<syn::token::DotDotEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![..=]") + } +} +impl Debug for Lite<syn::token::Dyn> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![dyn]") + } +} +impl Debug for Lite<syn::token::Else> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![else]") + } +} +impl Debug for Lite<syn::token::Enum> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![enum]") + } +} +impl Debug for Lite<syn::token::Eq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![=]") + } +} +impl Debug for Lite<syn::token::EqEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![==]") + } +} +impl Debug for Lite<syn::token::Extern> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![extern]") + } +} +impl Debug for Lite<syn::token::FatArrow> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![=>]") + } +} +impl Debug for Lite<syn::token::Final> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![final]") + } +} +impl Debug for Lite<syn::token::Fn> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![fn]") + } +} +impl Debug for Lite<syn::token::For> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![for]") + } +} +impl Debug for Lite<syn::token::Ge> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![>=]") + } +} +impl Debug for Lite<syn::token::Gt> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![>]") + } +} +impl Debug for Lite<syn::token::If> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![if]") + } +} +impl Debug for Lite<syn::token::Impl> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![impl]") + } +} +impl Debug for Lite<syn::token::In> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![in]") + } +} +impl Debug for Lite<syn::token::LArrow> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![<-]") + } +} +impl Debug for Lite<syn::token::Le> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![<=]") + } +} +impl Debug for Lite<syn::token::Let> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![let]") + } +} +impl Debug for Lite<syn::token::Loop> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![loop]") + } +} +impl Debug for Lite<syn::token::Lt> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![<]") + } +} +impl Debug for Lite<syn::token::Macro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![macro]") + } +} +impl Debug for Lite<syn::token::Match> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![match]") + } +} +impl Debug for Lite<syn::token::Minus> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![-]") + } +} +impl Debug for Lite<syn::token::MinusEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![-=]") + } +} +impl Debug for Lite<syn::token::Mod> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![mod]") + } +} +impl Debug for Lite<syn::token::Move> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![move]") + } +} +impl Debug for Lite<syn::token::Mut> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![mut]") + } +} +impl Debug for Lite<syn::token::Ne> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![!=]") + } +} +impl Debug for Lite<syn::token::Not> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![!]") + } +} +impl Debug for Lite<syn::token::Or> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![|]") + } +} +impl Debug for Lite<syn::token::OrEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![|=]") + } +} +impl Debug for Lite<syn::token::OrOr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![||]") + } +} +impl Debug for Lite<syn::token::Override> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![override]") + } +} +impl Debug for Lite<syn::token::PathSep> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![::]") + } +} +impl Debug for Lite<syn::token::Percent> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![%]") + } +} +impl Debug for Lite<syn::token::PercentEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![%=]") + } +} +impl Debug for Lite<syn::token::Plus> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![+]") + } +} +impl Debug for Lite<syn::token::PlusEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![+=]") + } +} +impl Debug for Lite<syn::token::Pound> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![#]") + } +} +impl Debug for Lite<syn::token::Priv> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![priv]") + } +} +impl Debug for Lite<syn::token::Pub> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![pub]") + } +} +impl Debug for Lite<syn::token::Question> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![?]") + } +} +impl Debug for Lite<syn::token::RArrow> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![->]") + } +} +impl Debug for Lite<syn::token::Raw> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![raw]") + } +} +impl Debug for Lite<syn::token::Ref> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![ref]") + } +} +impl Debug for Lite<syn::token::Return> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![return]") + } +} +impl Debug for Lite<syn::token::SelfType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![Self]") + } +} +impl Debug for Lite<syn::token::SelfValue> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![self]") + } +} +impl Debug for Lite<syn::token::Semi> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![;]") + } +} +impl Debug for Lite<syn::token::Shl> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![<<]") + } +} +impl Debug for Lite<syn::token::ShlEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![<<=]") + } +} +impl Debug for Lite<syn::token::Shr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![>>]") + } +} +impl Debug for Lite<syn::token::ShrEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![>>=]") + } +} +impl Debug for Lite<syn::token::Slash> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![/]") + } +} +impl Debug for Lite<syn::token::SlashEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![/=]") + } +} +impl Debug for Lite<syn::token::Star> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![*]") + } +} +impl Debug for Lite<syn::token::StarEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![*=]") + } +} +impl Debug for Lite<syn::token::Static> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![static]") + } +} +impl Debug for Lite<syn::token::Struct> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![struct]") + } +} +impl Debug for Lite<syn::token::Super> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![super]") + } +} +impl Debug for Lite<syn::token::Tilde> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![~]") + } +} +impl Debug for Lite<syn::token::Trait> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![trait]") + } +} +impl Debug for Lite<syn::token::Try> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![try]") + } +} +impl Debug for Lite<syn::token::Type> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![type]") + } +} +impl Debug for Lite<syn::token::Typeof> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![typeof]") + } +} +impl Debug for Lite<syn::token::Underscore> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![_]") + } +} +impl Debug for Lite<syn::token::Union> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![union]") + } +} +impl Debug for Lite<syn::token::Unsafe> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![unsafe]") + } +} +impl Debug for Lite<syn::token::Unsized> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![unsized]") + } +} +impl Debug for Lite<syn::token::Use> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![use]") + } +} +impl Debug for Lite<syn::token::Virtual> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![virtual]") + } +} +impl Debug for Lite<syn::token::Where> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![where]") + } +} +impl Debug for Lite<syn::token::While> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![while]") + } +} +impl Debug for Lite<syn::token::Yield> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Token![yield]") + } +} diff --git a/vendor/syn/tests/debug/mod.rs b/vendor/syn/tests/debug/mod.rs index caf9eed8..7ab2b795 100644 --- a/vendor/syn/tests/debug/mod.rs +++ b/vendor/syn/tests/debug/mod.rs @@ -82,7 +82,7 @@ impl Debug for Lite<TokenStream> { } } -impl<'a, T> Debug for Lite<&'a T> +impl<T> Debug for Lite<&T> where Lite<T>: Debug, { @@ -115,12 +115,16 @@ where impl<T, P> Debug for Lite<Punctuated<T, P>> where Lite<T>: Debug, + Lite<P>: Debug, { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_list() - .entries(self.value.iter().map(Lite)) - .finish() + let mut list = formatter.debug_list(); + for pair in self.pairs() { + let (node, punct) = pair.into_tuple(); + list.entry(Lite(node)); + list.entries(punct.map(Lite)); + } + list.finish() } } diff --git a/vendor/syn/tests/macros/mod.rs b/vendor/syn/tests/macros/mod.rs index 3bfbe038..9c9a957f 100644 --- a/vendor/syn/tests/macros/mod.rs +++ b/vendor/syn/tests/macros/mod.rs @@ -1,10 +1,3 @@ -#![allow(unused_macros, unused_macro_rules)] - -#[path = "../debug/mod.rs"] -pub mod debug; - -use syn::parse::{Parse, Result}; - macro_rules! errorf { ($($tt:tt)*) => {{ use ::std::io::Write; @@ -12,77 +5,3 @@ macro_rules! errorf { write!(stderr.lock(), $($tt)*).unwrap(); }}; } - -macro_rules! punctuated { - ($($e:expr,)+) => {{ - let mut seq = ::syn::punctuated::Punctuated::new(); - $( - seq.push($e); - )+ - seq - }}; - - ($($e:expr),+) => { - punctuated!($($e,)+) - }; -} - -macro_rules! snapshot { - ($($args:tt)*) => { - snapshot_impl!(() $($args)*) - }; -} - -macro_rules! snapshot_impl { - (($expr:ident) as $t:ty, @$snapshot:literal) => { - let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap(); - let debug = crate::macros::debug::Lite(&$expr); - if !cfg!(miri) { - #[allow(clippy::needless_raw_string_hashes)] // https://github.com/mitsuhiko/insta/issues/389 - { - insta::assert_debug_snapshot!(debug, @$snapshot); - } - } - }; - (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{ - let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap(); - let debug = crate::macros::debug::Lite(&syntax_tree); - if !cfg!(miri) { - #[allow(clippy::needless_raw_string_hashes)] - { - insta::assert_debug_snapshot!(debug, @$snapshot); - } - } - syntax_tree - }}; - (($($expr:tt)*) , @$snapshot:literal) => {{ - let syntax_tree = $($expr)*; - let debug = crate::macros::debug::Lite(&syntax_tree); - if !cfg!(miri) { - #[allow(clippy::needless_raw_string_hashes)] - { - insta::assert_debug_snapshot!(debug, @$snapshot); - } - } - syntax_tree - }}; - (($($expr:tt)*) $next:tt $($rest:tt)*) => { - snapshot_impl!(($($expr)* $next) $($rest)*) - }; -} - -pub trait Tokens { - fn parse<T: Parse>(self) -> Result<T>; -} - -impl<'a> Tokens for &'a str { - fn parse<T: Parse>(self) -> Result<T> { - syn::parse_str(self) - } -} - -impl Tokens for proc_macro2::TokenStream { - fn parse<T: Parse>(self) -> Result<T> { - syn::parse2(self) - } -} diff --git a/vendor/syn/tests/repo/mod.rs b/vendor/syn/tests/repo/mod.rs index 61d5ff35..8cbb83bf 100644 --- a/vendor/syn/tests/repo/mod.rs +++ b/vendor/syn/tests/repo/mod.rs @@ -6,41 +6,290 @@ use self::progress::Progress; use anyhow::Result; use flate2::read::GzDecoder; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; +use rayon::ThreadPoolBuilder; use std::collections::BTreeSet; +use std::env; use std::ffi::OsStr; use std::fs; use std::path::{Path, PathBuf}; use tar::Archive; use walkdir::{DirEntry, WalkDir}; -const REVISION: &str = "9f5fc1bd443f59583e7af0d94d289f95fe1e20c4"; +// nightly-2025-08-14 +const REVISION: &str = "3672a55b7cfd0a12e7097197b6242872473ffaa7"; #[rustfmt::skip] static EXCLUDE_FILES: &[&str] = &[ - // TODO: CStr literals: c"…", cr"…" - // https://github.com/dtolnay/syn/issues/1502 - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs", - - // TODO: explicit tail calls: `become _g()` - // https://github.com/dtolnay/syn/issues/1501 - "tests/ui/explicit-tail-calls/return-lifetime-sub.rs", - - // TODO: non-lifetime binders: `where for<'a, T> &'a Struct<T>: Trait` - // https://github.com/dtolnay/syn/issues/1435 - "tests/rustdoc-json/non_lifetime_binders.rs", - "tests/rustdoc/non_lifetime_binders.rs", - - // TODO: return type notation: `where T: Trait<method(): Send>` + // TODO: const traits: `pub const trait Trait {}` + // https://github.com/dtolnay/syn/issues/1887 + "src/tools/clippy/tests/ui/assign_ops.rs", + "src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.rs", + "src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs", + "src/tools/rust-analyzer/crates/test-utils/src/minicore.rs", + + // TODO: unsafe binders: `unsafe<'a> &'a T` + // https://github.com/dtolnay/syn/issues/1791 + "src/tools/rustfmt/tests/source/unsafe-binders.rs", + "src/tools/rustfmt/tests/target/unsafe-binders.rs", + "tests/mir-opt/gvn_on_unsafe_binder.rs", + "tests/rustdoc/auxiliary/unsafe-binder-dep.rs", + "tests/rustdoc/unsafe-binder.rs", + "tests/ui/unsafe-binders/cat-projection.rs", + + // TODO: unsafe fields: `struct S { unsafe field: T }` + // https://github.com/dtolnay/syn/issues/1792 + "src/tools/clippy/tests/ui/derive.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs", + "src/tools/rustfmt/tests/source/unsafe-field.rs", + "src/tools/rustfmt/tests/target/unsafe-field.rs", + "tests/ui/unsafe-fields/auxiliary/unsafe-fields-crate-dep.rs", + + // TODO: guard patterns: `match expr { (A if f()) | (B if g()) => {} }` + // https://github.com/dtolnay/syn/issues/1793 + "src/tools/rustfmt/tests/target/guard_patterns.rs", + "tests/ui/pattern/rfc-3637-guard-patterns/only-gather-locals-once.rs", + + // TODO: struct field default: `struct S { field: i32 = 1 }` + // https://github.com/dtolnay/syn/issues/1774 + "compiler/rustc_errors/src/markdown/parse.rs", + "compiler/rustc_session/src/config.rs", + "src/tools/clippy/tests/ui/exhaustive_items.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs", + "src/tools/rustfmt/tests/source/default-field-values.rs", + "src/tools/rustfmt/tests/target/default-field-values.rs", + "tests/ui/structs/default-field-values/auxiliary/struct_field_default.rs", + "tests/ui/structs/default-field-values/const-trait-default-field-value.rs", + "tests/ui/structs/default-field-values/field-references-param.rs", + "tests/ui/structs/default-field-values/support.rs", + "tests/ui/structs/default-field-values/use-normalized-ty-for-default-struct-value.rs", + + // TODO: return type notation: `where T: Trait<method(): Send>` and `where T::method(..): Send` // https://github.com/dtolnay/syn/issues/1434 - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs", + "src/tools/rustfmt/tests/target/return-type-notation.rs", + "tests/rustdoc-json/return-type-notation.rs", + "tests/rustdoc/return-type-notation.rs", + "tests/ui/associated-type-bounds/all-generics-lookup.rs", + "tests/ui/associated-type-bounds/implied-from-self-where-clause.rs", "tests/ui/associated-type-bounds/return-type-notation/basic.rs", + "tests/ui/associated-type-bounds/return-type-notation/higher-ranked-bound-works.rs", + "tests/ui/associated-type-bounds/return-type-notation/namespace-conflict.rs", + "tests/ui/associated-type-bounds/return-type-notation/path-constrained-in-method.rs", + "tests/ui/associated-type-bounds/return-type-notation/path-self-qself.rs", + "tests/ui/associated-type-bounds/return-type-notation/path-works.rs", + "tests/ui/associated-type-bounds/return-type-notation/unpretty-parenthesized.rs", + "tests/ui/async-await/return-type-notation/issue-110963-late.rs", + "tests/ui/async-await/return-type-notation/normalizing-self-auto-trait-issue-109924.rs", + "tests/ui/async-await/return-type-notation/rtn-implied-in-supertrait.rs", + "tests/ui/async-await/return-type-notation/super-method-bound.rs", + "tests/ui/async-await/return-type-notation/supertrait-bound.rs", + "tests/ui/borrowck/alias-liveness/rtn-static.rs", "tests/ui/feature-gates/feature-gate-return_type_notation.rs", - // Compile-fail expr parameter in const generic position: f::<1 + 2>() + // TODO: lazy type alias syntax with where-clause in trailing position + // https://github.com/dtolnay/syn/issues/1525 + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause.rs", + "src/tools/rustfmt/tests/source/type-alias-where-clauses-with-comments.rs", + "src/tools/rustfmt/tests/source/type-alias-where-clauses.rs", + "src/tools/rustfmt/tests/target/type-alias-where-clauses-with-comments.rs", + "src/tools/rustfmt/tests/target/type-alias-where-clauses.rs", + "tests/rustdoc/typedef-inner-variants-lazy_type_alias.rs", + + // TODO: gen blocks and functions + // https://github.com/dtolnay/syn/issues/1526 + "compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs", + "compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs", + "compiler/rustc_metadata/src/rmeta/decoder.rs", + "compiler/rustc_middle/src/ty/closure.rs", + "compiler/rustc_middle/src/ty/context.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rs", + "tests/ui/async-await/async-drop/assign-incompatible-types.rs", + "tests/ui/coroutine/async-gen-deduce-yield.rs", + "tests/ui/coroutine/async-gen-yield-ty-is-unit.rs", + "tests/ui/coroutine/async_gen_fn_iter.rs", + "tests/ui/coroutine/gen_block_is_fused_iter.rs", + "tests/ui/coroutine/gen_block_is_iter.rs", + "tests/ui/coroutine/gen_block_iterate.rs", + "tests/ui/coroutine/gen_fn_iter.rs", + "tests/ui/coroutine/gen_fn_lifetime_capture.rs", + "tests/ui/coroutine/other-attribute-on-gen.rs", + "tests/ui/coroutine/return-types-diverge.rs", + "tests/ui/higher-ranked/builtin-closure-like-bounds.rs", + "tests/ui/sanitizer/cfi/coroutine.rs", + + // TODO: postfix yield + // https://github.com/dtolnay/syn/issues/1890 + "tests/pretty/postfix-yield.rs", + "tests/ui/coroutine/postfix-yield.rs", + + // TODO: `!` as a pattern + // https://github.com/dtolnay/syn/issues/1546 + "tests/mir-opt/building/match/never_patterns.rs", + "tests/pretty/never-pattern.rs", + "tests/ui/rfcs/rfc-0000-never_patterns/always-read-in-closure-capture.rs", + "tests/ui/rfcs/rfc-0000-never_patterns/diverges.rs", + "tests/ui/rfcs/rfc-0000-never_patterns/use-bindings.rs", + + // TODO: async trait bounds: `impl async Fn()` + // https://github.com/dtolnay/syn/issues/1628 + "src/tools/miri/tests/pass/async-closure-captures.rs", + "src/tools/miri/tests/pass/async-closure-drop.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/async_trait_bound.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs", + "src/tools/rustfmt/tests/target/asyncness.rs", + "tests/coverage/async_closure.rs", + "tests/ui/async-await/async-closures/async-fn-mut-for-async-fn.rs", + "tests/ui/async-await/async-closures/async-fn-once-for-async-fn.rs", + "tests/ui/async-await/async-closures/auxiliary/foreign.rs", + "tests/ui/async-await/async-closures/body-check-on-non-fnmut.rs", + "tests/ui/async-await/async-closures/box-deref-in-debuginfo.rs", + "tests/ui/async-await/async-closures/brand.rs", + "tests/ui/async-await/async-closures/captures.rs", + "tests/ui/async-await/async-closures/clone-closure.rs", + "tests/ui/async-await/async-closures/constrained-but-no-upvars-yet.rs", + "tests/ui/async-await/async-closures/debuginfo-by-move-body.rs", + "tests/ui/async-await/async-closures/drop.rs", + "tests/ui/async-await/async-closures/force-move-due-to-inferred-kind.rs", + "tests/ui/async-await/async-closures/foreign.rs", + "tests/ui/async-await/async-closures/inline-body.rs", + "tests/ui/async-await/async-closures/mangle.rs", + "tests/ui/async-await/async-closures/moro-example.rs", + "tests/ui/async-await/async-closures/move-is-async-fn.rs", + "tests/ui/async-await/async-closures/mut-ref-reborrow.rs", + "tests/ui/async-await/async-closures/no-borrow-from-env.rs", + "tests/ui/async-await/async-closures/non-copy-arg-does-not-force-inner-move.rs", + "tests/ui/async-await/async-closures/overlapping-projs.rs", + "tests/ui/async-await/async-closures/precise-captures.rs", + "tests/ui/async-await/async-closures/refd.rs", + "tests/ui/async-await/async-closures/signature-deduction.rs", + "tests/ui/async-await/async-fn/edition-2015-not-async-bound.rs", + "tests/ui/async-await/async-fn/higher-ranked-async-fn.rs", + "tests/ui/async-await/async-fn/impl-trait.rs", + "tests/ui/async-await/async-fn/project.rs", + "tests/ui/async-await/async-fn/sugar.rs", + + // TODO: mutable by-reference bindings (mut ref) + // https://github.com/dtolnay/syn/issues/1629 + "src/tools/rustfmt/tests/source/mut_ref.rs", + "src/tools/rustfmt/tests/target/mut_ref.rs", + "tests/ui/mut/mut-ref.rs", + + // TODO: postfix match + // https://github.com/dtolnay/syn/issues/1630 + "src/tools/clippy/tests/ui/unnecessary_semicolon.rs", + "src/tools/rustfmt/tests/source/postfix-match/pf-match.rs", + "src/tools/rustfmt/tests/target/postfix-match/pf-match.rs", + "tests/pretty/postfix-match/simple-matches.rs", + "tests/ui/match/postfix-match/no-unused-parens.rs", + "tests/ui/match/postfix-match/pf-match-chain.rs", + "tests/ui/match/postfix-match/postfix-match.rs", + + // TODO: delegation: `reuse Trait::bar { Box::new(self.0) }` + // https://github.com/dtolnay/syn/issues/1580 + "tests/pretty/delegation.rs", + "tests/pretty/hir-delegation.rs", + "tests/ui/delegation/body-identity-glob.rs", + "tests/ui/delegation/body-identity-list.rs", + "tests/ui/delegation/explicit-paths-in-traits-pass.rs", + "tests/ui/delegation/explicit-paths-pass.rs", + "tests/ui/delegation/explicit-paths-signature-pass.rs", + "tests/ui/delegation/fn-header.rs", + "tests/ui/delegation/generics/free-fn-to-free-fn-pass.rs", + "tests/ui/delegation/generics/free-fn-to-trait-method-pass.rs", + "tests/ui/delegation/generics/impl-to-free-fn-pass.rs", + "tests/ui/delegation/generics/impl-trait-to-trait-method-pass.rs", + "tests/ui/delegation/generics/inherent-impl-to-trait-method-pass.rs", + "tests/ui/delegation/generics/trait-method-to-other-pass.rs", + "tests/ui/delegation/glob-glob.rs", + "tests/ui/delegation/glob-override.rs", + "tests/ui/delegation/glob.rs", + "tests/ui/delegation/impl-trait.rs", + "tests/ui/delegation/list.rs", + "tests/ui/delegation/macro-inside-glob.rs", + "tests/ui/delegation/macro-inside-list.rs", + "tests/ui/delegation/method-call-priority.rs", + "tests/ui/delegation/parse.rs", + "tests/ui/delegation/rename.rs", + "tests/ui/delegation/self-coercion.rs", + + // TODO: for await + // https://github.com/dtolnay/syn/issues/1631 + "tests/ui/async-await/for-await-2015.rs", + "tests/ui/async-await/for-await-passthrough.rs", + "tests/ui/async-await/for-await.rs", + + // TODO: unparenthesized half-open range pattern inside slice pattern: `[1..]` + // https://github.com/dtolnay/syn/issues/1769 + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/range_pat.rs", + "tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs", + + // TODO: pinned type sugar: `&pin const Self` + // https://github.com/dtolnay/syn/issues/1770 + "src/tools/rustfmt/tests/source/pin_sugar.rs", + "src/tools/rustfmt/tests/target/pin_sugar.rs", + "tests/pretty/pin-ergonomics-hir.rs", + "tests/pretty/pin-ergonomics.rs", + "tests/ui/pin-ergonomics/borrow.rs", + "tests/ui/pin-ergonomics/sugar-self.rs", + "tests/ui/pin-ergonomics/sugar.rs", + + // TODO: attributes on where-predicates + // https://github.com/dtolnay/syn/issues/1705 + "src/tools/rustfmt/tests/target/cfg_attribute_in_where.rs", + + // TODO: super let + // https://github.com/dtolnay/syn/issues/1889 + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs", + + // TODO: "ergonomic clones": `f(obj.use)`, `thread::spawn(use || f(obj))`, `async use` + // https://github.com/dtolnay/syn/issues/1802 + "tests/codegen-llvm/ergonomic-clones/closure.rs", + "tests/mir-opt/ergonomic-clones/closure.rs", + "tests/ui/ergonomic-clones/async/basic.rs", + "tests/ui/ergonomic-clones/closure/basic.rs", + "tests/ui/ergonomic-clones/closure/const-closure.rs", + "tests/ui/ergonomic-clones/closure/mutation.rs", + "tests/ui/ergonomic-clones/closure/nested.rs", + "tests/ui/ergonomic-clones/closure/once-move-out-on-heap.rs", + "tests/ui/ergonomic-clones/closure/with-binders.rs", + "tests/ui/ergonomic-clones/dotuse/basic.rs", + "tests/ui/ergonomic-clones/dotuse/block.rs", + + // TODO: contracts + // https://github.com/dtolnay/syn/issues/1892 + "tests/ui/contracts/internal_machinery/contract-ast-extensions-nest.rs", + "tests/ui/contracts/internal_machinery/contract-ast-extensions-tail.rs", + "tests/ui/contracts/internal_machinery/contracts-lowering-ensures-is-not-inherited-when-nesting.rs", + "tests/ui/contracts/internal_machinery/contracts-lowering-requires-is-not-inherited-when-nesting.rs", + + // TODO: frontmatter + // https://github.com/dtolnay/syn/issues/1893 + "tests/ui/frontmatter/auxiliary/lib.rs", + "tests/ui/frontmatter/dot-in-infostring-non-leading.rs", + "tests/ui/frontmatter/escape.rs", + "tests/ui/frontmatter/frontmatter-inner-hyphens-1.rs", + "tests/ui/frontmatter/frontmatter-inner-hyphens-2.rs", + "tests/ui/frontmatter/frontmatter-non-lexible-tokens.rs", + "tests/ui/frontmatter/frontmatter-whitespace-3.rs", + "tests/ui/frontmatter/frontmatter-whitespace-4.rs", + "tests/ui/frontmatter/shebang.rs", + "tests/ui/unpretty/frontmatter.rs", + + // TODO: `|| .. .method()` + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rs", + "src/tools/rustfmt/tests/source/issue-4808.rs", + + // Negative inherent impl: `impl !Box<JoinHandle> {}` + "src/tools/rustfmt/tests/source/negative-impl.rs", + "src/tools/rustfmt/tests/target/negative-impl.rs", + + // Compile-fail expr parameter in const generic position: `f::<1 + 2>()` "tests/ui/const-generics/early/closing-args-token.rs", "tests/ui/const-generics/early/const-expression-parameter.rs", // Compile-fail variadics in not the last position of a function parameter list + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_def_param.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_vararg.rs", "tests/ui/parser/variadic-ffi-syntactic-pass.rs", // Need at least one trait in impl Trait, no such type as impl 'static @@ -48,20 +297,24 @@ static EXCLUDE_FILES: &[&str] = &[ // Negative polarity trait bound: `where T: !Copy` "src/tools/rustfmt/tests/target/negative-bounds.rs", - - // Lifetime bound inside for<>: `T: ~const ?for<'a: 'b> Trait<'a>` - "tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-syntax.rs", + "tests/ui/traits/negative-bounds/supertrait.rs", // Const impl that is not a trait impl: `impl ~const T {}` - "tests/ui/rfcs/rfc-2632-const-trait-impl/syntax.rs", + "tests/ui/traits/const-traits/syntax.rs", + + // Lifetimes and types out of order in angle bracketed path arguments + "tests/ui/parser/constraints-before-generic-args-syntactic-pass.rs", // Deprecated anonymous parameter syntax in traits "src/tools/rustfmt/tests/source/trait.rs", "src/tools/rustfmt/tests/target/trait.rs", - "tests/ui/issues/issue-13105.rs", - "tests/ui/issues/issue-13775.rs", + "tests/pretty/hir-fn-params.rs", + "tests/rustdoc/anon-fn-params.rs", + "tests/rustdoc/auxiliary/ext-anon-fn-params.rs", + "tests/ui/fn/anonymous-parameters-trait-13105.rs", "tests/ui/issues/issue-34074.rs", "tests/ui/proc-macro/trait-fn-args-2015.rs", + "tests/ui/trait-bounds/anonymous-parameters-13775.rs", // Deprecated where-clause location "src/tools/rustfmt/tests/source/issue_4257.rs", @@ -72,10 +325,8 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/rustdoc/generic-associated-types/gats.rs", // Deprecated trait object syntax with parenthesized generic arguments and no dyn keyword - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rs", "src/tools/rustfmt/tests/source/attrib.rs", "src/tools/rustfmt/tests/source/closure.rs", "src/tools/rustfmt/tests/source/existential_type.rs", @@ -100,28 +351,21 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/pretty/closure-reform-pretty.rs", "tests/run-make/reproducible-build-2/reproducible-build.rs", "tests/run-make/reproducible-build/reproducible-build.rs", - "tests/ui/auxiliary/typeid-intrinsic-aux1.rs", - "tests/ui/auxiliary/typeid-intrinsic-aux2.rs", "tests/ui/impl-trait/generic-with-implicit-hrtb-without-dyn.rs", "tests/ui/lifetimes/auxiliary/lifetime_bound_will_change_warning_lib.rs", "tests/ui/lifetimes/bare-trait-object-borrowck.rs", "tests/ui/lifetimes/bare-trait-object.rs", "tests/ui/parser/bounds-obj-parens.rs", - // Obsolete box syntax - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs", - - // Invalid unparenthesized range pattern inside slice pattern: `[1..]` - "tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs", - // Various extensions to Rust syntax made up by rust-analyzer - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg_bounds.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_expr.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs", @@ -131,19 +375,25 @@ static EXCLUDE_FILES: &[&str] = &[ "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs", "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs", "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0224_dangling_dyn.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl_reference.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs", // Placeholder syntax for "throw expressions" "compiler/rustc_errors/src/translation.rs", + "compiler/rustc_expand/src/module.rs", + "compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs", "src/tools/clippy/tests/ui/needless_return.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yeet_expr.rs", "tests/pretty/yeet-expr.rs", "tests/ui/try-trait/yeet-for-option.rs", "tests/ui/try-trait/yeet-for-result.rs", // Edition 2015 code using identifiers that are now keywords // TODO: some of these we should probably parse - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rs", "src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs", "src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs", "src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs", @@ -156,6 +406,7 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs", "tests/ui/editions/edition-keywords-2015-2015.rs", "tests/ui/editions/edition-keywords-2015-2018.rs", + "tests/ui/lint/keyword-idents/auxiliary/multi_file_submod.rs", "tests/ui/lint/lint_pre_expansion_extern_module_aux.rs", "tests/ui/macros/macro-comma-support-rpass.rs", "tests/ui/macros/try-macro.rs", @@ -166,9 +417,12 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/ui/issues/issue-74564-if-expr-stack-overflow.rs", // Testing tools on invalid syntax + "src/tools/clippy/tests/ui/non_expressive_names_error_recovery.rs", "src/tools/rustfmt/tests/coverage/target/comments.rs", "src/tools/rustfmt/tests/parser/issue-4126/invalid.rs", "src/tools/rustfmt/tests/parser/issue_4418.rs", + "src/tools/rustfmt/tests/parser/stashed-diag.rs", + "src/tools/rustfmt/tests/parser/stashed-diag2.rs", "src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs", "src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs", "src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs", @@ -177,15 +431,13 @@ static EXCLUDE_FILES: &[&str] = &[ "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs", "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs", "src/tools/rustfmt/tests/target/type.rs", + "src/tools/rustfmt/tests/target/unsafe_extern_blocks.rs", "tests/run-make/translation/test.rs", "tests/ui/generics/issue-94432-garbage-ice.rs", // Generated file containing a top-level expression, used with `include!` "compiler/rustc_codegen_gcc/src/intrinsic/archs.rs", - // Clippy lint lists represented as expressions - "src/tools/clippy/clippy_lints/src/lib.deprecated.rs", - // Not actually test cases "tests/ui/lint/expansion-time-include.rs", "tests/ui/macros/auxiliary/macro-comma-support.rs", @@ -207,6 +459,9 @@ static EXCLUDE_DIRS: &[&str] = &[ // Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse "src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures", "src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures", + + // Inputs that crash rustc, making no claim about whether they are valid Rust + "tests/crashes", ]; // Directories in which a .stderr implies the corresponding .rs is not expected @@ -276,10 +531,29 @@ pub fn edition(path: &Path) -> &'static str { if path.ends_with("dyn-2015-no-warnings-without-lints.rs") { "2015" } else { - "2018" + "2021" + } +} + +#[allow(dead_code)] +pub fn abort_after() -> usize { + match env::var("ABORT_AFTER_FAILURE") { + Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"), + Err(_) => usize::MAX, } } +pub fn rayon_init() { + let stack_size = match env::var("RUST_MIN_STACK") { + Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"), + Err(_) => 1024 * 1024 * if cfg!(debug_assertions) { 40 } else { 20 }, + }; + ThreadPoolBuilder::new() + .stack_size(stack_size) + .build_global() + .unwrap(); +} + pub fn clone_rust() { let needs_clone = match fs::read_to_string("tests/rust/COMMIT") { Err(_) => true, @@ -326,10 +600,9 @@ pub fn clone_rust() { } fn download_and_unpack() -> Result<()> { - let url = format!( - "https://github.com/rust-lang/rust/archive/{}.tar.gz", - REVISION - ); + let url = format!("https://github.com/rust-lang/rust/archive/{REVISION}.tar.gz"); + errorf!("downloading {url}\n"); + let response = reqwest::blocking::get(url)?.error_for_status()?; let progress = Progress::new(response); let decoder = GzDecoder::new(progress); diff --git a/vendor/syn/tests/snapshot/mod.rs b/vendor/syn/tests/snapshot/mod.rs new file mode 100644 index 00000000..98d2aebc --- /dev/null +++ b/vendor/syn/tests/snapshot/mod.rs @@ -0,0 +1,68 @@ +#![allow(unused_macros, unused_macro_rules)] + +use std::str::FromStr; +use syn::parse::Result; + +macro_rules! snapshot { + ($($args:tt)*) => { + snapshot_impl!(() $($args)*) + }; +} + +macro_rules! snapshot_impl { + (($expr:ident) as $t:ty, @$snapshot:literal) => { + let tokens = crate::snapshot::TryIntoTokens::try_into_tokens($expr).unwrap(); + let $expr: $t = syn::parse_quote!(#tokens); + let debug = crate::debug::Lite(&$expr); + if !cfg!(miri) { + #[allow(clippy::needless_raw_string_hashes)] // https://github.com/mitsuhiko/insta/issues/389 + { + insta::assert_debug_snapshot!(debug, @$snapshot); + } + } + }; + (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{ + let tokens = crate::snapshot::TryIntoTokens::try_into_tokens($($expr)*).unwrap(); + let syntax_tree: $t = syn::parse_quote!(#tokens); + let debug = crate::debug::Lite(&syntax_tree); + if !cfg!(miri) { + #[allow(clippy::needless_raw_string_hashes)] + { + insta::assert_debug_snapshot!(debug, @$snapshot); + } + } + syntax_tree + }}; + (($($expr:tt)*) , @$snapshot:literal) => {{ + let syntax_tree = $($expr)*; + let debug = crate::debug::Lite(&syntax_tree); + if !cfg!(miri) { + #[allow(clippy::needless_raw_string_hashes)] + { + insta::assert_debug_snapshot!(debug, @$snapshot); + } + } + syntax_tree + }}; + (($($expr:tt)*) $next:tt $($rest:tt)*) => { + snapshot_impl!(($($expr)* $next) $($rest)*) + }; +} + +pub trait TryIntoTokens { + #[allow(dead_code)] + fn try_into_tokens(self) -> Result<proc_macro2::TokenStream>; +} + +impl TryIntoTokens for &str { + fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> { + let tokens = proc_macro2::TokenStream::from_str(self)?; + Ok(tokens) + } +} + +impl TryIntoTokens for proc_macro2::TokenStream { + fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> { + Ok(self) + } +} diff --git a/vendor/syn/tests/test_asyncness.rs b/vendor/syn/tests/test_asyncness.rs index 6bc5a163..c7aee328 100644 --- a/vendor/syn/tests/test_asyncness.rs +++ b/vendor/syn/tests/test_asyncness.rs @@ -1,7 +1,13 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use syn::{Expr, Item}; @@ -9,7 +15,7 @@ use syn::{Expr, Item}; fn test_async_fn() { let input = "async fn process() {}"; - snapshot!(input as Item, @r###" + snapshot!(input as Item, @r#" Item::Fn { vis: Visibility::Inherited, sig: Signature { @@ -18,22 +24,26 @@ fn test_async_fn() { generics: Generics, output: ReturnType::Default, }, - block: Block, + block: Block { + stmts: [], + }, } - "###); + "#); } #[test] fn test_async_closure() { let input = "async || {}"; - snapshot!(input as Expr, @r###" + snapshot!(input as Expr, @r#" Expr::Closure { asyncness: Some, output: ReturnType::Default, body: Expr::Block { - block: Block, + block: Block { + stmts: [], + }, }, } - "###); + "#); } diff --git a/vendor/syn/tests/test_attribute.rs b/vendor/syn/tests/test_attribute.rs index 597ae3ad..81c485e6 100644 --- a/vendor/syn/tests/test_attribute.rs +++ b/vendor/syn/tests/test_attribute.rs @@ -1,7 +1,13 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use syn::parse::Parser; use syn::{Attribute, Meta}; @@ -10,7 +16,7 @@ use syn::{Attribute, Meta}; fn test_meta_item_word() { let meta = test("#[foo]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::Path { segments: [ PathSegment { @@ -18,14 +24,14 @@ fn test_meta_item_word() { }, ], } - "###); + "#); } #[test] fn test_meta_item_name_value() { let meta = test("#[foo = 5]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::NameValue { path: Path { segments: [ @@ -38,14 +44,14 @@ fn test_meta_item_name_value() { lit: 5, }, } - "###); + "#); } #[test] fn test_meta_item_bool_value() { let meta = test("#[foo = true]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::NameValue { path: Path { segments: [ @@ -60,11 +66,11 @@ fn test_meta_item_bool_value() { }, }, } - "###); + "#); let meta = test("#[foo = false]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::NameValue { path: Path { segments: [ @@ -79,14 +85,14 @@ fn test_meta_item_bool_value() { }, }, } - "###); + "#); } #[test] fn test_meta_item_list_lit() { let meta = test("#[foo(5)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -98,14 +104,14 @@ fn test_meta_item_list_lit() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`5`), } - "###); + "#); } #[test] fn test_meta_item_list_word() { let meta = test("#[foo(bar)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -117,14 +123,14 @@ fn test_meta_item_list_word() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`bar`), } - "###); + "#); } #[test] fn test_meta_item_list_name_value() { let meta = test("#[foo(bar = 5)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -136,14 +142,14 @@ fn test_meta_item_list_name_value() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`bar = 5`), } - "###); + "#); } #[test] fn test_meta_item_list_bool_value() { let meta = test("#[foo(bar = true)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -155,14 +161,14 @@ fn test_meta_item_list_bool_value() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`bar = true`), } - "###); + "#); } #[test] fn test_meta_item_multiple() { let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -174,14 +180,14 @@ fn test_meta_item_multiple() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`), } - "###); + "#); } #[test] fn test_bool_lit() { let meta = test("#[foo(true)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -193,14 +199,14 @@ fn test_bool_lit() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`true`), } - "###); + "#); } #[test] fn test_negative_lit() { let meta = test("#[form(min = -1, max = 200)]"); - snapshot!(meta, @r###" + snapshot!(meta, @r#" Meta::List { path: Path { segments: [ @@ -212,7 +218,7 @@ fn test_negative_lit() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`min = - 1 , max = 200`), } - "###); + "#); } fn test(input: &str) -> Meta { diff --git a/vendor/syn/tests/test_derive_input.rs b/vendor/syn/tests/test_derive_input.rs index 3ec6aecb..790e2792 100644 --- a/vendor/syn/tests/test_derive_input.rs +++ b/vendor/syn/tests/test_derive_input.rs @@ -1,12 +1,16 @@ #![allow( clippy::assertions_on_result_states, + clippy::elidable_lifetime_names, clippy::manual_let_else, + clippy::needless_lifetimes, clippy::too_many_lines, clippy::uninlined_format_args )] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use quote::quote; use syn::{Data, DeriveInput}; @@ -17,7 +21,7 @@ fn test_unit() { struct Unit; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "Unit", @@ -27,7 +31,7 @@ fn test_unit() { semi_token: Some, }, } - "###); + "#); } #[test] @@ -40,7 +44,7 @@ fn test_struct() { } }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { attrs: [ Attribute { @@ -78,6 +82,7 @@ fn test_struct() { }, }, }, + Token![,], Field { vis: Visibility::Public, ident: Some("attrs"), @@ -109,9 +114,9 @@ fn test_struct() { }, }, } - "###); + "#); - snapshot!(&input.attrs[0].meta, @r###" + snapshot!(&input.attrs[0].meta, @r#" Meta::List { path: Path { segments: [ @@ -123,7 +128,7 @@ fn test_struct() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`Debug , Clone`), } - "###); + "#); } #[test] @@ -135,7 +140,7 @@ fn test_union() { } }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "MaybeUninit", @@ -157,6 +162,7 @@ fn test_union() { colon_token: Some, ty: Type::Tuple, }, + Token![,], Field { vis: Visibility::Inherited, ident: Some("value"), @@ -175,7 +181,7 @@ fn test_union() { }, }, } - "###); + "#); } #[test] @@ -195,7 +201,7 @@ fn test_enum() { } }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { attrs: [ Attribute { @@ -232,6 +238,7 @@ fn test_enum() { GenericParam::Type(TypeParam { ident: "T", }), + Token![,], GenericParam::Type(TypeParam { ident: "E", }), @@ -259,6 +266,7 @@ fn test_enum() { ], }, }, + Token![,], Variant { ident: "Err", fields: Fields::Unnamed { @@ -278,6 +286,7 @@ fn test_enum() { ], }, }, + Token![,], Variant { ident: "Surprise", fields: Fields::Unit, @@ -285,6 +294,7 @@ fn test_enum() { lit: 0isize, }), }, + Token![,], Variant { ident: "ProcMacroHack", fields: Fields::Unit, @@ -294,6 +304,7 @@ fn test_enum() { Expr::Lit { lit: 0, }, + Token![,], Expr::Lit { lit: "data", }, @@ -307,11 +318,11 @@ fn test_enum() { ], }, } - "###); + "#); let meta_items: Vec<_> = input.attrs.into_iter().map(|attr| attr.meta).collect(); - snapshot!(meta_items, @r###" + snapshot!(meta_items, @r#" [ Meta::NameValue { path: Path { @@ -333,7 +344,7 @@ fn test_enum() { ], }, ] - "###); + "#); } #[test] @@ -353,7 +364,7 @@ fn test_attr_with_mod_style_path_with_self() { struct S; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { attrs: [ Attribute { @@ -363,6 +374,7 @@ fn test_attr_with_mod_style_path_with_self() { PathSegment { ident: "foo", }, + Token![::], PathSegment { ident: "self", }, @@ -378,20 +390,21 @@ fn test_attr_with_mod_style_path_with_self() { semi_token: Some, }, } - "###); + "#); - snapshot!(&input.attrs[0].meta, @r###" + snapshot!(&input.attrs[0].meta, @r#" Meta::Path { segments: [ PathSegment { ident: "foo", }, + Token![::], PathSegment { ident: "self", }, ], } - "###); + "#); } #[test] @@ -401,7 +414,7 @@ fn test_pub_restricted() { pub(in m) struct Z(pub(in m::n) u8); }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Restricted { in_token: Some, @@ -426,6 +439,7 @@ fn test_pub_restricted() { PathSegment { ident: "m", }, + Token![::], PathSegment { ident: "n", }, @@ -447,7 +461,7 @@ fn test_pub_restricted() { semi_token: Some, }, } - "###); + "#); } #[test] @@ -456,7 +470,7 @@ fn test_pub_restricted_crate() { pub(crate) struct S; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Restricted { path: Path { @@ -474,7 +488,7 @@ fn test_pub_restricted_crate() { semi_token: Some, }, } - "###); + "#); } #[test] @@ -483,7 +497,7 @@ fn test_pub_restricted_super() { pub(super) struct S; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Restricted { path: Path { @@ -501,7 +515,7 @@ fn test_pub_restricted_super() { semi_token: Some, }, } - "###); + "#); } #[test] @@ -510,7 +524,7 @@ fn test_pub_restricted_in_super() { pub(in super) struct S; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Restricted { in_token: Some, @@ -529,7 +543,7 @@ fn test_pub_restricted_in_super() { semi_token: Some, }, } - "###); + "#); } #[test] @@ -538,7 +552,7 @@ fn test_fields_on_unit_struct() { struct S; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "S", @@ -548,7 +562,7 @@ fn test_fields_on_unit_struct() { semi_token: Some, }, } - "###); + "#); let data = match input.data { Data::Struct(data) => data, @@ -567,7 +581,7 @@ fn test_fields_on_named_struct() { } }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "S", @@ -589,6 +603,7 @@ fn test_fields_on_named_struct() { }, }, }, + Token![,], Field { vis: Visibility::Public, ident: Some("bar"), @@ -603,18 +618,19 @@ fn test_fields_on_named_struct() { }, }, }, + Token![,], ], }, }, } - "###); + "#); let data = match input.data { Data::Struct(data) => data, _ => panic!("expected a struct"), }; - snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###" + snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r#" [ Field { vis: Visibility::Inherited, @@ -645,7 +661,7 @@ fn test_fields_on_named_struct() { }, }, ] - "###); + "#); } #[test] @@ -654,7 +670,7 @@ fn test_fields_on_tuple_struct() { struct S(i32, pub String); }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "S", @@ -674,6 +690,7 @@ fn test_fields_on_tuple_struct() { }, }, }, + Token![,], Field { vis: Visibility::Public, ty: Type::Path { @@ -691,14 +708,14 @@ fn test_fields_on_tuple_struct() { semi_token: Some, }, } - "###); + "#); let data = match input.data { Data::Struct(data) => data, _ => panic!("expected a struct"), }; - snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###" + snapshot!(data.fields.iter().collect::<Vec<_>>(), @r#" [ Field { vis: Visibility::Inherited, @@ -725,7 +742,7 @@ fn test_fields_on_tuple_struct() { }, }, ] - "###); + "#); } #[test] @@ -735,7 +752,7 @@ fn test_ambiguous_crate() { struct S(crate::X); }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "S", @@ -751,6 +768,7 @@ fn test_ambiguous_crate() { PathSegment { ident: "crate", }, + Token![::], PathSegment { ident: "X", }, @@ -763,5 +781,5 @@ fn test_ambiguous_crate() { semi_token: Some, }, } - "###); + "#); } diff --git a/vendor/syn/tests/test_expr.rs b/vendor/syn/tests/test_expr.rs index 5d529bf1..e21373cf 100644 --- a/vendor/syn/tests/test_expr.rs +++ b/vendor/syn/tests/test_expr.rs @@ -1,33 +1,62 @@ -#![allow(clippy::uninlined_format_args)] +#![cfg(not(miri))] +#![recursion_limit = "1024"] +#![feature(rustc_private)] +#![allow( + clippy::elidable_lifetime_names, + clippy::match_like_matches_macro, + clippy::needless_lifetimes, + clippy::single_element_loop, + clippy::too_many_lines, + clippy::uninlined_format_args, + clippy::unreadable_literal +)] #[macro_use] mod macros; +#[macro_use] +mod snapshot; + +mod common; +mod debug; -use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; -use quote::quote; -use syn::{Expr, ExprRange}; +use crate::common::visit::{AsIfPrinted, FlattenParens}; +use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream}; +use quote::{quote, ToTokens as _}; +use std::process::ExitCode; +use syn::punctuated::Punctuated; +use syn::visit_mut::VisitMut as _; +use syn::{ + parse_quote, token, AngleBracketedGenericArguments, Arm, BinOp, Block, Expr, ExprArray, + ExprAssign, ExprAsync, ExprAwait, ExprBinary, ExprBlock, ExprBreak, ExprCall, ExprCast, + ExprClosure, ExprConst, ExprContinue, ExprField, ExprForLoop, ExprIf, ExprIndex, ExprLet, + ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprPath, ExprRange, ExprRawAddr, + ExprReference, ExprReturn, ExprStruct, ExprTry, ExprTryBlock, ExprTuple, ExprUnary, ExprUnsafe, + ExprWhile, ExprYield, GenericArgument, Label, Lifetime, Lit, LitInt, Macro, MacroDelimiter, + Member, Pat, PatWild, Path, PathArguments, PathSegment, PointerMutability, QSelf, RangeLimits, + ReturnType, Stmt, Token, Type, TypePath, UnOp, +}; #[test] fn test_expr_parse() { let tokens = quote!(..100u32); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Range { limits: RangeLimits::HalfOpen, end: Some(Expr::Lit { lit: 100u32, }), } - "###); + "#); let tokens = quote!(..100u32); - snapshot!(tokens as ExprRange, @r###" + snapshot!(tokens as ExprRange, @r#" ExprRange { limits: RangeLimits::HalfOpen, end: Some(Expr::Lit { lit: 100u32, }), } - "###); + "#); } #[test] @@ -35,7 +64,7 @@ fn test_await() { // Must not parse as Expr::Field. let tokens = quote!(fut.await); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Await { base: Expr::Path { path: Path { @@ -47,13 +76,13 @@ fn test_await() { }, }, } - "###); + "#); } #[rustfmt::skip] #[test] fn test_tuple_multi_index() { - let expected = snapshot!("tuple.0.0" as Expr, @r###" + let expected = snapshot!("tuple.0.0" as Expr, @r#" Expr::Field { base: Expr::Field { base: Expr::Path { @@ -73,7 +102,7 @@ fn test_tuple_multi_index() { index: 0, }), } - "###); + "#); for &input in &[ "tuple .0.0", @@ -100,12 +129,10 @@ fn test_tuple_multi_index() { #[test] fn test_macro_variable_func() { // mimics the token stream corresponding to `$fn()` - let tokens = TokenStream::from_iter(vec![ - TokenTree::Group(Group::new(Delimiter::None, quote! { f })), - TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), - ]); + let path = Group::new(Delimiter::None, quote!(f)); + let tokens = quote!(#path()); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Call { func: Expr::Group { expr: Expr::Path { @@ -119,16 +146,12 @@ fn test_macro_variable_func() { }, }, } - "###); + "#); - let tokens = TokenStream::from_iter(vec![ - TokenTree::Punct(Punct::new('#', Spacing::Alone)), - TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })), - TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })), - TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), - ]); + let path = Group::new(Delimiter::None, quote! { #[inside] f }); + let tokens = quote!(#[outside] #path()); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Call { attrs: [ Attribute { @@ -166,19 +189,16 @@ fn test_macro_variable_func() { }, }, } - "###); + "#); } #[test] fn test_macro_variable_macro() { // mimics the token stream corresponding to `$macro!()` - let tokens = TokenStream::from_iter(vec![ - TokenTree::Group(Group::new(Delimiter::None, quote! { m })), - TokenTree::Punct(Punct::new('!', Spacing::Alone)), - TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), - ]); + let mac = Group::new(Delimiter::None, quote!(m)); + let tokens = quote!(#mac!()); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Macro { mac: Macro { path: Path { @@ -192,18 +212,16 @@ fn test_macro_variable_macro() { tokens: TokenStream(``), }, } - "###); + "#); } #[test] fn test_macro_variable_struct() { // mimics the token stream corresponding to `$struct {}` - let tokens = TokenStream::from_iter(vec![ - TokenTree::Group(Group::new(Delimiter::None, quote! { S })), - TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), - ]); + let s = Group::new(Delimiter::None, quote! { S }); + let tokens = quote!(#s {}); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Struct { path: Path { segments: [ @@ -213,27 +231,40 @@ fn test_macro_variable_struct() { ], }, } - "###); + "#); +} + +#[test] +fn test_macro_variable_unary() { + // mimics the token stream corresponding to `$expr.method()` where expr is `&self` + let inner = Group::new(Delimiter::None, quote!(&self)); + let tokens = quote!(#inner.method()); + snapshot!(tokens as Expr, @r#" + Expr::MethodCall { + receiver: Expr::Group { + expr: Expr::Reference { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "self", + }, + ], + }, + }, + }, + }, + method: "method", + } + "#); } #[test] fn test_macro_variable_match_arm() { // mimics the token stream corresponding to `match v { _ => $expr }` - let tokens = TokenStream::from_iter(vec![ - TokenTree::Ident(Ident::new("match", Span::call_site())), - TokenTree::Ident(Ident::new("v", Span::call_site())), - TokenTree::Group(Group::new( - Delimiter::Brace, - TokenStream::from_iter(vec![ - TokenTree::Punct(Punct::new('_', Spacing::Alone)), - TokenTree::Punct(Punct::new('=', Spacing::Joint)), - TokenTree::Punct(Punct::new('>', Spacing::Alone)), - TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })), - ]), - )), - ]); - - snapshot!(tokens as Expr, @r###" + let expr = Group::new(Delimiter::None, quote! { #[a] () }); + let tokens = quote!(match v { _ => #expr }); + snapshot!(tokens as Expr, @r#" Expr::Match { expr: Expr::Path { path: Path { @@ -266,7 +297,41 @@ fn test_macro_variable_match_arm() { }, ], } - "###); + "#); + + let expr = Group::new(Delimiter::None, quote!(loop {} + 1)); + let tokens = quote!(match v { _ => #expr }); + snapshot!(tokens as Expr, @r#" + Expr::Match { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "v", + }, + ], + }, + }, + arms: [ + Arm { + pat: Pat::Wild, + body: Expr::Group { + expr: Expr::Binary { + left: Expr::Loop { + body: Block { + stmts: [], + }, + }, + op: BinOp::Add, + right: Expr::Lit { + lit: 1, + }, + }, + }, + }, + ], + } + "#); } // https://github.com/dtolnay/syn/issues/1019 @@ -274,7 +339,7 @@ fn test_macro_variable_match_arm() { fn test_closure_vs_rangefull() { #[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/4808 let tokens = quote!(|| .. .method()); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::MethodCall { receiver: Expr::Closure { output: ReturnType::Default, @@ -284,7 +349,7 @@ fn test_closure_vs_rangefull() { }, method: "method", } - "###); + "#); } #[test] @@ -294,7 +359,7 @@ fn test_postfix_operator_after_cast() { } #[test] -fn test_ranges() { +fn test_range_kinds() { syn::parse_str::<Expr>("..").unwrap(); syn::parse_str::<Expr>("..hi").unwrap(); syn::parse_str::<Expr>("lo..").unwrap(); @@ -310,3 +375,1328 @@ fn test_ranges() { syn::parse_str::<Expr>("lo...").unwrap_err(); syn::parse_str::<Expr>("lo...hi").unwrap_err(); } + +#[test] +fn test_range_precedence() { + snapshot!(".. .." as Expr, @r#" + Expr::Range { + limits: RangeLimits::HalfOpen, + end: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + } + "#); + + snapshot!(".. .. ()" as Expr, @r#" + Expr::Range { + limits: RangeLimits::HalfOpen, + end: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + end: Some(Expr::Tuple), + }), + } + "#); + + snapshot!("() .. .." as Expr, @r#" + Expr::Range { + start: Some(Expr::Tuple), + limits: RangeLimits::HalfOpen, + end: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + } + "#); + + snapshot!("() = .. + ()" as Expr, @r" + Expr::Binary { + left: Expr::Assign { + left: Expr::Tuple, + right: Expr::Range { + limits: RangeLimits::HalfOpen, + }, + }, + op: BinOp::Add, + right: Expr::Tuple, + } + "); + + // A range with a lower bound cannot be the upper bound of another range, + // and a range with an upper bound cannot be the lower bound of another + // range. + syn::parse_str::<Expr>(".. x ..").unwrap_err(); + syn::parse_str::<Expr>("x .. x ..").unwrap_err(); +} + +#[test] +fn test_range_attrs() { + // Attributes are not allowed on range expressions starting with `..` + syn::parse_str::<Expr>("#[allow()] ..").unwrap_err(); + syn::parse_str::<Expr>("#[allow()] .. hi").unwrap_err(); + + snapshot!("#[allow()] lo .. hi" as Expr, @r#" + Expr::Range { + start: Some(Expr::Path { + attrs: [ + Attribute { + style: AttrStyle::Outer, + meta: Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "allow", + }, + ], + }, + delimiter: MacroDelimiter::Paren, + tokens: TokenStream(``), + }, + }, + ], + path: Path { + segments: [ + PathSegment { + ident: "lo", + }, + ], + }, + }), + limits: RangeLimits::HalfOpen, + end: Some(Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "hi", + }, + ], + }, + }), + } + "#); +} + +#[test] +fn test_ranges_bailout() { + syn::parse_str::<Expr>(".. ?").unwrap_err(); + syn::parse_str::<Expr>(".. .field").unwrap_err(); + + snapshot!("return .. ?" as Expr, @r" + Expr::Try { + expr: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + } + "); + + snapshot!("break .. ?" as Expr, @r" + Expr::Try { + expr: Expr::Break { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + } + "); + + snapshot!("|| .. ?" as Expr, @r" + Expr::Try { + expr: Expr::Closure { + output: ReturnType::Default, + body: Expr::Range { + limits: RangeLimits::HalfOpen, + }, + }, + } + "); + + snapshot!("return .. .field" as Expr, @r#" + Expr::Field { + base: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + member: Member::Named("field"), + } + "#); + + snapshot!("break .. .field" as Expr, @r#" + Expr::Field { + base: Expr::Break { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + member: Member::Named("field"), + } + "#); + + snapshot!("|| .. .field" as Expr, @r#" + Expr::Field { + base: Expr::Closure { + output: ReturnType::Default, + body: Expr::Range { + limits: RangeLimits::HalfOpen, + }, + }, + member: Member::Named("field"), + } + "#); + + snapshot!("return .. = ()" as Expr, @r" + Expr::Assign { + left: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + right: Expr::Tuple, + } + "); + + snapshot!("return .. += ()" as Expr, @r" + Expr::Binary { + left: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + op: BinOp::AddAssign, + right: Expr::Tuple, + } + "); +} + +#[test] +fn test_ambiguous_label() { + for stmt in [ + quote! { + return 'label: loop { break 'label 42; }; + }, + quote! { + break ('label: loop { break 'label 42; }); + }, + quote! { + break 1 + 'label: loop { break 'label 42; }; + }, + quote! { + break 'outer 'inner: loop { break 'inner 42; }; + }, + ] { + syn::parse2::<Stmt>(stmt).unwrap(); + } + + for stmt in [ + // Parentheses required. See https://github.com/rust-lang/rust/pull/87026. + quote! { + break 'label: loop { break 'label 42; }; + }, + ] { + syn::parse2::<Stmt>(stmt).unwrap_err(); + } +} + +#[test] +fn test_extended_interpolated_path() { + let path = Group::new(Delimiter::None, quote!(a::b)); + + let tokens = quote!(if #path {}); + snapshot!(tokens as Expr, @r#" + Expr::If { + cond: Expr::Group { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "a", + }, + Token![::], + PathSegment { + ident: "b", + }, + ], + }, + }, + }, + then_branch: Block { + stmts: [], + }, + } + "#); + + let tokens = quote!(#path {}); + snapshot!(tokens as Expr, @r#" + Expr::Struct { + path: Path { + segments: [ + PathSegment { + ident: "a", + }, + Token![::], + PathSegment { + ident: "b", + }, + ], + }, + } + "#); + + let tokens = quote!(#path :: c); + snapshot!(tokens as Expr, @r#" + Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "a", + }, + Token![::], + PathSegment { + ident: "b", + }, + Token![::], + PathSegment { + ident: "c", + }, + ], + }, + } + "#); + + let nested = Group::new(Delimiter::None, quote!(a::b || true)); + let tokens = quote!(if #nested && false {}); + snapshot!(tokens as Expr, @r#" + Expr::If { + cond: Expr::Binary { + left: Expr::Group { + expr: Expr::Binary { + left: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "a", + }, + Token![::], + PathSegment { + ident: "b", + }, + ], + }, + }, + op: BinOp::Or, + right: Expr::Lit { + lit: Lit::Bool { + value: true, + }, + }, + }, + }, + op: BinOp::And, + right: Expr::Lit { + lit: Lit::Bool { + value: false, + }, + }, + }, + then_branch: Block { + stmts: [], + }, + } + "#); +} + +#[test] +fn test_tuple_comma() { + let mut expr = ExprTuple { + attrs: Vec::new(), + paren_token: token::Paren::default(), + elems: Punctuated::new(), + }; + snapshot!(expr.to_token_stream() as Expr, @"Expr::Tuple"); + + expr.elems.push_value(parse_quote!(continue)); + // Must not parse to Expr::Paren + snapshot!(expr.to_token_stream() as Expr, @r#" + Expr::Tuple { + elems: [ + Expr::Continue, + Token![,], + ], + } + "#); + + expr.elems.push_punct(<Token![,]>::default()); + snapshot!(expr.to_token_stream() as Expr, @r#" + Expr::Tuple { + elems: [ + Expr::Continue, + Token![,], + ], + } + "#); + + expr.elems.push_value(parse_quote!(continue)); + snapshot!(expr.to_token_stream() as Expr, @r#" + Expr::Tuple { + elems: [ + Expr::Continue, + Token![,], + Expr::Continue, + ], + } + "#); + + expr.elems.push_punct(<Token![,]>::default()); + snapshot!(expr.to_token_stream() as Expr, @r#" + Expr::Tuple { + elems: [ + Expr::Continue, + Token![,], + Expr::Continue, + Token![,], + ], + } + "#); +} + +#[test] +fn test_binop_associativity() { + // Left to right. + snapshot!("() + () + ()" as Expr, @r#" + Expr::Binary { + left: Expr::Binary { + left: Expr::Tuple, + op: BinOp::Add, + right: Expr::Tuple, + }, + op: BinOp::Add, + right: Expr::Tuple, + } + "#); + + // Right to left. + snapshot!("() += () += ()" as Expr, @r#" + Expr::Binary { + left: Expr::Tuple, + op: BinOp::AddAssign, + right: Expr::Binary { + left: Expr::Tuple, + op: BinOp::AddAssign, + right: Expr::Tuple, + }, + } + "#); + + // Parenthesization is required. + syn::parse_str::<Expr>("() == () == ()").unwrap_err(); +} + +#[test] +fn test_assign_range_precedence() { + // Range has higher precedence as the right-hand of an assignment, but + // ambiguous precedence as the left-hand of an assignment. + snapshot!("() = () .. ()" as Expr, @r#" + Expr::Assign { + left: Expr::Tuple, + right: Expr::Range { + start: Some(Expr::Tuple), + limits: RangeLimits::HalfOpen, + end: Some(Expr::Tuple), + }, + } + "#); + + snapshot!("() += () .. ()" as Expr, @r#" + Expr::Binary { + left: Expr::Tuple, + op: BinOp::AddAssign, + right: Expr::Range { + start: Some(Expr::Tuple), + limits: RangeLimits::HalfOpen, + end: Some(Expr::Tuple), + }, + } + "#); + + syn::parse_str::<Expr>("() .. () = ()").unwrap_err(); + syn::parse_str::<Expr>("() .. () += ()").unwrap_err(); +} + +#[test] +fn test_chained_comparison() { + // https://github.com/dtolnay/syn/issues/1738 + let _ = syn::parse_str::<Expr>("a = a < a <"); + let _ = syn::parse_str::<Expr>("a = a .. a .."); + let _ = syn::parse_str::<Expr>("a = a .. a +="); + + let err = syn::parse_str::<Expr>("a < a < a").unwrap_err(); + assert_eq!("comparison operators cannot be chained", err.to_string()); + + let err = syn::parse_str::<Expr>("a .. a .. a").unwrap_err(); + assert_eq!("unexpected token", err.to_string()); + + let err = syn::parse_str::<Expr>("a .. a += a").unwrap_err(); + assert_eq!("unexpected token", err.to_string()); +} + +#[test] +fn test_fixup() { + for tokens in [ + quote! { 2 * (1 + 1) }, + quote! { 0 + (0 + 0) }, + quote! { (a = b) = c }, + quote! { (x as i32) < 0 }, + quote! { 1 + (x as i32) < 0 }, + quote! { (1 + 1).abs() }, + quote! { (lo..hi)[..] }, + quote! { (a..b)..(c..d) }, + quote! { (x > ..) > x }, + quote! { (&mut fut).await }, + quote! { &mut (x as i32) }, + quote! { -(x as i32) }, + quote! { if (S {}) == 1 {} }, + quote! { { (m! {}) - 1 } }, + quote! { match m { _ => ({}) - 1 } }, + quote! { if let _ = (a && b) && c {} }, + quote! { if let _ = (S {}) {} }, + quote! { if (S {}) == 0 && let Some(_) = x {} }, + quote! { break ('a: loop { break 'a 1 } + 1) }, + quote! { a + (|| b) + c }, + quote! { if let _ = ((break) - 1 || true) {} }, + quote! { if let _ = (break + 1 || true) {} }, + quote! { if break (break) {} }, + quote! { if break break {} {} }, + quote! { if return (..) {} }, + quote! { if return .. {} {} }, + quote! { if || (Struct {}) {} }, + quote! { if || (Struct {}).await {} }, + quote! { if break || Struct {}.await {} }, + quote! { if break 'outer 'block: {} {} }, + quote! { if ..'block: {} {} }, + quote! { if break ({}).await {} }, + quote! { (break)() }, + quote! { (..) = () }, + quote! { (..) += () }, + quote! { (1 < 2) == (3 < 4) }, + quote! { { (let _ = ()) } }, + quote! { (#[attr] thing).field }, + quote! { #[attr] (1 + 1) }, + quote! { #[attr] (x = 1) }, + quote! { #[attr] (x += 1) }, + quote! { #[attr] (1 as T) }, + quote! { (return #[attr] (x + ..)).field }, + quote! { (self.f)() }, + quote! { (return)..=return }, + quote! { 1 + (return)..=1 + return }, + quote! { .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. }, + ] { + let original: Expr = syn::parse2(tokens).unwrap(); + + let mut flat = original.clone(); + FlattenParens::combine_attrs().visit_expr_mut(&mut flat); + let reconstructed: Expr = match syn::parse2(flat.to_token_stream()) { + Ok(reconstructed) => reconstructed, + Err(err) => panic!("failed to parse `{}`: {}", flat.to_token_stream(), err), + }; + + assert!( + original == reconstructed, + "original: {}\n{:#?}\nreconstructed: {}\n{:#?}", + original.to_token_stream(), + crate::debug::Lite(&original), + reconstructed.to_token_stream(), + crate::debug::Lite(&reconstructed), + ); + } +} + +#[test] +fn test_permutations() -> ExitCode { + fn iter(depth: usize, f: &mut dyn FnMut(Expr)) { + let span = Span::call_site(); + + // Expr::Path + f(Expr::Path(ExprPath { + // `x` + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("x", span)), + })); + if false { + f(Expr::Path(ExprPath { + // `x::<T>` + attrs: Vec::new(), + qself: None, + path: Path { + leading_colon: None, + segments: Punctuated::from_iter([PathSegment { + ident: Ident::new("x", span), + arguments: PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token: Some(Token![::](span)), + lt_token: Token![<](span), + args: Punctuated::from_iter([GenericArgument::Type(Type::Path( + TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + }, + ))]), + gt_token: Token![>](span), + }), + }]), + }, + })); + f(Expr::Path(ExprPath { + // `<T as Trait>::CONST` + attrs: Vec::new(), + qself: Some(QSelf { + lt_token: Token![<](span), + ty: Box::new(Type::Path(TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + })), + position: 1, + as_token: Some(Token![as](span)), + gt_token: Token![>](span), + }), + path: Path { + leading_colon: None, + segments: Punctuated::from_iter([ + PathSegment::from(Ident::new("Trait", span)), + PathSegment::from(Ident::new("CONST", span)), + ]), + }, + })); + } + + let Some(depth) = depth.checked_sub(1) else { + return; + }; + + // Expr::Assign + iter(depth, &mut |expr| { + iter(0, &mut |simple| { + f(Expr::Assign(ExprAssign { + // `x = $expr` + attrs: Vec::new(), + left: Box::new(simple.clone()), + eq_token: Token![=](span), + right: Box::new(expr.clone()), + })); + f(Expr::Assign(ExprAssign { + // `$expr = x` + attrs: Vec::new(), + left: Box::new(expr.clone()), + eq_token: Token![=](span), + right: Box::new(simple), + })); + }); + }); + + // Expr::Binary + iter(depth, &mut |expr| { + iter(0, &mut |simple| { + for op in [ + BinOp::Add(Token![+](span)), + //BinOp::Sub(Token![-](span)), + //BinOp::Mul(Token![*](span)), + //BinOp::Div(Token![/](span)), + //BinOp::Rem(Token![%](span)), + //BinOp::And(Token![&&](span)), + //BinOp::Or(Token![||](span)), + //BinOp::BitXor(Token![^](span)), + //BinOp::BitAnd(Token![&](span)), + //BinOp::BitOr(Token![|](span)), + //BinOp::Shl(Token![<<](span)), + //BinOp::Shr(Token![>>](span)), + //BinOp::Eq(Token![==](span)), + BinOp::Lt(Token![<](span)), + //BinOp::Le(Token![<=](span)), + //BinOp::Ne(Token![!=](span)), + //BinOp::Ge(Token![>=](span)), + //BinOp::Gt(Token![>](span)), + BinOp::ShlAssign(Token![<<=](span)), + ] { + f(Expr::Binary(ExprBinary { + // `x + $expr` + attrs: Vec::new(), + left: Box::new(simple.clone()), + op, + right: Box::new(expr.clone()), + })); + f(Expr::Binary(ExprBinary { + // `$expr + x` + attrs: Vec::new(), + left: Box::new(expr.clone()), + op, + right: Box::new(simple.clone()), + })); + } + }); + }); + + // Expr::Block + f(Expr::Block(ExprBlock { + // `{}` + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Break + f(Expr::Break(ExprBreak { + // `break` + attrs: Vec::new(), + break_token: Token![break](span), + label: None, + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Break(ExprBreak { + // `break $expr` + attrs: Vec::new(), + break_token: Token![break](span), + label: None, + expr: Some(Box::new(expr)), + })); + }); + + // Expr::Call + iter(depth, &mut |expr| { + f(Expr::Call(ExprCall { + // `$expr()` + attrs: Vec::new(), + func: Box::new(expr), + paren_token: token::Paren(span), + args: Punctuated::new(), + })); + }); + + // Expr::Cast + iter(depth, &mut |expr| { + f(Expr::Cast(ExprCast { + // `$expr as T` + attrs: Vec::new(), + expr: Box::new(expr), + as_token: Token![as](span), + ty: Box::new(Type::Path(TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + })), + })); + }); + + // Expr::Closure + iter(depth, &mut |expr| { + f(Expr::Closure(ExprClosure { + // `|| $expr` + attrs: Vec::new(), + lifetimes: None, + constness: None, + movability: None, + asyncness: None, + capture: None, + or1_token: Token![|](span), + inputs: Punctuated::new(), + or2_token: Token![|](span), + output: ReturnType::Default, + body: Box::new(expr), + })); + }); + + // Expr::Field + iter(depth, &mut |expr| { + f(Expr::Field(ExprField { + // `$expr.field` + attrs: Vec::new(), + base: Box::new(expr), + dot_token: Token![.](span), + member: Member::Named(Ident::new("field", span)), + })); + }); + + // Expr::If + iter(depth, &mut |expr| { + f(Expr::If(ExprIf { + // `if $expr {}` + attrs: Vec::new(), + if_token: Token![if](span), + cond: Box::new(expr), + then_branch: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + else_branch: None, + })); + }); + + // Expr::Let + iter(depth, &mut |expr| { + f(Expr::Let(ExprLet { + attrs: Vec::new(), + let_token: Token![let](span), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + eq_token: Token![=](span), + expr: Box::new(expr), + })); + }); + + // Expr::Range + f(Expr::Range(ExprRange { + // `..` + attrs: Vec::new(), + start: None, + limits: RangeLimits::HalfOpen(Token![..](span)), + end: None, + })); + iter(depth, &mut |expr| { + f(Expr::Range(ExprRange { + // `..$expr` + attrs: Vec::new(), + start: None, + limits: RangeLimits::HalfOpen(Token![..](span)), + end: Some(Box::new(expr.clone())), + })); + f(Expr::Range(ExprRange { + // `$expr..` + attrs: Vec::new(), + start: Some(Box::new(expr)), + limits: RangeLimits::HalfOpen(Token![..](span)), + end: None, + })); + }); + + // Expr::Reference + iter(depth, &mut |expr| { + f(Expr::Reference(ExprReference { + // `&$expr` + attrs: Vec::new(), + and_token: Token![&](span), + mutability: None, + expr: Box::new(expr), + })); + }); + + // Expr::Return + f(Expr::Return(ExprReturn { + // `return` + attrs: Vec::new(), + return_token: Token![return](span), + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Return(ExprReturn { + // `return $expr` + attrs: Vec::new(), + return_token: Token![return](span), + expr: Some(Box::new(expr)), + })); + }); + + // Expr::Try + iter(depth, &mut |expr| { + f(Expr::Try(ExprTry { + // `$expr?` + attrs: Vec::new(), + expr: Box::new(expr), + question_token: Token![?](span), + })); + }); + + // Expr::Unary + iter(depth, &mut |expr| { + for op in [ + UnOp::Deref(Token![*](span)), + //UnOp::Not(Token![!](span)), + //UnOp::Neg(Token![-](span)), + ] { + f(Expr::Unary(ExprUnary { + // `*$expr` + attrs: Vec::new(), + op, + expr: Box::new(expr.clone()), + })); + } + }); + + if false { + // Expr::Array + f(Expr::Array(ExprArray { + // `[]` + attrs: Vec::new(), + bracket_token: token::Bracket(span), + elems: Punctuated::new(), + })); + + // Expr::Async + f(Expr::Async(ExprAsync { + // `async {}` + attrs: Vec::new(), + async_token: Token![async](span), + capture: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Await + iter(depth, &mut |expr| { + f(Expr::Await(ExprAwait { + // `$expr.await` + attrs: Vec::new(), + base: Box::new(expr), + dot_token: Token![.](span), + await_token: Token![await](span), + })); + }); + + // Expr::Block + f(Expr::Block(ExprBlock { + // `'a: {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + iter(depth, &mut |expr| { + f(Expr::Block(ExprBlock { + // `{ $expr }` + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::from([Stmt::Expr(expr.clone(), None)]), + }, + })); + f(Expr::Block(ExprBlock { + // `{ $expr; }` + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::from([Stmt::Expr(expr, Some(Token![;](span)))]), + }, + })); + }); + + // Expr::Break + f(Expr::Break(ExprBreak { + // `break 'a` + attrs: Vec::new(), + break_token: Token![break](span), + label: Some(Lifetime::new("'a", span)), + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Break(ExprBreak { + // `break 'a $expr` + attrs: Vec::new(), + break_token: Token![break](span), + label: Some(Lifetime::new("'a", span)), + expr: Some(Box::new(expr)), + })); + }); + + // Expr::Closure + f(Expr::Closure(ExprClosure { + // `|| -> T {}` + attrs: Vec::new(), + lifetimes: None, + constness: None, + movability: None, + asyncness: None, + capture: None, + or1_token: Token![|](span), + inputs: Punctuated::new(), + or2_token: Token![|](span), + output: ReturnType::Type( + Token![->](span), + Box::new(Type::Path(TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + })), + ), + body: Box::new(Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })), + })); + + // Expr::Const + f(Expr::Const(ExprConst { + // `const {}` + attrs: Vec::new(), + const_token: Token![const](span), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Continue + f(Expr::Continue(ExprContinue { + // `continue` + attrs: Vec::new(), + continue_token: Token![continue](span), + label: None, + })); + f(Expr::Continue(ExprContinue { + // `continue 'a` + attrs: Vec::new(), + continue_token: Token![continue](span), + label: Some(Lifetime::new("'a", span)), + })); + + // Expr::ForLoop + iter(depth, &mut |expr| { + f(Expr::ForLoop(ExprForLoop { + // `for _ in $expr {}` + attrs: Vec::new(), + label: None, + for_token: Token![for](span), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + in_token: Token![in](span), + expr: Box::new(expr.clone()), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + f(Expr::ForLoop(ExprForLoop { + // `'a: for _ in $expr {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + for_token: Token![for](span), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + in_token: Token![in](span), + expr: Box::new(expr), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + }); + + // Expr::Index + iter(depth, &mut |expr| { + f(Expr::Index(ExprIndex { + // `$expr[0]` + attrs: Vec::new(), + expr: Box::new(expr), + bracket_token: token::Bracket(span), + index: Box::new(Expr::Lit(ExprLit { + attrs: Vec::new(), + lit: Lit::Int(LitInt::new("0", span)), + })), + })); + }); + + // Expr::Loop + f(Expr::Loop(ExprLoop { + // `loop {}` + attrs: Vec::new(), + label: None, + loop_token: Token![loop](span), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + f(Expr::Loop(ExprLoop { + // `'a: loop {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + loop_token: Token![loop](span), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Macro + f(Expr::Macro(ExprMacro { + // `m!()` + attrs: Vec::new(), + mac: Macro { + path: Path::from(Ident::new("m", span)), + bang_token: Token![!](span), + delimiter: MacroDelimiter::Paren(token::Paren(span)), + tokens: TokenStream::new(), + }, + })); + f(Expr::Macro(ExprMacro { + // `m! {}` + attrs: Vec::new(), + mac: Macro { + path: Path::from(Ident::new("m", span)), + bang_token: Token![!](span), + delimiter: MacroDelimiter::Brace(token::Brace(span)), + tokens: TokenStream::new(), + }, + })); + + // Expr::Match + iter(depth, &mut |expr| { + f(Expr::Match(ExprMatch { + // `match $expr {}` + attrs: Vec::new(), + match_token: Token![match](span), + expr: Box::new(expr.clone()), + brace_token: token::Brace(span), + arms: Vec::new(), + })); + f(Expr::Match(ExprMatch { + // `match x { _ => $expr }` + attrs: Vec::new(), + match_token: Token![match](span), + expr: Box::new(Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("x", span)), + })), + brace_token: token::Brace(span), + arms: Vec::from([Arm { + attrs: Vec::new(), + pat: Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + }), + guard: None, + fat_arrow_token: Token![=>](span), + body: Box::new(expr.clone()), + comma: None, + }]), + })); + f(Expr::Match(ExprMatch { + // `match x { _ if $expr => {} }` + attrs: Vec::new(), + match_token: Token![match](span), + expr: Box::new(Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("x", span)), + })), + brace_token: token::Brace(span), + arms: Vec::from([Arm { + attrs: Vec::new(), + pat: Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + }), + guard: Some((Token![if](span), Box::new(expr))), + fat_arrow_token: Token![=>](span), + body: Box::new(Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })), + comma: None, + }]), + })); + }); + + // Expr::MethodCall + iter(depth, &mut |expr| { + f(Expr::MethodCall(ExprMethodCall { + // `$expr.method()` + attrs: Vec::new(), + receiver: Box::new(expr.clone()), + dot_token: Token![.](span), + method: Ident::new("method", span), + turbofish: None, + paren_token: token::Paren(span), + args: Punctuated::new(), + })); + f(Expr::MethodCall(ExprMethodCall { + // `$expr.method::<T>()` + attrs: Vec::new(), + receiver: Box::new(expr), + dot_token: Token![.](span), + method: Ident::new("method", span), + turbofish: Some(AngleBracketedGenericArguments { + colon2_token: Some(Token![::](span)), + lt_token: Token![<](span), + args: Punctuated::from_iter([GenericArgument::Type(Type::Path( + TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + }, + ))]), + gt_token: Token![>](span), + }), + paren_token: token::Paren(span), + args: Punctuated::new(), + })); + }); + + // Expr::RawAddr + iter(depth, &mut |expr| { + f(Expr::RawAddr(ExprRawAddr { + // `&raw const $expr` + attrs: Vec::new(), + and_token: Token![&](span), + raw: Token![raw](span), + mutability: PointerMutability::Const(Token![const](span)), + expr: Box::new(expr), + })); + }); + + // Expr::Struct + f(Expr::Struct(ExprStruct { + // `Struct {}` + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("Struct", span)), + brace_token: token::Brace(span), + fields: Punctuated::new(), + dot2_token: None, + rest: None, + })); + + // Expr::TryBlock + f(Expr::TryBlock(ExprTryBlock { + // `try {}` + attrs: Vec::new(), + try_token: Token![try](span), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Unsafe + f(Expr::Unsafe(ExprUnsafe { + // `unsafe {}` + attrs: Vec::new(), + unsafe_token: Token![unsafe](span), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::While + iter(depth, &mut |expr| { + f(Expr::While(ExprWhile { + // `while $expr {}` + attrs: Vec::new(), + label: None, + while_token: Token![while](span), + cond: Box::new(expr.clone()), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + f(Expr::While(ExprWhile { + // `'a: while $expr {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + while_token: Token![while](span), + cond: Box::new(expr), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + }); + + // Expr::Yield + f(Expr::Yield(ExprYield { + // `yield` + attrs: Vec::new(), + yield_token: Token![yield](span), + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Yield(ExprYield { + // `yield $expr` + attrs: Vec::new(), + yield_token: Token![yield](span), + expr: Some(Box::new(expr)), + })); + }); + } + } + + let mut failures = 0; + macro_rules! fail { + ($($message:tt)*) => {{ + eprintln!($($message)*); + failures += 1; + return; + }}; + } + let mut assert = |mut original: Expr| { + let tokens = original.to_token_stream(); + let Ok(mut parsed) = syn::parse2::<Expr>(tokens.clone()) else { + fail!( + "failed to parse: {}\n{:#?}", + tokens, + crate::debug::Lite(&original), + ); + }; + AsIfPrinted.visit_expr_mut(&mut original); + FlattenParens::combine_attrs().visit_expr_mut(&mut parsed); + if original != parsed { + fail!( + "before: {}\n{:#?}\nafter: {}\n{:#?}", + tokens, + crate::debug::Lite(&original), + parsed.to_token_stream(), + crate::debug::Lite(&parsed), + ); + } + let mut tokens_no_paren = tokens.clone(); + FlattenParens::visit_token_stream_mut(&mut tokens_no_paren); + if tokens.to_string() != tokens_no_paren.to_string() { + if let Ok(mut parsed2) = syn::parse2::<Expr>(tokens_no_paren) { + FlattenParens::combine_attrs().visit_expr_mut(&mut parsed2); + if original == parsed2 { + fail!("redundant parens: {}", tokens); + } + } + } + }; + + iter(4, &mut assert); + if failures > 0 { + eprintln!("FAILURES: {failures}"); + ExitCode::FAILURE + } else { + ExitCode::SUCCESS + } +} diff --git a/vendor/syn/tests/test_generics.rs b/vendor/syn/tests/test_generics.rs index 51119adc..2cb05251 100644 --- a/vendor/syn/tests/test_generics.rs +++ b/vendor/syn/tests/test_generics.rs @@ -1,14 +1,21 @@ #![allow( + clippy::elidable_lifetime_names, clippy::manual_let_else, + clippy::needless_lifetimes, clippy::too_many_lines, clippy::uninlined_format_args )] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use quote::quote; -use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate}; +use syn::{ + parse_quote, DeriveInput, GenericParam, Generics, ItemFn, Lifetime, LifetimeParam, + TypeParamBound, WhereClause, WherePredicate, +}; #[test] fn test_split_for_impl() { @@ -16,7 +23,7 @@ fn test_split_for_impl() { struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug; }; - snapshot!(input as DeriveInput, @r###" + snapshot!(input as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "S", @@ -28,6 +35,7 @@ fn test_split_for_impl() { ident: "a", }, }), + Token![,], GenericParam::Lifetime(LifetimeParam { lifetime: Lifetime { ident: "b", @@ -39,6 +47,7 @@ fn test_split_for_impl() { }, ], }), + Token![,], GenericParam::Type(TypeParam { attrs: [ Attribute { @@ -96,7 +105,7 @@ fn test_split_for_impl() { semi_token: Some, }, } - "###); + "#); let generics = input.generics; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); @@ -124,23 +133,23 @@ fn test_split_for_impl() { } #[test] -fn test_ty_param_bound() { +fn test_type_param_bound() { let tokens = quote!('a); - snapshot!(tokens as TypeParamBound, @r###" + snapshot!(tokens as TypeParamBound, @r#" TypeParamBound::Lifetime { ident: "a", } - "###); + "#); let tokens = quote!('_); - snapshot!(tokens as TypeParamBound, @r###" + snapshot!(tokens as TypeParamBound, @r#" TypeParamBound::Lifetime { ident: "_", } - "###); + "#); let tokens = quote!(Debug); - snapshot!(tokens as TypeParamBound, @r###" + snapshot!(tokens as TypeParamBound, @r#" TypeParamBound::Trait(TraitBound { path: Path { segments: [ @@ -150,10 +159,10 @@ fn test_ty_param_bound() { ], }, }) - "###); + "#); let tokens = quote!(?Sized); - snapshot!(tokens as TypeParamBound, @r###" + snapshot!(tokens as TypeParamBound, @r#" TypeParamBound::Trait(TraitBound { modifier: TraitBoundModifier::Maybe, path: Path { @@ -164,7 +173,43 @@ fn test_ty_param_bound() { ], }, }) - "###); + "#); + + let tokens = quote!(for<'a> Trait); + snapshot!(tokens as TypeParamBound, @r#" + TypeParamBound::Trait(TraitBound { + lifetimes: Some(BoundLifetimes { + lifetimes: [ + GenericParam::Lifetime(LifetimeParam { + lifetime: Lifetime { + ident: "a", + }, + }), + ], + }), + path: Path { + segments: [ + PathSegment { + ident: "Trait", + }, + ], + }, + }) + "#); + + let tokens = quote!(for<> ?Trait); + let err = syn::parse2::<TypeParamBound>(tokens).unwrap_err(); + assert_eq!( + "`for<...>` binder not allowed with `?` trait polarity modifier", + err.to_string(), + ); + + let tokens = quote!(?for<> Trait); + let err = syn::parse2::<TypeParamBound>(tokens).unwrap_err(); + assert_eq!( + "`for<...>` binder not allowed with `?` trait polarity modifier", + err.to_string(), + ); } #[test] @@ -179,7 +224,7 @@ fn test_fn_precedence_in_where_clause() { } }; - snapshot!(input as ItemFn, @r###" + snapshot!(input as ItemFn, @r#" ItemFn { vis: Visibility::Inherited, sig: Signature { @@ -227,6 +272,7 @@ fn test_fn_precedence_in_where_clause() { ], }, }), + Token![+], TypeParamBound::Trait(TraitBound { path: Path { segments: [ @@ -238,14 +284,17 @@ fn test_fn_precedence_in_where_clause() { }), ], }), + Token![,], ], }), }, output: ReturnType::Default, }, - block: Block, + block: Block { + stmts: [], + }, } - "###); + "#); let where_clause = input.sig.generics.where_clause.as_ref().unwrap(); assert_eq!(where_clause.predicates.len(), 1); @@ -274,3 +323,23 @@ fn test_where_clause_at_end_of_input() { assert_eq!(input.predicates.len(), 0); } + +// Regression test for https://github.com/dtolnay/syn/issues/1718 +#[test] +#[allow(clippy::map_unwrap_or)] +fn no_opaque_drop() { + let mut generics = Generics::default(); + + let _ = generics + .lifetimes() + .next() + .map(|param| param.lifetime.clone()) + .unwrap_or_else(|| { + let lifetime: Lifetime = parse_quote!('a); + generics.params.insert( + 0, + GenericParam::Lifetime(LifetimeParam::new(lifetime.clone())), + ); + lifetime + }); +} diff --git a/vendor/syn/tests/test_grouping.rs b/vendor/syn/tests/test_grouping.rs index 6a73a924..b466c7e7 100644 --- a/vendor/syn/tests/test_grouping.rs +++ b/vendor/syn/tests/test_grouping.rs @@ -1,19 +1,25 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree}; use syn::Expr; #[test] fn test_grouping() { - let tokens: TokenStream = TokenStream::from_iter(vec![ + let tokens: TokenStream = TokenStream::from_iter([ TokenTree::Literal(Literal::i32_suffixed(1)), TokenTree::Punct(Punct::new('+', Spacing::Alone)), TokenTree::Group(Group::new( Delimiter::None, - TokenStream::from_iter(vec![ + TokenStream::from_iter([ TokenTree::Literal(Literal::i32_suffixed(2)), TokenTree::Punct(Punct::new('+', Spacing::Alone)), TokenTree::Literal(Literal::i32_suffixed(3)), @@ -25,7 +31,7 @@ fn test_grouping() { assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32"); - snapshot!(tokens as Expr, @r###" + snapshot!(tokens as Expr, @r#" Expr::Binary { left: Expr::Lit { lit: 1i32, @@ -49,5 +55,5 @@ fn test_grouping() { }, }, } - "###); + "#); } diff --git a/vendor/syn/tests/test_ident.rs b/vendor/syn/tests/test_ident.rs index ee01bfcc..10df0ad5 100644 --- a/vendor/syn/tests/test_ident.rs +++ b/vendor/syn/tests/test_ident.rs @@ -2,10 +2,12 @@ use proc_macro2::{Ident, Span, TokenStream}; use std::str::FromStr; use syn::Result; +#[track_caller] fn parse(s: &str) -> Result<Ident> { syn::parse2(TokenStream::from_str(s).unwrap()) } +#[track_caller] fn new(s: &str) -> Ident { Ident::new(s, Span::call_site()) } diff --git a/vendor/syn/tests/test_item.rs b/vendor/syn/tests/test_item.rs index 9b0e1c9f..d9a7b5b6 100644 --- a/vendor/syn/tests/test_item.rs +++ b/vendor/syn/tests/test_item.rs @@ -1,7 +1,13 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; use quote::quote; @@ -10,7 +16,7 @@ use syn::{Item, ItemTrait}; #[test] fn test_macro_variable_attr() { // mimics the token stream corresponding to `$attr fn f() {}` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })), TokenTree::Ident(Ident::new("fn", Span::call_site())), TokenTree::Ident(Ident::new("f", Span::call_site())), @@ -18,7 +24,7 @@ fn test_macro_variable_attr() { TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), ]); - snapshot!(tokens as Item, @r###" + snapshot!(tokens as Item, @r#" Item::Fn { attrs: [ Attribute { @@ -38,46 +44,39 @@ fn test_macro_variable_attr() { generics: Generics, output: ReturnType::Default, }, - block: Block, + block: Block { + stmts: [], + }, } - "###); + "#); } #[test] fn test_negative_impl() { - // Rustc parses all of the following. - #[cfg(any())] impl ! {} let tokens = quote! { impl ! {} }; - snapshot!(tokens as Item, @r###" + snapshot!(tokens as Item, @r#" Item::Impl { generics: Generics, self_ty: Type::Never, } - "###); + "#); - #[cfg(any())] - #[rustfmt::skip] - impl !Trait {} let tokens = quote! { impl !Trait {} }; - snapshot!(tokens as Item, @r###" - Item::Impl { - generics: Generics, - self_ty: Type::Verbatim(`! Trait`), - } - "###); + let err = syn::parse2::<Item>(tokens).unwrap_err(); + assert_eq!(err.to_string(), "inherent impls cannot be negative"); #[cfg(any())] impl !Trait for T {} let tokens = quote! { impl !Trait for T {} }; - snapshot!(tokens as Item, @r###" + snapshot!(tokens as Item, @r#" Item::Impl { generics: Generics, trait_: Some(( @@ -100,26 +99,13 @@ fn test_negative_impl() { }, }, } - "###); - - #[cfg(any())] - #[rustfmt::skip] - impl !! {} - let tokens = quote! { - impl !! {} - }; - snapshot!(tokens as Item, @r###" - Item::Impl { - generics: Generics, - self_ty: Type::Verbatim(`! !`), - } - "###); + "#); } #[test] fn test_macro_variable_impl() { // mimics the token stream corresponding to `impl $trait for $ty {}` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Ident(Ident::new("impl", Span::call_site())), TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))), TokenTree::Ident(Ident::new("for", Span::call_site())), @@ -127,7 +113,7 @@ fn test_macro_variable_impl() { TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), ]); - snapshot!(tokens as Item, @r###" + snapshot!(tokens as Item, @r#" Item::Impl { generics: Generics, trait_: Some(( @@ -152,7 +138,7 @@ fn test_macro_variable_impl() { }, }, } - "###); + "#); } #[test] @@ -161,7 +147,7 @@ fn test_supertraits() { #[rustfmt::skip] let tokens = quote!(trait Trait where {}); - snapshot!(tokens as ItemTrait, @r###" + snapshot!(tokens as ItemTrait, @r#" ItemTrait { vis: Visibility::Inherited, ident: "Trait", @@ -169,11 +155,11 @@ fn test_supertraits() { where_clause: Some(WhereClause), }, } - "###); + "#); #[rustfmt::skip] let tokens = quote!(trait Trait: where {}); - snapshot!(tokens as ItemTrait, @r###" + snapshot!(tokens as ItemTrait, @r#" ItemTrait { vis: Visibility::Inherited, ident: "Trait", @@ -182,11 +168,11 @@ fn test_supertraits() { }, colon_token: Some, } - "###); + "#); #[rustfmt::skip] let tokens = quote!(trait Trait: Sized where {}); - snapshot!(tokens as ItemTrait, @r###" + snapshot!(tokens as ItemTrait, @r#" ItemTrait { vis: Visibility::Inherited, ident: "Trait", @@ -206,11 +192,11 @@ fn test_supertraits() { }), ], } - "###); + "#); #[rustfmt::skip] let tokens = quote!(trait Trait: Sized + where {}); - snapshot!(tokens as ItemTrait, @r###" + snapshot!(tokens as ItemTrait, @r#" ItemTrait { vis: Visibility::Inherited, ident: "Trait", @@ -228,9 +214,10 @@ fn test_supertraits() { ], }, }), + Token![+], ], } - "###); + "#); } #[test] @@ -242,7 +229,7 @@ fn test_type_empty_bounds() { } }; - snapshot!(tokens as ItemTrait, @r###" + snapshot!(tokens as ItemTrait, @r#" ItemTrait { vis: Visibility::Inherited, ident: "Foo", @@ -255,7 +242,7 @@ fn test_type_empty_bounds() { }, ], } - "###); + "#); } #[test] @@ -274,7 +261,7 @@ fn test_impl_type_parameter_defaults() { let tokens = quote! { impl<T = ()> () {} }; - snapshot!(tokens as Item, @r###" + snapshot!(tokens as Item, @r#" Item::Impl { generics: Generics { lt_token: Some, @@ -289,7 +276,7 @@ fn test_impl_type_parameter_defaults() { }, self_ty: Type::Tuple, } - "###); + "#); } #[test] @@ -298,7 +285,7 @@ fn test_impl_trait_trailing_plus() { fn f() -> impl Sized + {} }; - snapshot!(tokens as Item, @r###" + snapshot!(tokens as Item, @r#" Item::Fn { vis: Visibility::Inherited, sig: Signature { @@ -316,11 +303,14 @@ fn test_impl_trait_trailing_plus() { ], }, }), + Token![+], ], }, ), }, - block: Block, + block: Block { + stmts: [], + }, } - "###); + "#); } diff --git a/vendor/syn/tests/test_lit.rs b/vendor/syn/tests/test_lit.rs index bc50136f..f2367b44 100644 --- a/vendor/syn/tests/test_lit.rs +++ b/vendor/syn/tests/test_lit.rs @@ -1,18 +1,25 @@ #![allow( + clippy::elidable_lifetime_names, clippy::float_cmp, + clippy::needless_lifetimes, + clippy::needless_raw_string_hashes, clippy::non_ascii_literal, clippy::single_match_else, clippy::uninlined_format_args )] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree}; use quote::ToTokens; +use std::ffi::CStr; use std::str::FromStr; use syn::{Lit, LitFloat, LitInt, LitStr}; +#[track_caller] fn lit(s: &str) -> Lit { let mut tokens = TokenStream::from_str(s).unwrap().into_iter(); match tokens.next().unwrap() { @@ -26,7 +33,9 @@ fn lit(s: &str) -> Lit { #[test] fn strings() { + #[track_caller] fn test_string(s: &str, value: &str) { + let s = s.trim(); match lit(s) { Lit::Str(lit) => { assert_eq!(lit.value(), value); @@ -39,16 +48,16 @@ fn strings() { } } - test_string("\"a\"", "a"); - test_string("\"\\n\"", "\n"); - test_string("\"\\r\"", "\r"); - test_string("\"\\t\"", "\t"); - test_string("\"🐕\"", "🐕"); // NOTE: This is an emoji - test_string("\"\\\"\"", "\""); - test_string("\"'\"", "'"); - test_string("\"\"", ""); - test_string("\"\\u{1F415}\"", "\u{1F415}"); - test_string("\"\\u{1_2__3_}\"", "\u{123}"); + test_string(r#" "" "#, ""); + test_string(r#" "a" "#, "a"); + test_string(r#" "\n" "#, "\n"); + test_string(r#" "\r" "#, "\r"); + test_string(r#" "\t" "#, "\t"); + test_string(r#" "🐕" "#, "🐕"); // NOTE: This is an emoji + test_string(r#" "\"" "#, "\""); + test_string(r#" "'" "#, "'"); + test_string(r#" "\u{1F415}" "#, "\u{1F415}"); + test_string(r#" "\u{1_2__3_}" "#, "\u{123}"); test_string( "\"contains\nnewlines\\\nescaped newlines\"", "contains\nnewlinesescaped newlines", @@ -65,7 +74,9 @@ fn strings() { #[test] fn byte_strings() { + #[track_caller] fn test_byte_string(s: &str, value: &[u8]) { + let s = s.trim(); match lit(s) { Lit::ByteStr(lit) => { assert_eq!(lit.value(), value); @@ -78,13 +89,13 @@ fn byte_strings() { } } - test_byte_string("b\"a\"", b"a"); - test_byte_string("b\"\\n\"", b"\n"); - test_byte_string("b\"\\r\"", b"\r"); - test_byte_string("b\"\\t\"", b"\t"); - test_byte_string("b\"\\\"\"", b"\""); - test_byte_string("b\"'\"", b"'"); - test_byte_string("b\"\"", b""); + test_byte_string(r#" b"" "#, b""); + test_byte_string(r#" b"a" "#, b"a"); + test_byte_string(r#" b"\n" "#, b"\n"); + test_byte_string(r#" b"\r" "#, b"\r"); + test_byte_string(r#" b"\t" "#, b"\t"); + test_byte_string(r#" b"\"" "#, b"\""); + test_byte_string(r#" b"'" "#, b"'"); test_byte_string( "b\"contains\nnewlines\\\nescaped newlines\"", b"contains\nnewlinesescaped newlines", @@ -95,9 +106,50 @@ fn byte_strings() { test_byte_string("br##\"...\"##q", b"..."); } +#[test] +fn c_strings() { + #[track_caller] + fn test_c_string(s: &str, value: &CStr) { + let s = s.trim(); + match lit(s) { + Lit::CStr(lit) => { + assert_eq!(*lit.value(), *value); + let again = lit.into_token_stream().to_string(); + if again != s { + test_c_string(&again, value); + } + } + wrong => panic!("{:?}", wrong), + } + } + + test_c_string(r#" c"" "#, c""); + test_c_string(r#" c"a" "#, c"a"); + test_c_string(r#" c"\n" "#, c"\n"); + test_c_string(r#" c"\r" "#, c"\r"); + test_c_string(r#" c"\t" "#, c"\t"); + test_c_string(r#" c"\\" "#, c"\\"); + test_c_string(r#" c"\'" "#, c"'"); + test_c_string(r#" c"\"" "#, c"\""); + test_c_string( + "c\"contains\nnewlines\\\nescaped newlines\"", + c"contains\nnewlinesescaped newlines", + ); + test_c_string("cr\"raw\nstring\\\nhere\"", c"raw\nstring\\\nhere"); + test_c_string("c\"...\"q", c"..."); + test_c_string("cr\"...\"", c"..."); + test_c_string("cr##\"...\"##", c"..."); + test_c_string( + r#" c"hello\x80我叫\u{1F980}" "#, // from the RFC + c"hello\x80我叫\u{1F980}", + ); +} + #[test] fn bytes() { + #[track_caller] fn test_byte(s: &str, value: u8) { + let s = s.trim(); match lit(s) { Lit::Byte(lit) => { assert_eq!(lit.value(), value); @@ -108,18 +160,20 @@ fn bytes() { } } - test_byte("b'a'", b'a'); - test_byte("b'\\n'", b'\n'); - test_byte("b'\\r'", b'\r'); - test_byte("b'\\t'", b'\t'); - test_byte("b'\\''", b'\''); - test_byte("b'\"'", b'"'); - test_byte("b'a'q", b'a'); + test_byte(r#" b'a' "#, b'a'); + test_byte(r#" b'\n' "#, b'\n'); + test_byte(r#" b'\r' "#, b'\r'); + test_byte(r#" b'\t' "#, b'\t'); + test_byte(r#" b'\'' "#, b'\''); + test_byte(r#" b'"' "#, b'"'); + test_byte(r#" b'a'q "#, b'a'); } #[test] fn chars() { + #[track_caller] fn test_char(s: &str, value: char) { + let s = s.trim(); match lit(s) { Lit::Char(lit) => { assert_eq!(lit.value(), value); @@ -132,19 +186,20 @@ fn chars() { } } - test_char("'a'", 'a'); - test_char("'\\n'", '\n'); - test_char("'\\r'", '\r'); - test_char("'\\t'", '\t'); - test_char("'🐕'", '🐕'); // NOTE: This is an emoji - test_char("'\\''", '\''); - test_char("'\"'", '"'); - test_char("'\\u{1F415}'", '\u{1F415}'); - test_char("'a'q", 'a'); + test_char(r#" 'a' "#, 'a'); + test_char(r#" '\n' "#, '\n'); + test_char(r#" '\r' "#, '\r'); + test_char(r#" '\t' "#, '\t'); + test_char(r#" '🐕' "#, '🐕'); // NOTE: This is an emoji + test_char(r#" '\'' "#, '\''); + test_char(r#" '"' "#, '"'); + test_char(r#" '\u{1F415}' "#, '\u{1F415}'); + test_char(r#" 'a'q "#, 'a'); } #[test] fn ints() { + #[track_caller] fn test_int(s: &str, value: u64, suffix: &str) { match lit(s) { Lit::Int(lit) => { @@ -185,6 +240,7 @@ fn ints() { #[test] fn floats() { + #[track_caller] fn test_float(s: &str, value: f64, suffix: &str) { match lit(s) { Lit::Float(lit) => { @@ -224,11 +280,13 @@ fn negative() { #[test] fn suffix() { + #[track_caller] fn get_suffix(token: &str) -> String { let lit = syn::parse_str::<Lit>(token).unwrap(); match lit { Lit::Str(lit) => lit.suffix().to_owned(), Lit::ByteStr(lit) => lit.suffix().to_owned(), + Lit::CStr(lit) => lit.suffix().to_owned(), Lit::Byte(lit) => lit.suffix().to_owned(), Lit::Char(lit) => lit.suffix().to_owned(), Lit::Int(lit) => lit.suffix().to_owned(), @@ -239,9 +297,13 @@ fn suffix() { assert_eq!(get_suffix("\"\"s"), "s"); assert_eq!(get_suffix("r\"\"r"), "r"); + assert_eq!(get_suffix("r#\"\"#r"), "r"); assert_eq!(get_suffix("b\"\"b"), "b"); assert_eq!(get_suffix("br\"\"br"), "br"); - assert_eq!(get_suffix("r#\"\"#r"), "r"); + assert_eq!(get_suffix("br#\"\"#br"), "br"); + assert_eq!(get_suffix("c\"\"c"), "c"); + assert_eq!(get_suffix("cr\"\"cr"), "cr"); + assert_eq!(get_suffix("cr#\"\"#cr"), "cr"); assert_eq!(get_suffix("'c'c"), "c"); assert_eq!(get_suffix("b'b'b"), "b"); assert_eq!(get_suffix("1i32"), "i32"); @@ -252,11 +314,11 @@ fn suffix() { #[test] fn test_deep_group_empty() { - let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new( + let tokens = TokenStream::from_iter([TokenTree::Group(Group::new( Delimiter::None, - TokenStream::from_iter(vec![TokenTree::Group(Group::new( + TokenStream::from_iter([TokenTree::Group(Group::new( Delimiter::None, - TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]), + TokenStream::from_iter([TokenTree::Literal(Literal::string("hi"))]), ))]), ))]); diff --git a/vendor/syn/tests/test_meta.rs b/vendor/syn/tests/test_meta.rs index 91a98070..4e1f9caf 100644 --- a/vendor/syn/tests/test_meta.rs +++ b/vendor/syn/tests/test_meta.rs @@ -1,19 +1,25 @@ #![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, clippy::shadow_unrelated, clippy::too_many_lines, clippy::uninlined_format_args )] #[macro_use] -mod macros; +mod snapshot; -use syn::{Meta, MetaList, MetaNameValue}; +mod debug; + +use quote::quote; +use syn::parse::{ParseStream, Parser as _, Result}; +use syn::{Meta, MetaList, MetaNameValue, Token}; #[test] fn test_parse_meta_item_word() { let input = "hello"; - snapshot!(input as Meta, @r###" + snapshot!(input as Meta, @r#" Meta::Path { segments: [ PathSegment { @@ -21,7 +27,7 @@ fn test_parse_meta_item_word() { }, ], } - "###); + "#); } #[test] @@ -29,7 +35,7 @@ fn test_parse_meta_name_value() { let input = "foo = 5"; let (inner, meta) = (input, input); - snapshot!(inner as MetaNameValue, @r###" + snapshot!(inner as MetaNameValue, @r#" MetaNameValue { path: Path { segments: [ @@ -42,9 +48,9 @@ fn test_parse_meta_name_value() { lit: 5, }, } - "###); + "#); - snapshot!(meta as Meta, @r###" + snapshot!(meta as Meta, @r#" Meta::NameValue { path: Path { segments: [ @@ -57,9 +63,9 @@ fn test_parse_meta_name_value() { lit: 5, }, } - "###); + "#); - assert_eq!(meta, inner.into()); + assert_eq!(meta, Meta::NameValue(inner)); } #[test] @@ -67,7 +73,7 @@ fn test_parse_meta_item_list_lit() { let input = "foo(5)"; let (inner, meta) = (input, input); - snapshot!(inner as MetaList, @r###" + snapshot!(inner as MetaList, @r#" MetaList { path: Path { segments: [ @@ -79,9 +85,9 @@ fn test_parse_meta_item_list_lit() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`5`), } - "###); + "#); - snapshot!(meta as Meta, @r###" + snapshot!(meta as Meta, @r#" Meta::List { path: Path { segments: [ @@ -93,9 +99,9 @@ fn test_parse_meta_item_list_lit() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`5`), } - "###); + "#); - assert_eq!(meta, inner.into()); + assert_eq!(meta, Meta::List(inner)); } #[test] @@ -103,7 +109,7 @@ fn test_parse_meta_item_multiple() { let input = "foo(word, name = 5, list(name2 = 6), word2)"; let (inner, meta) = (input, input); - snapshot!(inner as MetaList, @r###" + snapshot!(inner as MetaList, @r#" MetaList { path: Path { segments: [ @@ -115,9 +121,9 @@ fn test_parse_meta_item_multiple() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`), } - "###); + "#); - snapshot!(meta as Meta, @r###" + snapshot!(meta as Meta, @r#" Meta::List { path: Path { segments: [ @@ -129,25 +135,46 @@ fn test_parse_meta_item_multiple() { delimiter: MacroDelimiter::Paren, tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`), } - "###); + "#); - assert_eq!(meta, inner.into()); + assert_eq!(meta, Meta::List(inner)); } #[test] fn test_parse_path() { let input = "::serde::Serialize"; - snapshot!(input as Meta, @r###" + snapshot!(input as Meta, @r#" Meta::Path { leading_colon: Some, segments: [ PathSegment { ident: "serde", }, + Token![::], PathSegment { ident: "Serialize", }, ], } - "###); + "#); +} + +#[test] +fn test_fat_arrow_after_meta() { + fn parse(input: ParseStream) -> Result<()> { + while !input.is_empty() { + let _: Meta = input.parse()?; + let _: Token![=>] = input.parse()?; + let brace; + syn::braced!(brace in input); + } + Ok(()) + } + + let input = quote! { + target_os = "linux" => {} + windows => {} + }; + + parse.parse2(input).unwrap(); } diff --git a/vendor/syn/tests/test_parse_buffer.rs b/vendor/syn/tests/test_parse_buffer.rs index f2ca59c7..62abc6d2 100644 --- a/vendor/syn/tests/test_parse_buffer.rs +++ b/vendor/syn/tests/test_parse_buffer.rs @@ -1,11 +1,13 @@ #![allow(clippy::non_ascii_literal)] -use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree}; -use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result}; +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, TokenStream, TokenTree}; +use std::panic; +use syn::parse::discouraged::Speculative as _; +use syn::parse::{Parse, ParseStream, Parser, Result}; use syn::{parenthesized, Token}; #[test] -#[should_panic(expected = "Fork was not derived from the advancing parse stream")] +#[should_panic(expected = "fork was not derived from the advancing parse stream")] fn smuggled_speculative_cursor_between_sources() { struct BreakRules; impl Parse for BreakRules { @@ -22,7 +24,7 @@ fn smuggled_speculative_cursor_between_sources() { } #[test] -#[should_panic(expected = "Fork was not derived from the advancing parse stream")] +#[should_panic(expected = "fork was not derived from the advancing parse stream")] fn smuggled_speculative_cursor_between_brackets() { struct BreakRules; impl Parse for BreakRules { @@ -40,7 +42,7 @@ fn smuggled_speculative_cursor_between_brackets() { } #[test] -#[should_panic(expected = "Fork was not derived from the advancing parse stream")] +#[should_panic(expected = "fork was not derived from the advancing parse stream")] fn smuggled_speculative_cursor_into_brackets() { struct BreakRules; impl Parse for BreakRules { @@ -67,12 +69,12 @@ fn trailing_empty_none_group() { Ok(()) } - // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>` - let tokens = TokenStream::from_iter(vec![ + // `+ ( + «∅ ∅» ) «∅ «∅ ∅» ∅»` + let tokens = TokenStream::from_iter([ TokenTree::Punct(Punct::new('+', Spacing::Alone)), TokenTree::Group(Group::new( Delimiter::Parenthesis, - TokenStream::from_iter(vec![ + TokenStream::from_iter([ TokenTree::Punct(Punct::new('+', Spacing::Alone)), TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), ]), @@ -80,7 +82,7 @@ fn trailing_empty_none_group() { TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), TokenTree::Group(Group::new( Delimiter::None, - TokenStream::from_iter(vec![TokenTree::Group(Group::new( + TokenStream::from_iter([TokenTree::Group(Group::new( Delimiter::None, TokenStream::new(), ))]), @@ -89,3 +91,13 @@ fn trailing_empty_none_group() { parse.parse2(tokens).unwrap(); } + +#[test] +fn test_unwind_safe() { + fn parse(input: ParseStream) -> Result<Ident> { + let thread_result = panic::catch_unwind(|| input.parse()); + thread_result.unwrap() + } + + parse.parse_str("throw").unwrap(); +} diff --git a/vendor/syn/tests/test_parse_quote.rs b/vendor/syn/tests/test_parse_quote.rs new file mode 100644 index 00000000..600870ba --- /dev/null +++ b/vendor/syn/tests/test_parse_quote.rs @@ -0,0 +1,172 @@ +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] + +#[macro_use] +mod snapshot; + +mod debug; + +use syn::punctuated::Punctuated; +use syn::{parse_quote, Attribute, Field, Lit, Pat, Stmt, Token}; + +#[test] +fn test_attribute() { + let attr: Attribute = parse_quote!(#[test]); + snapshot!(attr, @r#" + Attribute { + style: AttrStyle::Outer, + meta: Meta::Path { + segments: [ + PathSegment { + ident: "test", + }, + ], + }, + } + "#); + + let attr: Attribute = parse_quote!(#![no_std]); + snapshot!(attr, @r#" + Attribute { + style: AttrStyle::Inner, + meta: Meta::Path { + segments: [ + PathSegment { + ident: "no_std", + }, + ], + }, + } + "#); +} + +#[test] +fn test_field() { + let field: Field = parse_quote!(pub enabled: bool); + snapshot!(field, @r#" + Field { + vis: Visibility::Public, + ident: Some("enabled"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "bool", + }, + ], + }, + }, + } + "#); + + let field: Field = parse_quote!(primitive::bool); + snapshot!(field, @r#" + Field { + vis: Visibility::Inherited, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "primitive", + }, + Token![::], + PathSegment { + ident: "bool", + }, + ], + }, + }, + } + "#); +} + +#[test] +fn test_pat() { + let pat: Pat = parse_quote!(Some(false) | None); + snapshot!(&pat, @r#" + Pat::Or { + cases: [ + Pat::TupleStruct { + path: Path { + segments: [ + PathSegment { + ident: "Some", + }, + ], + }, + elems: [ + Pat::Lit(ExprLit { + lit: Lit::Bool { + value: false, + }, + }), + ], + }, + Token![|], + Pat::Ident { + ident: "None", + }, + ], + } + "#); + + let boxed_pat: Box<Pat> = parse_quote!(Some(false) | None); + assert_eq!(*boxed_pat, pat); +} + +#[test] +fn test_punctuated() { + let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true); + snapshot!(punctuated, @r#" + [ + Lit::Bool { + value: true, + }, + Token![|], + Lit::Bool { + value: true, + }, + ] + "#); + + let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true |); + snapshot!(punctuated, @r#" + [ + Lit::Bool { + value: true, + }, + Token![|], + Lit::Bool { + value: true, + }, + Token![|], + ] + "#); +} + +#[test] +fn test_vec_stmt() { + let stmts: Vec<Stmt> = parse_quote! { + let _; + true + }; + snapshot!(stmts, @r#" + [ + Stmt::Local { + pat: Pat::Wild, + }, + Stmt::Expr( + Expr::Lit { + lit: Lit::Bool { + value: true, + }, + }, + None, + ), + ] + "#); +} diff --git a/vendor/syn/tests/test_parse_stream.rs b/vendor/syn/tests/test_parse_stream.rs index 2265dfe8..a650fc85 100644 --- a/vendor/syn/tests/test_parse_stream.rs +++ b/vendor/syn/tests/test_parse_stream.rs @@ -1,14 +1,187 @@ -#![allow(clippy::let_underscore_untyped)] +#![allow(clippy::items_after_statements, clippy::let_underscore_untyped)] -use syn::ext::IdentExt; -use syn::parse::ParseStream; -use syn::{Ident, Token}; +use proc_macro2::{Delimiter, Group, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::quote; +use syn::ext::IdentExt as _; +use syn::parse::discouraged::AnyDelimiter; +use syn::parse::{ParseStream, Parser as _, Result}; +use syn::{parenthesized, token, Ident, Lifetime, Token}; #[test] -fn test_peek() { - let _ = |input: ParseStream| { - let _ = input.peek(Ident); - let _ = input.peek(Ident::peek_any); - let _ = input.peek(Token![::]); - }; +fn test_peek_punct() { + let tokens = quote!(+= + =); + + fn assert(input: ParseStream) -> Result<()> { + assert!(input.peek(Token![+])); + assert!(input.peek(Token![+=])); + + let _: Token![+] = input.parse()?; + + assert!(input.peek(Token![=])); + assert!(!input.peek(Token![==])); + assert!(!input.peek(Token![+])); + + let _: Token![=] = input.parse()?; + + assert!(input.peek(Token![+])); + assert!(!input.peek(Token![+=])); + + let _: Token![+] = input.parse()?; + let _: Token![=] = input.parse()?; + Ok(()) + } + + assert.parse2(tokens).unwrap(); +} + +#[test] +fn test_peek_lifetime() { + // 'static ; + let tokens = TokenStream::from_iter([ + TokenTree::Punct(Punct::new('\'', Spacing::Joint)), + TokenTree::Ident(Ident::new("static", Span::call_site())), + TokenTree::Punct(Punct::new(';', Spacing::Alone)), + ]); + + fn assert(input: ParseStream) -> Result<()> { + assert!(input.peek(Lifetime)); + assert!(input.peek2(Token![;])); + assert!(!input.peek2(Token![static])); + + let _: Lifetime = input.parse()?; + + assert!(input.peek(Token![;])); + + let _: Token![;] = input.parse()?; + Ok(()) + } + + assert.parse2(tokens).unwrap(); +} + +#[test] +fn test_peek_not_lifetime() { + // ' static + let tokens = TokenStream::from_iter([ + TokenTree::Punct(Punct::new('\'', Spacing::Alone)), + TokenTree::Ident(Ident::new("static", Span::call_site())), + ]); + + fn assert(input: ParseStream) -> Result<()> { + assert!(!input.peek(Lifetime)); + assert!(input.parse::<Option<Punct>>()?.is_none()); + + let _: TokenTree = input.parse()?; + + assert!(input.peek(Token![static])); + + let _: Token![static] = input.parse()?; + Ok(()) + } + + assert.parse2(tokens).unwrap(); +} + +#[test] +fn test_peek_ident() { + let tokens = quote!(static var); + + fn assert(input: ParseStream) -> Result<()> { + assert!(!input.peek(Ident)); + assert!(input.peek(Ident::peek_any)); + assert!(input.peek(Token![static])); + + let _: Token![static] = input.parse()?; + + assert!(input.peek(Ident)); + assert!(input.peek(Ident::peek_any)); + + let _: Ident = input.parse()?; + Ok(()) + } + + assert.parse2(tokens).unwrap(); +} + +#[test] +fn test_peek_groups() { + // pub ( :: ) «∅ ! = ∅» static + let tokens = TokenStream::from_iter([ + TokenTree::Ident(Ident::new("pub", Span::call_site())), + TokenTree::Group(Group::new( + Delimiter::Parenthesis, + TokenStream::from_iter([ + TokenTree::Punct(Punct::new(':', Spacing::Joint)), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + ]), + )), + TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter([ + TokenTree::Punct(Punct::new('!', Spacing::Alone)), + TokenTree::Punct(Punct::new('=', Spacing::Alone)), + ]), + )), + TokenTree::Ident(Ident::new("static", Span::call_site())), + ]); + + fn assert(input: ParseStream) -> Result<()> { + assert!(input.peek2(token::Paren)); + assert!(input.peek3(token::Group)); + assert!(input.peek3(Token![!])); + + let _: Token![pub] = input.parse()?; + + assert!(input.peek(token::Paren)); + assert!(!input.peek(Token![::])); + assert!(!input.peek2(Token![::])); + assert!(input.peek2(Token![!])); + assert!(input.peek2(token::Group)); + assert!(input.peek3(Token![=])); + assert!(!input.peek3(Token![static])); + + let content; + parenthesized!(content in input); + + assert!(content.peek(Token![::])); + assert!(content.peek2(Token![:])); + assert!(!content.peek3(token::Group)); + assert!(!content.peek3(Token![!])); + + assert!(input.peek(token::Group)); + assert!(input.peek(Token![!])); + + let _: Token![::] = content.parse()?; + + assert!(input.peek(token::Group)); + assert!(input.peek(Token![!])); + assert!(input.peek2(Token![=])); + assert!(input.peek3(Token![static])); + assert!(!input.peek2(Token![static])); + + let implicit = input.fork(); + let explicit = input.fork(); + + let _: Token![!] = implicit.parse()?; + assert!(implicit.peek(Token![=])); + assert!(implicit.peek2(Token![static])); + let _: Token![=] = implicit.parse()?; + assert!(implicit.peek(Token![static])); + + let (delimiter, _span, grouped) = explicit.parse_any_delimiter()?; + assert_eq!(delimiter, Delimiter::None); + assert!(grouped.peek(Token![!])); + assert!(grouped.peek2(Token![=])); + assert!(!grouped.peek3(Token![static])); + let _: Token![!] = grouped.parse()?; + assert!(grouped.peek(Token![=])); + assert!(!grouped.peek2(Token![static])); + let _: Token![=] = grouped.parse()?; + assert!(!grouped.peek(Token![static])); + + let _: TokenStream = input.parse()?; + Ok(()) + } + + assert.parse2(tokens).unwrap(); } diff --git a/vendor/syn/tests/test_pat.rs b/vendor/syn/tests/test_pat.rs index cab7aa7f..f778928b 100644 --- a/vendor/syn/tests/test_pat.rs +++ b/vendor/syn/tests/test_pat.rs @@ -1,12 +1,19 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, TokenStream, TokenTree}; -use quote::quote; +use quote::{quote, ToTokens as _}; use syn::parse::Parser; -use syn::{Item, Pat, Stmt}; +use syn::punctuated::Punctuated; +use syn::{parse_quote, token, Item, Pat, PatTuple, Stmt, Token}; #[test] fn test_pat_ident() { @@ -47,10 +54,10 @@ fn test_leading_vert() { #[test] fn test_group() { let group = Group::new(Delimiter::None, quote!(Some(_))); - let tokens = TokenStream::from_iter(vec![TokenTree::Group(group)]); + let tokens = TokenStream::from_iter([TokenTree::Group(group)]); let pat = Pat::parse_single.parse2(tokens).unwrap(); - snapshot!(pat, @r###" + snapshot!(pat, @r#" Pat::TupleStruct { path: Path { segments: [ @@ -63,7 +70,7 @@ fn test_group() { Pat::Wild, ], } - "###); + "#); } #[test] @@ -95,3 +102,57 @@ fn test_ranges() { Pat::parse_single.parse_str("[_, (..=hi), _]").unwrap(); Pat::parse_single.parse_str("[_, lo..=hi, _]").unwrap(); } + +#[test] +fn test_tuple_comma() { + let mut expr = PatTuple { + attrs: Vec::new(), + paren_token: token::Paren::default(), + elems: Punctuated::new(), + }; + snapshot!(expr.to_token_stream() as Pat, @"Pat::Tuple"); + + expr.elems.push_value(parse_quote!(_)); + // Must not parse to Pat::Paren + snapshot!(expr.to_token_stream() as Pat, @r#" + Pat::Tuple { + elems: [ + Pat::Wild, + Token![,], + ], + } + "#); + + expr.elems.push_punct(<Token![,]>::default()); + snapshot!(expr.to_token_stream() as Pat, @r#" + Pat::Tuple { + elems: [ + Pat::Wild, + Token![,], + ], + } + "#); + + expr.elems.push_value(parse_quote!(_)); + snapshot!(expr.to_token_stream() as Pat, @r#" + Pat::Tuple { + elems: [ + Pat::Wild, + Token![,], + Pat::Wild, + ], + } + "#); + + expr.elems.push_punct(<Token![,]>::default()); + snapshot!(expr.to_token_stream() as Pat, @r#" + Pat::Tuple { + elems: [ + Pat::Wild, + Token![,], + Pat::Wild, + Token![,], + ], + } + "#); +} diff --git a/vendor/syn/tests/test_path.rs b/vendor/syn/tests/test_path.rs index 6aded74e..7f9e515d 100644 --- a/vendor/syn/tests/test_path.rs +++ b/vendor/syn/tests/test_path.rs @@ -1,7 +1,13 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; use quote::{quote, ToTokens}; @@ -10,93 +16,75 @@ use syn::{parse_quote, Expr, Type, TypePath}; #[test] fn parse_interpolated_leading_component() { // mimics the token stream corresponding to `$mod::rest` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, quote! { first })), TokenTree::Punct(Punct::new(':', Spacing::Joint)), TokenTree::Punct(Punct::new(':', Spacing::Alone)), TokenTree::Ident(Ident::new("rest", Span::call_site())), ]); - snapshot!(tokens.clone() as Expr, @r###" + snapshot!(tokens.clone() as Expr, @r#" Expr::Path { path: Path { segments: [ PathSegment { ident: "first", }, + Token![::], PathSegment { ident: "rest", }, ], }, } - "###); + "#); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::Path { path: Path { segments: [ PathSegment { ident: "first", }, + Token![::], PathSegment { ident: "rest", }, ], }, } - "###); + "#); } #[test] fn print_incomplete_qpath() { // qpath with `as` token let mut ty: TypePath = parse_quote!(<Self as A>::Q); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`< Self as A > :: Q`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`< Self as A > :: Q`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`< Self as A > ::`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`< Self as A > ::`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`< Self >`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`< Self >`)"); assert!(ty.path.segments.pop().is_none()); // qpath without `as` token let mut ty: TypePath = parse_quote!(<Self>::A::B); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`< Self > :: A :: B`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`< Self > :: A :: B`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`< Self > :: A ::`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`< Self > :: A ::`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`< Self > ::`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`< Self > ::`)"); assert!(ty.path.segments.pop().is_none()); // normal path let mut ty: TypePath = parse_quote!(Self::A::B); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`Self :: A :: B`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`Self :: A :: B`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`Self :: A ::`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`Self :: A ::`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(`Self ::`) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(`Self ::`)"); assert!(ty.path.segments.pop().is_some()); - snapshot!(ty.to_token_stream(), @r###" - TokenStream(``) - "###); + snapshot!(ty.to_token_stream(), @"TokenStream(``)"); assert!(ty.path.segments.pop().is_none()); } @@ -104,7 +92,7 @@ fn print_incomplete_qpath() { fn parse_parenthesized_path_arguments_with_disambiguator() { #[rustfmt::skip] let tokens = quote!(dyn FnOnce::() -> !); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::TraitObject { dyn_token: Some, bounds: [ @@ -124,5 +112,5 @@ fn parse_parenthesized_path_arguments_with_disambiguator() { }), ], } - "###); + "#); } diff --git a/vendor/syn/tests/test_precedence.rs b/vendor/syn/tests/test_precedence.rs index bf0510bb..eb193a5a 100644 --- a/vendor/syn/tests/test_precedence.rs +++ b/vendor/syn/tests/test_precedence.rs @@ -1,30 +1,37 @@ +// This test does the following for every file in the rust-lang/rust repo: +// +// 1. Parse the file using syn into a syn::File. +// 2. Extract every syn::Expr from the file. +// 3. Print each expr to a string of source code. +// 4. Parse the source code using librustc_parse into a rustc_ast::Expr. +// 5. For both the syn::Expr and rustc_ast::Expr, crawl the syntax tree to +// insert parentheses surrounding every subexpression. +// 6. Serialize the fully parenthesized syn::Expr to a string of source code. +// 7. Parse the fully parenthesized source code using librustc_parse. +// 8. Compare the rustc_ast::Expr resulting from parenthesizing using rustc data +// structures vs syn data structures, ignoring spans. If they agree, rustc's +// parser and syn's parser have identical handling of expression precedence. + #![cfg(not(syn_disable_nightly_tests))] #![cfg(not(miri))] #![recursion_limit = "1024"] #![feature(rustc_private)] #![allow( + clippy::blocks_in_conditions, + clippy::doc_markdown, + clippy::elidable_lifetime_names, clippy::explicit_deref_methods, clippy::let_underscore_untyped, clippy::manual_assert, clippy::manual_let_else, clippy::match_like_matches_macro, clippy::match_wildcard_for_single_variants, + clippy::needless_lifetimes, clippy::too_many_lines, - clippy::uninlined_format_args + clippy::uninlined_format_args, + clippy::unnecessary_box_returns )] -//! The tests in this module do the following: -//! -//! 1. Parse a given expression in both `syn` and `librustc`. -//! 2. Fold over the expression adding brackets around each subexpression (with -//! some complications - see the `syn_brackets` and `librustc_brackets` -//! methods). -//! 3. Serialize the `syn` expression back into a string, and re-parse it with -//! `librustc`. -//! 4. Respan all of the expressions, replacing the spans with the default -//! spans. -//! 5. Compare the expressions with one another, if they are not equal fail. - extern crate rustc_ast; extern crate rustc_ast_pretty; extern crate rustc_data_structures; @@ -35,43 +42,40 @@ extern crate thin_vec; use crate::common::eq::SpanlessEq; use crate::common::parse; -use quote::quote; -use regex::Regex; +use quote::ToTokens; use rustc_ast::ast; -use rustc_ast::ptr::P; use rustc_ast_pretty::pprust; use rustc_span::edition::Edition; use std::fs; +use std::mem; use std::path::Path; use std::process; use std::sync::atomic::{AtomicUsize, Ordering}; +use syn::parse::Parser as _; #[macro_use] mod macros; -#[allow(dead_code)] mod common; - mod repo; +#[path = "../src/scan_expr.rs"] +mod scan_expr; + #[test] fn test_rustc_precedence() { - common::rayon_init(); + repo::rayon_init(); repo::clone_rust(); - let abort_after = common::abort_after(); + let abort_after = repo::abort_after(); if abort_after == 0 { - panic!("Skipping all precedence tests"); + panic!("skipping all precedence tests"); } let passed = AtomicUsize::new(0); let failed = AtomicUsize::new(0); - // 2018 edition is hard - let edition_regex = Regex::new(r"\b(async|try)[!(]").unwrap(); - repo::for_each_rust_file(|path| { let content = fs::read_to_string(path).unwrap(); - let content = edition_regex.replace_all(&content, "_$0"); let (l_passed, l_failed) = match syn::parse_file(&content) { Ok(file) => { @@ -100,8 +104,8 @@ fn test_rustc_precedence() { } }); - let passed = passed.load(Ordering::Relaxed); - let failed = failed.load(Ordering::Relaxed); + let passed = passed.into_inner(); + let failed = failed.into_inner(); errorf!("\n===== Precedence Test Results =====\n"); errorf!("{} passed | {} failed\n", passed, failed); @@ -117,76 +121,101 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us rustc_span::create_session_if_not_set_then(edition, |_| { for expr in exprs { - let raw = quote!(#expr).to_string(); - - let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) { + let expr_tokens = expr.to_token_stream(); + let source_code = expr_tokens.to_string(); + let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) { e } else { failed += 1; - errorf!("\nFAIL {} - librustc failed to parse raw\n", path.display()); + errorf!( + "\nFAIL {} - librustc failed to parse original\n", + path.display(), + ); continue; }; - let syn_expr = syn_brackets(expr); - let syn_ast = if let Some(e) = parse::librustc_expr("e!(#syn_expr).to_string()) { + let syn_parenthesized_code = + syn_parenthesize(expr.clone()).to_token_stream().to_string(); + let syn_ast = if let Some(e) = parse::librustc_expr(&syn_parenthesized_code) { e } else { failed += 1; errorf!( - "\nFAIL {} - librustc failed to parse bracketed\n", + "\nFAIL {} - librustc failed to parse parenthesized\n", path.display(), ); continue; }; - if SpanlessEq::eq(&syn_ast, &librustc_ast) { - passed += 1; - } else { + if !SpanlessEq::eq(&syn_ast, &librustc_ast) { failed += 1; - let syn_program = pprust::expr_to_string(&syn_ast); - let librustc_program = pprust::expr_to_string(&librustc_ast); + let syn_pretty = pprust::expr_to_string(&syn_ast); + let librustc_pretty = pprust::expr_to_string(&librustc_ast); errorf!( "\nFAIL {}\n{}\nsyn != rustc\n{}\n", path.display(), - syn_program, - librustc_program, + syn_pretty, + librustc_pretty, + ); + continue; + } + + let expr_invisible = make_parens_invisible(expr); + let Ok(reparsed_expr_invisible) = syn::parse2(expr_invisible.to_token_stream()) else { + failed += 1; + errorf!( + "\nFAIL {} - syn failed to parse invisible delimiters\n{}\n", + path.display(), + source_code, ); + continue; + }; + if expr_invisible != reparsed_expr_invisible { + failed += 1; + errorf!( + "\nFAIL {} - mismatch after parsing invisible delimiters\n{}\n", + path.display(), + source_code, + ); + continue; } + + if scan_expr::scan_expr.parse2(expr_tokens).is_err() { + failed += 1; + errorf!( + "\nFAIL {} - failed to scan expr\n{}\n", + path.display(), + source_code, + ); + continue; + } + + passed += 1; } }); (passed, failed) } -fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> { - parse::librustc_expr(input).and_then(librustc_brackets) +fn librustc_parse_and_rewrite(input: &str) -> Option<Box<ast::Expr>> { + parse::librustc_expr(input).map(librustc_parenthesize) } -/// Wrap every expression which is not already wrapped in parens with parens, to -/// reveal the precedence of the parsed expressions, and produce a stringified -/// form of the resulting expression. -/// -/// This method operates on librustc objects. -fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { +fn librustc_parenthesize(mut librustc_expr: Box<ast::Expr>) -> Box<ast::Expr> { use rustc_ast::ast::{ - AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BorrowKind, Expr, ExprField, - ExprKind, GenericArg, GenericBound, ItemKind, Local, LocalKind, Pat, Stmt, StmtKind, - StructExpr, StructRest, TraitBoundModifier, Ty, - }; - use rustc_ast::mut_visit::{ - noop_flat_map_assoc_item, noop_visit_generic_arg, noop_visit_item_kind, noop_visit_local, - noop_visit_param_bound, MutVisitor, + AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BoundConstness, Expr, ExprField, + ExprKind, GenericArg, GenericBound, Local, LocalKind, Pat, PolyTraitRef, Stmt, StmtKind, + StructExpr, StructRest, TraitBoundModifiers, Ty, }; + use rustc_ast::mut_visit::{walk_flat_map_assoc_item, MutVisitor}; + use rustc_ast::visit::{AssocCtxt, BoundKind}; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_span::DUMMY_SP; use smallvec::SmallVec; - use std::mem; use std::ops::DerefMut; use thin_vec::ThinVec; - struct BracketsVisitor { - failed: bool, - } + struct FullyParenthesize; fn contains_let_chain(expr: &Expr) -> bool { match &expr.kind { @@ -226,9 +255,8 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { } fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) { - use rustc_ast::mut_visit::{noop_visit_expr, visit_attrs}; match &mut e.kind { - ExprKind::AddrOf(BorrowKind::Raw, ..) => {} + ExprKind::Become(..) => {} ExprKind::Struct(expr) => { let StructExpr { qself, @@ -236,65 +264,68 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { fields, rest, } = expr.deref_mut(); - vis.visit_qself(qself); + if let Some(qself) = qself { + vis.visit_qself(qself); + } vis.visit_path(path); fields.flat_map_in_place(|field| flat_map_field(field, vis)); if let StructRest::Base(rest) = rest { vis.visit_expr(rest); } - vis.visit_id(&mut e.id); - vis.visit_span(&mut e.span); - visit_attrs(&mut e.attrs, vis); } - _ => noop_visit_expr(e, vis), + _ => rustc_ast::mut_visit::walk_expr(vis, e), } } - impl MutVisitor for BracketsVisitor { - fn visit_expr(&mut self, e: &mut P<Expr>) { + impl MutVisitor for FullyParenthesize { + fn visit_expr(&mut self, e: &mut Expr) { noop_visit_expr(e, self); match e.kind { ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) => {} ExprKind::Binary(..) if contains_let_chain(e) => {} _ => { - let inner = mem::replace( - e, - P(Expr { - id: ast::DUMMY_NODE_ID, - kind: ExprKind::Err, - span: DUMMY_SP, - attrs: ThinVec::new(), - tokens: None, - }), - ); - e.kind = ExprKind::Paren(inner); + let inner = mem::replace(e, Expr::dummy()); + *e = Expr { + id: ast::DUMMY_NODE_ID, + kind: ExprKind::Paren(Box::new(inner)), + span: DUMMY_SP, + attrs: ThinVec::new(), + tokens: None, + }; } } } fn visit_generic_arg(&mut self, arg: &mut GenericArg) { match arg { + GenericArg::Lifetime(_lifetime) => {} + GenericArg::Type(arg) => self.visit_ty(arg), // Don't wrap unbraced const generic arg as that's invalid syntax. GenericArg::Const(anon_const) => { if let ExprKind::Block(..) = &mut anon_const.value.kind { noop_visit_expr(&mut anon_const.value, self); } } - _ => noop_visit_generic_arg(arg, self), } } - fn visit_param_bound(&mut self, bound: &mut GenericBound) { + fn visit_param_bound(&mut self, bound: &mut GenericBound, _ctxt: BoundKind) { match bound { - GenericBound::Trait( - _, - TraitBoundModifier::MaybeConst | TraitBoundModifier::MaybeConstMaybe, - ) => {} - _ => noop_visit_param_bound(bound, self), + GenericBound::Trait(PolyTraitRef { + modifiers: + TraitBoundModifiers { + constness: BoundConstness::Maybe(_), + .. + }, + .. + }) + | GenericBound::Outlives(..) + | GenericBound::Use(..) => {} + GenericBound::Trait(ty) => self.visit_poly_trait_ref(ty), } } - fn visit_block(&mut self, block: &mut P<Block>) { + fn visit_block(&mut self, block: &mut Block) { self.visit_id(&mut block.id); block .stmts @@ -302,35 +333,24 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { self.visit_span(&mut block.span); } - fn visit_local(&mut self, local: &mut P<Local>) { - match local.kind { - LocalKind::InitElse(..) => {} - _ => noop_visit_local(local, self), - } - } - - fn visit_item_kind(&mut self, item: &mut ItemKind) { - match item { - ItemKind::Const(const_item) - if !const_item.generics.params.is_empty() - || !const_item.generics.where_clause.predicates.is_empty() => {} - _ => noop_visit_item_kind(item, self), - } - } - - fn flat_map_trait_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> { - match &item.kind { - AssocItemKind::Const(const_item) - if !const_item.generics.params.is_empty() - || !const_item.generics.where_clause.predicates.is_empty() => - { - SmallVec::from([item]) + fn visit_local(&mut self, local: &mut Local) { + match &mut local.kind { + LocalKind::Decl => {} + LocalKind::Init(init) => { + self.visit_expr(init); + } + LocalKind::InitElse(init, els) => { + self.visit_expr(init); + self.visit_block(els); } - _ => noop_flat_map_assoc_item(item, self), } } - fn flat_map_impl_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> { + fn flat_map_assoc_item( + &mut self, + item: Box<AssocItem>, + ctxt: AssocCtxt, + ) -> SmallVec<[Box<AssocItem>; 1]> { match &item.kind { AssocItemKind::Const(const_item) if !const_item.generics.params.is_empty() @@ -338,18 +358,18 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { { SmallVec::from([item]) } - _ => noop_flat_map_assoc_item(item, self), + _ => walk_flat_map_assoc_item(self, item, ctxt), } } // We don't want to look at expressions that might appear in patterns or // types yet. We'll look into comparing those in the future. For now // focus on expressions appearing in other places. - fn visit_pat(&mut self, pat: &mut P<Pat>) { + fn visit_pat(&mut self, pat: &mut Pat) { let _ = pat; } - fn visit_ty(&mut self, ty: &mut P<Ty>) { + fn visit_ty(&mut self, ty: &mut Ty) { let _ = ty; } @@ -358,23 +378,18 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { } } - let mut folder = BracketsVisitor { failed: false }; + let mut folder = FullyParenthesize; folder.visit_expr(&mut librustc_expr); - if folder.failed { - None - } else { - Some(librustc_expr) - } + librustc_expr } -/// Wrap every expression which is not already wrapped in parens with parens, to -/// reveal the precedence of the parsed expressions, and produce a stringified -/// form of the resulting expression. -fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr { +fn syn_parenthesize(syn_expr: syn::Expr) -> syn::Expr { use syn::fold::{fold_expr, fold_generic_argument, Fold}; - use syn::{token, BinOp, Expr, ExprParen, GenericArgument, MetaNameValue, Pat, Stmt, Type}; + use syn::{ + token, BinOp, Expr, ExprParen, GenericArgument, Lit, MetaNameValue, Pat, Stmt, Type, + }; - struct ParenthesizeEveryExpr; + struct FullyParenthesize; fn parenthesize(expr: Expr) -> Expr { Expr::Paren(ExprParen { @@ -404,7 +419,7 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr { } } - impl Fold for ParenthesizeEveryExpr { + impl Fold for FullyParenthesize { fn fold_expr(&mut self, expr: Expr) -> Expr { let needs_paren = needs_paren(&expr); let folded = fold_expr(self, expr); @@ -450,12 +465,58 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr { fn fold_type(&mut self, ty: Type) -> Type { ty } + + fn fold_lit(&mut self, lit: Lit) -> Lit { + if let Lit::Verbatim(lit) = &lit { + panic!("unexpected verbatim literal: {lit}"); + } + lit + } } - let mut folder = ParenthesizeEveryExpr; + let mut folder = FullyParenthesize; folder.fold_expr(syn_expr) } +fn make_parens_invisible(expr: syn::Expr) -> syn::Expr { + use syn::fold::{fold_expr, fold_stmt, Fold}; + use syn::{token, Expr, ExprGroup, ExprParen, Stmt}; + + struct MakeParensInvisible; + + impl Fold for MakeParensInvisible { + fn fold_expr(&mut self, mut expr: Expr) -> Expr { + if let Expr::Paren(paren) = expr { + expr = Expr::Group(ExprGroup { + attrs: paren.attrs, + group_token: token::Group(paren.paren_token.span.join()), + expr: paren.expr, + }); + } + fold_expr(self, expr) + } + + fn fold_stmt(&mut self, stmt: Stmt) -> Stmt { + if let Stmt::Expr(expr @ (Expr::Binary(_) | Expr::Call(_) | Expr::Cast(_)), None) = stmt + { + Stmt::Expr( + Expr::Paren(ExprParen { + attrs: Vec::new(), + paren_token: token::Paren::default(), + expr: Box::new(fold_expr(self, expr)), + }), + None, + ) + } else { + fold_stmt(self, stmt) + } + } + } + + let mut folder = MakeParensInvisible; + folder.fold_expr(expr) +} + /// Walk through a crate collecting all expressions we can find in it. fn collect_exprs(file: syn::File) -> Vec<syn::Expr> { use syn::fold::Fold; diff --git a/vendor/syn/tests/test_iterators.rs b/vendor/syn/tests/test_punctuated.rs similarity index 75% rename from vendor/syn/tests/test_iterators.rs rename to vendor/syn/tests/test_punctuated.rs index 5f0eff59..14ea96c7 100644 --- a/vendor/syn/tests/test_iterators.rs +++ b/vendor/syn/tests/test_punctuated.rs @@ -1,10 +1,25 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] use syn::punctuated::{Pair, Punctuated}; use syn::Token; -#[macro_use] -mod macros; +macro_rules! punctuated { + ($($e:expr,)+) => {{ + let mut seq = ::syn::punctuated::Punctuated::new(); + $( + seq.push($e); + )+ + seq + }}; + + ($($e:expr),+) => { + punctuated!($($e,)+) + }; +} macro_rules! check_exact_size_iterator { ($iter:expr) => {{ @@ -68,3 +83,10 @@ fn may_dangle() { } } } + +#[test] +#[should_panic = "index out of bounds: the len is 0 but the index is 0"] +fn index_out_of_bounds() { + let p = Punctuated::<syn::Ident, Token![,]>::new(); + let _ = p[0].clone(); +} diff --git a/vendor/syn/tests/test_receiver.rs b/vendor/syn/tests/test_receiver.rs index 8decb555..98194101 100644 --- a/vendor/syn/tests/test_receiver.rs +++ b/vendor/syn/tests/test_receiver.rs @@ -1,7 +1,13 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use syn::{parse_quote, TraitItemFn}; @@ -10,7 +16,7 @@ fn test_by_value() { let TraitItemFn { sig, .. } = parse_quote! { fn by_value(self: Self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { colon_token: Some, ty: Type::Path { @@ -23,7 +29,7 @@ fn test_by_value() { }, }, }) - "###); + "#); } #[test] @@ -31,7 +37,7 @@ fn test_by_mut_value() { let TraitItemFn { sig, .. } = parse_quote! { fn by_mut(mut self: Self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { mutability: Some, colon_token: Some, @@ -45,7 +51,7 @@ fn test_by_mut_value() { }, }, }) - "###); + "#); } #[test] @@ -53,7 +59,7 @@ fn test_by_ref() { let TraitItemFn { sig, .. } = parse_quote! { fn by_ref(self: &Self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { colon_token: Some, ty: Type::Reference { @@ -68,7 +74,7 @@ fn test_by_ref() { }, }, }) - "###); + "#); } #[test] @@ -76,7 +82,7 @@ fn test_by_box() { let TraitItemFn { sig, .. } = parse_quote! { fn by_box(self: Box<Self>); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { colon_token: Some, ty: Type::Path { @@ -102,7 +108,7 @@ fn test_by_box() { }, }, }) - "###); + "#); } #[test] @@ -110,7 +116,7 @@ fn test_by_pin() { let TraitItemFn { sig, .. } = parse_quote! { fn by_pin(self: Pin<Self>); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { colon_token: Some, ty: Type::Path { @@ -136,7 +142,7 @@ fn test_by_pin() { }, }, }) - "###); + "#); } #[test] @@ -144,7 +150,7 @@ fn test_explicit_type() { let TraitItemFn { sig, .. } = parse_quote! { fn explicit_type(self: Pin<MyType>); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { colon_token: Some, ty: Type::Path { @@ -170,7 +176,7 @@ fn test_explicit_type() { }, }, }) - "###); + "#); } #[test] @@ -178,7 +184,7 @@ fn test_value_shorthand() { let TraitItemFn { sig, .. } = parse_quote! { fn value_shorthand(self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { ty: Type::Path { path: Path { @@ -190,7 +196,7 @@ fn test_value_shorthand() { }, }, }) - "###); + "#); } #[test] @@ -198,7 +204,7 @@ fn test_mut_value_shorthand() { let TraitItemFn { sig, .. } = parse_quote! { fn mut_value_shorthand(mut self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { mutability: Some, ty: Type::Path { @@ -211,7 +217,7 @@ fn test_mut_value_shorthand() { }, }, }) - "###); + "#); } #[test] @@ -219,7 +225,7 @@ fn test_ref_shorthand() { let TraitItemFn { sig, .. } = parse_quote! { fn ref_shorthand(&self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { reference: Some(None), ty: Type::Reference { @@ -234,7 +240,7 @@ fn test_ref_shorthand() { }, }, }) - "###); + "#); } #[test] @@ -242,7 +248,7 @@ fn test_ref_shorthand_with_lifetime() { let TraitItemFn { sig, .. } = parse_quote! { fn ref_shorthand(&'a self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { reference: Some(Some(Lifetime { ident: "a", @@ -262,7 +268,7 @@ fn test_ref_shorthand_with_lifetime() { }, }, }) - "###); + "#); } #[test] @@ -270,7 +276,7 @@ fn test_ref_mut_shorthand() { let TraitItemFn { sig, .. } = parse_quote! { fn ref_mut_shorthand(&mut self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { reference: Some(None), mutability: Some, @@ -287,7 +293,7 @@ fn test_ref_mut_shorthand() { }, }, }) - "###); + "#); } #[test] @@ -295,7 +301,7 @@ fn test_ref_mut_shorthand_with_lifetime() { let TraitItemFn { sig, .. } = parse_quote! { fn ref_mut_shorthand(&'a mut self); }; - snapshot!(&sig.inputs[0], @r###" + snapshot!(&sig.inputs[0], @r#" FnArg::Receiver(Receiver { reference: Some(Some(Lifetime { ident: "a", @@ -317,5 +323,5 @@ fn test_ref_mut_shorthand_with_lifetime() { }, }, }) - "###); + "#); } diff --git a/vendor/syn/tests/test_round_trip.rs b/vendor/syn/tests/test_round_trip.rs index c0af30d2..5b1b833a 100644 --- a/vendor/syn/tests/test_round_trip.rs +++ b/vendor/syn/tests/test_round_trip.rs @@ -3,11 +3,15 @@ #![recursion_limit = "1024"] #![feature(rustc_private)] #![allow( + clippy::blocks_in_conditions, + clippy::elidable_lifetime_names, clippy::manual_assert, clippy::manual_let_else, clippy::match_like_matches_macro, + clippy::needless_lifetimes, clippy::uninlined_format_args )] +#![allow(mismatched_lifetime_syntaxes)] extern crate rustc_ast; extern crate rustc_ast_pretty; @@ -16,22 +20,22 @@ extern crate rustc_driver; extern crate rustc_error_messages; extern crate rustc_errors; extern crate rustc_expand; -extern crate rustc_parse as parse; +extern crate rustc_parse; extern crate rustc_session; extern crate rustc_span; use crate::common::eq::SpanlessEq; use quote::quote; use rustc_ast::ast::{ - AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg, GenericParamKind, Generics, - WhereClause, + AngleBracketedArg, Crate, GenericArg, GenericArgs, GenericParamKind, Generics, }; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast_pretty::pprust; -use rustc_error_messages::{DiagnosticMessage, LazyFallbackBundle}; -use rustc_errors::{translation, Diagnostic, PResult}; +use rustc_data_structures::flat_map_in_place::FlatMapInPlace; +use rustc_error_messages::{DiagMessage, LazyFallbackBundle}; +use rustc_errors::{translation, Diag, PResult}; +use rustc_parse::lexer::StripTokens; use rustc_session::parse::ParseSess; -use rustc_span::source_map::FilePathMapping; use rustc_span::FileName; use std::borrow::Cow; use std::fs; @@ -44,53 +48,63 @@ use std::time::Instant; #[macro_use] mod macros; -#[allow(dead_code)] mod common; - mod repo; #[test] fn test_round_trip() { - common::rayon_init(); + repo::rayon_init(); repo::clone_rust(); - let abort_after = common::abort_after(); + let abort_after = repo::abort_after(); if abort_after == 0 { - panic!("Skipping all round_trip tests"); + panic!("skipping all round_trip tests"); } let failed = AtomicUsize::new(0); repo::for_each_rust_file(|path| test(path, &failed, abort_after)); - let failed = failed.load(Ordering::Relaxed); + let failed = failed.into_inner(); if failed > 0 { panic!("{} failures", failed); } } fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) { + let failed = || { + let prev_failed = failed.fetch_add(1, Ordering::Relaxed); + if prev_failed + 1 >= abort_after { + process::exit(1); + } + }; + let content = fs::read_to_string(path).unwrap(); - let start = Instant::now(); - let (krate, elapsed) = match syn::parse_file(&content) { - Ok(krate) => (krate, start.elapsed()), - Err(msg) => { + let (back, elapsed) = match panic::catch_unwind(|| { + let start = Instant::now(); + let result = syn::parse_file(&content); + let elapsed = start.elapsed(); + result.map(|krate| (quote!(#krate).to_string(), elapsed)) + }) { + Err(_) => { + errorf!("=== {}: syn panic\n", path.display()); + failed(); + return; + } + Ok(Err(msg)) => { errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg); - let prev_failed = failed.fetch_add(1, Ordering::Relaxed); - if prev_failed + 1 >= abort_after { - process::exit(1); - } + failed(); return; } + Ok(Ok(result)) => result, }; - let back = quote!(#krate).to_string(); + let edition = repo::edition(path).parse().unwrap(); rustc_span::create_session_if_not_set_then(edition, |_| { let equal = match panic::catch_unwind(|| { let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(); - let file_path_mapping = FilePathMapping::empty(); - let sess = ParseSess::new(locale_resources, file_path_mapping); + let sess = ParseSess::new(locale_resources); let before = match librustc_parse(content, &sess) { Ok(before) => before, Err(diagnostic) => { @@ -105,7 +119,7 @@ fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) { }; let after = match librustc_parse(back, &sess) { Ok(after) => after, - Err(mut diagnostic) => { + Err(diagnostic) => { errorf!("=== {}: librustc failed to parse", path.display()); diagnostic.emit(); return Err(false); @@ -140,10 +154,7 @@ fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) { } }; if !equal { - let prev_failed = failed.fetch_add(1, Ordering::Relaxed); - if prev_failed + 1 >= abort_after { - process::exit(1); - } + failed(); } }); } @@ -152,10 +163,17 @@ fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> { static COUNTER: AtomicUsize = AtomicUsize::new(0); let counter = COUNTER.fetch_add(1, Ordering::Relaxed); let name = FileName::Custom(format!("test_round_trip{}", counter)); - parse::parse_crate_from_source_str(name, content, sess) + let mut parser = rustc_parse::new_parser_from_source_str( + sess, + name, + content, + StripTokens::ShebangAndFrontmatter, + ) + .unwrap(); + parser.parse_crate_mod() } -fn translate_message(diagnostic: &Diagnostic) -> Cow<'static, str> { +fn translate_message(diagnostic: &Diag) -> Cow<'static, str> { thread_local! { static FLUENT_BUNDLE: LazyFallbackBundle = { let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(); @@ -164,12 +182,12 @@ fn translate_message(diagnostic: &Diagnostic) -> Cow<'static, str> { }; } - let message = &diagnostic.message[0].0; - let args = translation::to_fluent_args(diagnostic.args()); + let message = &diagnostic.messages[0].0; + let args = translation::to_fluent_args(diagnostic.args.iter()); let (identifier, attr) = match message { - DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => return msg.clone(), - DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr), + DiagMessage::Str(msg) | DiagMessage::Translated(msg) => return msg.clone(), + DiagMessage::FluentIdentifier(identifier, attr) => (identifier, attr), }; FLUENT_BUNDLE.with(|fluent_bundle| { @@ -195,21 +213,23 @@ fn normalize(krate: &mut Crate) { struct NormalizeVisitor; impl MutVisitor for NormalizeVisitor { - fn visit_angle_bracketed_parameter_data(&mut self, e: &mut AngleBracketedArgs) { - #[derive(Ord, PartialOrd, Eq, PartialEq)] - enum Group { - Lifetimes, - TypesAndConsts, - Constraints, + fn visit_generic_args(&mut self, e: &mut GenericArgs) { + if let GenericArgs::AngleBracketed(e) = e { + #[derive(Ord, PartialOrd, Eq, PartialEq)] + enum Group { + Lifetimes, + TypesAndConsts, + Constraints, + } + e.args.sort_by_key(|arg| match arg { + AngleBracketedArg::Arg(arg) => match arg { + GenericArg::Lifetime(_) => Group::Lifetimes, + GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts, + }, + AngleBracketedArg::Constraint(_) => Group::Constraints, + }); } - e.args.sort_by_key(|arg| match arg { - AngleBracketedArg::Arg(arg) => match arg { - GenericArg::Lifetime(_) => Group::Lifetimes, - GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts, - }, - AngleBracketedArg::Constraint(_) => Group::Constraints, - }); - mut_visit::noop_visit_angle_bracketed_parameter_data(e, self); + mut_visit::walk_generic_args(self, e); } fn visit_generics(&mut self, e: &mut Generics) { @@ -224,12 +244,10 @@ fn normalize(krate: &mut Crate) { Group::TypesAndConsts } }); - mut_visit::noop_visit_generics(e, self); - } - - fn visit_where_clause(&mut self, e: &mut WhereClause) { - if e.predicates.is_empty() { - e.has_where_token = false; + e.params + .flat_map_in_place(|param| self.flat_map_generic_param(param)); + if e.where_clause.predicates.is_empty() { + e.where_clause.has_where_token = false; } } } diff --git a/vendor/syn/tests/test_shebang.rs b/vendor/syn/tests/test_shebang.rs index 4c2a2045..3b55ddfd 100644 --- a/vendor/syn/tests/test_shebang.rs +++ b/vendor/syn/tests/test_shebang.rs @@ -1,13 +1,19 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; #[test] fn test_basic() { let content = "#!/usr/bin/env rustx\nfn main() {}"; let file = syn::parse_file(content).unwrap(); - snapshot!(file, @r###" + snapshot!(file, @r##" File { shebang: Some("#!/usr/bin/env rustx"), items: [ @@ -18,18 +24,20 @@ fn test_basic() { generics: Generics, output: ReturnType::Default, }, - block: Block, + block: Block { + stmts: [], + }, }, ], } - "###); + "##); } #[test] fn test_comment() { let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}"; let file = syn::parse_file(content).unwrap(); - snapshot!(file, @r###" + snapshot!(file, @r#" File { attrs: [ Attribute { @@ -55,9 +63,11 @@ fn test_comment() { generics: Generics, output: ReturnType::Default, }, - block: Block, + block: Block { + stmts: [], + }, }, ], } - "###); + "#); } diff --git a/vendor/syn/tests/test_should_parse.rs b/vendor/syn/tests/test_should_parse.rs deleted file mode 100644 index 180d8599..00000000 --- a/vendor/syn/tests/test_should_parse.rs +++ /dev/null @@ -1,45 +0,0 @@ -macro_rules! should_parse { - ($name:ident, { $($in:tt)* }) => { - #[test] - fn $name() { - // Make sure we can parse the file! - syn::parse_file(stringify!($($in)*)).unwrap(); - } - } -} - -should_parse!(generic_associated_type, { - impl Foo { - type Item = &'a i32; - fn foo<'a>(&'a self) -> Self::Item<'a> {} - } -}); - -#[rustfmt::skip] -should_parse!(const_generics_use, { - type X = Foo<5>; - type Y = Foo<"foo">; - type Z = Foo<X>; - type W = Foo<{ X + 10 }>; -}); - -should_parse!(trailing_plus_type, { - type A = Box<Foo>; - type A = Box<Foo + 'a>; - type A = Box<'a + Foo>; -}); - -should_parse!(generic_associated_type_where, { - trait Foo { - type Item; - fn foo<T>(&self, t: T) -> Self::Item<T>; - } -}); - -should_parse!(match_with_block_expr, { - fn main() { - match false { - _ => {}.a(), - } - } -}); diff --git a/vendor/syn/tests/test_size.rs b/vendor/syn/tests/test_size.rs index 943fcd35..29fd4358 100644 --- a/vendor/syn/tests/test_size.rs +++ b/vendor/syn/tests/test_size.rs @@ -1,36 +1,54 @@ // Assumes proc-macro2's "span-locations" feature is off. -#![cfg(target_pointer_width = "64")] - use std::mem; use syn::{Expr, Item, Lit, Pat, Type}; -#[rustversion::attr(before(2022-11-24), ignore)] +#[rustversion::attr(before(2022-11-24), ignore = "requires nightly-2022-11-24 or newer")] +#[rustversion::attr( + since(2022-11-24), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_expr_size() { assert_eq!(mem::size_of::<Expr>(), 176); } -#[rustversion::attr(before(2022-09-09), ignore)] +#[rustversion::attr(before(2022-09-09), ignore = "requires nightly-2022-09-09 or newer")] +#[rustversion::attr( + since(2022-09-09), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_item_size() { - assert_eq!(mem::size_of::<Item>(), 360); + assert_eq!(mem::size_of::<Item>(), 352); } -#[rustversion::attr(before(2023-04-29), ignore)] +#[rustversion::attr(before(2023-04-29), ignore = "requires nightly-2023-04-29 or newer")] +#[rustversion::attr( + since(2023-04-29), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_type_size() { - assert_eq!(mem::size_of::<Type>(), 232); + assert_eq!(mem::size_of::<Type>(), 224); } -#[rustversion::attr(before(2023-04-29), ignore)] +#[rustversion::attr(before(2023-04-29), ignore = "requires nightly-2023-04-29 or newer")] +#[rustversion::attr( + since(2023-04-29), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_pat_size() { assert_eq!(mem::size_of::<Pat>(), 184); } -#[rustversion::attr(before(2022-09-09), ignore)] +#[rustversion::attr(before(2023-12-20), ignore = "requires nightly-2023-12-20 or newer")] +#[rustversion::attr( + since(2023-12-20), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_lit_size() { - assert_eq!(mem::size_of::<Lit>(), 32); + assert_eq!(mem::size_of::<Lit>(), 24); } diff --git a/vendor/syn/tests/test_stmt.rs b/vendor/syn/tests/test_stmt.rs index bc57685d..101c1b1c 100644 --- a/vendor/syn/tests/test_stmt.rs +++ b/vendor/syn/tests/test_stmt.rs @@ -1,35 +1,51 @@ #![allow( clippy::assertions_on_result_states, + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, clippy::non_ascii_literal, clippy::uninlined_format_args )] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; -use quote::quote; -use syn::Stmt; +use quote::{quote, ToTokens as _}; +use syn::parse::Parser as _; +use syn::{Block, Stmt}; #[test] fn test_raw_operator() { let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap(); - snapshot!(stmt, @r###" + snapshot!(stmt, @r#" Stmt::Local { pat: Pat::Wild, init: Some(LocalInit { - expr: Expr::Verbatim(`& raw const x`), + expr: Expr::RawAddr { + mutability: PointerMutability::Const, + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "x", + }, + ], + }, + }, + }, }), } - "###); + "#); } #[test] fn test_raw_variable() { let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap(); - snapshot!(stmt, @r###" + snapshot!(stmt, @r#" Stmt::Local { pat: Pat::Wild, init: Some(LocalInit { @@ -46,7 +62,7 @@ fn test_raw_variable() { }, }), } - "###); + "#); } #[test] @@ -56,10 +72,10 @@ fn test_raw_invalid() { #[test] fn test_none_group() { - // <Ø async fn f() {} Ø> - let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new( + // «∅ async fn f() {} ∅» + let tokens = TokenStream::from_iter([TokenTree::Group(Group::new( Delimiter::None, - TokenStream::from_iter(vec![ + TokenStream::from_iter([ TokenTree::Ident(Ident::new("async", Span::call_site())), TokenTree::Ident(Ident::new("fn", Span::call_site())), TokenTree::Ident(Ident::new("f", Span::call_site())), @@ -67,8 +83,7 @@ fn test_none_group() { TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), ]), ))]); - - snapshot!(tokens as Stmt, @r###" + snapshot!(tokens as Stmt, @r#" Stmt::Item(Item::Fn { vis: Visibility::Inherited, sig: Signature { @@ -77,9 +92,37 @@ fn test_none_group() { generics: Generics, output: ReturnType::Default, }, - block: Block, + block: Block { + stmts: [], + }, }) - "###); + "#); + + let tokens = Group::new(Delimiter::None, quote!(let None = None)).to_token_stream(); + let stmts = Block::parse_within.parse2(tokens).unwrap(); + snapshot!(stmts, @r#" + [ + Stmt::Expr( + Expr::Group { + expr: Expr::Let { + pat: Pat::Ident { + ident: "None", + }, + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "None", + }, + ], + }, + }, + }, + }, + None, + ), + ] + "#); } #[test] @@ -88,7 +131,7 @@ fn test_let_dot_dot() { let .. = 10; }; - snapshot!(tokens as Stmt, @r###" + snapshot!(tokens as Stmt, @r#" Stmt::Local { pat: Pat::Rest, init: Some(LocalInit { @@ -97,7 +140,7 @@ fn test_let_dot_dot() { }, }), } - "###); + "#); } #[test] @@ -106,7 +149,7 @@ fn test_let_else() { let Some(x) = None else { return 0; }; }; - snapshot!(tokens as Stmt, @r###" + snapshot!(tokens as Stmt, @r#" Stmt::Local { pat: Pat::TupleStruct { path: Path { @@ -148,7 +191,7 @@ fn test_let_else() { }), }), } - "###); + "#); } #[test] @@ -162,7 +205,7 @@ fn test_macros() { } }; - snapshot!(tokens as Stmt, @r###" + snapshot!(tokens as Stmt, @r#" Stmt::Item(Item::Fn { vis: Visibility::Inherited, sig: Signature { @@ -232,5 +275,63 @@ fn test_macros() { ], }, }) - "###); + "#); +} + +#[test] +fn test_early_parse_loop() { + // The following is an Expr::Loop followed by Expr::Tuple. It is not an + // Expr::Call. + let tokens = quote! { + loop {} + () + }; + + let stmts = Block::parse_within.parse2(tokens).unwrap(); + + snapshot!(stmts, @r#" + [ + Stmt::Expr( + Expr::Loop { + body: Block { + stmts: [], + }, + }, + None, + ), + Stmt::Expr( + Expr::Tuple, + None, + ), + ] + "#); + + let tokens = quote! { + 'a: loop {} + () + }; + + let stmts = Block::parse_within.parse2(tokens).unwrap(); + + snapshot!(stmts, @r#" + [ + Stmt::Expr( + Expr::Loop { + label: Some(Label { + name: Lifetime { + ident: "a", + }, + }), + body: Block { + stmts: [], + }, + }, + None, + ), + Stmt::Expr( + Expr::Tuple, + None, + ), + ] + "#); } diff --git a/vendor/syn/tests/test_token_trees.rs b/vendor/syn/tests/test_token_trees.rs index f5a067fd..1b473858 100644 --- a/vendor/syn/tests/test_token_trees.rs +++ b/vendor/syn/tests/test_token_trees.rs @@ -1,7 +1,13 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::TokenStream; use quote::quote; @@ -17,11 +23,11 @@ fn test_struct() { } "; - snapshot!(input as TokenStream, @r###" + snapshot!(input as TokenStream, @r##" TokenStream( `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`, ) - "###); + "##); } #[test] diff --git a/vendor/syn/tests/test_ty.rs b/vendor/syn/tests/test_ty.rs index a400a761..5f292201 100644 --- a/vendor/syn/tests/test_ty.rs +++ b/vendor/syn/tests/test_ty.rs @@ -1,11 +1,18 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; -use quote::quote; -use syn::Type; +use quote::{quote, ToTokens as _}; +use syn::punctuated::Punctuated; +use syn::{parse_quote, token, Token, Type, TypeTuple}; #[test] fn test_mut_self() { @@ -20,14 +27,14 @@ fn test_mut_self() { #[test] fn test_macro_variable_type() { // mimics the token stream corresponding to `$ty<T>` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), TokenTree::Punct(Punct::new('<', Spacing::Alone)), TokenTree::Ident(Ident::new("T", Span::call_site())), TokenTree::Punct(Punct::new('>', Spacing::Alone)), ]); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::Path { path: Path { segments: [ @@ -50,10 +57,10 @@ fn test_macro_variable_type() { ], }, } - "###); + "#); // mimics the token stream corresponding to `$ty::<T>` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), TokenTree::Punct(Punct::new(':', Spacing::Joint)), TokenTree::Punct(Punct::new(':', Spacing::Alone)), @@ -62,7 +69,7 @@ fn test_macro_variable_type() { TokenTree::Punct(Punct::new('>', Spacing::Alone)), ]); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::Path { path: Path { segments: [ @@ -86,20 +93,20 @@ fn test_macro_variable_type() { ], }, } - "###); + "#); } #[test] fn test_group_angle_brackets() { // mimics the token stream corresponding to `Option<$ty>` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Ident(Ident::new("Option", Span::call_site())), TokenTree::Punct(Punct::new('<', Spacing::Alone)), TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })), TokenTree::Punct(Punct::new('>', Spacing::Alone)), ]); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::Path { path: Path { segments: [ @@ -137,20 +144,20 @@ fn test_group_angle_brackets() { ], }, } - "###); + "#); } #[test] fn test_group_colons() { // mimics the token stream corresponding to `$ty::Item` - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })), TokenTree::Punct(Punct::new(':', Spacing::Joint)), TokenTree::Punct(Punct::new(':', Spacing::Alone)), TokenTree::Ident(Ident::new("Item", Span::call_site())), ]); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::Path { path: Path { segments: [ @@ -170,22 +177,23 @@ fn test_group_colons() { ], }, }, + Token![::], PathSegment { ident: "Item", }, ], }, } - "###); + "#); - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })), TokenTree::Punct(Punct::new(':', Spacing::Joint)), TokenTree::Punct(Punct::new(':', Spacing::Alone)), TokenTree::Ident(Ident::new("Element", Span::call_site())), ]); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::Path { qself: Some(QSelf { ty: Type::Slice { @@ -210,13 +218,13 @@ fn test_group_colons() { ], }, } - "###); + "#); } #[test] fn test_trait_object() { let tokens = quote!(dyn for<'a> Trait<'a> + 'static); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::TraitObject { dyn_token: Some, bounds: [ @@ -245,21 +253,23 @@ fn test_trait_object() { ], }, }), + Token![+], TypeParamBound::Lifetime { ident: "static", }, ], } - "###); + "#); let tokens = quote!(dyn 'a + Trait); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::TraitObject { dyn_token: Some, bounds: [ TypeParamBound::Lifetime { ident: "a", }, + Token![+], TypeParamBound::Trait(TraitBound { path: Path { segments: [ @@ -271,7 +281,7 @@ fn test_trait_object() { }), ], } - "###); + "#); // None of the following are valid Rust types. syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err(); @@ -282,7 +292,7 @@ fn test_trait_object() { fn test_trailing_plus() { #[rustfmt::skip] let tokens = quote!(impl Trait +); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::ImplTrait { bounds: [ TypeParamBound::Trait(TraitBound { @@ -294,13 +304,14 @@ fn test_trailing_plus() { ], }, }), + Token![+], ], } - "###); + "#); #[rustfmt::skip] let tokens = quote!(dyn Trait +); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::TraitObject { dyn_token: Some, bounds: [ @@ -313,13 +324,14 @@ fn test_trailing_plus() { ], }, }), + Token![+], ], } - "###); + "#); #[rustfmt::skip] let tokens = quote!(Trait +); - snapshot!(tokens as Type, @r###" + snapshot!(tokens as Type, @r#" Type::TraitObject { bounds: [ TypeParamBound::Trait(TraitBound { @@ -331,7 +343,129 @@ fn test_trailing_plus() { ], }, }), + Token![+], + ], + } + "#); +} + +#[test] +fn test_tuple_comma() { + let mut expr = TypeTuple { + paren_token: token::Paren::default(), + elems: Punctuated::new(), + }; + snapshot!(expr.to_token_stream() as Type, @"Type::Tuple"); + + expr.elems.push_value(parse_quote!(_)); + // Must not parse to Type::Paren + snapshot!(expr.to_token_stream() as Type, @r#" + Type::Tuple { + elems: [ + Type::Infer, + Token![,], + ], + } + "#); + + expr.elems.push_punct(<Token![,]>::default()); + snapshot!(expr.to_token_stream() as Type, @r#" + Type::Tuple { + elems: [ + Type::Infer, + Token![,], + ], + } + "#); + + expr.elems.push_value(parse_quote!(_)); + snapshot!(expr.to_token_stream() as Type, @r#" + Type::Tuple { + elems: [ + Type::Infer, + Token![,], + Type::Infer, + ], + } + "#); + + expr.elems.push_punct(<Token![,]>::default()); + snapshot!(expr.to_token_stream() as Type, @r#" + Type::Tuple { + elems: [ + Type::Infer, + Token![,], + Type::Infer, + Token![,], + ], + } + "#); +} + +#[test] +fn test_impl_trait_use() { + let tokens = quote! { + impl Sized + use<'_, 'a, A, Test> + }; + + snapshot!(tokens as Type, @r#" + Type::ImplTrait { + bounds: [ + TypeParamBound::Trait(TraitBound { + path: Path { + segments: [ + PathSegment { + ident: "Sized", + }, + ], + }, + }), + Token![+], + TypeParamBound::PreciseCapture(PreciseCapture { + params: [ + CapturedParam::Lifetime(Lifetime { + ident: "_", + }), + Token![,], + CapturedParam::Lifetime(Lifetime { + ident: "a", + }), + Token![,], + CapturedParam::Ident("A"), + Token![,], + CapturedParam::Ident("Test"), + ], + }), + ], + } + "#); + + let trailing = quote! { + impl Sized + use<'_,> + }; + + snapshot!(trailing as Type, @r#" + Type::ImplTrait { + bounds: [ + TypeParamBound::Trait(TraitBound { + path: Path { + segments: [ + PathSegment { + ident: "Sized", + }, + ], + }, + }), + Token![+], + TypeParamBound::PreciseCapture(PreciseCapture { + params: [ + CapturedParam::Lifetime(Lifetime { + ident: "_", + }), + Token![,], + ], + }), ], } - "###); + "#); } diff --git a/vendor/syn/tests/test_unparenthesize.rs b/vendor/syn/tests/test_unparenthesize.rs new file mode 100644 index 00000000..5fa2e59e --- /dev/null +++ b/vendor/syn/tests/test_unparenthesize.rs @@ -0,0 +1,70 @@ +#![cfg(not(miri))] +#![recursion_limit = "1024"] +#![feature(rustc_private)] +#![allow( + clippy::elidable_lifetime_names, + clippy::manual_assert, + clippy::match_like_matches_macro, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] + +use crate::common::visit::{AsIfPrinted, FlattenParens}; +use quote::ToTokens as _; +use std::fs; +use std::panic; +use std::path::Path; +use std::sync::atomic::{AtomicUsize, Ordering}; +use syn::visit_mut::VisitMut as _; + +#[macro_use] +mod macros; + +mod common; +mod repo; + +#[test] +fn test_unparenthesize() { + repo::rayon_init(); + repo::clone_rust(); + + let failed = AtomicUsize::new(0); + + repo::for_each_rust_file(|path| test(path, &failed)); + + let failed = failed.into_inner(); + if failed > 0 { + panic!("{} failures", failed); + } +} + +fn test(path: &Path, failed: &AtomicUsize) { + let content = fs::read_to_string(path).unwrap(); + + match panic::catch_unwind(|| -> syn::Result<()> { + let mut before = syn::parse_file(&content)?; + FlattenParens::discard_attrs().visit_file_mut(&mut before); + let printed = before.to_token_stream(); + let mut after = syn::parse2::<syn::File>(printed.clone())?; + FlattenParens::discard_attrs().visit_file_mut(&mut after); + // Normalize features that we expect Syn not to print. + AsIfPrinted.visit_file_mut(&mut before); + if before != after { + errorf!("=== {}\n", path.display()); + if failed.fetch_add(1, Ordering::Relaxed) == 0 { + errorf!("BEFORE:\n{:#?}\nAFTER:\n{:#?}\n", before, after); + } + } + Ok(()) + }) { + Err(_) => { + errorf!("=== {}: syn panic\n", path.display()); + failed.fetch_add(1, Ordering::Relaxed); + } + Ok(Err(msg)) => { + errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg); + failed.fetch_add(1, Ordering::Relaxed); + } + Ok(Ok(())) => {} + } +} diff --git a/vendor/syn/tests/test_visibility.rs b/vendor/syn/tests/test_visibility.rs index 496e0070..cf15574b 100644 --- a/vendor/syn/tests/test_visibility.rs +++ b/vendor/syn/tests/test_visibility.rs @@ -1,9 +1,16 @@ -#![allow(clippy::uninlined_format_args)] +#![allow( + clippy::elidable_lifetime_names, + clippy::needless_lifetimes, + clippy::uninlined_format_args +)] #[macro_use] -mod macros; +mod snapshot; + +mod debug; use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::quote; use syn::parse::{Parse, ParseStream}; use syn::{DeriveInput, Result, Visibility}; @@ -33,7 +40,7 @@ macro_rules! assert_vis_parse { match parse.vis { $p => {} - _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis), + _ => panic!("expected {}, got {:?}", stringify!($p), parse.vis), } // NOTE: Round-trips through `to_string` to avoid potential whitespace @@ -100,29 +107,23 @@ fn test_junk_after_in() { } #[test] -fn test_empty_group_vis() { +fn test_inherited_vis_named_field() { // mimics `struct S { $vis $field: () }` where $vis is empty - let tokens = TokenStream::from_iter(vec![ + let tokens = TokenStream::from_iter([ TokenTree::Ident(Ident::new("struct", Span::call_site())), TokenTree::Ident(Ident::new("S", Span::call_site())), TokenTree::Group(Group::new( Delimiter::Brace, - TokenStream::from_iter(vec![ + TokenStream::from_iter([ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), - TokenTree::Group(Group::new( - Delimiter::None, - TokenStream::from_iter(vec![TokenTree::Ident(Ident::new( - "f", - Span::call_site(), - ))]), - )), + TokenTree::Group(Group::new(Delimiter::None, quote!(f))), TokenTree::Punct(Punct::new(':', Spacing::Alone)), TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), ]), )), ]); - snapshot!(tokens as DeriveInput, @r###" + snapshot!(tokens as DeriveInput, @r#" DeriveInput { vis: Visibility::Inherited, ident: "S", @@ -140,5 +141,51 @@ fn test_empty_group_vis() { }, }, } - "###); + "#); +} + +#[test] +fn test_inherited_vis_unnamed_field() { + // mimics `struct S($vis $ty);` where $vis is empty + let tokens = TokenStream::from_iter([ + TokenTree::Ident(Ident::new("struct", Span::call_site())), + TokenTree::Ident(Ident::new("S", Span::call_site())), + TokenTree::Group(Group::new( + Delimiter::Parenthesis, + TokenStream::from_iter([ + TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), + TokenTree::Group(Group::new(Delimiter::None, quote!(str))), + ]), + )), + TokenTree::Punct(Punct::new(';', Spacing::Alone)), + ]); + + snapshot!(tokens as DeriveInput, @r#" + DeriveInput { + vis: Visibility::Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Fields::Unnamed { + unnamed: [ + Field { + vis: Visibility::Inherited, + ty: Type::Group { + elem: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "str", + }, + ], + }, + }, + }, + }, + ], + }, + semi_token: Some, + }, + } + "#); } diff --git a/vendor/zerocopy-derive/.cargo-checksum.json b/vendor/zerocopy-derive/.cargo-checksum.json new file mode 100644 index 00000000..65e357f0 --- /dev/null +++ b/vendor/zerocopy-derive/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"71ce0494c349f54d091805a75f85ae5516eb3a08242607aaff921a3dac63d626","Cargo.lock":"89a5bd6d96792ed221803ff077e8447f24346c306178a1ea2e1acae5bba41124","Cargo.toml":"bf0e415a25233d292e12907cf90ad077649ab26ce377b774d45eb3ce2b1dd400","Cargo.toml.orig":"72594fde2301466ef051db909c198349db6c1ad9a4571c105d35812e4ee94395","LICENSE-APACHE":"9d185ac6703c4b0453974c0d85e9eee43e6941009296bb1f5eb0b54e2329e9f3","LICENSE-BSD":"83c1763356e822adde0a2cae748d938a73fdc263849ccff6b27776dff213bd32","LICENSE-MIT":"1a2f5c12ddc934d58956aa5dbdd3255fe55fd957633ab7d0d39e4f0daa73f7df","src/enum.rs":"4188e445fee6a661f01668360503cec288786b1d46eb8969c6772783808c5b50","src/ext.rs":"a58afaa83ae4601e01f4aad29045d0668dfb893d694d5cbeb3725cbabd47dd51","src/lib.rs":"6687f9000157dc21eda13fbb40bbfe66a79e548139e5219d977c7a6dc453eab0","src/output_tests.rs":"56f7dbaf43a678b24f60ceb6bf6013769e6cc193006fd359342da256fef09017","src/repr.rs":"2766afad6cf7bd24ee67ee02452e638dac474e8d00edb51dff54f9e9691d2526","tests/crate_path.rs":"9083a9ed020b254e3ae632207ec6f72dd3eede94081f76f0378bdb62308398df","tests/deprecated.rs":"708cf36f73aaef10250af6be1aef68b4316f926e63d618caa981ec021da250de","tests/enum_from_zeros.rs":"1e712a2802195af290c7ebefb8645158eef96698dfb93b323f9ce234e878c58d","tests/enum_known_layout.rs":"9a36492810c0a7744e651dc8dd8a5749a81dd678b8cd6713ebe3aaa5fe17d7e6","tests/enum_no_cell.rs":"0024ffbd04551350ff2c4d8c4262fc22ebadc599d1f641bedf1bf24d6a060c78","tests/enum_to_bytes.rs":"cf7d0c482e833d2b4f6f677f4c45495f654bdc4862b5044a183dea56533a2e7a","tests/enum_try_from_bytes.rs":"3644b90ab443be7c3a5f212b1738042d86066b3a2516eee035b7081ccda99edf","tests/enum_unaligned.rs":"e04c9d1fa0f8abcd2353cab94015da010e2b3bdee65e23186c7bd13ff89d54c3","tests/eq.rs":"97fd62ff73c474e00921edc3e1bac639dfe757db1b9419d7ed97894a2b76b682","tests/hash.rs":"f0dd3df5f177b6806def5334d38f89348308e56680f7624ca3753ef503f98604","tests/hygiene.rs":"5ee71dbb525e5e40161e9b886d322b2969c60da152c2e87c59e759fc24cd143d","tests/include.rs":"2c042273938a26638d9251d6f08b4d99ab3666d9871fdf19c57bf86007798ab2","tests/issue_2117.rs":"7cd6480a9684c7efe915753057973363a0e2bce93bea5a85d3139ea9b2f519bd","tests/issue_2835.rs":"b5a26492da3cd5173b3b5aab11974a972ed370df37e6cba87b5b09e7837b3aed","tests/paths_and_modules.rs":"07a0cc3006e5b60dd32fb0530fa4e5c9e4438d38b3cdd4eb7aca456e6f45a121","tests/priv_in_pub.rs":"cf800572fb32f8a31dc68bf74833abbc40a15ca70a5aad243a496dac136c8ed0","tests/struct_from_bytes.rs":"95dc1d2fec9917044da0bad9a4e217641f3052f95bb8d5f7511fdf72683ce040","tests/struct_from_zeros.rs":"3436b3c34d0ccd69b456dc41264a6c5d3af60f1f955e08f6475cc9ace3cd5f95","tests/struct_known_layout.rs":"66d2676d959a6682f10e1765a4022f41751cca44520e5cb91ebd8e043db734c8","tests/struct_no_cell.rs":"aea0ef4836d5ad38a968304ea57a2cbf46c8b60864c98dd421c72b3369aafb6c","tests/struct_to_bytes.rs":"962b002c03803cabe3b7cc349a0e36ae4280fbaa2902f147aa7c56accf285174","tests/struct_try_from_bytes.rs":"1949e9572045bac869051c6b693dfb38bf0af22b3ee953626baa23c8b9745b09","tests/struct_unaligned.rs":"405ac720c3ce5f6b63ca17248455d45ca92967f8a43b3f347deec03fdb841c6e","tests/trybuild.rs":"a240f2b14fbd84a9f61e8eaf9dcccdbea9359802bf81189f9939dcc06a81afd0","tests/ui-msrv/derive_transparent.rs":"a822e79244df0da55fad83b8cde327f3c1dab7011c8e0b828221d3921537f9ad","tests/ui-msrv/derive_transparent.stderr":"9ce9d216e2c51b252a67b0e1b5b9db034dd9e35e1efcc84ec8f8a7d11d4b378d","tests/ui-msrv/enum.rs":"cbaa483762e1f9ba8448279d740cf5ad9444b144aa9fc31300cfb89cfc123db9","tests/ui-msrv/enum.stderr":"f78d7e763d13fa96d24ee9d991b09b1c3f27058dc195f05a5478aa5bf0fab811","tests/ui-msrv/enum_from_bytes_u8_too_few.rs":"afbec6f24a4bfca472f2929abc5125d8f5b305a0a1b472a1215ad7739ed63100","tests/ui-msrv/enum_from_bytes_u8_too_few.stderr":"a5ab2757166ef9dfa51a09d3dbddd5e9e2b1a46bd3d4b8d175b33a90747878d7","tests/ui-msrv/late_compile_pass.rs":"242d13124ee2fb28413fabec77019ad38b3a1a49ce4f6a783dac8b9c244ea0a5","tests/ui-msrv/late_compile_pass.stderr":"0c518781f144fbe091ad8165ecfbd9341621eda13b67b05e9ea797f1d0587f9e","tests/ui-msrv/mid_compile_pass.rs":"b80b01bfd383c537173f7d630ec2789a2af3e01bc6d53c807fdcf271b235d0c9","tests/ui-msrv/mid_compile_pass.stderr":"02f51290ac6e31ebab51d7917df644163f6c2346877506d7debe8a6351acda95","tests/ui-msrv/msrv_specific.rs":"ef23fd947a553a97fd7d2c2c201b1b1ca98a11cb59894325c2b3ccd61b0dd980","tests/ui-msrv/msrv_specific.stderr":"7aec6f7f077f12f6d35ef31504b9e80fa070245a0feb776b6cc8720614f8f64c","tests/ui-msrv/struct.rs":"f8f07369ec53ab84a62a3ed435cc762268ed98ba44b7c0a2f387388fc15fe25c","tests/ui-msrv/struct.stderr":"b1970c0a893ac7e167f33ff2d97c9cc1a09dac141ea2ff4fc27a6d09aa13bdda","tests/ui-msrv/union.rs":"8bf1cf5267427674a17be834debb16cd4177c5a1103019c0b52ef24e91b6e6c7","tests/ui-msrv/union.stderr":"557f3a446af9213c905e7449ff71f6d62ea8a61547312d3358082b590fe17680","tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.rs":"a0168adcd0dfdc46e84421937c704b713a1307a4fc2efc19bac5dc32f6758e89","tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.stderr":"0b9af4f767cc1d6fc591d75b046d31617b5230f012d8ce7c70d7c20ffd6178ca","tests/ui-nightly/absence_of_deprecated_warning.rs":"126083dc4f3efaa51c0606ab68073fea904dc3f71805782b47117bb4ca563b44","tests/ui-nightly/absence_of_deprecated_warning.stderr":"7177d3bbc4b897037c34775f801baef8461483d55915443f07a165f67c5b5f16","tests/ui-nightly/derive_transparent.rs":"a822e79244df0da55fad83b8cde327f3c1dab7011c8e0b828221d3921537f9ad","tests/ui-nightly/derive_transparent.stderr":"f7bff4f69ed5474350f5b056f05cf05bf6ce2a7f9443dc31bd7465ed201ecdc3","tests/ui-nightly/enum.rs":"cbaa483762e1f9ba8448279d740cf5ad9444b144aa9fc31300cfb89cfc123db9","tests/ui-nightly/enum.stderr":"300aafde20b11e6320726c47aa1e0b2f6db780c07682b12aa1449592a54a63e2","tests/ui-nightly/enum_from_bytes_u8_too_few.rs":"afbec6f24a4bfca472f2929abc5125d8f5b305a0a1b472a1215ad7739ed63100","tests/ui-nightly/enum_from_bytes_u8_too_few.stderr":"6523010fb4d9d3b5313778c144878904647f6ece5e985e8ceac01fb2c6d69f3b","tests/ui-nightly/late_compile_pass.rs":"242d13124ee2fb28413fabec77019ad38b3a1a49ce4f6a783dac8b9c244ea0a5","tests/ui-nightly/late_compile_pass.stderr":"7854a41cbca727f15aea4450137318ffc3fdfe37f1f392fb27fb1288c51a4452","tests/ui-nightly/mid_compile_pass.rs":"b80b01bfd383c537173f7d630ec2789a2af3e01bc6d53c807fdcf271b235d0c9","tests/ui-nightly/mid_compile_pass.stderr":"9ad9aeb1e55ab8e84ce0b818e59c9df25f91c47fe1648fad5b129ebb35c493ac","tests/ui-nightly/struct.rs":"f8f07369ec53ab84a62a3ed435cc762268ed98ba44b7c0a2f387388fc15fe25c","tests/ui-nightly/struct.stderr":"936292edab0c63446e8e867d367eda184f6cf91b9f92e042672116c18202c408","tests/ui-nightly/union.rs":"8bf1cf5267427674a17be834debb16cd4177c5a1103019c0b52ef24e91b6e6c7","tests/ui-nightly/union.stderr":"ea7162d4ae97d0c56b685ff30cac0ea74db8ecb2a59bd592d1fe72e09f345720","tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.rs":"a0168adcd0dfdc46e84421937c704b713a1307a4fc2efc19bac5dc32f6758e89","tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.stderr":"1462a888754048e09c81c93ee4e46c860a85c7683655f35c1b9a305b386b2676","tests/ui-stable/derive_transparent.rs":"a822e79244df0da55fad83b8cde327f3c1dab7011c8e0b828221d3921537f9ad","tests/ui-stable/derive_transparent.stderr":"995f027f1496707a6fbd19b4cda8f87f4f235f9434116a2463ad7564803e19e1","tests/ui-stable/enum.rs":"cbaa483762e1f9ba8448279d740cf5ad9444b144aa9fc31300cfb89cfc123db9","tests/ui-stable/enum.stderr":"0b80e167e93573845c195cb3b1cae8ad4cce4e1ee9fd28f6ee83de3b9eabd9c8","tests/ui-stable/enum_from_bytes_u8_too_few.rs":"afbec6f24a4bfca472f2929abc5125d8f5b305a0a1b472a1215ad7739ed63100","tests/ui-stable/enum_from_bytes_u8_too_few.stderr":"2ab0939e10d53824a837d0791d12f039cb20f2ec6b42dce18057f70733e768cc","tests/ui-stable/late_compile_pass.rs":"242d13124ee2fb28413fabec77019ad38b3a1a49ce4f6a783dac8b9c244ea0a5","tests/ui-stable/late_compile_pass.stderr":"675b94d564e852db7cd318d2ef759148beabd79bede55cbb94cf39e43240d6e5","tests/ui-stable/mid_compile_pass.rs":"b80b01bfd383c537173f7d630ec2789a2af3e01bc6d53c807fdcf271b235d0c9","tests/ui-stable/mid_compile_pass.stderr":"0c8fa05159e21c087c8a2a87fb506b0fcd9692c84010c4c7eafd4c1bb1c390c3","tests/ui-stable/struct.rs":"f8f07369ec53ab84a62a3ed435cc762268ed98ba44b7c0a2f387388fc15fe25c","tests/ui-stable/struct.stderr":"e45b72580ba4e3a72140bee9b10d864703741e0d49011a70701fc09dc05e27cd","tests/ui-stable/union.rs":"8bf1cf5267427674a17be834debb16cd4177c5a1103019c0b52ef24e91b6e6c7","tests/ui-stable/union.stderr":"be89f3703a86aad65394c92f03dd65a66664248c4f9c367bd00aa3a04e290688","tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.rs":"a0168adcd0dfdc46e84421937c704b713a1307a4fc2efc19bac5dc32f6758e89","tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.stderr":"5ab86d509f2a9ef55e47e34373c57241f528f568e0936553e71c8ddb13f490c0","tests/union_from_bytes.rs":"6cb7ecc1a18985bc65b09a48581ae2b5a5b1c4947e0ca46bf00f0dfabe49c5a1","tests/union_from_zeros.rs":"f2dfc80bf4422b8dc5164a467c31021be1d6868e0d65eafda3714345770782b5","tests/union_known_layout.rs":"9af0d34af3a443ca52b25448f4353bd45eebda67203a890a5dbf7471481f728d","tests/union_no_cell.rs":"be5040a2e878fafa34b7d153a698b31ce6c1577b4d0bfd24df2902b666805be9","tests/union_to_bytes.rs":"bef8e076a781fa5fa6ec3bab759add6f981c7c42527d0123db5780dd98b6a99d","tests/union_try_from_bytes.rs":"8fafc7c81827bd932183d04ed83a3f72ab8ac48490fe40b85cdc0c71e02e35ae","tests/union_unaligned.rs":"eb3a44fe2f3bf66acf6303765dd4c4cf65063d4bb3318d0e8160998651a7f5bf","tests/unsafe_cell.rs":"754045859595316399dc2747eda0e7560ff536f5a0e2763bb8f5c1cf7c6128ad"},"package":"d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"} \ No newline at end of file diff --git a/vendor/zerocopy-derive/.cargo_vcs_info.json b/vendor/zerocopy-derive/.cargo_vcs_info.json new file mode 100644 index 00000000..18729641 --- /dev/null +++ b/vendor/zerocopy-derive/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "8ffc71c66080d4e9d8db3c01614ffa724c43d0c5" + }, + "path_in_vcs": "zerocopy-derive" +} \ No newline at end of file diff --git a/vendor/zerocopy-derive/Cargo.lock b/vendor/zerocopy-derive/Cargo.lock new file mode 100644 index 00000000..d61cb483 --- /dev/null +++ b/vendor/zerocopy-derive/Cargo.lock @@ -0,0 +1,266 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "basic-toml" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a" +dependencies = [ + "serde", +] + +[[package]] +name = "dissimilar" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921" + +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "libc" +version = "0.2.163" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fdaeca4cf44ed4ac623e86ef41f056e848dbeab7ec043ecb7326ba300b36fd0" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "once_cell" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" + +[[package]] +name = "prettyplease" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56dea16b0a29e94408b9aa5e2940a4eedbd128a1ba20e8f7ae60fd3d465af0e" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "serde" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.143" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "syn" +version = "2.0.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2863d96a84c6439701d7a38f9de935ec562c8832cc55d1dde0f513b52fad106" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "trybuild" +version = "1.0.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a9d3ba662913483d6722303f619e75ea10b7855b0f8e0d72799cf8621bb488f" +dependencies = [ + "basic-toml", + "dissimilar", + "glob", + "once_cell", + "serde", + "serde_derive", + "serde_json", + "termcolor", +] + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "zerocopy-derive" +version = "0.8.31" +dependencies = [ + "dissimilar", + "libc", + "once_cell", + "prettyplease", + "proc-macro2", + "quote", + "rustversion", + "static_assertions", + "syn", + "trybuild", +] diff --git a/vendor/zerocopy-derive/Cargo.toml b/vendor/zerocopy-derive/Cargo.toml new file mode 100644 index 00000000..c4615481 --- /dev/null +++ b/vendor/zerocopy-derive/Cargo.toml @@ -0,0 +1,210 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +name = "zerocopy-derive" +version = "0.8.31" +authors = [ + "Joshua Liebow-Feeser <joshlf@google.com>", + "Jack Wrenn <jswrenn@amazon.com>", +] +build = false +exclude = [ + ".*", + "tests/enum_from_bytes.rs", + "tests/ui-nightly/enum_from_bytes_u16_too_few.rs.disabled", +] +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "Custom derive for traits from the zerocopy crate" +readme = false +license = "BSD-2-Clause OR Apache-2.0 OR MIT" +repository = "https://github.com/google/zerocopy" + +[lib] +name = "zerocopy_derive" +path = "src/lib.rs" +proc-macro = true + +[[test]] +name = "crate_path" +path = "tests/crate_path.rs" + +[[test]] +name = "deprecated" +path = "tests/deprecated.rs" + +[[test]] +name = "enum_from_zeros" +path = "tests/enum_from_zeros.rs" + +[[test]] +name = "enum_known_layout" +path = "tests/enum_known_layout.rs" + +[[test]] +name = "enum_no_cell" +path = "tests/enum_no_cell.rs" + +[[test]] +name = "enum_to_bytes" +path = "tests/enum_to_bytes.rs" + +[[test]] +name = "enum_try_from_bytes" +path = "tests/enum_try_from_bytes.rs" + +[[test]] +name = "enum_unaligned" +path = "tests/enum_unaligned.rs" + +[[test]] +name = "eq" +path = "tests/eq.rs" + +[[test]] +name = "hash" +path = "tests/hash.rs" + +[[test]] +name = "hygiene" +path = "tests/hygiene.rs" + +[[test]] +name = "include" +path = "tests/include.rs" + +[[test]] +name = "issue_2117" +path = "tests/issue_2117.rs" + +[[test]] +name = "issue_2835" +path = "tests/issue_2835.rs" + +[[test]] +name = "paths_and_modules" +path = "tests/paths_and_modules.rs" + +[[test]] +name = "priv_in_pub" +path = "tests/priv_in_pub.rs" + +[[test]] +name = "struct_from_bytes" +path = "tests/struct_from_bytes.rs" + +[[test]] +name = "struct_from_zeros" +path = "tests/struct_from_zeros.rs" + +[[test]] +name = "struct_known_layout" +path = "tests/struct_known_layout.rs" + +[[test]] +name = "struct_no_cell" +path = "tests/struct_no_cell.rs" + +[[test]] +name = "struct_to_bytes" +path = "tests/struct_to_bytes.rs" + +[[test]] +name = "struct_try_from_bytes" +path = "tests/struct_try_from_bytes.rs" + +[[test]] +name = "struct_unaligned" +path = "tests/struct_unaligned.rs" + +[[test]] +name = "trybuild" +path = "tests/trybuild.rs" + +[[test]] +name = "union_from_bytes" +path = "tests/union_from_bytes.rs" + +[[test]] +name = "union_from_zeros" +path = "tests/union_from_zeros.rs" + +[[test]] +name = "union_known_layout" +path = "tests/union_known_layout.rs" + +[[test]] +name = "union_no_cell" +path = "tests/union_no_cell.rs" + +[[test]] +name = "union_to_bytes" +path = "tests/union_to_bytes.rs" + +[[test]] +name = "union_try_from_bytes" +path = "tests/union_try_from_bytes.rs" + +[[test]] +name = "union_unaligned" +path = "tests/union_unaligned.rs" + +[[test]] +name = "unsafe_cell" +path = "tests/unsafe_cell.rs" + +[dependencies.proc-macro2] +version = "1.0.1" + +[dependencies.quote] +version = "1.0.40" + +[dependencies.syn] +version = "2.0.46" +features = ["full"] + +[dev-dependencies.dissimilar] +version = "1.0.9" + +[dev-dependencies.libc] +version = "=0.2.163" + +[dev-dependencies.once_cell] +version = "=1.9" + +[dev-dependencies.prettyplease] +version = "=0.2.17" + +[dev-dependencies.proc-macro2] +version = "=1.0.80" + +[dev-dependencies.quote] +version = "=1.0.40" + +[dev-dependencies.rustversion] +version = "1.0" + +[dev-dependencies.static_assertions] +version = "1.1" + +[dev-dependencies.trybuild] +version = "=1.0.89" +features = ["diff"] + +[lints.rust.unexpected_cfgs] +level = "warn" +priority = 0 +check-cfg = ["cfg(zerocopy_derive_union_into_bytes)"] diff --git a/vendor/zerocopy-derive/Cargo.toml.orig b/vendor/zerocopy-derive/Cargo.toml.orig new file mode 100644 index 00000000..d85a7459 --- /dev/null +++ b/vendor/zerocopy-derive/Cargo.toml.orig @@ -0,0 +1,58 @@ +# Copyright 2019 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +[package] +edition = "2021" +name = "zerocopy-derive" +version = "0.8.31" +authors = ["Joshua Liebow-Feeser <joshlf@google.com>", "Jack Wrenn <jswrenn@amazon.com>"] +description = "Custom derive for traits from the zerocopy crate" +license = "BSD-2-Clause OR Apache-2.0 OR MIT" +repository = "https://github.com/google/zerocopy" + +# We prefer to include tests when publishing to crates.io so that Crater [1] can +# detect regressions in our test suite. These two tests are excessively large, +# so we exclude them to keep the published crate file small. +# +# [1] https://github.com/rust-lang/crater +exclude = [".*", "tests/enum_from_bytes.rs", "tests/ui-nightly/enum_from_bytes_u16_too_few.rs.disabled"] + +[lints.rust] +# See #1792 for more context. +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(zerocopy_derive_union_into_bytes)'] } + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1.0.1" +quote = "1.0.40" +syn = { version = "2.0.46", features = ["full"] } + +[dev-dependencies] +dissimilar = "1.0.9" +# We don't use this directly, but trybuild does. On the MSRV toolchain, the +# version resolver fails to select any version for once_cell unless we +# depend on it directly. +once_cell = "=1.9" +# Same MSRV issue as above. +libc = "=0.2.163" +# This is the latest version which is compatible with `syn` 2.0.46, which we pin +# to in CI for MSRV compatibility reasons. +prettyplease = "=0.2.17" +proc-macro2 = "=1.0.80" +quote = "=1.0.40" +rustversion = "1.0" +static_assertions = "1.1" +testutil = { path = "../testutil" } +# Pinned to a specific version so that the version used for local development +# and the version used in CI are guaranteed to be the same. Future versions +# sometimes change the output format slightly, so a version mismatch can cause +# CI test failures. +trybuild = { version = "=1.0.89", features = ["diff"] } +zerocopy = { path = "../", features = ["derive"] } diff --git a/vendor/zerocopy-derive/LICENSE-APACHE b/vendor/zerocopy-derive/LICENSE-APACHE new file mode 100644 index 00000000..2dc22c12 --- /dev/null +++ b/vendor/zerocopy-derive/LICENSE-APACHE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Fuchsia Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/vendor/zerocopy-derive/LICENSE-BSD b/vendor/zerocopy-derive/LICENSE-BSD new file mode 100644 index 00000000..7ed244f4 --- /dev/null +++ b/vendor/zerocopy-derive/LICENSE-BSD @@ -0,0 +1,24 @@ +Copyright 2019 The Fuchsia Authors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/zerocopy-derive/LICENSE-MIT b/vendor/zerocopy-derive/LICENSE-MIT new file mode 100644 index 00000000..26e15216 --- /dev/null +++ b/vendor/zerocopy-derive/LICENSE-MIT @@ -0,0 +1,26 @@ +Copyright 2023 The Fuchsia Authors + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + diff --git a/vendor/zerocopy-derive/src/enum.rs b/vendor/zerocopy-derive/src/enum.rs new file mode 100644 index 00000000..3d8a5282 --- /dev/null +++ b/vendor/zerocopy-derive/src/enum.rs @@ -0,0 +1,395 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use syn::{parse_quote, DataEnum, Error, Fields, Generics, Ident, Path}; + +use crate::{derive_try_from_bytes_inner, repr::EnumRepr, Trait}; + +/// Generates a tag enum for the given enum. This generates an enum with the +/// same non-align `repr`s, variants, and corresponding discriminants, but none +/// of the fields. +pub(crate) fn generate_tag_enum(repr: &EnumRepr, data: &DataEnum) -> TokenStream { + let variants = data.variants.iter().map(|v| { + let ident = &v.ident; + if let Some((eq, discriminant)) = &v.discriminant { + quote! { #ident #eq #discriminant } + } else { + quote! { #ident } + } + }); + + // Don't include any `repr(align)` when generating the tag enum, as that + // could add padding after the tag but before any variants, which is not the + // correct behavior. + let repr = match repr { + EnumRepr::Transparent(span) => quote::quote_spanned! { *span => #[repr(transparent)] }, + EnumRepr::Compound(c, _) => quote! { #c }, + }; + + quote! { + #repr + #[allow(dead_code, non_camel_case_types)] + enum ___ZerocopyTag { + #(#variants,)* + } + } +} + +fn tag_ident(variant_ident: &Ident) -> Ident { + let variant_ident_str = crate::ext::to_ident_str(variant_ident); + Ident::new(&format!("___ZEROCOPY_TAG_{}", variant_ident_str), variant_ident.span()) +} + +/// Generates a constant for the tag associated with each variant of the enum. +/// When we match on the enum's tag, each arm matches one of these constants. We +/// have to use constants here because: +/// +/// - The type that we're matching on is not the type of the tag, it's an +/// integer of the same size as the tag type and with the same bit patterns. +/// - We can't read the enum tag as an enum because the bytes may not represent +/// a valid variant. +/// - Patterns do not currently support const expressions, so we have to assign +/// these constants to names rather than use them inline in the `match` +/// statement. +fn generate_tag_consts(data: &DataEnum) -> TokenStream { + let tags = data.variants.iter().map(|v| { + let variant_ident = &v.ident; + let tag_ident = tag_ident(variant_ident); + + quote! { + // This casts the enum variant to its discriminant, and then + // converts the discriminant to the target integral type via a + // numeric cast [1]. + // + // Because these are the same size, this is defined to be a no-op + // and therefore is a lossless conversion [2]. + // + // [1] Per https://doc.rust-lang.org/1.81.0/reference/expressions/operator-expr.html#enum-cast: + // + // Casts an enum to its discriminant. + // + // [2] Per https://doc.rust-lang.org/1.81.0/reference/expressions/operator-expr.html#numeric-cast: + // + // Casting between two integers of the same size (e.g. i32 -> u32) + // is a no-op. + #[allow(non_upper_case_globals)] + const #tag_ident: ___ZerocopyTagPrimitive = + ___ZerocopyTag::#variant_ident as ___ZerocopyTagPrimitive; + } + }); + + quote! { + #(#tags)* + } +} + +fn variant_struct_ident(variant_ident: &Ident) -> Ident { + let variant_ident_str = crate::ext::to_ident_str(variant_ident); + Ident::new(&format!("___ZerocopyVariantStruct_{}", variant_ident_str), variant_ident.span()) +} + +/// Generates variant structs for the given enum variant. +/// +/// These are structs associated with each variant of an enum. They are +/// `repr(C)` tuple structs with the same fields as the variant after a +/// `MaybeUninit<___ZerocopyInnerTag>`. +/// +/// In order to unify the generated types for `repr(C)` and `repr(int)` enums, +/// we use a "fused" representation with fields for both an inner tag and an +/// outer tag. Depending on the repr, we will set one of these tags to the tag +/// type and the other to `()`. This lets us generate the same code but put the +/// tags in different locations. +fn generate_variant_structs( + enum_name: &Ident, + generics: &Generics, + data: &DataEnum, + zerocopy_crate: &Path, +) -> TokenStream { + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + // All variant structs have a `PhantomData<MyEnum<...>>` field because we + // don't know which generic parameters each variant will use, and unused + // generic parameters are a compile error. + let phantom_ty = quote! { + core_reexport::marker::PhantomData<#enum_name #ty_generics> + }; + + let variant_structs = data.variants.iter().filter_map(|variant| { + // We don't generate variant structs for unit variants because we only + // need to check the tag. This helps cut down our generated code a bit. + if matches!(variant.fields, Fields::Unit) { + return None; + } + + let variant_struct_ident = variant_struct_ident(&variant.ident); + let field_types = variant.fields.iter().map(|f| &f.ty); + + let variant_struct = parse_quote! { + #[repr(C)] + #[allow(non_snake_case)] + struct #variant_struct_ident #impl_generics ( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + #(#field_types,)* + #phantom_ty, + ) #where_clause; + }; + + // We do this rather than emitting `#[derive(::zerocopy::TryFromBytes)]` + // because that is not hygienic, and this is also more performant. + let try_from_bytes_impl = + derive_try_from_bytes_inner(&variant_struct, Trait::TryFromBytes, zerocopy_crate) + .expect("derive_try_from_bytes_inner should not fail on synthesized type"); + + Some(quote! { + #variant_struct + #try_from_bytes_impl + }) + }); + + quote! { + #(#variant_structs)* + } +} + +fn generate_variants_union(generics: &Generics, data: &DataEnum) -> TokenStream { + let (_, ty_generics, _) = generics.split_for_impl(); + + let fields = data.variants.iter().filter_map(|variant| { + // We don't generate variant structs for unit variants because we only + // need to check the tag. This helps cut down our generated code a bit. + if matches!(variant.fields, Fields::Unit) { + return None; + } + + // Field names are prefixed with `__field_` to prevent name collision + // with the `__nonempty` field. + let field_name_str = crate::ext::to_ident_str(&variant.ident); + let field_name = Ident::new(&format!("__field_{}", field_name_str), variant.ident.span()); + let variant_struct_ident = variant_struct_ident(&variant.ident); + + Some(quote! { + #field_name: core_reexport::mem::ManuallyDrop< + #variant_struct_ident #ty_generics + >, + }) + }); + + quote! { + #[repr(C)] + #[allow(non_snake_case)] + union ___ZerocopyVariants #generics { + #(#fields)* + // Enums can have variants with no fields, but unions must + // have at least one field. So we just add a trailing unit + // to ensure that this union always has at least one field. + // Because this union is `repr(C)`, this unit type does not + // affect the layout. + __nonempty: (), + } + } +} + +/// Generates an implementation of `is_bit_valid` for an arbitrary enum. +/// +/// The general process is: +/// +/// 1. Generate a tag enum. This is an enum with the same repr, variants, and +/// corresponding discriminants as the original enum, but without any fields +/// on the variants. This gives us access to an enum where the variants have +/// the same discriminants as the one we're writing `is_bit_valid` for. +/// 2. Make constants from the variants of the tag enum. We need these because +/// we can't put const exprs in match arms. +/// 3. Generate variant structs. These are structs which have the same fields as +/// each variant of the enum, and are `#[repr(C)]` with an optional "inner +/// tag". +/// 4. Generate a variants union, with one field for each variant struct type. +/// 5. And finally, our raw enum is a `#[repr(C)]` struct of an "outer tag" and +/// the variants union. +/// +/// See these reference links for fully-worked example decompositions. +/// +/// - `repr(C)`: <https://doc.rust-lang.org/reference/type-layout.html#reprc-enums-with-fields> +/// - `repr(int)`: <https://doc.rust-lang.org/reference/type-layout.html#primitive-representation-of-enums-with-fields> +/// - `repr(C, int)`: <https://doc.rust-lang.org/reference/type-layout.html#combining-primitive-representations-of-enums-with-fields-and-reprc> +pub(crate) fn derive_is_bit_valid( + enum_ident: &Ident, + repr: &EnumRepr, + generics: &Generics, + data: &DataEnum, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let trait_path = Trait::TryFromBytes.crate_path(zerocopy_crate); + let tag_enum = generate_tag_enum(repr, data); + let tag_consts = generate_tag_consts(data); + + let (outer_tag_type, inner_tag_type) = if repr.is_c() { + (quote! { ___ZerocopyTag }, quote! { () }) + } else if repr.is_primitive() { + (quote! { () }, quote! { ___ZerocopyTag }) + } else { + return Err(Error::new( + Span::call_site(), + "must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout", + )); + }; + + let variant_structs = generate_variant_structs(enum_ident, generics, data, zerocopy_crate); + let variants_union = generate_variants_union(generics, data); + + let (_, ty_generics, _) = generics.split_for_impl(); + + let match_arms = data.variants.iter().map(|variant| { + let tag_ident = tag_ident(&variant.ident); + let variant_struct_ident = variant_struct_ident(&variant.ident); + + if matches!(variant.fields, Fields::Unit) { + // Unit variants don't need any further validation beyond checking + // the tag. + quote! { + #tag_ident => true + } + } else { + quote! { + #tag_ident => { + // SAFETY: + // - This cast is from a `repr(C)` union which has a field + // of type `variant_struct_ident` to that variant struct + // type itself. This addresses a subset of the bytes + // addressed by `variants`. + // - The returned pointer is cast from `p`, and so has the + // same provenance as `p`. + // - We checked that the tag of the enum matched the + // constant for this variant, so this cast preserves + // types and locations of all fields. Therefore, any + // `UnsafeCell`s will have the same location as in the + // original type. + let variant = unsafe { + variants.cast_unsized_unchecked( + |p: #zerocopy_crate::pointer::PtrInner<'_, ___ZerocopyVariants #ty_generics>| { + p.cast_sized::<#variant_struct_ident #ty_generics>() + } + ) + }; + // SAFETY: `cast_unsized_unchecked` removes the + // initialization invariant from `p`, so we re-assert that + // all of the bytes are initialized. + let variant = unsafe { variant.assume_initialized() }; + < + #variant_struct_ident #ty_generics as #trait_path + >::is_bit_valid(variant) + } + } + } + }); + + Ok(quote! { + // SAFETY: We use `is_bit_valid` to validate that the bit pattern of the + // enum's tag corresponds to one of the enum's discriminants. Then, we + // check the bit validity of each field of the corresponding variant. + // Thus, this is a sound implementation of `is_bit_valid`. + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: #zerocopy_crate::Maybe<'_, Self, ___ZerocopyAliasing>, + ) -> #zerocopy_crate::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: #zerocopy_crate::pointer::invariant::Reference, + { + use #zerocopy_crate::util::macro_util::core_reexport; + + #tag_enum + + type ___ZerocopyTagPrimitive = #zerocopy_crate::util::macro_util::SizeToTag< + { core_reexport::mem::size_of::<___ZerocopyTag>() }, + >; + + #tag_consts + + type ___ZerocopyOuterTag = #outer_tag_type; + type ___ZerocopyInnerTag = #inner_tag_type; + + #variant_structs + + #variants_union + + #[repr(C)] + struct ___ZerocopyRawEnum #generics { + tag: ___ZerocopyOuterTag, + variants: ___ZerocopyVariants #ty_generics, + } + + let tag = { + // SAFETY: + // - The provided cast addresses a subset of the bytes addressed + // by `candidate` because it addresses the starting tag of the + // enum. + // - Because the pointer is cast from `candidate`, it has the + // same provenance as it. + // - There are no `UnsafeCell`s in the tag because it is a + // primitive integer. + let tag_ptr = unsafe { + candidate.reborrow().cast_unsized_unchecked(|p: #zerocopy_crate::pointer::PtrInner<'_, Self>| { + p.cast_sized::<___ZerocopyTagPrimitive>() + }) + }; + // SAFETY: `tag_ptr` is casted from `candidate`, whose referent + // is `Initialized`. Since we have not written uninitialized + // bytes into the referent, `tag_ptr` is also `Initialized`. + let tag_ptr = unsafe { tag_ptr.assume_initialized() }; + tag_ptr.recall_validity::<_, (_, (_, _))>().read_unaligned::<#zerocopy_crate::BecauseImmutable>() + }; + + // SAFETY: + // - The raw enum has the same fields in the same locations as the + // input enum, and may have a lower alignment. This guarantees + // that it addresses a subset of the bytes addressed by + // `candidate`. + // - The returned pointer is cast from `p`, and so has the same + // provenance as `p`. + // - The raw enum has the same types at the same locations as the + // original enum, and so preserves the locations of any + // `UnsafeCell`s. + let raw_enum = unsafe { + candidate.cast_unsized_unchecked(|p: #zerocopy_crate::pointer::PtrInner<'_, Self>| { + p.cast_sized::<___ZerocopyRawEnum #ty_generics>() + }) + }; + // SAFETY: `cast_unsized_unchecked` removes the initialization + // invariant from `p`, so we re-assert that all of the bytes are + // initialized. + let raw_enum = unsafe { raw_enum.assume_initialized() }; + // SAFETY: + // - This projection returns a subfield of `this` using + // `addr_of_mut!`. + // - Because the subfield pointer is derived from `this`, it has the + // same provenance. + // - The locations of `UnsafeCell`s in the subfield match the + // locations of `UnsafeCell`s in `this`. This is because the + // subfield pointer just points to a smaller portion of the + // overall struct. + let variants = unsafe { + use #zerocopy_crate::pointer::PtrInner; + raw_enum.cast_unsized_unchecked(|p: PtrInner<'_, ___ZerocopyRawEnum #ty_generics>| { + let p = p.as_non_null().as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + // SAFETY: `ptr` is a projection into `p`, which is + // `NonNull`, and guaranteed not to wrap around the address + // space. Thus, `ptr` cannot be null. + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) }; + unsafe { PtrInner::new(ptr) } + }) + }; + + #[allow(non_upper_case_globals)] + match tag { + #(#match_arms,)* + _ => false, + } + } + }) +} diff --git a/vendor/zerocopy-derive/src/ext.rs b/vendor/zerocopy-derive/src/ext.rs new file mode 100644 index 00000000..72811299 --- /dev/null +++ b/vendor/zerocopy-derive/src/ext.rs @@ -0,0 +1,123 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use proc_macro2::{Span, TokenStream}; +use quote::ToTokens; +use syn::{Data, DataEnum, DataStruct, DataUnion, Field, Ident, Index, Type, Visibility}; + +pub(crate) trait DataExt { + /// Extracts the names and types of all fields. For enums, extracts the + /// names and types of fields from each variant. For tuple structs, the + /// names are the indices used to index into the struct (ie, `0`, `1`, etc). + /// + /// FIXME: Extracting field names for enums doesn't really make sense. Types + /// makes sense because we don't care about where they live - we just care + /// about transitive ownership. But for field names, we'd only use them when + /// generating is_bit_valid, which cares about where they live. + fn fields(&self) -> Vec<(&Visibility, TokenStream, &Type)>; + + fn variants(&self) -> Vec<Vec<(&Visibility, TokenStream, &Type)>>; + + fn tag(&self) -> Option<Ident>; +} + +impl DataExt for Data { + fn fields(&self) -> Vec<(&Visibility, TokenStream, &Type)> { + match self { + Data::Struct(strc) => strc.fields(), + Data::Enum(enm) => enm.fields(), + Data::Union(un) => un.fields(), + } + } + + fn variants(&self) -> Vec<Vec<(&Visibility, TokenStream, &Type)>> { + match self { + Data::Struct(strc) => strc.variants(), + Data::Enum(enm) => enm.variants(), + Data::Union(un) => un.variants(), + } + } + + fn tag(&self) -> Option<Ident> { + match self { + Data::Struct(strc) => strc.tag(), + Data::Enum(enm) => enm.tag(), + Data::Union(un) => un.tag(), + } + } +} + +impl DataExt for DataStruct { + fn fields(&self) -> Vec<(&Visibility, TokenStream, &Type)> { + map_fields(&self.fields) + } + + fn variants(&self) -> Vec<Vec<(&Visibility, TokenStream, &Type)>> { + vec![self.fields()] + } + + fn tag(&self) -> Option<Ident> { + None + } +} + +impl DataExt for DataEnum { + fn fields(&self) -> Vec<(&Visibility, TokenStream, &Type)> { + map_fields(self.variants.iter().flat_map(|var| &var.fields)) + } + + fn variants(&self) -> Vec<Vec<(&Visibility, TokenStream, &Type)>> { + self.variants.iter().map(|var| map_fields(&var.fields)).collect() + } + + fn tag(&self) -> Option<Ident> { + Some(Ident::new("___ZerocopyTag", Span::call_site())) + } +} + +impl DataExt for DataUnion { + fn fields(&self) -> Vec<(&Visibility, TokenStream, &Type)> { + map_fields(&self.fields.named) + } + + fn variants(&self) -> Vec<Vec<(&Visibility, TokenStream, &Type)>> { + vec![self.fields()] + } + + fn tag(&self) -> Option<Ident> { + None + } +} + +fn map_fields<'a>( + fields: impl 'a + IntoIterator<Item = &'a Field>, +) -> Vec<(&'a Visibility, TokenStream, &'a Type)> { + fields + .into_iter() + .enumerate() + .map(|(idx, f)| { + ( + &f.vis, + f.ident + .as_ref() + .map(ToTokens::to_token_stream) + .unwrap_or_else(|| Index::from(idx).to_token_stream()), + &f.ty, + ) + }) + .collect() +} + +pub(crate) fn to_ident_str(t: &impl ToString) -> String { + let s = t.to_string(); + if let Some(stripped) = s.strip_prefix("r#") { + stripped.to_string() + } else { + s + } +} diff --git a/vendor/zerocopy-derive/src/lib.rs b/vendor/zerocopy-derive/src/lib.rs new file mode 100644 index 00000000..27daac61 --- /dev/null +++ b/vendor/zerocopy-derive/src/lib.rs @@ -0,0 +1,1894 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Derive macros for [zerocopy]'s traits. +//! +//! [zerocopy]: https://docs.rs/zerocopy + +// Sometimes we want to use lints which were added after our MSRV. +// `unknown_lints` is `warn` by default and we deny warnings in CI, so without +// this attribute, any unknown lint would cause a CI failure when testing with +// our MSRV. +#![allow(unknown_lints)] +#![deny(renamed_and_removed_lints)] +#![deny( + clippy::all, + clippy::missing_safety_doc, + clippy::multiple_unsafe_ops_per_block, + clippy::undocumented_unsafe_blocks +)] +// Inlining format args isn't supported on our MSRV. +#![allow(clippy::uninlined_format_args)] +#![deny( + rustdoc::bare_urls, + rustdoc::broken_intra_doc_links, + rustdoc::invalid_codeblock_attributes, + rustdoc::invalid_html_tags, + rustdoc::invalid_rust_codeblocks, + rustdoc::missing_crate_level_docs, + rustdoc::private_intra_doc_links +)] +#![recursion_limit = "128"] + +mod r#enum; +mod ext; +#[cfg(test)] +mod output_tests; +mod repr; + +use proc_macro2::{Span, TokenStream, TokenTree}; +use quote::{quote, ToTokens}; +use syn::{ + parse_quote, Attribute, Data, DataEnum, DataStruct, DataUnion, DeriveInput, Error, Expr, + ExprLit, ExprUnary, GenericParam, Ident, Lit, Meta, Path, Type, UnOp, WherePredicate, +}; + +use crate::{ext::*, repr::*}; + +// FIXME(https://github.com/rust-lang/rust/issues/54140): Some errors could be +// made better if we could add multiple lines of error output like this: +// +// error: unsupported representation +// --> enum.rs:28:8 +// | +// 28 | #[repr(transparent)] +// | +// help: required by the derive of FromBytes +// +// Instead, we have more verbose error messages like "unsupported representation +// for deriving FromZeros, FromBytes, IntoBytes, or Unaligned on an enum" +// +// This will probably require Span::error +// (https://doc.rust-lang.org/nightly/proc_macro/struct.Span.html#method.error), +// which is currently unstable. Revisit this once it's stable. + +/// Defines a derive function named `$outer` which parses its input +/// `TokenStream` as a `DeriveInput` and then invokes the `$inner` function. +/// +/// Note that the separate `$outer` parameter is required - proc macro functions +/// are currently required to live at the crate root, and so the caller must +/// specify the name in order to avoid name collisions. +macro_rules! derive { + ($trait:ident => $outer:ident => $inner:ident) => { + #[proc_macro_derive($trait, attributes(zerocopy))] + pub fn $outer(ts: proc_macro::TokenStream) -> proc_macro::TokenStream { + let ast = syn::parse_macro_input!(ts as DeriveInput); + let zerocopy_crate = match extract_zerocopy_crate(&ast.attrs) { + Ok(zerocopy_crate) => zerocopy_crate, + Err(e) => return e.into_compile_error().into(), + }; + $inner(&ast, Trait::$trait, &zerocopy_crate).into_ts().into() + } + }; +} + +trait IntoTokenStream { + fn into_ts(self) -> TokenStream; +} + +impl IntoTokenStream for TokenStream { + fn into_ts(self) -> TokenStream { + self + } +} + +impl IntoTokenStream for Result<TokenStream, Error> { + fn into_ts(self) -> TokenStream { + match self { + Ok(ts) => ts, + Err(err) => err.to_compile_error(), + } + } +} + +/// Attempt to extract a crate path from the provided attributes. Defaults to +/// `::zerocopy` if not found. +fn extract_zerocopy_crate(attrs: &[Attribute]) -> Result<Path, Error> { + let mut path = parse_quote!(::zerocopy); + + for attr in attrs { + if let Meta::List(ref meta_list) = attr.meta { + if meta_list.path.is_ident("zerocopy") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("crate") { + let expr = meta.value().and_then(|value| value.parse()); + if let Ok(Expr::Lit(ExprLit { lit: Lit::Str(lit), .. })) = expr { + if let Ok(path_lit) = lit.parse() { + path = path_lit; + return Ok(()); + } + } + + return Err(Error::new( + Span::call_site(), + "`crate` attribute requires a path as the value", + )); + } + + Err(Error::new( + Span::call_site(), + format!("unknown attribute encountered: {}", meta.path.into_token_stream()), + )) + })?; + } + } + } + + Ok(path) +} + +derive!(KnownLayout => derive_known_layout => derive_known_layout_inner); +derive!(Immutable => derive_no_cell => derive_no_cell_inner); +derive!(TryFromBytes => derive_try_from_bytes => derive_try_from_bytes_inner); +derive!(FromZeros => derive_from_zeros => derive_from_zeros_inner); +derive!(FromBytes => derive_from_bytes => derive_from_bytes_inner); +derive!(IntoBytes => derive_into_bytes => derive_into_bytes_inner); +derive!(Unaligned => derive_unaligned => derive_unaligned_inner); +derive!(ByteHash => derive_hash => derive_hash_inner); +derive!(ByteEq => derive_eq => derive_eq_inner); +derive!(SplitAt => derive_split_at => derive_split_at_inner); + +/// Deprecated: prefer [`FromZeros`] instead. +#[deprecated(since = "0.8.0", note = "`FromZeroes` was renamed to `FromZeros`")] +#[doc(hidden)] +#[proc_macro_derive(FromZeroes)] +pub fn derive_from_zeroes(ts: proc_macro::TokenStream) -> proc_macro::TokenStream { + derive_from_zeros(ts) +} + +/// Deprecated: prefer [`IntoBytes`] instead. +#[deprecated(since = "0.8.0", note = "`AsBytes` was renamed to `IntoBytes`")] +#[doc(hidden)] +#[proc_macro_derive(AsBytes)] +pub fn derive_as_bytes(ts: proc_macro::TokenStream) -> proc_macro::TokenStream { + derive_into_bytes(ts) +} + +fn derive_known_layout_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let is_repr_c_struct = match &ast.data { + Data::Struct(..) => { + let repr = StructUnionRepr::from_attrs(&ast.attrs)?; + if repr.is_c() { + Some(repr) + } else { + None + } + } + Data::Enum(..) | Data::Union(..) => None, + }; + + let fields = ast.data.fields(); + + let (self_bounds, inner_extras, outer_extras) = if let ( + Some(repr), + Some((trailing_field, leading_fields)), + ) = (is_repr_c_struct, fields.split_last()) + { + let (_vis, trailing_field_name, trailing_field_ty) = trailing_field; + let leading_fields_tys = leading_fields.iter().map(|(_vis, _name, ty)| ty); + + let core_path = quote!(#zerocopy_crate::util::macro_util::core_reexport); + let repr_align = repr + .get_align() + .map(|align| { + let align = align.t.get(); + quote!(#core_path::num::NonZeroUsize::new(#align as usize)) + }) + .unwrap_or_else(|| quote!(#core_path::option::Option::None)); + let repr_packed = repr + .get_packed() + .map(|packed| { + let packed = packed.get(); + quote!(#core_path::num::NonZeroUsize::new(#packed as usize)) + }) + .unwrap_or_else(|| quote!(#core_path::option::Option::None)); + + let make_methods = |trailing_field_ty| { + quote! { + // SAFETY: + // - The returned pointer has the same address and provenance as + // `bytes`: + // - The recursive call to `raw_from_ptr_len` preserves both + // address and provenance. + // - The `as` cast preserves both address and provenance. + // - `NonNull::new_unchecked` preserves both address and + // provenance. + // - If `Self` is a slice DST, the returned pointer encodes + // `elems` elements in the trailing slice: + // - This is true of the recursive call to `raw_from_ptr_len`. + // - `trailing.as_ptr() as *mut Self` preserves trailing slice + // element count [1]. + // - `NonNull::new_unchecked` preserves trailing slice element + // count. + // + // [1] Per https://doc.rust-lang.org/reference/expressions/operator-expr.html#pointer-to-pointer-cast: + // + // `*const T`` / `*mut T` can be cast to `*const U` / `*mut U` + // with the following behavior: + // ... + // - If `T` and `U` are both unsized, the pointer is also + // returned unchanged. In particular, the metadata is + // preserved exactly. + // + // For instance, a cast from `*const [T]` to `*const [U]` + // preserves the number of elements. ... The same holds + // for str and any compound type whose unsized tail is a + // slice type, such as struct `Foo(i32, [u8])` or + // `(u64, Foo)`. + #[inline(always)] + fn raw_from_ptr_len( + bytes: #zerocopy_crate::util::macro_util::core_reexport::ptr::NonNull<u8>, + meta: Self::PointerMetadata, + ) -> #zerocopy_crate::util::macro_util::core_reexport::ptr::NonNull<Self> { + use #zerocopy_crate::KnownLayout; + let trailing = <#trailing_field_ty as KnownLayout>::raw_from_ptr_len(bytes, meta); + let slf = trailing.as_ptr() as *mut Self; + // SAFETY: Constructed from `trailing`, which is non-null. + unsafe { #zerocopy_crate::util::macro_util::core_reexport::ptr::NonNull::new_unchecked(slf) } + } + + #[inline(always)] + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata { + <#trailing_field_ty>::pointer_to_metadata(ptr as *mut _) + } + } + }; + + let inner_extras = { + let leading_fields_tys = leading_fields_tys.clone(); + let methods = make_methods(*trailing_field_ty); + let (_, ty_generics, _) = ast.generics.split_for_impl(); + + quote!( + type PointerMetadata = <#trailing_field_ty as #zerocopy_crate::KnownLayout>::PointerMetadata; + + type MaybeUninit = __ZerocopyKnownLayoutMaybeUninit #ty_generics; + + // SAFETY: `LAYOUT` accurately describes the layout of `Self`. + // The documentation of `DstLayout::for_repr_c_struct` vows that + // invocations in this manner will accurately describe a type, + // so long as: + // + // - that type is `repr(C)`, + // - its fields are enumerated in the order they appear, + // - the presence of `repr_align` and `repr_packed` are + // correctly accounted for. + // + // We respect all three of these preconditions here. This + // expansion is only used if `is_repr_c_struct`, we enumerate + // the fields in order, and we extract the values of `align(N)` + // and `packed(N)`. + const LAYOUT: #zerocopy_crate::DstLayout = { + use #zerocopy_crate::util::macro_util::core_reexport::num::NonZeroUsize; + use #zerocopy_crate::{DstLayout, KnownLayout}; + + DstLayout::for_repr_c_struct( + #repr_align, + #repr_packed, + &[ + #(DstLayout::for_type::<#leading_fields_tys>(),)* + <#trailing_field_ty as KnownLayout>::LAYOUT + ], + ) + }; + + #methods + ) + }; + + let outer_extras = { + let ident = &ast.ident; + let vis = &ast.vis; + let params = &ast.generics.params; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + + let predicates = if let Some(where_clause) = where_clause { + where_clause.predicates.clone() + } else { + Default::default() + }; + + // Generate a valid ident for a type-level handle to a field of a + // given `name`. + let field_index = |name: &TokenStream| { + let name = to_ident_str(name); + Ident::new(&format!("__Zerocopy_Field_{}", name), ident.span()) + }; + + let field_indices: Vec<_> = + fields.iter().map(|(_vis, name, _ty)| field_index(name)).collect(); + + // Define the collection of type-level field handles. + let field_defs = field_indices.iter().zip(&fields).map(|(idx, (vis, _, _))| { + quote! { + #[allow(non_camel_case_types)] + #vis struct #idx; + } + }); + + let field_impls = field_indices.iter().zip(&fields).map(|(idx, (_, _, ty))| quote! { + // SAFETY: `#ty` is the type of `#ident`'s field at `#idx`. + #[allow(deprecated)] + unsafe impl #impl_generics #zerocopy_crate::util::macro_util::Field<#idx> for #ident #ty_generics + where + #predicates + { + type Type = #ty; + } + }); + + let trailing_field_index = field_index(trailing_field_name); + let leading_field_indices = + leading_fields.iter().map(|(_vis, name, _ty)| field_index(name)); + + let trailing_field_ty = quote! { + <#ident #ty_generics as + #zerocopy_crate::util::macro_util::Field<#trailing_field_index> + >::Type + }; + + let methods = make_methods(&parse_quote! { + <#trailing_field_ty as #zerocopy_crate::KnownLayout>::MaybeUninit + }); + + quote! { + #(#field_defs)* + + #(#field_impls)* + + // SAFETY: This has the same layout as the derive target type, + // except that it admits uninit bytes. This is ensured by using + // the same repr as the target type, and by using field types + // which have the same layout as the target type's fields, + // except that they admit uninit bytes. We indirect through + // `Field` to ensure that occurrences of `Self` resolve to + // `#ty`, not `__ZerocopyKnownLayoutMaybeUninit` (see #2116). + #repr + #[doc(hidden)] + // Required on some rustc versions due to a lint that is only + // triggered when `derive(KnownLayout)` is applied to `repr(C)` + // structs that are generated by macros. See #2177 for details. + #[allow(private_bounds)] + #[allow(deprecated)] + #vis struct __ZerocopyKnownLayoutMaybeUninit<#params> ( + #(#zerocopy_crate::util::macro_util::core_reexport::mem::MaybeUninit< + <#ident #ty_generics as + #zerocopy_crate::util::macro_util::Field<#leading_field_indices> + >::Type + >,)* + // NOTE(#2302): We wrap in `ManuallyDrop` here in case the + // type we're operating on is both generic and + // `repr(packed)`. In that case, Rust needs to know that the + // type is *either* `Sized` or has a trivial `Drop`. + // `ManuallyDrop` has a trivial `Drop`, and so satisfies + // this requirement. + #zerocopy_crate::util::macro_util::core_reexport::mem::ManuallyDrop< + <#trailing_field_ty as #zerocopy_crate::KnownLayout>::MaybeUninit + > + ) + where + #trailing_field_ty: #zerocopy_crate::KnownLayout, + #predicates; + + // SAFETY: We largely defer to the `KnownLayout` implementation + // on the derive target type (both by using the same tokens, and + // by deferring to impl via type-level indirection). This is + // sound, since `__ZerocopyKnownLayoutMaybeUninit` is guaranteed + // to have the same layout as the derive target type, except + // that `__ZerocopyKnownLayoutMaybeUninit` admits uninit bytes. + #[allow(deprecated)] + unsafe impl #impl_generics #zerocopy_crate::KnownLayout for __ZerocopyKnownLayoutMaybeUninit #ty_generics + where + #trailing_field_ty: #zerocopy_crate::KnownLayout, + #predicates + { + #[allow(clippy::missing_inline_in_public_items)] + fn only_derive_is_allowed_to_implement_this_trait() {} + + type PointerMetadata = <#ident #ty_generics as #zerocopy_crate::KnownLayout>::PointerMetadata; + + type MaybeUninit = Self; + + const LAYOUT: #zerocopy_crate::DstLayout = <#ident #ty_generics as #zerocopy_crate::KnownLayout>::LAYOUT; + + #methods + } + } + }; + + (SelfBounds::None, inner_extras, Some(outer_extras)) + } else { + // For enums, unions, and non-`repr(C)` structs, we require that + // `Self` is sized, and as a result don't need to reason about the + // internals of the type. + ( + SelfBounds::SIZED, + quote!( + type PointerMetadata = (); + type MaybeUninit = + #zerocopy_crate::util::macro_util::core_reexport::mem::MaybeUninit<Self>; + + // SAFETY: `LAYOUT` is guaranteed to accurately describe the + // layout of `Self`, because that is the documented safety + // contract of `DstLayout::for_type`. + const LAYOUT: #zerocopy_crate::DstLayout = #zerocopy_crate::DstLayout::for_type::<Self>(); + + // SAFETY: `.cast` preserves address and provenance. + // + // FIXME(#429): Add documentation to `.cast` that promises that + // it preserves provenance. + #[inline(always)] + fn raw_from_ptr_len( + bytes: #zerocopy_crate::util::macro_util::core_reexport::ptr::NonNull<u8>, + _meta: (), + ) -> #zerocopy_crate::util::macro_util::core_reexport::ptr::NonNull<Self> + { + bytes.cast::<Self>() + } + + #[inline(always)] + fn pointer_to_metadata(_ptr: *mut Self) -> () {} + ), + None, + ) + }; + + Ok(match &ast.data { + Data::Struct(strct) => { + let require_trait_bound_on_field_types = if self_bounds == SelfBounds::SIZED { + FieldBounds::None + } else { + FieldBounds::TRAILING_SELF + }; + + // A bound on the trailing field is required, since structs are + // unsized if their trailing field is unsized. Reflecting the layout + // of an usized trailing field requires that the field is + // `KnownLayout`. + ImplBlockBuilder::new( + ast, + strct, + Trait::KnownLayout, + require_trait_bound_on_field_types, + zerocopy_crate, + ) + .self_type_trait_bounds(self_bounds) + .inner_extras(inner_extras) + .outer_extras(outer_extras) + .build() + } + Data::Enum(enm) => { + // A bound on the trailing field is not required, since enums cannot + // currently be unsized. + ImplBlockBuilder::new(ast, enm, Trait::KnownLayout, FieldBounds::None, zerocopy_crate) + .self_type_trait_bounds(SelfBounds::SIZED) + .inner_extras(inner_extras) + .outer_extras(outer_extras) + .build() + } + Data::Union(unn) => { + // A bound on the trailing field is not required, since unions + // cannot currently be unsized. + ImplBlockBuilder::new(ast, unn, Trait::KnownLayout, FieldBounds::None, zerocopy_crate) + .self_type_trait_bounds(SelfBounds::SIZED) + .inner_extras(inner_extras) + .outer_extras(outer_extras) + .build() + } + }) +} + +fn derive_no_cell_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> TokenStream { + match &ast.data { + Data::Struct(strct) => ImplBlockBuilder::new( + ast, + strct, + Trait::Immutable, + FieldBounds::ALL_SELF, + zerocopy_crate, + ) + .build(), + Data::Enum(enm) => { + ImplBlockBuilder::new(ast, enm, Trait::Immutable, FieldBounds::ALL_SELF, zerocopy_crate) + .build() + } + Data::Union(unn) => { + ImplBlockBuilder::new(ast, unn, Trait::Immutable, FieldBounds::ALL_SELF, zerocopy_crate) + .build() + } + } +} + +fn derive_try_from_bytes_inner( + ast: &DeriveInput, + top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + match &ast.data { + Data::Struct(strct) => derive_try_from_bytes_struct(ast, strct, top_level, zerocopy_crate), + Data::Enum(enm) => derive_try_from_bytes_enum(ast, enm, top_level, zerocopy_crate), + Data::Union(unn) => Ok(derive_try_from_bytes_union(ast, unn, top_level, zerocopy_crate)), + } +} + +fn derive_from_zeros_inner( + ast: &DeriveInput, + top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let try_from_bytes = derive_try_from_bytes_inner(ast, top_level, zerocopy_crate)?; + let from_zeros = match &ast.data { + Data::Struct(strct) => derive_from_zeros_struct(ast, strct, zerocopy_crate), + Data::Enum(enm) => derive_from_zeros_enum(ast, enm, zerocopy_crate)?, + Data::Union(unn) => derive_from_zeros_union(ast, unn, zerocopy_crate), + }; + Ok(IntoIterator::into_iter([try_from_bytes, from_zeros]).collect()) +} + +fn derive_from_bytes_inner( + ast: &DeriveInput, + top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let from_zeros = derive_from_zeros_inner(ast, top_level, zerocopy_crate)?; + let from_bytes = match &ast.data { + Data::Struct(strct) => derive_from_bytes_struct(ast, strct, zerocopy_crate), + Data::Enum(enm) => derive_from_bytes_enum(ast, enm, zerocopy_crate)?, + Data::Union(unn) => derive_from_bytes_union(ast, unn, zerocopy_crate), + }; + + Ok(IntoIterator::into_iter([from_zeros, from_bytes]).collect()) +} + +fn derive_into_bytes_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + match &ast.data { + Data::Struct(strct) => derive_into_bytes_struct(ast, strct, zerocopy_crate), + Data::Enum(enm) => derive_into_bytes_enum(ast, enm, zerocopy_crate), + Data::Union(unn) => derive_into_bytes_union(ast, unn, zerocopy_crate), + } +} + +fn derive_unaligned_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + match &ast.data { + Data::Struct(strct) => derive_unaligned_struct(ast, strct, zerocopy_crate), + Data::Enum(enm) => derive_unaligned_enum(ast, enm, zerocopy_crate), + Data::Union(unn) => derive_unaligned_union(ast, unn, zerocopy_crate), + } +} + +fn derive_hash_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + // This doesn't delegate to `impl_block` because `impl_block` assumes it is + // deriving a `zerocopy`-defined trait, and these trait impls share a common + // shape that `Hash` does not. In particular, `zerocopy` traits contain a + // method that only `zerocopy_derive` macros are supposed to implement, and + // `impl_block` generating this trait method is incompatible with `Hash`. + let type_ident = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + let where_predicates = where_clause.map(|clause| &clause.predicates); + Ok(quote! { + #[allow(deprecated)] + // While there are not currently any warnings that this suppresses (that + // we're aware of), it's good future-proofing hygiene. + #[automatically_derived] + impl #impl_generics #zerocopy_crate::util::macro_util::core_reexport::hash::Hash for #type_ident #ty_generics + where + Self: #zerocopy_crate::IntoBytes + #zerocopy_crate::Immutable, + #where_predicates + { + fn hash<H>(&self, state: &mut H) + where + H: #zerocopy_crate::util::macro_util::core_reexport::hash::Hasher, + { + #zerocopy_crate::util::macro_util::core_reexport::hash::Hasher::write( + state, + #zerocopy_crate::IntoBytes::as_bytes(self) + ) + } + + fn hash_slice<H>(data: &[Self], state: &mut H) + where + H: #zerocopy_crate::util::macro_util::core_reexport::hash::Hasher, + { + #zerocopy_crate::util::macro_util::core_reexport::hash::Hasher::write( + state, + #zerocopy_crate::IntoBytes::as_bytes(data) + ) + } + } + }) +} + +fn derive_eq_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + // This doesn't delegate to `impl_block` because `impl_block` assumes it is + // deriving a `zerocopy`-defined trait, and these trait impls share a common + // shape that `Eq` does not. In particular, `zerocopy` traits contain a + // method that only `zerocopy_derive` macros are supposed to implement, and + // `impl_block` generating this trait method is incompatible with `Eq`. + let type_ident = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + let where_predicates = where_clause.map(|clause| &clause.predicates); + Ok(quote! { + // FIXME(#553): Add a test that generates a warning when + // `#[allow(deprecated)]` isn't present. + #[allow(deprecated)] + // While there are not currently any warnings that this suppresses (that + // we're aware of), it's good future-proofing hygiene. + #[automatically_derived] + impl #impl_generics #zerocopy_crate::util::macro_util::core_reexport::cmp::PartialEq for #type_ident #ty_generics + where + Self: #zerocopy_crate::IntoBytes + #zerocopy_crate::Immutable, + #where_predicates + { + fn eq(&self, other: &Self) -> bool { + #zerocopy_crate::util::macro_util::core_reexport::cmp::PartialEq::eq( + #zerocopy_crate::IntoBytes::as_bytes(self), + #zerocopy_crate::IntoBytes::as_bytes(other), + ) + } + } + + // FIXME(#553): Add a test that generates a warning when + // `#[allow(deprecated)]` isn't present. + #[allow(deprecated)] + // While there are not currently any warnings that this suppresses (that + // we're aware of), it's good future-proofing hygiene. + #[automatically_derived] + impl #impl_generics #zerocopy_crate::util::macro_util::core_reexport::cmp::Eq for #type_ident #ty_generics + where + Self: #zerocopy_crate::IntoBytes + #zerocopy_crate::Immutable, + #where_predicates + { + } + }) +} + +fn derive_split_at_inner( + ast: &DeriveInput, + _top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = StructUnionRepr::from_attrs(&ast.attrs)?; + + match &ast.data { + Data::Struct(_) => {} + Data::Enum(_) | Data::Union(_) => { + return Err(Error::new(Span::call_site(), "can only be applied to structs")); + } + }; + + if repr.get_packed().is_some() { + return Err(Error::new(Span::call_site(), "must not have #[repr(packed)] attribute")); + } + + if !(repr.is_c() || repr.is_transparent()) { + return Err(Error::new(Span::call_site(), "must have #[repr(C)] or #[repr(transparent)] in order to guarantee this type's layout is splitable")); + } + + let fields = ast.data.fields(); + let trailing_field = if let Some(((_, _, trailing_field), _)) = fields.split_last() { + trailing_field + } else { + return Err(Error::new(Span::call_site(), "must at least one field")); + }; + + // SAFETY: `#ty`, per the above checks, is `repr(C)` or `repr(transparent)` + // and is not packed; its trailing field is guaranteed to be well-aligned + // for its type. By invariant on `FieldBounds::TRAILING_SELF`, the trailing + // slice of the trailing field is also well-aligned for its type. + Ok(ImplBlockBuilder::new( + ast, + &ast.data, + Trait::SplitAt, + FieldBounds::TRAILING_SELF, + zerocopy_crate, + ) + .inner_extras(quote! { + type Elem = <#trailing_field as ::zerocopy::SplitAt>::Elem; + }) + .build()) +} + +/// A struct is `TryFromBytes` if: +/// - all fields are `TryFromBytes` +fn derive_try_from_bytes_struct( + ast: &DeriveInput, + strct: &DataStruct, + top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let extras = + try_gen_trivial_is_bit_valid(ast, top_level, zerocopy_crate).unwrap_or_else(|| { + let fields = strct.fields(); + let field_names = fields.iter().map(|(_vis, name, _ty)| name); + let field_tys = fields.iter().map(|(_vis, _name, ty)| ty); + quote!( + // SAFETY: We use `is_bit_valid` to validate that each field is + // bit-valid, and only return `true` if all of them are. The bit + // validity of a struct is just the composition of the bit + // validities of its fields, so this is a sound implementation + // of `is_bit_valid`. + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: #zerocopy_crate::Maybe<Self, ___ZerocopyAliasing>, + ) -> #zerocopy_crate::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: #zerocopy_crate::pointer::invariant::Reference, + { + use #zerocopy_crate::util::macro_util::core_reexport; + use #zerocopy_crate::pointer::PtrInner; + + true #(&& { + // SAFETY: + // - `project` is a field projection, and so it + // addresses a subset of the bytes addressed by `slf` + // - ..., and so it preserves provenance + // - ..., and `*slf` is a struct, so `UnsafeCell`s exist + // at the same byte ranges in the returned pointer's + // referent as they do in `*slf` + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).#field_names); + // SAFETY: `cast_unsized_unchecked` promises + // that `slf` will either reference a zero-sized + // byte range, or else will reference a byte + // range that is entirely contained within an + // allocated object. In either case, this + // guarantees that field projection will not + // wrap around the address space, and so `field` + // will be non-null. + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + // SAFETY: + // 0. `ptr` addresses a subset of the bytes of + // `slf`, so by invariant on `slf: PtrInner`, + // if `ptr`'s referent is not zero sized, + // then `ptr` has valid provenance for its + // referent, which is entirely contained in + // some Rust allocation, `A`. + // 1. By invariant on `slf: PtrInner`, if + // `ptr`'s referent is not zero sized, `A` is + // guaranteed to live for at least `'a`. + unsafe { PtrInner::new(ptr) } + }; + + candidate.reborrow().cast_unsized_unchecked(project) + }; + + <#field_tys as #zerocopy_crate::TryFromBytes>::is_bit_valid(field_candidate) + })* + } + ) + }); + Ok(ImplBlockBuilder::new( + ast, + strct, + Trait::TryFromBytes, + FieldBounds::ALL_SELF, + zerocopy_crate, + ) + .inner_extras(extras) + .build()) +} + +/// A union is `TryFromBytes` if: +/// - all of its fields are `TryFromBytes` and `Immutable` +fn derive_try_from_bytes_union( + ast: &DeriveInput, + unn: &DataUnion, + top_level: Trait, + zerocopy_crate: &Path, +) -> TokenStream { + // FIXME(#5): Remove the `Immutable` bound. + let field_type_trait_bounds = + FieldBounds::All(&[TraitBound::Slf, TraitBound::Other(Trait::Immutable)]); + let extras = + try_gen_trivial_is_bit_valid(ast, top_level, zerocopy_crate).unwrap_or_else(|| { + let fields = unn.fields(); + let field_names = fields.iter().map(|(_vis, name, _ty)| name); + let field_tys = fields.iter().map(|(_vis, _name, ty)| ty); + quote!( + // SAFETY: We use `is_bit_valid` to validate that any field is + // bit-valid; we only return `true` if at least one of them is. + // The bit validity of a union is not yet well defined in Rust, + // but it is guaranteed to be no more strict than this + // definition. See #696 for a more in-depth discussion. + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: #zerocopy_crate::Maybe<'_, Self,___ZerocopyAliasing> + ) -> #zerocopy_crate::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: #zerocopy_crate::pointer::invariant::Reference, + { + use #zerocopy_crate::util::macro_util::core_reexport; + use #zerocopy_crate::pointer::PtrInner; + + false #(|| { + // SAFETY: + // - `project` is a field projection, and so it + // addresses a subset of the bytes addressed by `slf` + // - ..., and so it preserves provenance + // - Since `Self: Immutable` is enforced by + // `self_type_trait_bounds`, neither `*slf` nor the + // returned pointer's referent contain any + // `UnsafeCell`s + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).#field_names); + // SAFETY: `cast_unsized_unchecked` promises + // that `slf` will either reference a zero-sized + // byte range, or else will reference a byte + // range that is entirely contained within an + // allocated object. In either case, this + // guarantees that field projection will not + // wrap around the address space, and so `field` + // will be non-null. + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + // SAFETY: + // 0. `ptr` addresses a subset of the bytes of + // `slf`, so by invariant on `slf: PtrInner`, + // if `ptr`'s referent is not zero sized, + // then `ptr` has valid provenance for its + // referent, which is entirely contained in + // some Rust allocation, `A`. + // 1. By invariant on `slf: PtrInner`, if + // `ptr`'s referent is not zero sized, `A` is + // guaranteed to live for at least `'a`. + unsafe { PtrInner::new(ptr) } + }; + + candidate.reborrow().cast_unsized_unchecked(project) + }; + + <#field_tys as #zerocopy_crate::TryFromBytes>::is_bit_valid(field_candidate) + })* + } + ) + }); + ImplBlockBuilder::new(ast, unn, Trait::TryFromBytes, field_type_trait_bounds, zerocopy_crate) + .inner_extras(extras) + .build() +} + +fn derive_try_from_bytes_enum( + ast: &DeriveInput, + enm: &DataEnum, + top_level: Trait, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = EnumRepr::from_attrs(&ast.attrs)?; + + // If an enum has no fields, it has a well-defined integer representation, + // and every possible bit pattern corresponds to a valid discriminant tag, + // then it *could* be `FromBytes` (even if the user hasn't derived + // `FromBytes`). This holds if, for `repr(uN)` or `repr(iN)`, there are 2^N + // variants. + let could_be_from_bytes = enum_size_from_repr(&repr) + .map(|size| enm.fields().is_empty() && enm.variants.len() == 1usize << size) + .unwrap_or(false); + + let trivial_is_bit_valid = try_gen_trivial_is_bit_valid(ast, top_level, zerocopy_crate); + let extra = match (trivial_is_bit_valid, could_be_from_bytes) { + (Some(is_bit_valid), _) => is_bit_valid, + // SAFETY: It would be sound for the enum to implement `FromBytes`, as + // required by `gen_trivial_is_bit_valid_unchecked`. + (None, true) => unsafe { gen_trivial_is_bit_valid_unchecked(zerocopy_crate) }, + (None, false) => { + r#enum::derive_is_bit_valid(&ast.ident, &repr, &ast.generics, enm, zerocopy_crate)? + } + }; + + Ok(ImplBlockBuilder::new(ast, enm, Trait::TryFromBytes, FieldBounds::ALL_SELF, zerocopy_crate) + .inner_extras(extra) + .build()) +} + +/// Attempts to generate a `TryFromBytes::is_bit_valid` instance that +/// unconditionally returns true. +/// +/// This is possible when the `top_level` trait is `FromBytes` and there are no +/// generic type parameters. In this case, we know that compilation will succeed +/// only if the type is unconditionally `FromBytes`. Type parameters are not +/// supported because a type with type parameters could be `TryFromBytes` but +/// not `FromBytes` depending on its type parameters, and so deriving a trivial +/// `is_bit_valid` would be either unsound or, assuming we add a defensive +/// `Self: FromBytes` bound (as we currently do), overly restrictive. Consider, +/// for example, that `Foo<bool>` ought to be `TryFromBytes` but not `FromBytes` +/// in this example: +/// +/// ```rust,ignore +/// #[derive(FromBytes)] +/// #[repr(transparent)] +/// struct Foo<T>(T); +/// ``` +/// +/// This should be used where possible. Using this impl is faster to codegen, +/// faster to compile, and is friendlier on the optimizer. +fn try_gen_trivial_is_bit_valid( + ast: &DeriveInput, + top_level: Trait, + zerocopy_crate: &Path, +) -> Option<proc_macro2::TokenStream> { + // If the top-level trait is `FromBytes` and `Self` has no type parameters, + // then the `FromBytes` derive will fail compilation if `Self` is not + // actually soundly `FromBytes`, and so we can rely on that for our + // `is_bit_valid` impl. It's plausible that we could make changes - or Rust + // could make changes (such as the "trivial bounds" language feature) - that + // make this no longer true. To hedge against these, we include an explicit + // `Self: FromBytes` check in the generated `is_bit_valid`, which is + // bulletproof. + if top_level == Trait::FromBytes && ast.generics.params.is_empty() { + Some(quote!( + // SAFETY: See inline. + fn is_bit_valid<___ZerocopyAliasing>( + _candidate: #zerocopy_crate::Maybe<Self, ___ZerocopyAliasing>, + ) -> #zerocopy_crate::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: #zerocopy_crate::pointer::invariant::Reference, + { + if false { + fn assert_is_from_bytes<T>() + where + T: #zerocopy_crate::FromBytes, + T: ?#zerocopy_crate::util::macro_util::core_reexport::marker::Sized, + { + } + + assert_is_from_bytes::<Self>(); + } + + // SAFETY: The preceding code only compiles if `Self: + // FromBytes`. Thus, this code only compiles if all initialized + // byte sequences represent valid instances of `Self`. + true + } + )) + } else { + None + } +} + +/// Generates a `TryFromBytes::is_bit_valid` instance that unconditionally +/// returns true. +/// +/// This should be used where possible, (although `try_gen_trivial_is_bit_valid` +/// should be preferred over this for safety reasons). Using this impl is faster +/// to codegen, faster to compile, and is friendlier on the optimizer. +/// +/// # Safety +/// +/// The caller must ensure that all initialized bit patterns are valid for +/// `Self`. +unsafe fn gen_trivial_is_bit_valid_unchecked(zerocopy_crate: &Path) -> proc_macro2::TokenStream { + quote!( + // SAFETY: The caller of `gen_trivial_is_bit_valid_unchecked` has + // promised that all initialized bit patterns are valid for `Self`. + fn is_bit_valid<___ZerocopyAliasing>( + _candidate: #zerocopy_crate::Maybe<Self, ___ZerocopyAliasing>, + ) -> #zerocopy_crate::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: #zerocopy_crate::pointer::invariant::Reference, + { + true + } + ) +} + +/// A struct is `FromZeros` if: +/// - all fields are `FromZeros` +fn derive_from_zeros_struct( + ast: &DeriveInput, + strct: &DataStruct, + zerocopy_crate: &Path, +) -> TokenStream { + ImplBlockBuilder::new(ast, strct, Trait::FromZeros, FieldBounds::ALL_SELF, zerocopy_crate) + .build() +} + +/// Returns `Ok(index)` if variant `index` of the enum has a discriminant of +/// zero. If `Err(bool)` is returned, the boolean is true if the enum has +/// unknown discriminants (e.g. discriminants set to const expressions which we +/// can't evaluate in a proc macro). If the enum has unknown discriminants, then +/// it might have a zero variant that we just can't detect. +fn find_zero_variant(enm: &DataEnum) -> Result<usize, bool> { + // Discriminants can be anywhere in the range [i128::MIN, u128::MAX] because + // the discriminant type may be signed or unsigned. Since we only care about + // tracking the discriminant when it's less than or equal to zero, we can + // avoid u128 -> i128 conversions and bounds checking by making the "next + // discriminant" value implicitly negative. + // Technically 64 bits is enough, but 128 is better for future compatibility + // with https://github.com/rust-lang/rust/issues/56071 + let mut next_negative_discriminant = Some(0); + + // Sometimes we encounter explicit discriminants that we can't know the + // value of (e.g. a constant expression that requires evaluation). These + // could evaluate to zero or a negative number, but we can't assume that + // they do (no false positives allowed!). So we treat them like strictly- + // positive values that can't result in any zero variants, and track whether + // we've encountered any unknown discriminants. + let mut has_unknown_discriminants = false; + + for (i, v) in enm.variants.iter().enumerate() { + match v.discriminant.as_ref() { + // Implicit discriminant + None => { + match next_negative_discriminant.as_mut() { + Some(0) => return Ok(i), + // n is nonzero so subtraction is always safe + Some(n) => *n -= 1, + None => (), + } + } + // Explicit positive discriminant + Some((_, Expr::Lit(ExprLit { lit: Lit::Int(int), .. }))) => { + match int.base10_parse::<u128>().ok() { + Some(0) => return Ok(i), + Some(_) => next_negative_discriminant = None, + None => { + // Numbers should never fail to parse, but just in case: + has_unknown_discriminants = true; + next_negative_discriminant = None; + } + } + } + // Explicit negative discriminant + Some((_, Expr::Unary(ExprUnary { op: UnOp::Neg(_), expr, .. }))) => match &**expr { + Expr::Lit(ExprLit { lit: Lit::Int(int), .. }) => { + match int.base10_parse::<u128>().ok() { + Some(0) => return Ok(i), + // x is nonzero so subtraction is always safe + Some(x) => next_negative_discriminant = Some(x - 1), + None => { + // Numbers should never fail to parse, but just in + // case: + has_unknown_discriminants = true; + next_negative_discriminant = None; + } + } + } + // Unknown negative discriminant (e.g. const repr) + _ => { + has_unknown_discriminants = true; + next_negative_discriminant = None; + } + }, + // Unknown discriminant (e.g. const expr) + _ => { + has_unknown_discriminants = true; + next_negative_discriminant = None; + } + } + } + + Err(has_unknown_discriminants) +} + +/// An enum is `FromZeros` if: +/// - one of the variants has a discriminant of `0` +/// - that variant's fields are all `FromZeros` +fn derive_from_zeros_enum( + ast: &DeriveInput, + enm: &DataEnum, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = EnumRepr::from_attrs(&ast.attrs)?; + + // We don't actually care what the repr is; we just care that it's one of + // the allowed ones. + match repr { + Repr::Compound( + Spanned { t: CompoundRepr::C | CompoundRepr::Primitive(_), span: _ }, + _, + ) => {} + Repr::Transparent(_) + | Repr::Compound(Spanned { t: CompoundRepr::Rust, span: _ }, _) => return Err(Error::new(Span::call_site(), "must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout")), + } + + let zero_variant = match find_zero_variant(enm) { + Ok(index) => enm.variants.iter().nth(index).unwrap(), + // Has unknown variants + Err(true) => { + return Err(Error::new_spanned( + ast, + "FromZeros only supported on enums with a variant that has a discriminant of `0`\n\ + help: This enum has discriminants which are not literal integers. One of those may \ + define or imply which variant has a discriminant of zero. Use a literal integer to \ + define or imply the variant with a discriminant of zero.", + )); + } + // Does not have unknown variants + Err(false) => { + return Err(Error::new_spanned( + ast, + "FromZeros only supported on enums with a variant that has a discriminant of `0`", + )); + } + }; + + let explicit_bounds = zero_variant + .fields + .iter() + .map(|field| { + let ty = &field.ty; + parse_quote! { #ty: #zerocopy_crate::FromZeros } + }) + .collect::<Vec<WherePredicate>>(); + + Ok(ImplBlockBuilder::new( + ast, + enm, + Trait::FromZeros, + FieldBounds::Explicit(explicit_bounds), + zerocopy_crate, + ) + .build()) +} + +/// Unions are `FromZeros` if +/// - all fields are `FromZeros` and `Immutable` +fn derive_from_zeros_union( + ast: &DeriveInput, + unn: &DataUnion, + zerocopy_crate: &Path, +) -> TokenStream { + // FIXME(#5): Remove the `Immutable` bound. It's only necessary for + // compatibility with `derive(TryFromBytes)` on unions; not for soundness. + let field_type_trait_bounds = + FieldBounds::All(&[TraitBound::Slf, TraitBound::Other(Trait::Immutable)]); + ImplBlockBuilder::new(ast, unn, Trait::FromZeros, field_type_trait_bounds, zerocopy_crate) + .build() +} + +/// A struct is `FromBytes` if: +/// - all fields are `FromBytes` +fn derive_from_bytes_struct( + ast: &DeriveInput, + strct: &DataStruct, + zerocopy_crate: &Path, +) -> TokenStream { + ImplBlockBuilder::new(ast, strct, Trait::FromBytes, FieldBounds::ALL_SELF, zerocopy_crate) + .build() +} + +/// An enum is `FromBytes` if: +/// - Every possible bit pattern must be valid, which means that every bit +/// pattern must correspond to a different enum variant. Thus, for an enum +/// whose layout takes up N bytes, there must be 2^N variants. +/// - Since we must know N, only representations which guarantee the layout's +/// size are allowed. These are `repr(uN)` and `repr(iN)` (`repr(C)` implies +/// an implementation-defined size). `usize` and `isize` technically guarantee +/// the layout's size, but would require us to know how large those are on the +/// target platform. This isn't terribly difficult - we could emit a const +/// expression that could call `core::mem::size_of` in order to determine the +/// size and check against the number of enum variants, but a) this would be +/// platform-specific and, b) even on Rust's smallest bit width platform (32), +/// this would require ~4 billion enum variants, which obviously isn't a +/// thing. +/// - All fields of all variants are `FromBytes`. +fn derive_from_bytes_enum( + ast: &DeriveInput, + enm: &DataEnum, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = EnumRepr::from_attrs(&ast.attrs)?; + + let variants_required = 1usize << enum_size_from_repr(&repr)?; + if enm.variants.len() != variants_required { + return Err(Error::new_spanned( + ast, + format!( + "FromBytes only supported on {} enum with {} variants", + repr.repr_type_name(), + variants_required + ), + )); + } + + Ok(ImplBlockBuilder::new(ast, enm, Trait::FromBytes, FieldBounds::ALL_SELF, zerocopy_crate) + .build()) +} + +// Returns `None` if the enum's size is not guaranteed by the repr. +fn enum_size_from_repr(repr: &EnumRepr) -> Result<usize, Error> { + use CompoundRepr::*; + use PrimitiveRepr::*; + use Repr::*; + match repr { + Transparent(span) + | Compound( + Spanned { t: C | Rust | Primitive(U32 | I32 | U64 | I64 | U128 | I128 | Usize | Isize), span }, + _, + ) => Err(Error::new(*span, "`FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16`")), + Compound(Spanned { t: Primitive(U8 | I8), span: _ }, _align) => Ok(8), + Compound(Spanned { t: Primitive(U16 | I16), span: _ }, _align) => Ok(16), + } +} + +/// Unions are `FromBytes` if +/// - all fields are `FromBytes` and `Immutable` +fn derive_from_bytes_union( + ast: &DeriveInput, + unn: &DataUnion, + zerocopy_crate: &Path, +) -> TokenStream { + // FIXME(#5): Remove the `Immutable` bound. It's only necessary for + // compatibility with `derive(TryFromBytes)` on unions; not for soundness. + let field_type_trait_bounds = + FieldBounds::All(&[TraitBound::Slf, TraitBound::Other(Trait::Immutable)]); + ImplBlockBuilder::new(ast, unn, Trait::FromBytes, field_type_trait_bounds, zerocopy_crate) + .build() +} + +fn derive_into_bytes_struct( + ast: &DeriveInput, + strct: &DataStruct, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = StructUnionRepr::from_attrs(&ast.attrs)?; + + let is_transparent = repr.is_transparent(); + let is_c = repr.is_c(); + let is_packed_1 = repr.is_packed_1(); + let num_fields = strct.fields().len(); + + let (padding_check, require_unaligned_fields) = if is_transparent || is_packed_1 { + // No padding check needed. + // - repr(transparent): The layout and ABI of the whole struct is the + // same as its only non-ZST field (meaning there's no padding outside + // of that field) and we require that field to be `IntoBytes` (meaning + // there's no padding in that field). + // - repr(packed): Any inter-field padding bytes are removed, meaning + // that any padding bytes would need to come from the fields, all of + // which we require to be `IntoBytes` (meaning they don't have any + // padding). Note that this holds regardless of other `repr` + // attributes, including `repr(Rust)`. [1] + // + // [1] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#the-alignment-modifiers: + // + // An important consequence of these rules is that a type with + // `#[repr(packed(1))]`` (or `#[repr(packed)]``) will have no + // inter-field padding. + (None, false) + } else if is_c && !repr.is_align_gt_1() && num_fields <= 1 { + // No padding check needed. A repr(C) struct with zero or one field has + // no padding unless #[repr(align)] explicitly adds padding, which we + // check for in this branch's condition. + (None, false) + } else if ast.generics.params.is_empty() { + // Is the last field a syntactic slice, i.e., `[SomeType]`. + let is_syntactic_dst = + strct.fields().last().map(|(_, _, ty)| matches!(ty, Type::Slice(_))).unwrap_or(false); + // Since there are no generics, we can emit a padding check. All reprs + // guarantee that fields won't overlap [1], so the padding check is + // sound. This is more permissive than the next case, which requires + // that all field types implement `Unaligned`. + // + // [1] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#the-rust-representation: + // + // The only data layout guarantees made by [`repr(Rust)`] are those + // required for soundness. They are: + // ... + // 2. The fields do not overlap. + // ... + if is_c && is_syntactic_dst { + (Some(PaddingCheck::ReprCStruct), false) + } else { + (Some(PaddingCheck::Struct), false) + } + } else if is_c && !repr.is_align_gt_1() { + // We can't use a padding check since there are generic type arguments. + // Instead, we require all field types to implement `Unaligned`. This + // ensures that the `repr(C)` layout algorithm will not insert any + // padding unless #[repr(align)] explicitly adds padding, which we check + // for in this branch's condition. + // + // FIXME(#10): Support type parameters for non-transparent, non-packed + // structs without requiring `Unaligned`. + (None, true) + } else { + return Err(Error::new(Span::call_site(), "must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout")); + }; + + let field_bounds = if require_unaligned_fields { + FieldBounds::All(&[TraitBound::Slf, TraitBound::Other(Trait::Unaligned)]) + } else { + FieldBounds::ALL_SELF + }; + + Ok(ImplBlockBuilder::new(ast, strct, Trait::IntoBytes, field_bounds, zerocopy_crate) + .padding_check(padding_check) + .build()) +} + +/// If the type is an enum: +/// - It must have a defined representation (`repr`s `C`, `u8`, `u16`, `u32`, +/// `u64`, `usize`, `i8`, `i16`, `i32`, `i64`, or `isize`). +/// - It must have no padding bytes. +/// - Its fields must be `IntoBytes`. +fn derive_into_bytes_enum( + ast: &DeriveInput, + enm: &DataEnum, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = EnumRepr::from_attrs(&ast.attrs)?; + if !repr.is_c() && !repr.is_primitive() { + return Err(Error::new(Span::call_site(), "must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout")); + } + + let tag_type_definition = r#enum::generate_tag_enum(&repr, enm); + Ok(ImplBlockBuilder::new(ast, enm, Trait::IntoBytes, FieldBounds::ALL_SELF, zerocopy_crate) + .padding_check(PaddingCheck::Enum { tag_type_definition }) + .build()) +} + +/// A union is `IntoBytes` if: +/// - all fields are `IntoBytes` +/// - `repr(C)`, `repr(transparent)`, or `repr(packed)` +/// - no padding (size of union equals size of each field type) +fn derive_into_bytes_union( + ast: &DeriveInput, + unn: &DataUnion, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + // See #1792 for more context. + // + // By checking for `zerocopy_derive_union_into_bytes` both here and in the + // generated code, we ensure that `--cfg zerocopy_derive_union_into_bytes` + // need only be passed *either* when compiling this crate *or* when + // compiling the user's crate. The former is preferable, but in some + // situations (such as when cross-compiling using `cargo build --target`), + // it doesn't get propagated to this crate's build by default. + let cfg_compile_error = if cfg!(zerocopy_derive_union_into_bytes) { + quote!() + } else { + let error_message = "requires --cfg zerocopy_derive_union_into_bytes; +please let us know you use this feature: https://github.com/google/zerocopy/discussions/1802"; + quote!( + const _: () = { + #[cfg(not(zerocopy_derive_union_into_bytes))] + #zerocopy_crate::util::macro_util::core_reexport::compile_error!(#error_message); + }; + ) + }; + + // FIXME(#10): Support type parameters. + if !ast.generics.params.is_empty() { + return Err(Error::new(Span::call_site(), "unsupported on types with type parameters")); + } + + // Because we don't support generics, we don't need to worry about + // special-casing different reprs. So long as there is *some* repr which + // guarantees the layout, our `PaddingCheck::Union` guarantees that there is + // no padding. + let repr = StructUnionRepr::from_attrs(&ast.attrs)?; + if !repr.is_c() && !repr.is_transparent() && !repr.is_packed_1() { + return Err(Error::new( + Span::call_site(), + "must be #[repr(C)], #[repr(packed)], or #[repr(transparent)]", + )); + } + + let impl_block = + ImplBlockBuilder::new(ast, unn, Trait::IntoBytes, FieldBounds::ALL_SELF, zerocopy_crate) + .padding_check(PaddingCheck::Union) + .build(); + Ok(quote!(#cfg_compile_error #impl_block)) +} + +/// A struct is `Unaligned` if: +/// - `repr(align)` is no more than 1 and either +/// - `repr(C)` or `repr(transparent)` and +/// - all fields `Unaligned` +/// - `repr(packed)` +fn derive_unaligned_struct( + ast: &DeriveInput, + strct: &DataStruct, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = StructUnionRepr::from_attrs(&ast.attrs)?; + repr.unaligned_validate_no_align_gt_1()?; + + let field_bounds = if repr.is_packed_1() { + FieldBounds::None + } else if repr.is_c() || repr.is_transparent() { + FieldBounds::ALL_SELF + } else { + return Err(Error::new(Span::call_site(), "must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment")); + }; + + Ok(ImplBlockBuilder::new(ast, strct, Trait::Unaligned, field_bounds, zerocopy_crate).build()) +} + +/// An enum is `Unaligned` if: +/// - No `repr(align(N > 1))` +/// - `repr(u8)` or `repr(i8)` +fn derive_unaligned_enum( + ast: &DeriveInput, + enm: &DataEnum, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = EnumRepr::from_attrs(&ast.attrs)?; + repr.unaligned_validate_no_align_gt_1()?; + + if !repr.is_u8() && !repr.is_i8() { + return Err(Error::new(Span::call_site(), "must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment")); + } + + Ok(ImplBlockBuilder::new(ast, enm, Trait::Unaligned, FieldBounds::ALL_SELF, zerocopy_crate) + .build()) +} + +/// Like structs, a union is `Unaligned` if: +/// - `repr(align)` is no more than 1 and either +/// - `repr(C)` or `repr(transparent)` and +/// - all fields `Unaligned` +/// - `repr(packed)` +fn derive_unaligned_union( + ast: &DeriveInput, + unn: &DataUnion, + zerocopy_crate: &Path, +) -> Result<TokenStream, Error> { + let repr = StructUnionRepr::from_attrs(&ast.attrs)?; + repr.unaligned_validate_no_align_gt_1()?; + + let field_type_trait_bounds = if repr.is_packed_1() { + FieldBounds::None + } else if repr.is_c() || repr.is_transparent() { + FieldBounds::ALL_SELF + } else { + return Err(Error::new(Span::call_site(), "must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment")); + }; + + Ok(ImplBlockBuilder::new(ast, unn, Trait::Unaligned, field_type_trait_bounds, zerocopy_crate) + .build()) +} + +/// This enum describes what kind of padding check needs to be generated for the +/// associated impl. +enum PaddingCheck { + /// Check that the sum of the fields' sizes exactly equals the struct's + /// size. + Struct, + /// Check that a `repr(C)` struct has no padding. + ReprCStruct, + /// Check that the size of each field exactly equals the union's size. + Union, + /// Check that every variant of the enum contains no padding. + /// + /// Because doing so requires a tag enum, this padding check requires an + /// additional `TokenStream` which defines the tag enum as `___ZerocopyTag`. + Enum { tag_type_definition: TokenStream }, +} + +impl PaddingCheck { + /// Returns the idents of the trait to use and the macro to call in order to + /// validate that a type passes the relevant padding check. + fn validator_trait_and_macro_idents(&self) -> (Ident, Ident) { + let (trt, mcro) = match self { + PaddingCheck::Struct => ("PaddingFree", "struct_padding"), + PaddingCheck::ReprCStruct => ("DynamicPaddingFree", "repr_c_struct_has_padding"), + PaddingCheck::Union => ("PaddingFree", "union_padding"), + PaddingCheck::Enum { .. } => ("PaddingFree", "enum_padding"), + }; + + let trt = Ident::new(trt, Span::call_site()); + let mcro = Ident::new(mcro, Span::call_site()); + (trt, mcro) + } + + /// Sometimes performing the padding check requires some additional + /// "context" code. For enums, this is the definition of the tag enum. + fn validator_macro_context(&self) -> Option<&TokenStream> { + match self { + PaddingCheck::Struct | PaddingCheck::ReprCStruct | PaddingCheck::Union => None, + PaddingCheck::Enum { tag_type_definition } => Some(tag_type_definition), + } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum Trait { + KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned, + Sized, + ByteHash, + ByteEq, + SplitAt, +} + +impl ToTokens for Trait { + fn to_tokens(&self, tokens: &mut TokenStream) { + // According to [1], the format of the derived `Debug`` output is not + // stable and therefore not guaranteed to represent the variant names. + // Indeed with the (unstable) `fmt-debug` compiler flag [2], it can + // return only a minimalized output or empty string. To make sure this + // code will work in the future and independent of the compiler flag, we + // translate the variants to their names manually here. + // + // [1] https://doc.rust-lang.org/1.81.0/std/fmt/trait.Debug.html#stability + // [2] https://doc.rust-lang.org/beta/unstable-book/compiler-flags/fmt-debug.html + let s = match self { + Trait::KnownLayout => "KnownLayout", + Trait::Immutable => "Immutable", + Trait::TryFromBytes => "TryFromBytes", + Trait::FromZeros => "FromZeros", + Trait::FromBytes => "FromBytes", + Trait::IntoBytes => "IntoBytes", + Trait::Unaligned => "Unaligned", + Trait::Sized => "Sized", + Trait::ByteHash => "ByteHash", + Trait::ByteEq => "ByteEq", + Trait::SplitAt => "SplitAt", + }; + let ident = Ident::new(s, Span::call_site()); + tokens.extend(core::iter::once(TokenTree::Ident(ident))); + } +} + +impl Trait { + fn crate_path(&self, zerocopy_crate: &Path) -> Path { + match self { + Self::Sized => { + parse_quote!(#zerocopy_crate::util::macro_util::core_reexport::marker::#self) + } + _ => parse_quote!(#zerocopy_crate::#self), + } + } +} + +#[derive(Debug, Eq, PartialEq)] +enum TraitBound { + Slf, + Other(Trait), +} + +enum FieldBounds<'a> { + None, + All(&'a [TraitBound]), + Trailing(&'a [TraitBound]), + Explicit(Vec<WherePredicate>), +} + +impl<'a> FieldBounds<'a> { + const ALL_SELF: FieldBounds<'a> = FieldBounds::All(&[TraitBound::Slf]); + const TRAILING_SELF: FieldBounds<'a> = FieldBounds::Trailing(&[TraitBound::Slf]); +} + +#[derive(Debug, Eq, PartialEq)] +enum SelfBounds<'a> { + None, + All(&'a [Trait]), +} + +// FIXME(https://github.com/rust-lang/rust-clippy/issues/12908): This is a false +// positive. Explicit lifetimes are actually necessary here. +#[allow(clippy::needless_lifetimes)] +impl<'a> SelfBounds<'a> { + const SIZED: Self = Self::All(&[Trait::Sized]); +} + +/// Normalizes a slice of bounds by replacing [`TraitBound::Slf`] with `slf`. +fn normalize_bounds(slf: Trait, bounds: &[TraitBound]) -> impl '_ + Iterator<Item = Trait> { + bounds.iter().map(move |bound| match bound { + TraitBound::Slf => slf, + TraitBound::Other(trt) => *trt, + }) +} + +struct ImplBlockBuilder<'a, D: DataExt> { + input: &'a DeriveInput, + data: &'a D, + trt: Trait, + field_type_trait_bounds: FieldBounds<'a>, + zerocopy_crate: &'a Path, + self_type_trait_bounds: SelfBounds<'a>, + padding_check: Option<PaddingCheck>, + inner_extras: Option<TokenStream>, + outer_extras: Option<TokenStream>, +} + +impl<'a, D: DataExt> ImplBlockBuilder<'a, D> { + fn new( + input: &'a DeriveInput, + data: &'a D, + trt: Trait, + field_type_trait_bounds: FieldBounds<'a>, + zerocopy_crate: &'a Path, + ) -> Self { + Self { + input, + data, + trt, + field_type_trait_bounds, + zerocopy_crate, + self_type_trait_bounds: SelfBounds::None, + padding_check: None, + inner_extras: None, + outer_extras: None, + } + } + + fn self_type_trait_bounds(mut self, self_type_trait_bounds: SelfBounds<'a>) -> Self { + self.self_type_trait_bounds = self_type_trait_bounds; + self + } + + fn padding_check<P: Into<Option<PaddingCheck>>>(mut self, padding_check: P) -> Self { + self.padding_check = padding_check.into(); + self + } + + fn inner_extras(mut self, inner_extras: TokenStream) -> Self { + self.inner_extras = Some(inner_extras); + self + } + + fn outer_extras<T: Into<Option<TokenStream>>>(mut self, outer_extras: T) -> Self { + self.outer_extras = outer_extras.into(); + self + } + + fn build(self) -> TokenStream { + // In this documentation, we will refer to this hypothetical struct: + // + // #[derive(FromBytes)] + // struct Foo<T, I: Iterator> + // where + // T: Copy, + // I: Clone, + // I::Item: Clone, + // { + // a: u8, + // b: T, + // c: I::Item, + // } + // + // We extract the field types, which in this case are `u8`, `T`, and + // `I::Item`. We re-use the existing parameters and where clauses. If + // `require_trait_bound == true` (as it is for `FromBytes), we add where + // bounds for each field's type: + // + // impl<T, I: Iterator> FromBytes for Foo<T, I> + // where + // T: Copy, + // I: Clone, + // I::Item: Clone, + // T: FromBytes, + // I::Item: FromBytes, + // { + // } + // + // NOTE: It is standard practice to only emit bounds for the type + // parameters themselves, not for field types based on those parameters + // (e.g., `T` vs `T::Foo`). For a discussion of why this is standard + // practice, see https://github.com/rust-lang/rust/issues/26925. + // + // The reason we diverge from this standard is that doing it that way + // for us would be unsound. E.g., consider a type, `T` where `T: + // FromBytes` but `T::Foo: !FromBytes`. It would not be sound for us to + // accept a type with a `T::Foo` field as `FromBytes` simply because `T: + // FromBytes`. + // + // While there's no getting around this requirement for us, it does have + // the pretty serious downside that, when lifetimes are involved, the + // trait solver ties itself in knots: + // + // #[derive(Unaligned)] + // #[repr(C)] + // struct Dup<'a, 'b> { + // a: PhantomData<&'a u8>, + // b: PhantomData<&'b u8>, + // } + // + // error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned` + // --> src/main.rs:6:10 + // | + // 6 | #[derive(Unaligned)] + // | ^^^^^^^^^ + // | + // = note: required by `zerocopy::Unaligned` + + let type_ident = &self.input.ident; + let trait_path = self.trt.crate_path(self.zerocopy_crate); + let fields = self.data.fields(); + let variants = self.data.variants(); + let tag = self.data.tag(); + let zerocopy_crate = self.zerocopy_crate; + + fn bound_tt( + ty: &Type, + traits: impl Iterator<Item = Trait>, + zerocopy_crate: &Path, + ) -> WherePredicate { + let traits = traits.map(|t| t.crate_path(zerocopy_crate)); + parse_quote!(#ty: #(#traits)+*) + } + let field_type_bounds: Vec<_> = match (self.field_type_trait_bounds, &fields[..]) { + (FieldBounds::All(traits), _) => fields + .iter() + .map(|(_vis, _name, ty)| { + bound_tt(ty, normalize_bounds(self.trt, traits), zerocopy_crate) + }) + .collect(), + (FieldBounds::None, _) | (FieldBounds::Trailing(..), []) => vec![], + (FieldBounds::Trailing(traits), [.., last]) => { + vec![bound_tt(last.2, normalize_bounds(self.trt, traits), zerocopy_crate)] + } + (FieldBounds::Explicit(bounds), _) => bounds, + }; + + // Don't bother emitting a padding check if there are no fields. + #[allow(unstable_name_collisions)] // See `BoolExt` below + let padding_check_bound = self + .padding_check + .and_then(|check| (!fields.is_empty()).then_some(check)) + .map(|check| { + let variant_types = variants.iter().map(|var| { + let types = var.iter().map(|(_vis, _name, ty)| ty); + quote!([#((#types)),*]) + }); + let validator_context = check.validator_macro_context(); + let (trt, validator_macro) = check.validator_trait_and_macro_idents(); + let t = tag.iter(); + parse_quote! { + (): #zerocopy_crate::util::macro_util::#trt< + Self, + { + #validator_context + #zerocopy_crate::#validator_macro!(Self, #(#t,)* #(#variant_types),*) + } + > + } + }); + + let self_bounds: Option<WherePredicate> = match self.self_type_trait_bounds { + SelfBounds::None => None, + SelfBounds::All(traits) => { + Some(bound_tt(&parse_quote!(Self), traits.iter().copied(), zerocopy_crate)) + } + }; + + let bounds = self + .input + .generics + .where_clause + .as_ref() + .map(|where_clause| where_clause.predicates.iter()) + .into_iter() + .flatten() + .chain(field_type_bounds.iter()) + .chain(padding_check_bound.iter()) + .chain(self_bounds.iter()); + + // The parameters with trait bounds, but without type defaults. + let params = self.input.generics.params.clone().into_iter().map(|mut param| { + match &mut param { + GenericParam::Type(ty) => ty.default = None, + GenericParam::Const(cnst) => cnst.default = None, + GenericParam::Lifetime(_) => {} + } + quote!(#param) + }); + + // The identifiers of the parameters without trait bounds or type + // defaults. + let param_idents = self.input.generics.params.iter().map(|param| match param { + GenericParam::Type(ty) => { + let ident = &ty.ident; + quote!(#ident) + } + GenericParam::Lifetime(l) => { + let ident = &l.lifetime; + quote!(#ident) + } + GenericParam::Const(cnst) => { + let ident = &cnst.ident; + quote!({#ident}) + } + }); + + let inner_extras = self.inner_extras; + let impl_tokens = quote! { + #[allow(deprecated)] + // While there are not currently any warnings that this suppresses + // (that we're aware of), it's good future-proofing hygiene. + #[automatically_derived] + unsafe impl < #(#params),* > #trait_path for #type_ident < #(#param_idents),* > + where + #(#bounds,)* + { + fn only_derive_is_allowed_to_implement_this_trait() {} + + #inner_extras + } + }; + + if let Some(outer_extras) = self.outer_extras { + // So that any items defined in `#outer_extras` don't conflict with + // existing names defined in this scope. + quote! { + const _: () = { + #impl_tokens + + #outer_extras + }; + } + } else { + impl_tokens + } + } +} + +// A polyfill for `Option::then_some`, which was added after our MSRV. +// +// The `#[allow(unused)]` is necessary because, on sufficiently recent toolchain +// versions, `b.then_some(...)` resolves to the inherent method rather than to +// this trait, and so this trait is considered unused. +// +// FIXME(#67): Remove this once our MSRV is >= 1.62. +#[allow(unused)] +trait BoolExt { + fn then_some<T>(self, t: T) -> Option<T>; +} + +impl BoolExt for bool { + fn then_some<T>(self, t: T) -> Option<T> { + if self { + Some(t) + } else { + None + } + } +} diff --git a/vendor/zerocopy-derive/src/output_tests.rs b/vendor/zerocopy-derive/src/output_tests.rs new file mode 100644 index 00000000..9d3c62d8 --- /dev/null +++ b/vendor/zerocopy-derive/src/output_tests.rs @@ -0,0 +1,2366 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use dissimilar::Chunk; +use proc_macro2::TokenStream; + +use crate::IntoTokenStream; + +macro_rules! use_as_trait_name { + ($($alias:ident => $derive:ident),* $(,)?) => { + $(use super::$derive as $alias;)* + }; +} + +// This permits invocations of `test!` to be more ergonomic, passing the name of +// the trait under test rather than the name of the inner derive function. +use_as_trait_name!( + KnownLayout => derive_known_layout_inner, + Immutable => derive_no_cell_inner, + TryFromBytes => derive_try_from_bytes_inner, + FromZeros => derive_from_zeros_inner, + FromBytes => derive_from_bytes_inner, + IntoBytes => derive_into_bytes_inner, + Unaligned => derive_unaligned_inner, + ByteHash => derive_hash_inner, + ByteEq => derive_eq_inner, + SplitAt => derive_split_at_inner, +); + +/// Test that the given derive input expands to the expected output. +/// +/// Equality is tested by formatting both token streams using `prettyplease` and +/// performing string equality on the results. This has the effect of making the +/// tests less brittle and robust against meaningless formatting changes. +// Adapted from https://github.com/joshlf/synstructure/blob/400499aaf54840056ff56718beb7810540e6be59/src/macros.rs#L212-L317 +macro_rules! test { + ($name:ident { $($i:tt)* } expands to { $($o:tt)* }) => { + { + #[allow(dead_code)] + fn ensure_compiles() { + $($i)* + $($o)* + } + + test!($name { $($i)* } expands to { $($o)* } no_build); + } + }; + + ($name:ident { $($i:tt)* } expands to { $($o:tt)* } no_build) => { + { + let ts: proc_macro2::TokenStream = quote::quote!( $($i)* ); + let ast = syn::parse2::<syn::DeriveInput>(ts).unwrap(); + let res = $name(&ast, crate::Trait::$name, &syn::parse_quote!(::zerocopy)); + let expected_toks = quote::quote!( $($o)* ); + assert_eq_streams(expected_toks.into(), res.into_ts().into()); + } + }; +} + +#[track_caller] +fn assert_eq_streams(expect: TokenStream, res: TokenStream) { + let pretty = + |ts: TokenStream| prettyplease::unparse(&syn::parse_file(&ts.to_string()).unwrap()); + + let expect = pretty(expect.clone()); + let res = pretty(res.clone()); + if expect != res { + let diff = dissimilar::diff(&expect, &res) + .into_iter() + .flat_map(|chunk| { + let (prefix, chunk) = match chunk { + Chunk::Equal(chunk) => (" ", chunk), + Chunk::Delete(chunk) => ("-", chunk), + Chunk::Insert(chunk) => ("+", chunk), + }; + [prefix, chunk, "\n"] + }) + .collect::<String>(); + + panic!( + "\ +test failed: +got: +``` +{} +``` + +diff (expected vs got): +``` +{} +```\n", + res, diff + ); + } +} + +#[test] +fn test_known_layout() { + test! { + KnownLayout { + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::KnownLayout for Foo + where + Self: ::zerocopy::util::macro_util::core_reexport::marker::Sized, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + + type PointerMetadata = (); + + type MaybeUninit = ::zerocopy::util::macro_util::core_reexport::mem::MaybeUninit<Self>; + + const LAYOUT: ::zerocopy::DstLayout = ::zerocopy::DstLayout::for_type::<Self>(); + + #[inline(always)] + fn raw_from_ptr_len( + bytes: ::zerocopy::util::macro_util::core_reexport::ptr::NonNull<u8>, + _meta: (), + ) -> ::zerocopy::util::macro_util::core_reexport::ptr::NonNull<Self> { + bytes.cast::<Self>() + } + + #[inline(always)] + fn pointer_to_metadata(_ptr: *mut Self) -> () {} + } + } no_build + } + + test! { + KnownLayout { + #[repr(C, align(2))] + struct Foo<T, U>(T, U); + } + expands to { + const _: () = { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<T, U> ::zerocopy::KnownLayout for Foo<T, U> + where + U: ::zerocopy::KnownLayout, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + type PointerMetadata = <U as ::zerocopy::KnownLayout>::PointerMetadata; + type MaybeUninit = __ZerocopyKnownLayoutMaybeUninit<T, U>; + const LAYOUT: ::zerocopy::DstLayout = { + use ::zerocopy::util::macro_util::core_reexport::num::NonZeroUsize; + use ::zerocopy::{DstLayout, KnownLayout}; + DstLayout::for_repr_c_struct( + ::zerocopy::util::macro_util::core_reexport::num::NonZeroUsize::new( + 2u32 as usize, + ), + ::zerocopy::util::macro_util::core_reexport::option::Option::None, + &[DstLayout::for_type::<T>(), <U as KnownLayout>::LAYOUT], + ) + }; + #[inline(always)] + fn raw_from_ptr_len( + bytes: ::zerocopy::util::macro_util::core_reexport::ptr::NonNull<u8>, + meta: Self::PointerMetadata, + ) -> ::zerocopy::util::macro_util::core_reexport::ptr::NonNull<Self> { + use ::zerocopy::KnownLayout; + let trailing = <U as KnownLayout>::raw_from_ptr_len(bytes, meta); + let slf = trailing.as_ptr() as *mut Self; + unsafe { + ::zerocopy::util::macro_util::core_reexport::ptr::NonNull::new_unchecked( + slf, + ) + } + } + #[inline(always)] + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata { + <U>::pointer_to_metadata(ptr as *mut _) + } + } + #[allow(non_camel_case_types)] + struct __Zerocopy_Field_0; + #[allow(non_camel_case_types)] + struct __Zerocopy_Field_1; + #[allow(deprecated)] + unsafe impl<T, U> ::zerocopy::util::macro_util::Field<__Zerocopy_Field_0> + for Foo<T, U> { + type Type = T; + } + #[allow(deprecated)] + unsafe impl<T, U> ::zerocopy::util::macro_util::Field<__Zerocopy_Field_1> + for Foo<T, U> { + type Type = U; + } + #[repr(C)] + #[repr(align(2))] + #[doc(hidden)] + #[allow(private_bounds)] + #[allow(deprecated)] + struct __ZerocopyKnownLayoutMaybeUninit<T, U>( + ::zerocopy::util::macro_util::core_reexport::mem::MaybeUninit< + <Foo<T, U> as ::zerocopy::util::macro_util::Field<__Zerocopy_Field_0>>::Type, + >, + ::zerocopy::util::macro_util::core_reexport::mem::ManuallyDrop< + <<Foo< + T, + U, + > as ::zerocopy::util::macro_util::Field< + __Zerocopy_Field_1, + >>::Type as ::zerocopy::KnownLayout>::MaybeUninit, + >, + ) + where + <Foo< + T, + U, + > as ::zerocopy::util::macro_util::Field< + __Zerocopy_Field_1, + >>::Type: ::zerocopy::KnownLayout; + #[allow(deprecated)] + unsafe impl<T, U> ::zerocopy::KnownLayout for __ZerocopyKnownLayoutMaybeUninit<T, U> + where + <Foo< + T, + U, + > as ::zerocopy::util::macro_util::Field< + __Zerocopy_Field_1, + >>::Type: ::zerocopy::KnownLayout, + { + #[allow(clippy::missing_inline_in_public_items)] + fn only_derive_is_allowed_to_implement_this_trait() {} + type PointerMetadata = <Foo<T, U> as ::zerocopy::KnownLayout>::PointerMetadata; + type MaybeUninit = Self; + const LAYOUT: ::zerocopy::DstLayout = <Foo< + T, + U, + > as ::zerocopy::KnownLayout>::LAYOUT; + #[inline(always)] + fn raw_from_ptr_len( + bytes: ::zerocopy::util::macro_util::core_reexport::ptr::NonNull<u8>, + meta: Self::PointerMetadata, + ) -> ::zerocopy::util::macro_util::core_reexport::ptr::NonNull<Self> { + use ::zerocopy::KnownLayout; + let trailing = <<<Foo< + T, + U, + > as ::zerocopy::util::macro_util::Field< + __Zerocopy_Field_1, + >>::Type as ::zerocopy::KnownLayout>::MaybeUninit as KnownLayout>::raw_from_ptr_len( + bytes, + meta, + ); + let slf = trailing.as_ptr() as *mut Self; + unsafe { + ::zerocopy::util::macro_util::core_reexport::ptr::NonNull::new_unchecked( + slf, + ) + } + } + #[inline(always)] + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata { + <<<Foo< + T, + U, + > as ::zerocopy::util::macro_util::Field< + __Zerocopy_Field_1, + >>::Type as ::zerocopy::KnownLayout>::MaybeUninit>::pointer_to_metadata( + ptr as *mut _, + ) + } + } + }; + } no_build + } +} + +#[test] +fn test_immutable() { + test! { + Immutable { + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::Immutable for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_try_from_bytes() { + test! { + TryFromBytes { + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::TryFromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + true + } + } + } no_build + } +} + +#[test] +fn test_from_zeros() { + test! { + FromZeros { + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::TryFromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + true + } + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromZeros for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_from_bytes_struct() { + test! { + FromBytes { + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::TryFromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + + fn is_bit_valid<___ZerocopyAliasing>( + _candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + if false { + fn assert_is_from_bytes<T>() + where + T: ::zerocopy::FromBytes, + T: ?::zerocopy::util::macro_util::core_reexport::marker::Sized, + {} + assert_is_from_bytes::<Self>(); + } + + true + } + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromZeros for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_from_bytes_union() { + test! { + FromBytes { + union Foo { + a: u8, + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::TryFromBytes for Foo + where + u8: ::zerocopy::TryFromBytes + ::zerocopy::Immutable, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + + fn is_bit_valid<___ZerocopyAliasing>( + _candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + if false { + fn assert_is_from_bytes<T>() + where + T: ::zerocopy::FromBytes, + T: ?::zerocopy::util::macro_util::core_reexport::marker::Sized, + {} + assert_is_from_bytes::<Self>(); + } + + true + } + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromZeros for Foo + where + u8: ::zerocopy::FromZeros + ::zerocopy::Immutable, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromBytes for Foo + where + u8: ::zerocopy::FromBytes + ::zerocopy::Immutable, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_into_bytes_struct() { + test! { + IntoBytes { + #[repr(C)] + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::IntoBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } + + test! { + IntoBytes { + #[repr(C)] + struct Foo { + a: u8, + b: u8, + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::IntoBytes for Foo + where + u8: ::zerocopy::IntoBytes, + u8: ::zerocopy::IntoBytes, + (): ::zerocopy::util::macro_util::PaddingFree< + Self, + { ::zerocopy::struct_padding!(Self, [(u8), (u8)]) }, + >, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } + + test! { + IntoBytes { + #[repr(C)] + struct Foo { + a: u8, + b: [Trailing], + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::IntoBytes for Foo + where + u8: ::zerocopy::IntoBytes, + [Trailing]: ::zerocopy::IntoBytes, + (): ::zerocopy::util::macro_util::DynamicPaddingFree< + Self, + { ::zerocopy::repr_c_struct_has_padding!(Self, [(u8), ([Trailing])]) }, + >, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } + + test! { + IntoBytes { + #[repr(C)] + struct Foo<Trailing> { + a: u8, + b: [Trailing], + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<Trailing> ::zerocopy::IntoBytes for Foo<Trailing> + where + u8: ::zerocopy::IntoBytes + ::zerocopy::Unaligned, + [Trailing]: ::zerocopy::IntoBytes + ::zerocopy::Unaligned, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_into_bytes_enum() { + macro_rules! test_repr { + ($(#[$attr:meta])*) => { + $(test! { + IntoBytes { + #[$attr] + enum Foo { + Bar, + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::IntoBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + })* + }; + } + + test_repr! { + #[repr(C)] + #[repr(u8)] + #[repr(u16)] + #[repr(u32)] + #[repr(u64)] + #[repr(u128)] + #[repr(usize)] + #[repr(i8)] + #[repr(i16)] + #[repr(i32)] + #[repr(i64)] + #[repr(i128)] + #[repr(isize)] + } +} + +#[test] +fn test_unaligned() { + test! { + Unaligned { + #[repr(C)] + struct Foo; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::Unaligned for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_try_from_bytes_enum() { + test! { + TryFromBytes { + #[repr(u8)] + enum ComplexWithGenerics<'a: 'static, const N: usize, X, Y: Deref> + where + X: Deref<Target = &'a [(X, Y); N]>, + { + UnitLike, + StructLike { a: u8, b: X, c: X::Target, d: Y::Target, e: [(X, Y); N] }, + TupleLike(bool, Y, PhantomData<&'a [(X, Y); N]>), + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ComplexWithGenerics<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + u8: ::zerocopy::TryFromBytes, + X: ::zerocopy::TryFromBytes, + X::Target: ::zerocopy::TryFromBytes, + Y::Target: ::zerocopy::TryFromBytes, + [(X, Y); N]: ::zerocopy::TryFromBytes, + bool: ::zerocopy::TryFromBytes, + Y: ::zerocopy::TryFromBytes, + PhantomData<&'a [(X, Y); N]>: ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<'_, Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + + #[repr(u8)] + #[allow(dead_code, non_camel_case_types)] + enum ___ZerocopyTag { + UnitLike, + StructLike, + TupleLike, + } + type ___ZerocopyTagPrimitive = ::zerocopy::util::macro_util::SizeToTag< + { core_reexport::mem::size_of::<___ZerocopyTag>() }, + >; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_UnitLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::UnitLike as ___ZerocopyTagPrimitive; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_StructLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::StructLike as ___ZerocopyTagPrimitive; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_TupleLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::TupleLike as ___ZerocopyTagPrimitive; + type ___ZerocopyOuterTag = (); + type ___ZerocopyInnerTag = ___ZerocopyTag; + #[repr(C)] + #[allow(non_snake_case)] + struct ___ZerocopyVariantStruct_StructLike<'a: 'static, const N: usize, X, Y: Deref>( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + u8, + X, + X::Target, + Y::Target, + [(X, Y); N], + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>, + ) + where + X: Deref<Target = &'a [(X, Y); N]>; + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ___ZerocopyVariantStruct_StructLike<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>: ::zerocopy::TryFromBytes, + u8: ::zerocopy::TryFromBytes, + X: ::zerocopy::TryFromBytes, + X::Target: ::zerocopy::TryFromBytes, + Y::Target: ::zerocopy::TryFromBytes, + [(X, Y); N]: ::zerocopy::TryFromBytes, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>: + ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + + true && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::mem::MaybeUninit< + ___ZerocopyInnerTag, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <u8 as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <X as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <X::Target as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <Y::Target as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).5); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <[(X, Y); N] as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).6); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::marker::PhantomData< + ComplexWithGenerics<'a, N, X, Y>, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } + } + } + #[repr(C)] + #[allow(non_snake_case)] + struct ___ZerocopyVariantStruct_TupleLike<'a: 'static, const N: usize, X, Y: Deref>( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + bool, + Y, + PhantomData<&'a [(X, Y); N]>, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>, + ) + where + X: Deref<Target = &'a [(X, Y); N]>; + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ___ZerocopyVariantStruct_TupleLike<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>: ::zerocopy::TryFromBytes, + bool: ::zerocopy::TryFromBytes, + Y: ::zerocopy::TryFromBytes, + PhantomData<&'a [(X, Y); N]>: ::zerocopy::TryFromBytes, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>: + ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + + true && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::mem::MaybeUninit< + ___ZerocopyInnerTag, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <bool as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <Y as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <PhantomData<&'a [(X, Y); N]> as ::zerocopy::TryFromBytes>::is_bit_valid( + field_candidate, + ) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::marker::PhantomData< + ComplexWithGenerics<'a, N, X, Y>, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } + } + } + #[repr(C)] + #[allow(non_snake_case)] + union ___ZerocopyVariants<'a: 'static, const N: usize, X, Y: Deref> { + __field_StructLike: + core_reexport::mem::ManuallyDrop<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>, + __field_TupleLike: + core_reexport::mem::ManuallyDrop<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>, + __nonempty: (), + } + #[repr(C)] + struct ___ZerocopyRawEnum<'a: 'static, const N: usize, X, Y: Deref> { + tag: ___ZerocopyOuterTag, + variants: ___ZerocopyVariants<'a, N, X, Y>, + } + let tag = { + let tag_ptr = unsafe { + candidate.reborrow().cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, Self>| { p.cast_sized::<___ZerocopyTagPrimitive>() }) + }; + let tag_ptr = unsafe { tag_ptr.assume_initialized() }; + tag_ptr.recall_validity::<_, (_, (_, _))>().read_unaligned::<::zerocopy::BecauseImmutable>() + }; + let raw_enum = unsafe { + candidate.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, Self>| { p.cast_sized::<___ZerocopyRawEnum<'a, N, X, Y>>() }) + }; + let raw_enum = unsafe { raw_enum.assume_initialized() }; + let variants = unsafe { + use ::zerocopy::pointer::PtrInner; + raw_enum.cast_unsized_unchecked(|p: PtrInner<'_, ___ZerocopyRawEnum<'a, N, X, Y>>| { + let p = p.as_non_null().as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) }; + unsafe { PtrInner::new(ptr) } + }) + }; + #[allow(non_upper_case_globals)] + match tag { + ___ZEROCOPY_TAG_UnitLike => true, + ___ZEROCOPY_TAG_StructLike => { + let variant = unsafe { + variants.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, ___ZerocopyVariants<'a, N, X, Y>>| { + p.cast_sized::<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>() + }) + }; + let variant = unsafe { variant.assume_initialized() }; + <___ZerocopyVariantStruct_StructLike<'a, N, X, Y> as ::zerocopy ::TryFromBytes>::is_bit_valid ( + variant) + } + ___ZEROCOPY_TAG_TupleLike => { + let variant = unsafe { + variants.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, ___ZerocopyVariants<'a, N, X, Y>>| { + p.cast_sized::<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>() + }) + }; + let variant = unsafe { variant.assume_initialized() }; + <___ZerocopyVariantStruct_TupleLike<'a, N, X, Y> as ::zerocopy ::TryFromBytes>::is_bit_valid ( + variant) + } + _ => false, + } + } + } + } no_build + } + + test! { + TryFromBytes { + #[repr(u32)] + enum ComplexWithGenerics<'a: 'static, const N: usize, X, Y: Deref> + where + X: Deref<Target = &'a [(X, Y); N]>, + { + UnitLike, + StructLike { a: u8, b: X, c: X::Target, d: Y::Target, e: [(X, Y); N] }, + TupleLike(bool, Y, PhantomData<&'a [(X, Y); N]>), + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ComplexWithGenerics<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + u8: ::zerocopy::TryFromBytes, + X: ::zerocopy::TryFromBytes, + X::Target: ::zerocopy::TryFromBytes, + Y::Target: ::zerocopy::TryFromBytes, + [(X, Y); N]: ::zerocopy::TryFromBytes, + bool: ::zerocopy::TryFromBytes, + Y: ::zerocopy::TryFromBytes, + PhantomData<&'a [(X, Y); N]>: ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<'_, Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + + #[repr(u32)] + #[allow(dead_code, non_camel_case_types)] + enum ___ZerocopyTag { + UnitLike, + StructLike, + TupleLike, + } + type ___ZerocopyTagPrimitive = ::zerocopy::util::macro_util::SizeToTag< + { core_reexport::mem::size_of::<___ZerocopyTag>() }, + >; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_UnitLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::UnitLike as ___ZerocopyTagPrimitive; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_StructLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::StructLike as ___ZerocopyTagPrimitive; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_TupleLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::TupleLike as ___ZerocopyTagPrimitive; + type ___ZerocopyOuterTag = (); + type ___ZerocopyInnerTag = ___ZerocopyTag; + #[repr(C)] + #[allow(non_snake_case)] + struct ___ZerocopyVariantStruct_StructLike<'a: 'static, const N: usize, X, Y: Deref>( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + u8, + X, + X::Target, + Y::Target, + [(X, Y); N], + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>, + ) + where + X: Deref<Target = &'a [(X, Y); N]>; + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ___ZerocopyVariantStruct_StructLike<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>: ::zerocopy::TryFromBytes, + u8: ::zerocopy::TryFromBytes, + X: ::zerocopy::TryFromBytes, + X::Target: ::zerocopy::TryFromBytes, + Y::Target: ::zerocopy::TryFromBytes, + [(X, Y); N]: ::zerocopy::TryFromBytes, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>: + ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + + true && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::mem::MaybeUninit< + ___ZerocopyInnerTag, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <u8 as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <X as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <X::Target as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <Y::Target as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).5); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <[(X, Y); N] as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).6); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::marker::PhantomData< + ComplexWithGenerics<'a, N, X, Y>, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } + } + } + #[repr(C)] + #[allow(non_snake_case)] + struct ___ZerocopyVariantStruct_TupleLike<'a: 'static, const N: usize, X, Y: Deref>( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + bool, + Y, + PhantomData<&'a [(X, Y); N]>, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>, + ) + where + X: Deref<Target = &'a [(X, Y); N]>; + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ___ZerocopyVariantStruct_TupleLike<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>: ::zerocopy::TryFromBytes, + bool: ::zerocopy::TryFromBytes, + Y: ::zerocopy::TryFromBytes, + PhantomData<&'a [(X, Y); N]>: ::zerocopy::TryFromBytes, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>: + ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + + true && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::mem::MaybeUninit< + ___ZerocopyInnerTag, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <bool as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <Y as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <PhantomData<&'a [(X, Y); N]> as ::zerocopy::TryFromBytes>::is_bit_valid( + field_candidate, + ) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::marker::PhantomData< + ComplexWithGenerics<'a, N, X, Y>, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } + } + } + #[repr(C)] + #[allow(non_snake_case)] + union ___ZerocopyVariants<'a: 'static, const N: usize, X, Y: Deref> { + __field_StructLike: + core_reexport::mem::ManuallyDrop<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>, + __field_TupleLike: + core_reexport::mem::ManuallyDrop<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>, + __nonempty: (), + } + #[repr(C)] + struct ___ZerocopyRawEnum<'a: 'static, const N: usize, X, Y: Deref> { + tag: ___ZerocopyOuterTag, + variants: ___ZerocopyVariants<'a, N, X, Y>, + } + let tag = { + let tag_ptr = unsafe { + candidate.reborrow().cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, Self>| { p.cast_sized::<___ZerocopyTagPrimitive> ()}) + }; + let tag_ptr = unsafe { tag_ptr.assume_initialized() }; + tag_ptr.recall_validity::<_, (_, (_, _))>().read_unaligned::<::zerocopy::BecauseImmutable>() + }; + let raw_enum = unsafe { + candidate.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, Self>| { p.cast_sized::<___ZerocopyRawEnum<'a, N, X, Y>> ()}) + }; + let raw_enum = unsafe { raw_enum.assume_initialized() }; + let variants = unsafe { + use ::zerocopy::pointer::PtrInner; + raw_enum.cast_unsized_unchecked(|p: PtrInner<'_, ___ZerocopyRawEnum<'a, N, X, Y>>| { + let p = p.as_non_null().as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) }; + unsafe { PtrInner::new(ptr) } + }) + }; + #[allow(non_upper_case_globals)] + match tag { + ___ZEROCOPY_TAG_UnitLike => true, + ___ZEROCOPY_TAG_StructLike => { + let variant = unsafe { + variants.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, ___ZerocopyVariants<'a, N, X, Y>>| { + p.cast_sized::<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>() + }) + }; + let variant = unsafe { variant.assume_initialized() }; + <___ZerocopyVariantStruct_StructLike<'a, N, X, Y> as ::zerocopy ::TryFromBytes>::is_bit_valid ( + variant) + } + ___ZEROCOPY_TAG_TupleLike => { + let variant = unsafe { + variants.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, ___ZerocopyVariants<'a, N, X, Y>>| { + p.cast_sized::<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>() + }) + }; + let variant = unsafe { variant.assume_initialized() }; + <___ZerocopyVariantStruct_TupleLike<'a, N, X, Y> as ::zerocopy ::TryFromBytes>::is_bit_valid ( + variant) + } + _ => false, + } + } + } + } no_build + } + + test! { + TryFromBytes { + #[repr(C)] + enum ComplexWithGenerics<'a: 'static, const N: usize, X, Y: Deref> + where + X: Deref<Target = &'a [(X, Y); N]>, + { + UnitLike, + StructLike { a: u8, b: X, c: X::Target, d: Y::Target, e: [(X, Y); N] }, + TupleLike(bool, Y, PhantomData<&'a [(X, Y); N]>), + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ComplexWithGenerics<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + u8: ::zerocopy::TryFromBytes, + X: ::zerocopy::TryFromBytes, + X::Target: ::zerocopy::TryFromBytes, + Y::Target: ::zerocopy::TryFromBytes, + [(X, Y); N]: ::zerocopy::TryFromBytes, + bool: ::zerocopy::TryFromBytes, + Y: ::zerocopy::TryFromBytes, + PhantomData<&'a [(X, Y); N]>: ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<'_, Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + + #[repr(C)] + #[allow(dead_code, non_camel_case_types)] + enum ___ZerocopyTag { + UnitLike, + StructLike, + TupleLike, + } + type ___ZerocopyTagPrimitive = ::zerocopy::util::macro_util::SizeToTag< + { core_reexport::mem::size_of::<___ZerocopyTag>() }, + >; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_UnitLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::UnitLike as ___ZerocopyTagPrimitive; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_StructLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::StructLike as ___ZerocopyTagPrimitive; + #[allow(non_upper_case_globals)] + const ___ZEROCOPY_TAG_TupleLike: ___ZerocopyTagPrimitive = + ___ZerocopyTag::TupleLike as ___ZerocopyTagPrimitive; + type ___ZerocopyOuterTag = ___ZerocopyTag; + type ___ZerocopyInnerTag = (); + #[repr(C)] + #[allow(non_snake_case)] + struct ___ZerocopyVariantStruct_StructLike<'a: 'static, const N: usize, X, Y: Deref>( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + u8, + X, + X::Target, + Y::Target, + [(X, Y); N], + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>, + ) + where + X: Deref<Target = &'a [(X, Y); N]>; + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ___ZerocopyVariantStruct_StructLike<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>: ::zerocopy::TryFromBytes, + u8: ::zerocopy::TryFromBytes, + X: ::zerocopy::TryFromBytes, + X::Target: ::zerocopy::TryFromBytes, + Y::Target: ::zerocopy::TryFromBytes, + [(X, Y); N]: ::zerocopy::TryFromBytes, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>: + ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + + true && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::mem::MaybeUninit< + ___ZerocopyInnerTag, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <u8 as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <X as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <X::Target as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <Y::Target as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).5); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <[(X, Y); N] as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).6); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::marker::PhantomData< + ComplexWithGenerics<'a, N, X, Y>, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } + } + } + #[repr(C)] + #[allow(non_snake_case)] + struct ___ZerocopyVariantStruct_TupleLike<'a: 'static, const N: usize, X, Y: Deref>( + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>, + bool, + Y, + PhantomData<&'a [(X, Y); N]>, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>, + ) + where + X: Deref<Target = &'a [(X, Y); N]>; + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<'a: 'static, const N: usize, X, Y: Deref> ::zerocopy::TryFromBytes + for ___ZerocopyVariantStruct_TupleLike<'a, { N }, X, Y> + where + X: Deref<Target = &'a [(X, Y); N]>, + core_reexport::mem::MaybeUninit<___ZerocopyInnerTag>: ::zerocopy::TryFromBytes, + bool: ::zerocopy::TryFromBytes, + Y: ::zerocopy::TryFromBytes, + PhantomData<&'a [(X, Y); N]>: ::zerocopy::TryFromBytes, + core_reexport::marker::PhantomData<ComplexWithGenerics<'a, N, X, Y>>: + ::zerocopy::TryFromBytes, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + fn is_bit_valid<___ZerocopyAliasing>( + mut candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + use ::zerocopy::util::macro_util::core_reexport; + use ::zerocopy::pointer::PtrInner; + + true && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::mem::MaybeUninit< + ___ZerocopyInnerTag, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <bool as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <Y as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <PhantomData<&'a [(X, Y); N]> as ::zerocopy::TryFromBytes>::is_bit_valid( + field_candidate, + ) + } && { + let field_candidate = unsafe { + let project = |slf: PtrInner<'_, Self>| { + let slf = slf.as_non_null().as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(field) }; + unsafe { PtrInner::new(ptr) } + }; + candidate.reborrow().cast_unsized_unchecked(project) + }; + <core_reexport::marker::PhantomData< + ComplexWithGenerics<'a, N, X, Y>, + > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) + } + } + } + #[repr(C)] + #[allow(non_snake_case)] + union ___ZerocopyVariants<'a: 'static, const N: usize, X, Y: Deref> { + __field_StructLike: + core_reexport::mem::ManuallyDrop<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>, + __field_TupleLike: + core_reexport::mem::ManuallyDrop<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>, + __nonempty: (), + } + #[repr(C)] + struct ___ZerocopyRawEnum<'a: 'static, const N: usize, X, Y: Deref> { + tag: ___ZerocopyOuterTag, + variants: ___ZerocopyVariants<'a, N, X, Y>, + } + let tag = { + let tag_ptr = unsafe { + candidate.reborrow().cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, Self>| { p.cast_sized::<___ZerocopyTagPrimitive> ()}) + }; + let tag_ptr = unsafe { tag_ptr.assume_initialized() }; + tag_ptr.recall_validity::<_, (_, (_, _))>().read_unaligned::<::zerocopy::BecauseImmutable>() + }; + let raw_enum = unsafe { + candidate.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, Self>| { p.cast_sized::<___ZerocopyRawEnum<'a, N, X, Y>> ()}) + }; + let raw_enum = unsafe { raw_enum.assume_initialized() }; + let variants = unsafe { + use ::zerocopy::pointer::PtrInner; + raw_enum.cast_unsized_unchecked(|p: PtrInner<'_, ___ZerocopyRawEnum<'a, N, X, Y>>| { + let p = p.as_non_null().as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + let ptr = unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) }; + unsafe { PtrInner::new(ptr) } + }) + }; + #[allow(non_upper_case_globals)] + match tag { + ___ZEROCOPY_TAG_UnitLike => true, + ___ZEROCOPY_TAG_StructLike => { + let variant = unsafe { + variants.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, ___ZerocopyVariants<'a, N, X, Y>>| { + p.cast_sized::<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>() + }) + }; + let variant = unsafe { variant.assume_initialized() }; + <___ZerocopyVariantStruct_StructLike<'a, N, X, Y> as ::zerocopy ::TryFromBytes>::is_bit_valid ( + variant) + } + ___ZEROCOPY_TAG_TupleLike => { + let variant = unsafe { + variants.cast_unsized_unchecked(|p: ::zerocopy::pointer::PtrInner<'_, ___ZerocopyVariants<'a, N, X, Y>>| { + p.cast_sized::<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>() + }) + }; + let variant = unsafe { variant.assume_initialized() }; + <___ZerocopyVariantStruct_TupleLike<'a, N, X, Y> as ::zerocopy ::TryFromBytes>::is_bit_valid ( + variant) + } + _ => false, + } + } + } + } no_build + } +} + +// This goes at the bottom because it's so verbose and it makes scrolling past +// other code a pain. +#[test] +fn test_from_bytes_enum() { + test! { + FromBytes { + #[repr(u8)] + enum Foo { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, + Variant255, + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::TryFromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + + fn is_bit_valid<___ZerocopyAliasing>( + _candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + if false { + fn assert_is_from_bytes<T>() + where + T: ::zerocopy::FromBytes, + T: ?::zerocopy::util::macro_util::core_reexport::marker::Sized, + {} + assert_is_from_bytes::<Self>(); + } + + true + } + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromZeros for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::FromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + } no_build + } +} + +#[test] +fn test_try_from_bytes_trivial_is_bit_valid_enum() { + // Even when we aren't deriving `FromBytes` as the top-level trait, + // `TryFromBytes` on enums still detects whether we *could* derive + // `FromBytes`, and if so, performs the same "trivial `is_bit_valid`" + // optimization. + test! { + TryFromBytes { + #[repr(u8)] + enum Foo { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, + Variant255, + } + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl ::zerocopy::TryFromBytes for Foo { + fn only_derive_is_allowed_to_implement_this_trait() {} + + fn is_bit_valid<___ZerocopyAliasing>( + _candidate: ::zerocopy::Maybe<Self, ___ZerocopyAliasing>, + ) -> ::zerocopy::util::macro_util::core_reexport::primitive::bool + where + ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, + { + true + } + } + } no_build + } +} + +#[test] +fn test_hash() { + test! { + ByteHash { + struct Foo<T: Clone>(T) where Self: Sized; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + impl<T: Clone> ::zerocopy::util::macro_util::core_reexport::hash::Hash for Foo<T> + where + Self: ::zerocopy::IntoBytes + ::zerocopy::Immutable, + Self: Sized, + { + fn hash<H>(&self, state: &mut H) + where + H: ::zerocopy::util::macro_util::core_reexport::hash::Hasher, + { + ::zerocopy::util::macro_util::core_reexport::hash::Hasher::write( + state, + ::zerocopy::IntoBytes::as_bytes(self) + ) + } + + fn hash_slice<H>(data: &[Self], state: &mut H) + where + H: ::zerocopy::util::macro_util::core_reexport::hash::Hasher, + { + ::zerocopy::util::macro_util::core_reexport::hash::Hasher::write( + state, + ::zerocopy::IntoBytes::as_bytes(data) + ) + } + } + } no_build + } +} + +#[test] +fn test_eq() { + test! { + ByteEq { + struct Foo<T: Clone>(T) where Self: Sized; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + impl<T: Clone> ::zerocopy::util::macro_util::core_reexport::cmp::PartialEq for Foo<T> + where + Self: ::zerocopy::IntoBytes + ::zerocopy::Immutable, + Self: Sized, + { + fn eq(&self, other: &Self) -> bool { + ::zerocopy::util::macro_util::core_reexport::cmp::PartialEq::eq( + ::zerocopy::IntoBytes::as_bytes(self), + ::zerocopy::IntoBytes::as_bytes(other), + ) + } + } + + #[allow(deprecated)] + #[automatically_derived] + impl<T: Clone> ::zerocopy::util::macro_util::core_reexport::cmp::Eq for Foo<T> + where + Self: ::zerocopy::IntoBytes + ::zerocopy::Immutable, + Self: Sized, + { + } + } no_build + } +} + +#[test] +fn test_split_at() { + test! { + SplitAt { + #[repr(C)] + struct Foo<T: ?Sized + Copy>(T) where Self: Copy; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<T: ?Sized + Copy> ::zerocopy::SplitAt for Foo<T> + where + Self: Copy, + T: ::zerocopy::SplitAt, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + type Elem = <T as ::zerocopy::SplitAt>::Elem; + } + } no_build + } + + test! { + SplitAt { + #[repr(transparent)] + struct Foo<T: ?Sized + Copy>(T) where Self: Copy; + } expands to { + #[allow(deprecated)] + #[automatically_derived] + unsafe impl<T: ?Sized + Copy> ::zerocopy::SplitAt for Foo<T> + where + Self: Copy, + T: ::zerocopy::SplitAt, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + type Elem = <T as ::zerocopy::SplitAt>::Elem; + } + } no_build + } + + test! { + SplitAt { + #[repr(packed)] + struct Foo<T: ?Sized + Copy>(T) where Self: Copy; + } expands to { + ::core::compile_error! { + "must not have #[repr(packed)] attribute" + } + } no_build + } + + test! { + SplitAt { + #[repr(packed(2))] + struct Foo<T: ?Sized + Copy>(T) where Self: Copy; + } expands to { + ::core::compile_error! { + "must not have #[repr(packed)] attribute" + } + } no_build + } + + test! { + SplitAt { + enum Foo {} + } expands to { + ::core::compile_error! { + "can only be applied to structs" + } + } no_build + } + + test! { + SplitAt { + union Foo { a: () } + } expands to { + ::core::compile_error! { + "can only be applied to structs" + } + } no_build + } + + test! { + SplitAt { + struct Foo<T: ?Sized + Copy>(T) where Self: Copy; + } expands to { + ::core::compile_error! { + "must have #[repr(C)] or #[repr(transparent)] in order to guarantee this type's layout is splitable" + } + } no_build + } +} diff --git a/vendor/zerocopy-derive/src/repr.rs b/vendor/zerocopy-derive/src/repr.rs new file mode 100644 index 00000000..57014b38 --- /dev/null +++ b/vendor/zerocopy-derive/src/repr.rs @@ -0,0 +1,849 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{ + convert::{Infallible, TryFrom}, + num::NonZeroU32, +}; + +use proc_macro2::{Span, TokenStream}; +use quote::{quote_spanned, ToTokens, TokenStreamExt as _}; +use syn::{ + punctuated::Punctuated, spanned::Spanned as _, token::Comma, Attribute, Error, LitInt, Meta, + MetaList, +}; + +/// The computed representation of a type. +/// +/// This is the result of processing all `#[repr(...)]` attributes on a type, if +/// any. A `Repr` is only capable of representing legal combinations of +/// `#[repr(...)]` attributes. +#[cfg_attr(test, derive(Copy, Clone, Debug))] +pub(crate) enum Repr<Prim, Packed> { + /// `#[repr(transparent)]` + Transparent(Span), + /// A compound representation: `repr(C)`, `repr(Rust)`, or `repr(Int)` + /// optionally combined with `repr(packed(...))` or `repr(align(...))` + Compound(Spanned<CompoundRepr<Prim>>, Option<Spanned<AlignRepr<Packed>>>), +} + +/// A compound representation: `repr(C)`, `repr(Rust)`, or `repr(Int)`. +#[cfg_attr(test, derive(Copy, Clone, Debug, Eq, PartialEq))] +pub(crate) enum CompoundRepr<Prim> { + C, + Rust, + Primitive(Prim), +} + +/// `repr(Int)` +#[derive(Copy, Clone)] +#[cfg_attr(test, derive(Debug, Eq, PartialEq))] +pub(crate) enum PrimitiveRepr { + U8, + U16, + U32, + U64, + U128, + Usize, + I8, + I16, + I32, + I64, + I128, + Isize, +} + +/// `repr(packed(...))` or `repr(align(...))` +#[cfg_attr(test, derive(Copy, Clone, Debug, Eq, PartialEq))] +pub(crate) enum AlignRepr<Packed> { + Packed(Packed), + Align(NonZeroU32), +} + +/// The representations which can legally appear on a struct or union type. +pub(crate) type StructUnionRepr = Repr<Infallible, NonZeroU32>; + +/// The representations which can legally appear on an enum type. +pub(crate) type EnumRepr = Repr<PrimitiveRepr, Infallible>; + +impl<Prim, Packed> Repr<Prim, Packed> { + /// Gets the name of this "repr type" - the non-align `repr(X)` that is used + /// in prose to refer to this type. + /// + /// For example, we would refer to `#[repr(C, align(4))] struct Foo { ... }` + /// as a "`repr(C)` struct". + pub(crate) fn repr_type_name(&self) -> &str + where + Prim: Copy + With<PrimitiveRepr>, + { + use CompoundRepr::*; + use PrimitiveRepr::*; + use Repr::*; + match self { + Transparent(_span) => "repr(transparent)", + Compound(Spanned { t: repr, span: _ }, _align) => match repr { + C => "repr(C)", + Rust => "repr(Rust)", + Primitive(prim) => prim.with(|prim| match prim { + U8 => "repr(u8)", + U16 => "repr(u16)", + U32 => "repr(u32)", + U64 => "repr(u64)", + U128 => "repr(u128)", + Usize => "repr(usize)", + I8 => "repr(i8)", + I16 => "repr(i16)", + I32 => "repr(i32)", + I64 => "repr(i64)", + I128 => "repr(i128)", + Isize => "repr(isize)", + }), + }, + } + } + + pub(crate) fn is_transparent(&self) -> bool { + matches!(self, Repr::Transparent(_)) + } + + pub(crate) fn is_c(&self) -> bool { + use CompoundRepr::*; + matches!(self, Repr::Compound(Spanned { t: C, span: _ }, _align)) + } + + pub(crate) fn is_primitive(&self) -> bool { + use CompoundRepr::*; + matches!(self, Repr::Compound(Spanned { t: Primitive(_), span: _ }, _align)) + } + + pub(crate) fn get_packed(&self) -> Option<&Packed> { + use AlignRepr::*; + use Repr::*; + if let Compound(_, Some(Spanned { t: Packed(p), span: _ })) = self { + Some(p) + } else { + None + } + } + + pub(crate) fn get_align(&self) -> Option<Spanned<NonZeroU32>> { + use AlignRepr::*; + use Repr::*; + if let Compound(_, Some(Spanned { t: Align(n), span })) = self { + Some(Spanned::new(*n, *span)) + } else { + None + } + } + + pub(crate) fn is_align_gt_1(&self) -> bool { + self.get_align().map(|n| n.t.get() > 1).unwrap_or(false) + } + + /// When deriving `Unaligned`, validate that the decorated type has no + /// `#[repr(align(N))]` attribute where `N > 1`. If no such attribute exists + /// (including if `N == 1`), this returns `Ok(())`, and otherwise it returns + /// a descriptive error. + pub(crate) fn unaligned_validate_no_align_gt_1(&self) -> Result<(), Error> { + if let Some(n) = self.get_align().filter(|n| n.t.get() > 1) { + Err(Error::new( + n.span, + "cannot derive `Unaligned` on type with alignment greater than 1", + )) + } else { + Ok(()) + } + } +} + +impl<Prim> Repr<Prim, NonZeroU32> { + /// Does `self` describe a `#[repr(packed)]` or `#[repr(packed(1))]` type? + pub(crate) fn is_packed_1(&self) -> bool { + self.get_packed().map(|n| n.get() == 1).unwrap_or(false) + } +} + +impl<Packed> Repr<PrimitiveRepr, Packed> { + fn get_primitive(&self) -> Option<&PrimitiveRepr> { + use CompoundRepr::*; + use Repr::*; + if let Compound(Spanned { t: Primitive(p), span: _ }, _align) = self { + Some(p) + } else { + None + } + } + + /// Does `self` describe a `#[repr(u8)]` type? + pub(crate) fn is_u8(&self) -> bool { + matches!(self.get_primitive(), Some(PrimitiveRepr::U8)) + } + + /// Does `self` describe a `#[repr(i8)]` type? + pub(crate) fn is_i8(&self) -> bool { + matches!(self.get_primitive(), Some(PrimitiveRepr::I8)) + } +} + +impl<Prim, Packed> ToTokens for Repr<Prim, Packed> +where + Prim: With<PrimitiveRepr> + Copy, + Packed: With<NonZeroU32> + Copy, +{ + fn to_tokens(&self, ts: &mut TokenStream) { + use Repr::*; + match self { + Transparent(span) => ts.append_all(quote_spanned! { *span=> #[repr(transparent)] }), + Compound(repr, align) => { + repr.to_tokens(ts); + if let Some(align) = align { + align.to_tokens(ts); + } + } + } + } +} + +impl<Prim: With<PrimitiveRepr> + Copy> ToTokens for Spanned<CompoundRepr<Prim>> { + fn to_tokens(&self, ts: &mut TokenStream) { + use CompoundRepr::*; + match &self.t { + C => ts.append_all(quote_spanned! { self.span=> #[repr(C)] }), + Rust => ts.append_all(quote_spanned! { self.span=> #[repr(Rust)] }), + Primitive(prim) => prim.with(|prim| Spanned::new(prim, self.span).to_tokens(ts)), + } + } +} + +impl ToTokens for Spanned<PrimitiveRepr> { + fn to_tokens(&self, ts: &mut TokenStream) { + use PrimitiveRepr::*; + match self.t { + U8 => ts.append_all(quote_spanned! { self.span => #[repr(u8)] }), + U16 => ts.append_all(quote_spanned! { self.span => #[repr(u16)] }), + U32 => ts.append_all(quote_spanned! { self.span => #[repr(u32)] }), + U64 => ts.append_all(quote_spanned! { self.span => #[repr(u64)] }), + U128 => ts.append_all(quote_spanned! { self.span => #[repr(u128)] }), + Usize => ts.append_all(quote_spanned! { self.span => #[repr(usize)] }), + I8 => ts.append_all(quote_spanned! { self.span => #[repr(i8)] }), + I16 => ts.append_all(quote_spanned! { self.span => #[repr(i16)] }), + I32 => ts.append_all(quote_spanned! { self.span => #[repr(i32)] }), + I64 => ts.append_all(quote_spanned! { self.span => #[repr(i64)] }), + I128 => ts.append_all(quote_spanned! { self.span => #[repr(i128)] }), + Isize => ts.append_all(quote_spanned! { self.span => #[repr(isize)] }), + } + } +} + +impl<Packed: With<NonZeroU32> + Copy> ToTokens for Spanned<AlignRepr<Packed>> { + fn to_tokens(&self, ts: &mut TokenStream) { + use AlignRepr::*; + // We use `syn::Index` instead of `u32` because `quote_spanned!` + // serializes `u32` literals as `123u32`, not just `123`. Rust doesn't + // recognize that as a valid argument to `#[repr(align(...))]` or + // `#[repr(packed(...))]`. + let to_index = |n: NonZeroU32| syn::Index { index: n.get(), span: self.span }; + match self.t { + Packed(n) => n.with(|n| { + let n = to_index(n); + ts.append_all(quote_spanned! { self.span => #[repr(packed(#n))] }) + }), + Align(n) => { + let n = to_index(n); + ts.append_all(quote_spanned! { self.span => #[repr(align(#n))] }) + } + } + } +} + +/// The result of parsing a single `#[repr(...)]` attribute or a single +/// directive inside a compound `#[repr(..., ...)]` attribute. +#[derive(Copy, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Debug))] +pub(crate) enum RawRepr { + Transparent, + C, + Rust, + U8, + U16, + U32, + U64, + U128, + Usize, + I8, + I16, + I32, + I64, + I128, + Isize, + Align(NonZeroU32), + PackedN(NonZeroU32), + Packed, +} + +/// The error from converting from a `RawRepr`. +#[cfg_attr(test, derive(Debug, Eq, PartialEq))] +pub(crate) enum FromRawReprError<E> { + /// The `RawRepr` doesn't affect the high-level repr we're parsing (e.g. + /// it's `align(...)` and we're parsing a `CompoundRepr`). + None, + /// The `RawRepr` is invalid for the high-level repr we're parsing (e.g. + /// it's `packed` repr and we're parsing an `AlignRepr` for an enum type). + Err(E), +} + +/// The representation hint is not supported for the decorated type. +#[cfg_attr(test, derive(Copy, Clone, Debug, Eq, PartialEq))] +pub(crate) struct UnsupportedReprError; + +impl<Prim: With<PrimitiveRepr>> TryFrom<RawRepr> for CompoundRepr<Prim> { + type Error = FromRawReprError<UnsupportedReprError>; + fn try_from( + raw: RawRepr, + ) -> Result<CompoundRepr<Prim>, FromRawReprError<UnsupportedReprError>> { + use RawRepr::*; + match raw { + C => Ok(CompoundRepr::C), + Rust => Ok(CompoundRepr::Rust), + raw @ (U8 | U16 | U32 | U64 | U128 | Usize | I8 | I16 | I32 | I64 | I128 | Isize) => { + Prim::try_with_or( + || match raw { + U8 => Ok(PrimitiveRepr::U8), + U16 => Ok(PrimitiveRepr::U16), + U32 => Ok(PrimitiveRepr::U32), + U64 => Ok(PrimitiveRepr::U64), + U128 => Ok(PrimitiveRepr::U128), + Usize => Ok(PrimitiveRepr::Usize), + I8 => Ok(PrimitiveRepr::I8), + I16 => Ok(PrimitiveRepr::I16), + I32 => Ok(PrimitiveRepr::I32), + I64 => Ok(PrimitiveRepr::I64), + I128 => Ok(PrimitiveRepr::I128), + Isize => Ok(PrimitiveRepr::Isize), + Transparent | C | Rust | Align(_) | PackedN(_) | Packed => { + Err(UnsupportedReprError) + } + }, + UnsupportedReprError, + ) + .map(CompoundRepr::Primitive) + .map_err(FromRawReprError::Err) + } + Transparent | Align(_) | PackedN(_) | Packed => Err(FromRawReprError::None), + } + } +} + +impl<Pcked: With<NonZeroU32>> TryFrom<RawRepr> for AlignRepr<Pcked> { + type Error = FromRawReprError<UnsupportedReprError>; + fn try_from(raw: RawRepr) -> Result<AlignRepr<Pcked>, FromRawReprError<UnsupportedReprError>> { + use RawRepr::*; + match raw { + Packed | PackedN(_) => Pcked::try_with_or( + || match raw { + Packed => Ok(NonZeroU32::new(1).unwrap()), + PackedN(n) => Ok(n), + U8 | U16 | U32 | U64 | U128 | Usize | I8 | I16 | I32 | I64 | I128 | Isize + | Transparent | C | Rust | Align(_) => Err(UnsupportedReprError), + }, + UnsupportedReprError, + ) + .map(AlignRepr::Packed) + .map_err(FromRawReprError::Err), + Align(n) => Ok(AlignRepr::Align(n)), + U8 | U16 | U32 | U64 | U128 | Usize | I8 | I16 | I32 | I64 | I128 | Isize + | Transparent | C | Rust => Err(FromRawReprError::None), + } + } +} + +/// The error from extracting a high-level repr type from a list of `RawRepr`s. +#[cfg_attr(test, derive(Copy, Clone, Debug, Eq, PartialEq))] +enum FromRawReprsError<E> { + /// One of the `RawRepr`s is invalid for the high-level repr we're parsing + /// (e.g. there's a `packed` repr and we're parsing an `AlignRepr` for an + /// enum type). + Single(E), + /// Two `RawRepr`s appear which both affect the high-level repr we're + /// parsing (e.g., the list is `#[repr(align(2), packed)]`). Note that we + /// conservatively treat redundant reprs as conflicting (e.g. + /// `#[repr(packed, packed)]`). + Conflict, +} + +/// Tries to extract a high-level repr from a list of `RawRepr`s. +fn try_from_raw_reprs<'a, E, R: TryFrom<RawRepr, Error = FromRawReprError<E>>>( + r: impl IntoIterator<Item = &'a Spanned<RawRepr>>, +) -> Result<Option<Spanned<R>>, Spanned<FromRawReprsError<E>>> { + // Walk the list of `RawRepr`s and attempt to convert each to an `R`. Bail + // if we find any errors. If we find more than one which converts to an `R`, + // bail with a `Conflict` error. + r.into_iter().try_fold(None, |found: Option<Spanned<R>>, raw| { + let new = match Spanned::<R>::try_from(*raw) { + Ok(r) => r, + // This `RawRepr` doesn't convert to an `R`, so keep the current + // found `R`, if any. + Err(FromRawReprError::None) => return Ok(found), + // This repr is unsupported for the decorated type (e.g. + // `repr(packed)` on an enum). + Err(FromRawReprError::Err(Spanned { t: err, span })) => { + return Err(Spanned::new(FromRawReprsError::Single(err), span)) + } + }; + + if let Some(found) = found { + // We already found an `R`, but this `RawRepr` also converts to an + // `R`, so that's a conflict. + // + // `Span::join` returns `None` if the two spans are from different + // files or if we're not on the nightly compiler. In that case, just + // use `new`'s span. + let span = found.span.join(new.span).unwrap_or(new.span); + Err(Spanned::new(FromRawReprsError::Conflict, span)) + } else { + Ok(Some(new)) + } + }) +} + +/// The error returned from [`Repr::from_attrs`]. +#[cfg_attr(test, derive(Copy, Clone, Debug, Eq, PartialEq))] +enum FromAttrsError { + FromRawReprs(FromRawReprsError<UnsupportedReprError>), + Unrecognized, +} + +impl From<FromRawReprsError<UnsupportedReprError>> for FromAttrsError { + fn from(err: FromRawReprsError<UnsupportedReprError>) -> FromAttrsError { + FromAttrsError::FromRawReprs(err) + } +} + +impl From<UnrecognizedReprError> for FromAttrsError { + fn from(_err: UnrecognizedReprError) -> FromAttrsError { + FromAttrsError::Unrecognized + } +} + +impl From<Spanned<FromAttrsError>> for Error { + fn from(err: Spanned<FromAttrsError>) -> Error { + let Spanned { t: err, span } = err; + match err { + FromAttrsError::FromRawReprs(FromRawReprsError::Single( + _err @ UnsupportedReprError, + )) => Error::new(span, "unsupported representation hint for the decorated type"), + FromAttrsError::FromRawReprs(FromRawReprsError::Conflict) => { + // NOTE: This says "another" rather than "a preceding" because + // when one of the reprs involved is `transparent`, we detect + // that condition in `Repr::from_attrs`, and at that point we + // can't tell which repr came first, so we might report this on + // the first involved repr rather than the second, third, etc. + Error::new(span, "this conflicts with another representation hint") + } + FromAttrsError::Unrecognized => Error::new(span, "unrecognized representation hint"), + } + } +} + +impl<Prim, Packed> Repr<Prim, Packed> { + fn from_attrs_inner(attrs: &[Attribute]) -> Result<Repr<Prim, Packed>, Spanned<FromAttrsError>> + where + Prim: With<PrimitiveRepr>, + Packed: With<NonZeroU32>, + { + let raw_reprs = RawRepr::from_attrs(attrs).map_err(Spanned::from)?; + + let transparent = { + let mut transparents = raw_reprs.iter().filter_map(|Spanned { t, span }| match t { + RawRepr::Transparent => Some(span), + _ => None, + }); + let first = transparents.next(); + let second = transparents.next(); + match (first, second) { + (None, None) => None, + (Some(span), None) => Some(*span), + (Some(_), Some(second)) => { + return Err(Spanned::new( + FromAttrsError::FromRawReprs(FromRawReprsError::Conflict), + *second, + )) + } + // An iterator can't produce a value only on the second call to + // `.next()`. + (None, Some(_)) => unreachable!(), + } + }; + + let compound: Option<Spanned<CompoundRepr<Prim>>> = + try_from_raw_reprs(raw_reprs.iter()).map_err(Spanned::from)?; + let align: Option<Spanned<AlignRepr<Packed>>> = + try_from_raw_reprs(raw_reprs.iter()).map_err(Spanned::from)?; + + if let Some(span) = transparent { + if compound.is_some() || align.is_some() { + // Arbitrarily report the problem on the `transparent` span. Any + // span will do. + return Err(Spanned::new(FromRawReprsError::Conflict.into(), span)); + } + + Ok(Repr::Transparent(span)) + } else { + Ok(Repr::Compound( + compound.unwrap_or(Spanned::new(CompoundRepr::Rust, Span::call_site())), + align, + )) + } + } +} + +impl<Prim, Packed> Repr<Prim, Packed> { + pub(crate) fn from_attrs(attrs: &[Attribute]) -> Result<Repr<Prim, Packed>, Error> + where + Prim: With<PrimitiveRepr>, + Packed: With<NonZeroU32>, + { + Repr::from_attrs_inner(attrs).map_err(Into::into) + } +} + +/// The representation hint could not be parsed or was unrecognized. +struct UnrecognizedReprError; + +impl RawRepr { + fn from_attrs( + attrs: &[Attribute], + ) -> Result<Vec<Spanned<RawRepr>>, Spanned<UnrecognizedReprError>> { + let mut reprs = Vec::new(); + for attr in attrs { + // Ignore documentation attributes. + if attr.path().is_ident("doc") { + continue; + } + if let Meta::List(ref meta_list) = attr.meta { + if meta_list.path.is_ident("repr") { + let parsed: Punctuated<Meta, Comma> = + match meta_list.parse_args_with(Punctuated::parse_terminated) { + Ok(parsed) => parsed, + Err(_) => { + return Err(Spanned::new( + UnrecognizedReprError, + meta_list.tokens.span(), + )) + } + }; + for meta in parsed { + let s = meta.span(); + reprs.push( + RawRepr::from_meta(&meta) + .map(|r| Spanned::new(r, s)) + .map_err(|e| Spanned::new(e, s))?, + ); + } + } + } + } + + Ok(reprs) + } + + fn from_meta(meta: &Meta) -> Result<RawRepr, UnrecognizedReprError> { + let (path, list) = match meta { + Meta::Path(path) => (path, None), + Meta::List(list) => (&list.path, Some(list)), + _ => return Err(UnrecognizedReprError), + }; + + let ident = path.get_ident().ok_or(UnrecognizedReprError)?; + + // Only returns `Ok` for non-zero power-of-two values. + let parse_nzu64 = |list: &MetaList| { + list.parse_args::<LitInt>() + .and_then(|int| int.base10_parse::<NonZeroU32>()) + .map_err(|_| UnrecognizedReprError) + .and_then(|nz| { + if nz.get().is_power_of_two() { + Ok(nz) + } else { + Err(UnrecognizedReprError) + } + }) + }; + + use RawRepr::*; + Ok(match (ident.to_string().as_str(), list) { + ("u8", None) => U8, + ("u16", None) => U16, + ("u32", None) => U32, + ("u64", None) => U64, + ("u128", None) => U128, + ("usize", None) => Usize, + ("i8", None) => I8, + ("i16", None) => I16, + ("i32", None) => I32, + ("i64", None) => I64, + ("i128", None) => I128, + ("isize", None) => Isize, + ("C", None) => C, + ("transparent", None) => Transparent, + ("Rust", None) => Rust, + ("packed", None) => Packed, + ("packed", Some(list)) => PackedN(parse_nzu64(list)?), + ("align", Some(list)) => Align(parse_nzu64(list)?), + _ => return Err(UnrecognizedReprError), + }) + } +} + +pub(crate) use util::*; +mod util { + use super::*; + /// A value with an associated span. + #[derive(Copy, Clone)] + #[cfg_attr(test, derive(Debug))] + pub(crate) struct Spanned<T> { + pub(crate) t: T, + pub(crate) span: Span, + } + + impl<T> Spanned<T> { + pub(super) fn new(t: T, span: Span) -> Spanned<T> { + Spanned { t, span } + } + + pub(super) fn from<U>(s: Spanned<U>) -> Spanned<T> + where + T: From<U>, + { + let Spanned { t: u, span } = s; + Spanned::new(u.into(), span) + } + + /// Delegates to `T: TryFrom`, preserving span information in both the + /// success and error cases. + pub(super) fn try_from<E, U>( + u: Spanned<U>, + ) -> Result<Spanned<T>, FromRawReprError<Spanned<E>>> + where + T: TryFrom<U, Error = FromRawReprError<E>>, + { + let Spanned { t: u, span } = u; + T::try_from(u).map(|t| Spanned { t, span }).map_err(|err| match err { + FromRawReprError::None => FromRawReprError::None, + FromRawReprError::Err(e) => FromRawReprError::Err(Spanned::new(e, span)), + }) + } + } + + // Used to permit implementing `With<T> for T: Inhabited` and for + // `Infallible` without a blanket impl conflict. + pub(crate) trait Inhabited {} + impl Inhabited for PrimitiveRepr {} + impl Inhabited for NonZeroU32 {} + + pub(crate) trait With<T> { + fn with<O, F: FnOnce(T) -> O>(self, f: F) -> O; + fn try_with_or<E, F: FnOnce() -> Result<T, E>>(f: F, err: E) -> Result<Self, E> + where + Self: Sized; + } + + impl<T: Inhabited> With<T> for T { + fn with<O, F: FnOnce(T) -> O>(self, f: F) -> O { + f(self) + } + + fn try_with_or<E, F: FnOnce() -> Result<T, E>>(f: F, _err: E) -> Result<Self, E> { + f() + } + } + + impl<T> With<T> for Infallible { + fn with<O, F: FnOnce(T) -> O>(self, _f: F) -> O { + match self {} + } + + fn try_with_or<E, F: FnOnce() -> Result<T, E>>(_f: F, err: E) -> Result<Self, E> { + Err(err) + } + } +} + +#[cfg(test)] +mod tests { + use syn::parse_quote; + + use super::*; + + impl<T> From<T> for Spanned<T> { + fn from(t: T) -> Spanned<T> { + Spanned::new(t, Span::call_site()) + } + } + + // We ignore spans for equality in testing since real spans are hard to + // synthesize and don't implement `PartialEq`. + impl<T: PartialEq> PartialEq for Spanned<T> { + fn eq(&self, other: &Spanned<T>) -> bool { + self.t.eq(&other.t) + } + } + + impl<T: Eq> Eq for Spanned<T> {} + + impl<Prim: PartialEq, Packed: PartialEq> PartialEq for Repr<Prim, Packed> { + fn eq(&self, other: &Repr<Prim, Packed>) -> bool { + match (self, other) { + (Repr::Transparent(_), Repr::Transparent(_)) => true, + (Repr::Compound(sc, sa), Repr::Compound(oc, oa)) => (sc, sa) == (oc, oa), + _ => false, + } + } + } + + fn s() -> Span { + Span::call_site() + } + + #[test] + fn test() { + // Test that a given `#[repr(...)]` attribute parses and returns the + // given `Repr` or error. + macro_rules! test { + ($(#[$attr:meta])* => $repr:expr) => { + test!(@inner $(#[$attr])* => Repr => Ok($repr)); + }; + // In the error case, the caller must explicitly provide the name of + // the `Repr` type to assist in type inference. + (@error $(#[$attr:meta])* => $typ:ident => $repr:expr) => { + test!(@inner $(#[$attr])* => $typ => Err($repr)); + }; + (@inner $(#[$attr:meta])* => $typ:ident => $repr:expr) => { + let attr: Attribute = parse_quote!($(#[$attr])*); + let mut got = $typ::from_attrs_inner(&[attr]); + let expect: Result<Repr<_, _>, _> = $repr; + if false { + // Force Rust to infer `got` as having the same type as + // `expect`. + got = expect; + } + assert_eq!(got, expect, stringify!($(#[$attr])*)); + }; + } + + use AlignRepr::*; + use CompoundRepr::*; + use PrimitiveRepr::*; + let nz = |n: u32| NonZeroU32::new(n).unwrap(); + + test!(#[repr(transparent)] => StructUnionRepr::Transparent(s())); + test!(#[repr()] => StructUnionRepr::Compound(Rust.into(), None)); + test!(#[repr(packed)] => StructUnionRepr::Compound(Rust.into(), Some(Packed(nz(1)).into()))); + test!(#[repr(packed(2))] => StructUnionRepr::Compound(Rust.into(), Some(Packed(nz(2)).into()))); + test!(#[repr(align(1))] => StructUnionRepr::Compound(Rust.into(), Some(Align(nz(1)).into()))); + test!(#[repr(align(2))] => StructUnionRepr::Compound(Rust.into(), Some(Align(nz(2)).into()))); + test!(#[repr(C)] => StructUnionRepr::Compound(C.into(), None)); + test!(#[repr(C, packed)] => StructUnionRepr::Compound(C.into(), Some(Packed(nz(1)).into()))); + test!(#[repr(C, packed(2))] => StructUnionRepr::Compound(C.into(), Some(Packed(nz(2)).into()))); + test!(#[repr(C, align(1))] => StructUnionRepr::Compound(C.into(), Some(Align(nz(1)).into()))); + test!(#[repr(C, align(2))] => StructUnionRepr::Compound(C.into(), Some(Align(nz(2)).into()))); + + test!(#[repr(transparent)] => EnumRepr::Transparent(s())); + test!(#[repr()] => EnumRepr::Compound(Rust.into(), None)); + test!(#[repr(align(1))] => EnumRepr::Compound(Rust.into(), Some(Align(nz(1)).into()))); + test!(#[repr(align(2))] => EnumRepr::Compound(Rust.into(), Some(Align(nz(2)).into()))); + + macro_rules! for_each_compound_repr { + ($($r:tt => $var:expr),*) => { + $( + test!(#[repr($r)] => EnumRepr::Compound($var.into(), None)); + test!(#[repr($r, align(1))] => EnumRepr::Compound($var.into(), Some(Align(nz(1)).into()))); + test!(#[repr($r, align(2))] => EnumRepr::Compound($var.into(), Some(Align(nz(2)).into()))); + )* + } + } + + for_each_compound_repr!( + C => C, + u8 => Primitive(U8), + u16 => Primitive(U16), + u32 => Primitive(U32), + u64 => Primitive(U64), + usize => Primitive(Usize), + i8 => Primitive(I8), + i16 => Primitive(I16), + i32 => Primitive(I32), + i64 => Primitive(I64), + isize => Primitive(Isize) + ); + + use FromAttrsError::*; + use FromRawReprsError::*; + + // Run failure tests which are valid for both `StructUnionRepr` and + // `EnumRepr`. + macro_rules! for_each_repr_type { + ($($repr:ident),*) => { + $( + // Invalid packed or align attributes + test!(@error #[repr(packed(0))] => $repr => Unrecognized.into()); + test!(@error #[repr(packed(3))] => $repr => Unrecognized.into()); + test!(@error #[repr(align(0))] => $repr => Unrecognized.into()); + test!(@error #[repr(align(3))] => $repr => Unrecognized.into()); + + // Conflicts + test!(@error #[repr(transparent, transparent)] => $repr => FromRawReprs(Conflict).into()); + test!(@error #[repr(transparent, C)] => $repr => FromRawReprs(Conflict).into()); + test!(@error #[repr(transparent, Rust)] => $repr => FromRawReprs(Conflict).into()); + + test!(@error #[repr(C, transparent)] => $repr => FromRawReprs(Conflict).into()); + test!(@error #[repr(C, C)] => $repr => FromRawReprs(Conflict).into()); + test!(@error #[repr(C, Rust)] => $repr => FromRawReprs(Conflict).into()); + + test!(@error #[repr(Rust, transparent)] => $repr => FromRawReprs(Conflict).into()); + test!(@error #[repr(Rust, C)] => $repr => FromRawReprs(Conflict).into()); + test!(@error #[repr(Rust, Rust)] => $repr => FromRawReprs(Conflict).into()); + )* + } + } + + for_each_repr_type!(StructUnionRepr, EnumRepr); + + // Enum-specific conflicts. + // + // We don't bother to test every combination since that would be a huge + // number (enums can have primitive reprs u8, u16, u32, u64, usize, i8, + // i16, i32, i64, and isize). Instead, since the conflict logic doesn't + // care what specific value of `PrimitiveRepr` is present, we assume + // that testing against u8 alone is fine. + test!(@error #[repr(transparent, u8)] => EnumRepr => FromRawReprs(Conflict).into()); + test!(@error #[repr(u8, transparent)] => EnumRepr => FromRawReprs(Conflict).into()); + test!(@error #[repr(C, u8)] => EnumRepr => FromRawReprs(Conflict).into()); + test!(@error #[repr(u8, C)] => EnumRepr => FromRawReprs(Conflict).into()); + test!(@error #[repr(Rust, u8)] => EnumRepr => FromRawReprs(Conflict).into()); + test!(@error #[repr(u8, Rust)] => EnumRepr => FromRawReprs(Conflict).into()); + test!(@error #[repr(u8, u8)] => EnumRepr => FromRawReprs(Conflict).into()); + + // Illegal struct/union reprs + test!(@error #[repr(u8)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(u16)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(u32)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(u64)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(usize)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(i8)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(i16)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(i32)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(i64)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(isize)] => StructUnionRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + + // Illegal enum reprs + test!(@error #[repr(packed)] => EnumRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(packed(1))] => EnumRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + test!(@error #[repr(packed(2))] => EnumRepr => FromRawReprs(Single(UnsupportedReprError)).into()); + } +} diff --git a/vendor/zerocopy-derive/tests/crate_path.rs b/vendor/zerocopy-derive/tests/crate_path.rs new file mode 100644 index 00000000..ea1f4662 --- /dev/null +++ b/vendor/zerocopy-derive/tests/crate_path.rs @@ -0,0 +1,189 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Make sure that the derive macros will respect the +// `#[zerocopy(crate = "...")]` attribute when renaming the crate. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[test] +fn test_gen_custom_zerocopy() { + #[derive( + imp::ByteEq, + imp::ByteHash, + imp::IntoBytes, + imp::FromBytes, + imp::Unaligned, + imp::Immutable, + imp::KnownLayout, + )] + #[zerocopy(crate = "fake_zerocopy")] + #[repr(packed)] + struct SomeStruct { + a: u16, + b: u32, + } + + impl AssertNotZerocopyIntoBytes for SomeStruct {} + impl AssertNotZerocopyFromBytes for SomeStruct {} + impl AssertNotZerocopyUnaligned for SomeStruct {} + impl AssertNotZerocopyImmutable for SomeStruct {} + impl AssertNotZerocopyKnownLayout for SomeStruct {} + + fake_zerocopy::assert::<SomeStruct>(); +} + +mod fake_zerocopy { + use ::std::{io, ptr::NonNull, unimplemented}; + + pub use super::imp::*; + + pub fn assert<T>() + where + T: IntoBytes + FromBytes + Unaligned + Immutable, + { + } + + pub unsafe trait IntoBytes { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + fn as_bytes(&self) -> &[u8] + where + Self: Immutable, + { + unimplemented!() + } + } + + pub unsafe trait FromBytes: FromZeros { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + } + + pub unsafe trait Unaligned { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + } + + pub unsafe trait Immutable { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + } + + pub unsafe trait KnownLayout { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + type PointerMetadata: PointerMetadata; + + type MaybeUninit: ?Sized + KnownLayout<PointerMetadata = Self::PointerMetadata>; + + const LAYOUT: DstLayout; + + fn raw_from_ptr_len(bytes: NonNull<u8>, meta: Self::PointerMetadata) -> NonNull<Self>; + + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata; + } + + macro_rules! impl_ty { + ($ty:ty $(as $generic:ident)?) => { + unsafe impl$(<$generic: IntoBytes>)? IntoBytes for $ty { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + unimplemented!() + } + } + + unsafe impl$(<$generic: FromBytes>)? FromBytes for $ty { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + unimplemented!() + } + } + + unsafe impl$(<$generic: Unaligned>)? Unaligned for $ty { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + unimplemented!() + } + } + + unsafe impl$(<$generic: Immutable>)? Immutable for $ty { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + unimplemented!() + } + } + + unsafe impl$(<$generic: KnownLayout>)? KnownLayout for $ty { + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + unimplemented!() + } + + type PointerMetadata = (); + + type MaybeUninit = (); + + const LAYOUT: DstLayout = DstLayout::new_zst(None); + + fn raw_from_ptr_len( + bytes: NonNull<u8>, + meta: Self::PointerMetadata, + ) -> NonNull<Self> { + unimplemented!() + } + + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata { + unimplemented!() + } + } + }; + } + + impl_ty!(()); + impl_ty!(u16); + impl_ty!(u32); + impl_ty!([T] as T); + impl_ty!(::std::mem::MaybeUninit<T> as T); +} + +pub trait AssertNotZerocopyIntoBytes {} +impl<T: imp::IntoBytes> AssertNotZerocopyIntoBytes for T {} + +pub trait AssertNotZerocopyFromBytes {} +impl<T: imp::FromBytes> AssertNotZerocopyFromBytes for T {} + +pub trait AssertNotZerocopyUnaligned {} +impl<T: imp::Unaligned> AssertNotZerocopyUnaligned for T {} + +pub trait AssertNotZerocopyImmutable {} +impl<T: imp::Immutable> AssertNotZerocopyImmutable for T {} + +pub trait AssertNotZerocopyKnownLayout {} +impl<T: imp::KnownLayout> AssertNotZerocopyKnownLayout for T {} diff --git a/vendor/zerocopy-derive/tests/deprecated.rs b/vendor/zerocopy-derive/tests/deprecated.rs new file mode 100644 index 00000000..052e258c --- /dev/null +++ b/vendor/zerocopy-derive/tests/deprecated.rs @@ -0,0 +1,125 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] +#![deny(deprecated)] + +include!("include.rs"); + +// Make sure no deprecation warnings are generated from our derives (see #553). + +#[macro_export] +macro_rules! test { + ($name:ident => $ty:item => $($trait:ident),*) => { + #[allow(non_snake_case)] + mod $name { + $( + mod $trait { + use super::super::*; + + #[deprecated = "do not use"] + #[derive(imp::$trait)] + $ty + + #[allow(deprecated)] + fn _allow_deprecated() { + util_assert_impl_all!($name: imp::$trait); + } + } + )* + } + }; +} + +// NOTE: `FromBytes` is tested separately in `enum_from_bytes.rs` since it +// requires 256-variant enums which are extremely verbose; such enums are +// already in that file. +test!(Enum => #[repr(u8)] enum Enum { A, } => TryFromBytes, FromZeros, KnownLayout, Immutable, IntoBytes, Unaligned); + +test!(Struct => #[repr(C)] struct Struct; => TryFromBytes, FromZeros, FromBytes, KnownLayout, Immutable, IntoBytes, Unaligned); + +test!(Union => #[repr(C)] union Union{ a: (), } => TryFromBytes, FromZeros, FromBytes, KnownLayout, Immutable, IntoBytes, Unaligned); + +// Tests for ByteHash and ByteEq which require IntoBytes + Immutable +mod enum_hash_eq { + mod ByteHash { + use super::super::*; + #[deprecated = "do not use"] + #[derive(imp::ByteHash, imp::IntoBytes, imp::Immutable)] + #[repr(u8)] + enum Enum { + A, + } + + #[allow(deprecated)] + fn _allow_deprecated() { + util_assert_impl_all!(Enum: ::core::hash::Hash); + } + } + mod ByteEq { + use super::super::*; + #[deprecated = "do not use"] + #[derive(imp::ByteEq, imp::IntoBytes, imp::Immutable)] + #[repr(u8)] + enum Enum { + A, + } + + #[allow(deprecated)] + fn _allow_deprecated() { + util_assert_impl_all!(Enum: ::core::cmp::PartialEq, ::core::cmp::Eq); + } + } +} + +mod struct_hash_eq { + mod ByteHash { + use super::super::*; + #[deprecated = "do not use"] + #[derive(imp::ByteHash, imp::IntoBytes, imp::Immutable)] + #[repr(C)] + struct Struct; + + #[allow(deprecated)] + fn _allow_deprecated() { + util_assert_impl_all!(Struct: ::core::hash::Hash); + } + } + mod ByteEq { + use super::super::*; + #[deprecated = "do not use"] + #[derive(imp::ByteEq, imp::IntoBytes, imp::Immutable)] + #[repr(C)] + struct Struct; + + #[allow(deprecated)] + fn _allow_deprecated() { + util_assert_impl_all!(Struct: ::core::cmp::PartialEq, ::core::cmp::Eq); + } + } +} + +// Tests for SplitAt which requires repr(C) and at least one field +mod split_at_test { + mod SplitAt { + use super::super::*; + #[deprecated = "do not use"] + #[derive(imp::SplitAt, imp::KnownLayout)] + #[repr(C)] + struct Struct { + a: [u8], + } + + #[allow(deprecated)] + fn _allow_deprecated() { + util_assert_impl_all!(Struct: imp::SplitAt); + } + } +} diff --git a/vendor/zerocopy-derive/tests/enum_from_zeros.rs b/vendor/zerocopy-derive/tests/enum_from_zeros.rs new file mode 100644 index 00000000..6245785b --- /dev/null +++ b/vendor/zerocopy-derive/tests/enum_from_zeros.rs @@ -0,0 +1,99 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(imp::FromZeros)] +#[repr(C)] +enum Foo { + A, +} + +util_assert_impl_all!(Foo: imp::FromZeros); + +#[derive(imp::FromZeros)] +#[repr(C)] +enum Bar { + A = 0, +} + +util_assert_impl_all!(Bar: imp::FromZeros); + +#[derive(imp::FromZeros)] +#[repr(C)] +enum TwoVariantsHasExplicitZero { + A = 1, + B = 0, +} + +util_assert_impl_all!(TwoVariantsHasExplicitZero: imp::FromZeros); + +#[derive(imp::FromZeros)] +#[repr(i8)] +enum ImplicitNonFirstVariantIsZero { + A = -1, + B, +} + +util_assert_impl_all!(ImplicitNonFirstVariantIsZero: imp::FromZeros); + +#[derive(imp::FromZeros)] +#[repr(u64)] +enum LargeDiscriminant { + A = 0xFFFF_FFFF_FFFF_FFFF, + B = 0x0000_0000_0000_0000, +} + +util_assert_impl_all!(LargeDiscriminant: imp::FromZeros); + +#[derive(imp::FromZeros)] +#[repr(C)] +enum FirstVariantIsZeroable { + A(u32), + B { foo: u32 }, +} + +util_assert_impl_all!(FirstVariantIsZeroable: imp::FromZeros); + +#[derive(imp::FromZeros)] +#[repr(u8)] +enum FirstVariantIsZeroableSecondIsNot { + A(bool), + B(::core::num::NonZeroU8), +} + +util_assert_impl_all!(FirstVariantIsZeroableSecondIsNot: imp::FromZeros); + +// MSRV does not support data-carrying enum variants with explicit discriminants +#[cfg(not(__ZEROCOPY_TOOLCHAIN = "msrv"))] +mod msrv_only { + use super::*; + + #[derive(imp::FromZeros)] + #[repr(u8)] + enum ImplicitFirstVariantIsZeroable { + A(bool), + B(::core::num::NonZeroU8) = 1, + } + + util_assert_impl_all!(ImplicitFirstVariantIsZeroable: imp::FromZeros); + + #[derive(imp::FromZeros)] + #[repr(i8)] + enum ImplicitNonFirstVariantIsZeroable { + A(::core::num::NonZeroU8) = 1, + B = -1, + C(bool), + } + + util_assert_impl_all!(ImplicitNonFirstVariantIsZeroable: imp::FromZeros); +} diff --git a/vendor/zerocopy-derive/tests/enum_known_layout.rs b/vendor/zerocopy-derive/tests/enum_known_layout.rs new file mode 100644 index 00000000..72e52fa6 --- /dev/null +++ b/vendor/zerocopy-derive/tests/enum_known_layout.rs @@ -0,0 +1,50 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(imp::KnownLayout)] +enum Foo { + A, +} + +util_assert_impl_all!(Foo: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +enum Bar { + A = 0, +} + +util_assert_impl_all!(Bar: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +enum Baz { + A = 1, + B = 0, +} + +util_assert_impl_all!(Baz: imp::KnownLayout); + +// Deriving `KnownLayout` should work if the enum has bounded parameters. + +#[derive(imp::KnownLayout)] +#[repr(C)] +enum WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::KnownLayout, const N: usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::KnownLayout, +{ + Variant([T; N], imp::PhantomData<&'a &'b ()>), +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::KnownLayout); diff --git a/vendor/zerocopy-derive/tests/enum_no_cell.rs b/vendor/zerocopy-derive/tests/enum_no_cell.rs new file mode 100644 index 00000000..f22b8da8 --- /dev/null +++ b/vendor/zerocopy-derive/tests/enum_no_cell.rs @@ -0,0 +1,51 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(imp::Immutable)] +enum Foo { + A, +} + +util_assert_impl_all!(Foo: imp::Immutable); + +#[derive(imp::Immutable)] +enum Bar { + A = 0, +} + +util_assert_impl_all!(Bar: imp::Immutable); + +#[derive(imp::Immutable)] +enum Baz { + A = 1, + B = 0, +} + +util_assert_impl_all!(Baz: imp::Immutable); + +// Deriving `Immutable` should work if the enum has bounded parameters. + +#[derive(imp::Immutable)] +#[repr(C)] +enum WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::Immutable, const N: ::core::primitive::usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Immutable, +{ + Variant([T; N], imp::PhantomData<&'a &'b ()>), + UnsafeCell(imp::PhantomData<imp::UnsafeCell<()>>, &'a imp::UnsafeCell<()>), +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::Immutable); diff --git a/vendor/zerocopy-derive/tests/enum_to_bytes.rs b/vendor/zerocopy-derive/tests/enum_to_bytes.rs new file mode 100644 index 00000000..560bc869 --- /dev/null +++ b/vendor/zerocopy-derive/tests/enum_to_bytes.rs @@ -0,0 +1,133 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// An enum is `IntoBytes` if if has a defined repr. + +#[derive(imp::IntoBytes)] +#[repr(C)] +enum C { + A, +} + +util_assert_impl_all!(C: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(u8)] +enum U8 { + A, +} + +util_assert_impl_all!(U8: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(u16)] +enum U16 { + A, +} + +util_assert_impl_all!(U16: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(u32)] +enum U32 { + A, +} + +util_assert_impl_all!(U32: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(u64)] +enum U64 { + A, +} + +util_assert_impl_all!(U64: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(usize)] +enum Usize { + A, +} + +util_assert_impl_all!(Usize: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(i8)] +enum I8 { + A, +} + +util_assert_impl_all!(I8: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(i16)] +enum I16 { + A, +} + +util_assert_impl_all!(I16: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(i32)] +enum I32 { + A, +} + +util_assert_impl_all!(I32: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(i64)] +enum I64 { + A, +} + +util_assert_impl_all!(I64: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(isize)] +enum Isize { + A, +} + +util_assert_impl_all!(Isize: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(u8)] +enum HasData { + A(u8), + B(i8), +} + +util_assert_impl_all!(HasData: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(u32)] +enum HasData32 { + A(u32), + B(i32), + C([u8; 4]), + D([u16; 2]), +} + +util_assert_impl_all!(HasData: imp::IntoBytes); + +// After #1752 landed but before #1758 was fixed, this failed to compile because +// the padding check treated the tag type as being `#[repr(u8, align(2))] struct +// Tag { A }`, which is two bytes long, rather than the correct `#[repr(u8)] +// struct Tag { A }`, which is one byte long. +#[derive(imp::IntoBytes)] +#[repr(u8, align(2))] +enum BadTagWouldHavePadding { + A(u8, u16), +} diff --git a/vendor/zerocopy-derive/tests/enum_try_from_bytes.rs b/vendor/zerocopy-derive/tests/enum_try_from_bytes.rs new file mode 100644 index 00000000..bd89803e --- /dev/null +++ b/vendor/zerocopy-derive/tests/enum_try_from_bytes.rs @@ -0,0 +1,653 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(Eq, PartialEq, Debug, imp::Immutable, imp::KnownLayout, imp::TryFromBytes)] +#[repr(u8)] +enum Foo { + A, +} + +util_assert_impl_all!(Foo: imp::TryFromBytes); + +#[test] +fn test_foo() { + imp::assert_eq!(<Foo as imp::TryFromBytes>::try_read_from_bytes(&[0]), imp::Ok(Foo::A)); + imp::assert!(<Foo as imp::TryFromBytes>::try_read_from_bytes(&[]).is_err()); + imp::assert!(<Foo as imp::TryFromBytes>::try_read_from_bytes(&[1]).is_err()); + imp::assert!(<Foo as imp::TryFromBytes>::try_read_from_bytes(&[0, 0]).is_err()); +} + +#[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] +#[repr(u16)] +enum Bar { + A = 0, +} + +util_assert_impl_all!(Bar: imp::TryFromBytes); + +#[test] +fn test_bar() { + imp::assert_eq!(<Bar as imp::TryFromBytes>::try_read_from_bytes(&[0, 0]), imp::Ok(Bar::A)); + imp::assert!(<Bar as imp::TryFromBytes>::try_read_from_bytes(&[]).is_err()); + imp::assert!(<Bar as imp::TryFromBytes>::try_read_from_bytes(&[0]).is_err()); + imp::assert!(<Bar as imp::TryFromBytes>::try_read_from_bytes(&[0, 1]).is_err()); + imp::assert!(<Bar as imp::TryFromBytes>::try_read_from_bytes(&[0, 0, 0]).is_err()); +} + +#[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] +#[repr(u32)] +enum Baz { + A = 1, + B = 0, +} + +util_assert_impl_all!(Baz: imp::TryFromBytes); + +#[test] +fn test_baz() { + imp::assert_eq!( + <Baz as imp::TryFromBytes>::try_read_from_bytes(imp::IntoBytes::as_bytes(&1u32)), + imp::Ok(Baz::A) + ); + imp::assert_eq!( + <Baz as imp::TryFromBytes>::try_read_from_bytes(imp::IntoBytes::as_bytes(&0u32)), + imp::Ok(Baz::B) + ); + imp::assert!(<Baz as imp::TryFromBytes>::try_read_from_bytes(&[]).is_err()); + imp::assert!(<Baz as imp::TryFromBytes>::try_read_from_bytes(&[0]).is_err()); + imp::assert!(<Baz as imp::TryFromBytes>::try_read_from_bytes(&[0, 0]).is_err()); + imp::assert!(<Baz as imp::TryFromBytes>::try_read_from_bytes(&[0, 0, 0]).is_err()); + imp::assert!(<Baz as imp::TryFromBytes>::try_read_from_bytes(&[0, 0, 0, 0, 0]).is_err()); +} + +// Test hygiene - make sure that `i8` being shadowed doesn't cause problems for +// the code emitted by the derive. +type i8 = bool; + +const THREE: ::core::primitive::i8 = 3; + +#[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] +#[repr(i8)] +enum Blah { + A = 1, + B = 0, + C = 1 + 2, + D = 3 + THREE, +} + +util_assert_impl_all!(Blah: imp::TryFromBytes); + +#[test] +fn test_blah() { + imp::assert_eq!( + <Blah as imp::TryFromBytes>::try_read_from_bytes(imp::IntoBytes::as_bytes(&1i8)), + imp::Ok(Blah::A) + ); + imp::assert_eq!( + <Blah as imp::TryFromBytes>::try_read_from_bytes(imp::IntoBytes::as_bytes(&0i8)), + imp::Ok(Blah::B) + ); + imp::assert_eq!( + <Blah as imp::TryFromBytes>::try_read_from_bytes(imp::IntoBytes::as_bytes(&3i8)), + imp::Ok(Blah::C) + ); + imp::assert_eq!( + <Blah as imp::TryFromBytes>::try_read_from_bytes(imp::IntoBytes::as_bytes(&6i8)), + imp::Ok(Blah::D) + ); + imp::assert!(<Blah as imp::TryFromBytes>::try_read_from_bytes(&[]).is_err()); + imp::assert!(<Blah as imp::TryFromBytes>::try_read_from_bytes(&[4]).is_err()); + imp::assert!(<Blah as imp::TryFromBytes>::try_read_from_bytes(&[0, 0]).is_err()); +} + +#[derive( + Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes, imp::IntoBytes, +)] +#[repr(C)] +enum FieldlessButNotUnitOnly { + A, + B(), + C {}, +} + +#[test] +fn test_fieldless_but_not_unit_only() { + const SIZE: usize = ::core::mem::size_of::<FieldlessButNotUnitOnly>(); + let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::A); + imp::assert_eq!( + <FieldlessButNotUnitOnly as imp::TryFromBytes>::try_read_from_bytes(&disc[..]), + imp::Ok(FieldlessButNotUnitOnly::A) + ); + let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::B()); + imp::assert_eq!( + <FieldlessButNotUnitOnly as imp::TryFromBytes>::try_read_from_bytes(&disc[..]), + imp::Ok(FieldlessButNotUnitOnly::B()) + ); + let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::C {}); + imp::assert_eq!( + <FieldlessButNotUnitOnly as imp::TryFromBytes>::try_read_from_bytes(&disc[..]), + imp::Ok(FieldlessButNotUnitOnly::C {}) + ); + imp::assert!(<FieldlessButNotUnitOnly as imp::TryFromBytes>::try_read_from_bytes( + &[0xFF; SIZE][..] + ) + .is_err()); +} + +#[derive( + Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes, imp::IntoBytes, +)] +#[repr(C)] +enum WeirdDiscriminants { + A = -7, + B, + C = 33, +} + +#[test] +fn test_weird_discriminants() { + const SIZE: usize = ::core::mem::size_of::<WeirdDiscriminants>(); + let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::A); + imp::assert_eq!( + <WeirdDiscriminants as imp::TryFromBytes>::try_read_from_bytes(&disc[..]), + imp::Ok(WeirdDiscriminants::A) + ); + let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::B); + imp::assert_eq!( + <WeirdDiscriminants as imp::TryFromBytes>::try_read_from_bytes(&disc[..]), + imp::Ok(WeirdDiscriminants::B) + ); + let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::C); + imp::assert_eq!( + <WeirdDiscriminants as imp::TryFromBytes>::try_read_from_bytes(&disc[..]), + imp::Ok(WeirdDiscriminants::C) + ); + imp::assert!( + <WeirdDiscriminants as imp::TryFromBytes>::try_read_from_bytes(&[0xFF; SIZE][..]).is_err() + ); +} + +// Technically non-portable since this is only `IntoBytes` if the discriminant +// is an `i32` or `u32`, but we'll cross that bridge when we get to it... +#[derive( + Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes, imp::IntoBytes, +)] +#[repr(C)] +enum HasFields { + A(u32), + B { foo: ::core::num::NonZeroU32 }, +} + +#[test] +fn test_has_fields() { + const SIZE: usize = ::core::mem::size_of::<HasFields>(); + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(HasFields::A(10)); + imp::assert_eq!( + <HasFields as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFields::A(10)), + ); + + let bytes: [u8; SIZE] = + ::zerocopy::transmute!(HasFields::B { foo: ::core::num::NonZeroU32::new(123456).unwrap() }); + imp::assert_eq!( + <HasFields as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFields::B { foo: ::core::num::NonZeroU32::new(123456).unwrap() }), + ); +} + +#[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] +#[repr(C, align(16))] +enum HasFieldsAligned { + A(u32), + B { foo: ::core::num::NonZeroU32 }, +} + +util_assert_impl_all!(HasFieldsAligned: imp::TryFromBytes); + +#[test] +fn test_has_fields_aligned() { + const SIZE: usize = ::core::mem::size_of::<HasFieldsAligned>(); + + #[derive(imp::IntoBytes)] + #[repr(C)] + struct BytesOfHasFieldsAligned { + has_fields: HasFields, + padding: [u8; 8], + } + + let wrap = |has_fields| BytesOfHasFieldsAligned { has_fields, padding: [0; 8] }; + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(wrap(HasFields::A(10))); + imp::assert_eq!( + <HasFieldsAligned as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFieldsAligned::A(10)), + ); + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(wrap(HasFields::B { + foo: ::core::num::NonZeroU32::new(123456).unwrap() + })); + imp::assert_eq!( + <HasFieldsAligned as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFieldsAligned::B { foo: ::core::num::NonZeroU32::new(123456).unwrap() }), + ); +} + +#[derive( + Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes, imp::IntoBytes, +)] +#[repr(u32)] +enum HasFieldsPrimitive { + A(u32), + B { foo: ::core::num::NonZeroU32 }, +} + +#[test] +fn test_has_fields_primitive() { + const SIZE: usize = ::core::mem::size_of::<HasFieldsPrimitive>(); + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(HasFieldsPrimitive::A(10)); + imp::assert_eq!( + <HasFieldsPrimitive as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFieldsPrimitive::A(10)), + ); + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(HasFieldsPrimitive::B { + foo: ::core::num::NonZeroU32::new(123456).unwrap(), + }); + imp::assert_eq!( + <HasFieldsPrimitive as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFieldsPrimitive::B { foo: ::core::num::NonZeroU32::new(123456).unwrap() }), + ); +} + +#[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] +#[repr(u32, align(16))] +enum HasFieldsPrimitiveAligned { + A(u32), + B { foo: ::core::num::NonZeroU32 }, +} + +util_assert_impl_all!(HasFieldsPrimitiveAligned: imp::TryFromBytes); + +#[test] +fn test_has_fields_primitive_aligned() { + const SIZE: usize = ::core::mem::size_of::<HasFieldsPrimitiveAligned>(); + + #[derive(imp::IntoBytes)] + #[repr(C)] + struct BytesOfHasFieldsPrimitiveAligned { + has_fields: HasFieldsPrimitive, + padding: [u8; 8], + } + + let wrap = |has_fields| BytesOfHasFieldsPrimitiveAligned { has_fields, padding: [0; 8] }; + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(wrap(HasFieldsPrimitive::A(10))); + imp::assert_eq!( + <HasFieldsPrimitiveAligned as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFieldsPrimitiveAligned::A(10)), + ); + + let bytes: [u8; SIZE] = ::zerocopy::transmute!(wrap(HasFieldsPrimitive::B { + foo: ::core::num::NonZeroU32::new(123456).unwrap() + })); + imp::assert_eq!( + <HasFieldsPrimitiveAligned as imp::TryFromBytes>::try_read_from_bytes(&bytes[..]), + imp::Ok(HasFieldsPrimitiveAligned::B { + foo: ::core::num::NonZeroU32::new(123456).unwrap() + }), + ); +} + +#[derive(imp::TryFromBytes)] +#[repr(align(4), u32)] +enum HasReprAlignFirst { + A, + B, +} + +util_assert_impl_all!(HasReprAlignFirst: imp::TryFromBytes); + +#[derive(imp::KnownLayout, imp::TryFromBytes, imp::Immutable)] +#[repr(u8)] +enum Complex { + UnitLike, + StructLike { a: u8, b: u16 }, + TupleLike(bool, char), +} + +util_assert_impl_all!(Complex: imp::TryFromBytes); + +#[derive(imp::KnownLayout, imp::TryFromBytes, imp::Immutable)] +#[repr(u8)] +enum ComplexWithGenerics<X, Y> { + UnitLike, + StructLike { a: u8, b: X }, + TupleLike(bool, Y), +} + +util_assert_impl_all!(ComplexWithGenerics<u16, char>: imp::TryFromBytes); + +#[derive(imp::KnownLayout, imp::TryFromBytes, imp::Immutable)] +#[repr(C)] +enum GenericWithLifetimes<'a, 'b, X: 'a, Y: 'b> { + Foo(::core::marker::PhantomData<&'a X>), + Bar(::core::marker::PhantomData<&'b Y>), +} + +#[derive(Clone, Copy, imp::TryFromBytes)] +struct A; + +#[derive(imp::TryFromBytes)] +#[repr(C)] +enum B { + A(A), + A2 { a: A }, +} + +#[derive(imp::TryFromBytes)] +#[repr(u8)] +enum FooU8 { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, + Variant255, +} + +#[test] +fn test_trivial_is_bit_valid() { + // Though we don't derive `FromBytes`, `FooU8` *could* soundly implement + // `FromBytes`. Therefore, `TryFromBytes` derive's `is_bit_valid` impl is + // trivial - it unconditionally returns `true`. + util_assert_not_impl_any!(FooU8: imp::FromBytes); + util::test_trivial_is_bit_valid::<FooU8>(); +} + +#[deny(non_camel_case_types)] +mod issue_2051 { + use super::*; + + // Test that the `non_camel_case_types` lint isn't triggered by generated + // code. + // Prevents regressions of #2051. + #[repr(u32)] + #[derive(imp::TryFromBytes)] + #[allow(non_camel_case_types)] + pub enum Code { + I32_ADD, + I32_SUB, + I32_MUL, + } +} + +#[derive(imp::TryFromBytes)] +#[repr(u8)] +enum RawIdentifierVariant { + r#type, +} + +util_assert_impl_all!(RawIdentifierVariant: imp::TryFromBytes); diff --git a/vendor/zerocopy-derive/tests/enum_unaligned.rs b/vendor/zerocopy-derive/tests/enum_unaligned.rs new file mode 100644 index 00000000..2d8f510f --- /dev/null +++ b/vendor/zerocopy-derive/tests/enum_unaligned.rs @@ -0,0 +1,49 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// An enum is `Unaligned` if: +// - No `repr(align(N > 1))` +// - `repr(u8)` or `repr(i8)` + +#[derive(imp::Unaligned)] +#[repr(u8)] +enum Foo { + A, +} + +util_assert_impl_all!(Foo: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(i8)] +enum Bar { + A, +} + +util_assert_impl_all!(Bar: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(u8, align(1))] +enum Baz { + A, +} + +util_assert_impl_all!(Baz: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(i8, align(1))] +enum Blah { + B, +} + +util_assert_impl_all!(Blah: imp::Unaligned); diff --git a/vendor/zerocopy-derive/tests/eq.rs b/vendor/zerocopy-derive/tests/eq.rs new file mode 100644 index 00000000..084f2c22 --- /dev/null +++ b/vendor/zerocopy-derive/tests/eq.rs @@ -0,0 +1,33 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(imp::Debug, imp::IntoBytes, imp::Immutable, imp::ByteEq)] +#[repr(C)] +struct Struct { + a: u64, + b: u32, + c: u32, +} + +util_assert_impl_all!(Struct: imp::IntoBytes, imp::PartialEq, imp::Eq); + +#[test] +fn test_eq() { + use imp::{assert_eq, assert_ne}; + let a = Struct { a: 10, b: 15, c: 20 }; + let b = Struct { a: 10, b: 15, c: 25 }; + assert_eq!(a, a); + assert_ne!(a, b); + assert_ne!(b, a); +} diff --git a/vendor/zerocopy-derive/tests/hash.rs b/vendor/zerocopy-derive/tests/hash.rs new file mode 100644 index 00000000..08b172f4 --- /dev/null +++ b/vendor/zerocopy-derive/tests/hash.rs @@ -0,0 +1,38 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(imp::IntoBytes, imp::Immutable, imp::ByteHash)] +#[repr(C)] +struct Struct { + a: u64, + b: u32, + c: u32, +} + +util_assert_impl_all!(Struct: imp::IntoBytes, imp::hash::Hash); + +#[test] +fn test_hash() { + use imp::{ + hash::{Hash, Hasher}, + DefaultHasher, + }; + fn hash(val: impl Hash) -> u64 { + let mut hasher = DefaultHasher::new(); + val.hash(&mut hasher); + hasher.finish() + } + hash(Struct { a: 10, b: 15, c: 20 }); + hash(&[Struct { a: 10, b: 15, c: 20 }, Struct { a: 5, b: 4, c: 3 }]); +} diff --git a/vendor/zerocopy-derive/tests/hygiene.rs b/vendor/zerocopy-derive/tests/hygiene.rs new file mode 100644 index 00000000..9c3671d8 --- /dev/null +++ b/vendor/zerocopy-derive/tests/hygiene.rs @@ -0,0 +1,38 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Make sure that macro hygiene will ensure that when we reference "zerocopy", +// that will work properly even if they've renamed the crate and have not +// imported its traits. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +extern crate zerocopy as _zerocopy; + +#[derive(_zerocopy::KnownLayout, _zerocopy::FromBytes, _zerocopy::Unaligned)] +#[repr(C)] +struct TypeParams<'a, T, I: imp::Iterator> { + a: T, + c: I::Item, + d: u8, + e: imp::PhantomData<&'a [::core::primitive::u8]>, + f: imp::PhantomData<&'static ::core::primitive::str>, + g: imp::PhantomData<imp::String>, +} + +util_assert_impl_all!( + TypeParams<'static, (), imp::IntoIter<()>>: + _zerocopy::KnownLayout, + _zerocopy::FromZeros, + _zerocopy::FromBytes, + _zerocopy::Unaligned +); diff --git a/vendor/zerocopy-derive/tests/include.rs b/vendor/zerocopy-derive/tests/include.rs new file mode 100644 index 00000000..01a370c3 --- /dev/null +++ b/vendor/zerocopy-derive/tests/include.rs @@ -0,0 +1,129 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// ON THE PRELUDE: All of the tests in this directory (excepting UI tests) +// disable the prelude via `#![no_implicit_prelude]`. This ensures that all code +// emitted by our derives doesn't accidentally assume that the prelude is +// included, which helps ensure that items are referred to by absolute path, +// which in turn ensures that these items can't accidentally refer to names +// which have been shadowed. For example, the code `x == None` could behave +// incorrectly if, in the scope in which the derive is invoked, `None` has been +// shadowed by `CONST None: Option<usize> = Some(1)`. +// +// `mod imp` allows us to import items and refer to them in this module without +// introducing the risk that this hides bugs in which derive-emitted code uses +// names which are not fully-qualified. For such a bug to manifest, it would +// need to be of the form `imp::Foo`, which is unlikely to happen by accident. +mod imp { + // Since this file is included in every test file, and since not every test + // file uses every item here, we allow unused imports to avoid generating + // warnings. + #[allow(unused)] + pub use { + ::core::{ + self, assert_eq, assert_ne, + cell::UnsafeCell, + convert::TryFrom, + hash, + marker::PhantomData, + mem::{ManuallyDrop, MaybeUninit}, + option::IntoIter, + prelude::v1::*, + primitive::*, + }, + ::std::{collections::hash_map::DefaultHasher, prelude::v1::*}, + ::zerocopy::*, + }; +} + +// These items go in their own module (rather than the top level) for the same +// reason that we use `mod imp` above. See its comment for more details. +pub mod util { + /// A type that doesn't implement any zerocopy traits. + pub struct NotZerocopy<T = ()>(pub T); + + /// A `u16` with alignment 2. + /// + /// Though `u16` has alignment 2 on some platforms, it's not guaranteed. By + /// contrast, `util::AU16` is guaranteed to have alignment 2. + #[derive( + super::imp::KnownLayout, + super::imp::Immutable, + super::imp::FromBytes, + super::imp::IntoBytes, + Copy, + Clone, + )] + #[repr(C, align(2))] + pub struct AU16(pub u16); + + // Since we can't import these by path (ie, `util::assert_impl_all!`), use a + // name prefix to ensure our derive-emitted code isn't accidentally relying + // on `assert_impl_all!` being in scope. + #[macro_export] + macro_rules! util_assert_impl_all { + ($type:ty: $($trait:path),+ $(,)?) => { + const _: fn() = || { + use ::core::prelude::v1::*; + ::static_assertions::assert_impl_all!($type: $($trait),+); + }; + }; + } + + // Since we can't import these by path (ie, `util::assert_not_impl_any!`), + // use a name prefix to ensure our derive-emitted code isn't accidentally + // relying on `assert_not_impl_any!` being in scope. + #[macro_export] + macro_rules! util_assert_not_impl_any { + ($x:ty: $($t:path),+ $(,)?) => { + const _: fn() = || { + use ::core::prelude::v1::*; + ::static_assertions::assert_not_impl_any!($x: $($t),+); + }; + }; + } + + #[macro_export] + macro_rules! test_trivial_is_bit_valid { + ($x:ty => $name:ident) => { + #[test] + fn $name() { + util::test_trivial_is_bit_valid::<$x>(); + } + }; + } + + // Under some circumstances, our `TryFromBytes` derive generates a trivial + // `is_bit_valid` impl that unconditionally returns `true`. This test + // attempts to validate that this is, indeed, the behavior of our + // `TryFromBytes` derive. It is not foolproof, but is likely to catch some + // mistakes. + // + // As of this writing, this happens when deriving `TryFromBytes` thanks to a + // top-level `#[derive(FromBytes)]`. + pub fn test_trivial_is_bit_valid<T: super::imp::TryFromBytes>() { + // This test works based on the insight that a trivial `is_bit_valid` + // impl should never load any bytes from memory. Thus, while it is + // technically a violation of `is_bit_valid`'s safety precondition to + // pass a pointer to uninitialized memory, the `is_bit_valid` impl we + // expect our derives to generate should never touch this memory, and + // thus should never exhibit UB. By contrast, if our derives are + // spuriously generating non-trivial `is_bit_valid` impls, this should + // cause UB which may be caught by Miri. + + let buf = super::imp::MaybeUninit::<T>::uninit(); + let ptr = super::imp::Ptr::from_ref(&buf); + // SAFETY: This is intentionally unsound; see the preceding comment. + let ptr = unsafe { ptr.assume_initialized() }; + + // SAFETY: `T` and `MaybeUninit<T>` have the same layout, so this is a + // size-preserving cast. It is also a provenance-preserving cast. + let ptr = unsafe { ptr.cast_unsized_unchecked(|p| p.cast_sized()) }; + assert!(<T as super::imp::TryFromBytes>::is_bit_valid(ptr)); + } +} diff --git a/vendor/zerocopy-derive/tests/issue_2117.rs b/vendor/zerocopy-derive/tests/issue_2117.rs new file mode 100644 index 00000000..1ee809ae --- /dev/null +++ b/vendor/zerocopy-derive/tests/issue_2117.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] +#![forbid(unexpected_cfgs)] + +include!("include.rs"); + +// Make sure no unexpected `cfg`s are emitted by our derives (see #2117). + +#[derive(imp::KnownLayout)] +#[repr(C)] +pub struct Test(pub [u8; 32]); diff --git a/vendor/zerocopy-derive/tests/issue_2835.rs b/vendor/zerocopy-derive/tests/issue_2835.rs new file mode 100644 index 00000000..f04fec9b --- /dev/null +++ b/vendor/zerocopy-derive/tests/issue_2835.rs @@ -0,0 +1,22 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use zerocopy::{IntoBytes, Unalign}; + +#[allow(unused)] +#[derive(IntoBytes)] +#[repr(C)] +struct Struct { + leading: Unalign<u32>, + trailing: [u8], +} + +#[test] +fn test_issue_2835() { + // Compilation is enough to verify the fix +} diff --git a/vendor/zerocopy-derive/tests/paths_and_modules.rs b/vendor/zerocopy-derive/tests/paths_and_modules.rs new file mode 100644 index 00000000..74f28558 --- /dev/null +++ b/vendor/zerocopy-derive/tests/paths_and_modules.rs @@ -0,0 +1,40 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// Ensure that types that are use'd and types that are referenced by path work. + +mod foo { + use super::*; + + #[derive(imp::FromBytes, imp::IntoBytes, imp::Unaligned)] + #[repr(C)] + pub struct Foo { + foo: u8, + } + + #[derive(imp::FromBytes, imp::IntoBytes, imp::Unaligned)] + #[repr(C)] + pub struct Bar { + bar: u8, + } +} + +use foo::Foo; + +#[derive(imp::FromBytes, imp::IntoBytes, imp::Unaligned)] +#[repr(C)] +struct Baz { + foo: Foo, + bar: foo::Bar, +} diff --git a/vendor/zerocopy-derive/tests/priv_in_pub.rs b/vendor/zerocopy-derive/tests/priv_in_pub.rs new file mode 100644 index 00000000..6e641d0b --- /dev/null +++ b/vendor/zerocopy-derive/tests/priv_in_pub.rs @@ -0,0 +1,34 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// FIXME(#847): Make this test succeed on earlier Rust versions. +#[::rustversion::stable(1.59)] +mod test { + use super::*; + + // These derives do not result in E0446 as of Rust 1.59.0, because of + // https://github.com/rust-lang/rust/pull/90586. + // + // This change eliminates one of the major downsides of emitting `where` + // bounds for field types (i.e., the emission of E0446 for private field + // types). + + #[derive(imp::KnownLayout, imp::IntoBytes, imp::FromZeros, imp::FromBytes, imp::Unaligned)] + #[repr(C)] + pub struct Public(Private); + + #[derive(imp::KnownLayout, imp::IntoBytes, imp::FromZeros, imp::FromBytes, imp::Unaligned)] + #[repr(C)] + struct Private(()); +} diff --git a/vendor/zerocopy-derive/tests/struct_from_bytes.rs b/vendor/zerocopy-derive/tests/struct_from_bytes.rs new file mode 100644 index 00000000..8cc816c9 --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_from_bytes.rs @@ -0,0 +1,77 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A struct is `FromBytes` if: +// - all fields are `FromBytes` + +#[derive(imp::FromBytes)] +struct Zst; + +util_assert_impl_all!(Zst: imp::FromBytes); +test_trivial_is_bit_valid!(Zst => test_zst_trivial_is_bit_valid); + +#[derive(imp::FromBytes)] +struct One { + a: u8, +} + +util_assert_impl_all!(One: imp::FromBytes); +test_trivial_is_bit_valid!(One => test_one_trivial_is_bit_valid); + +#[derive(imp::FromBytes)] +struct Two { + a: u8, + b: Zst, +} + +util_assert_impl_all!(Two: imp::FromBytes); +test_trivial_is_bit_valid!(Two => test_two_trivial_is_bit_valid); + +#[derive(imp::FromBytes)] +struct Unsized { + a: [u8], +} + +util_assert_impl_all!(Unsized: imp::FromBytes); + +#[derive(imp::FromBytes)] +struct TypeParams<'a, T: ?imp::Sized, I: imp::Iterator> { + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [::core::primitive::u8]>, + d: imp::PhantomData<&'static ::core::primitive::str>, + e: imp::PhantomData<imp::String>, + f: T, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::FromBytes); +util_assert_impl_all!(TypeParams<'static, util::AU16, imp::IntoIter<()>>: imp::FromBytes); +util_assert_impl_all!(TypeParams<'static, [util::AU16], imp::IntoIter<()>>: imp::FromBytes); +test_trivial_is_bit_valid!(TypeParams<'static, (), imp::IntoIter<()>> => test_type_params_trivial_is_bit_valid); + +// Deriving `FromBytes` should work if the struct has bounded parameters. + +#[derive(imp::FromBytes)] +#[repr(transparent)] +struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::FromBytes, const N: usize>( + [T; N], + imp::PhantomData<&'a &'b ()>, +) +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::FromBytes; + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::FromBytes); +test_trivial_is_bit_valid!(WithParams<'static, 'static, u8, 42> => test_with_params_trivial_is_bit_valid); diff --git a/vendor/zerocopy-derive/tests/struct_from_zeros.rs b/vendor/zerocopy-derive/tests/struct_from_zeros.rs new file mode 100644 index 00000000..147b04cf --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_from_zeros.rs @@ -0,0 +1,72 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A struct is `FromZeros` if: +// - all fields are `FromZeros` + +#[derive(imp::FromZeros)] +struct Zst; + +util_assert_impl_all!(Zst: imp::FromZeros); + +#[derive(imp::FromZeros)] +struct One { + a: bool, +} + +util_assert_impl_all!(One: imp::FromZeros); + +#[derive(imp::FromZeros)] +struct Two { + a: bool, + b: Zst, +} + +util_assert_impl_all!(Two: imp::FromZeros); + +#[derive(imp::FromZeros)] +struct Unsized { + a: [u8], +} + +util_assert_impl_all!(Unsized: imp::FromZeros); + +#[derive(imp::FromZeros)] +struct TypeParams<'a, T: ?imp::Sized, I: imp::Iterator> { + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [u8]>, + d: imp::PhantomData<&'static str>, + e: imp::PhantomData<imp::String>, + f: T, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::FromZeros); +util_assert_impl_all!(TypeParams<'static, util::AU16, imp::IntoIter<()>>: imp::FromZeros); +util_assert_impl_all!(TypeParams<'static, [util::AU16], imp::IntoIter<()>>: imp::FromZeros); + +// Deriving `FromZeros` should work if the struct has bounded parameters. + +#[derive(imp::FromZeros)] +#[repr(transparent)] +struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::FromZeros, const N: usize>( + [T; N], + imp::PhantomData<&'a &'b ()>, +) +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::FromZeros; + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::FromZeros); diff --git a/vendor/zerocopy-derive/tests/struct_known_layout.rs b/vendor/zerocopy-derive/tests/struct_known_layout.rs new file mode 100644 index 00000000..69427d1d --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_known_layout.rs @@ -0,0 +1,122 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +extern crate rustversion; + +include!("include.rs"); + +#[derive(imp::KnownLayout)] +struct Zst; + +util_assert_impl_all!(Zst: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +struct One { + a: bool, +} + +util_assert_impl_all!(One: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +struct Two { + a: bool, + b: Zst, +} + +util_assert_impl_all!(Two: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +struct TypeParams<'a, T, I: imp::Iterator> { + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [::core::primitive::u8]>, + d: imp::PhantomData<&'static ::core::primitive::str>, + e: imp::PhantomData<imp::String>, + f: T, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::KnownLayout); +util_assert_impl_all!(TypeParams<'static, util::AU16, imp::IntoIter<()>>: imp::KnownLayout); + +// Deriving `KnownLayout` should work if the struct has bounded parameters. +// +// N.B. We limit this test to rustc >= 1.62, since earlier versions of rustc ICE +// when `KnownLayout` is derived on a `repr(C)` struct whose trailing field +// contains non-static lifetimes. +#[rustversion::since(1.62)] +const _: () = { + #[derive(imp::KnownLayout)] + #[repr(C)] + struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::KnownLayout, const N: usize>( + [T; N], + imp::PhantomData<&'a &'b ()>, + ) + where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::KnownLayout; + + util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::KnownLayout); +}; + +const _: () = { + // Similar to the previous test, except that the trailing field contains + // only static lifetimes. This is exercisable on all supported toolchains. + + #[derive(imp::KnownLayout)] + #[repr(C)] + struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::KnownLayout, const N: usize>( + &'a &'b [T; N], + imp::PhantomData<&'static ()>, + ) + where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::KnownLayout; + + util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::KnownLayout); +}; + +// Deriving `KnownLayout` should work if the struct references `Self`. See +// #2116. + +#[derive(imp::KnownLayout)] +#[repr(C)] +struct WithSelfReference { + leading: [u8; Self::N], + trailing: [[u8; Self::N]], +} + +impl WithSelfReference { + const N: usize = 42; +} + +util_assert_impl_all!(WithSelfReference: imp::KnownLayout); + +// Deriving `KnownLayout` should work with generic `repr(packed)` types. See +// #2302. + +#[derive(imp::KnownLayout)] +#[repr(C, packed)] +struct Packet<P> { + payload: P, +} + +util_assert_impl_all!(Packet<imp::u8>: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +#[repr(C)] +struct RawIdentifier { + r#type: u8, +} + +util_assert_impl_all!(RawIdentifier: imp::KnownLayout); diff --git a/vendor/zerocopy-derive/tests/struct_no_cell.rs b/vendor/zerocopy-derive/tests/struct_no_cell.rs new file mode 100644 index 00000000..776996df --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_no_cell.rs @@ -0,0 +1,97 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(imp::Immutable)] +struct Zst; + +util_assert_impl_all!(Zst: imp::Immutable); + +#[derive(imp::Immutable)] +struct One { + a: bool, +} + +util_assert_impl_all!(One: imp::Immutable); + +#[derive(imp::Immutable)] +struct Two { + a: bool, + b: Zst, +} + +util_assert_impl_all!(Two: imp::Immutable); + +#[derive(imp::Immutable)] +struct Three { + a: [u8], +} + +util_assert_impl_all!(Three: imp::Immutable); + +#[derive(imp::Immutable)] +struct Four<'a> { + field: &'a imp::UnsafeCell<u8>, +} + +util_assert_impl_all!(Four<'static>: imp::Immutable); + +#[derive(imp::Immutable)] +struct TypeParams<'a, T, U, I: imp::Iterator> { + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [::core::primitive::u8]>, + d: imp::PhantomData<&'static ::core::primitive::str>, + e: imp::PhantomData<imp::String>, + f: imp::PhantomData<U>, + g: T, +} + +util_assert_impl_all!(TypeParams<'static, (), (), imp::IntoIter<()>>: imp::Immutable); +util_assert_impl_all!(TypeParams<'static, util::AU16, util::AU16, imp::IntoIter<()>>: imp::Immutable); +util_assert_impl_all!(TypeParams<'static, util::AU16, imp::UnsafeCell<u8>, imp::IntoIter<()>>: imp::Immutable); +util_assert_not_impl_any!(TypeParams<'static, imp::UnsafeCell<()>, (), imp::IntoIter<()>>: imp::Immutable); +util_assert_not_impl_any!(TypeParams<'static, [imp::UnsafeCell<u8>; 0], (), imp::IntoIter<()>>: imp::Immutable); +util_assert_not_impl_any!(TypeParams<'static, (), (), imp::IntoIter<imp::UnsafeCell<()>>>: imp::Immutable); + +trait Trait { + type Assoc; +} + +impl<T> Trait for imp::UnsafeCell<T> { + type Assoc = T; +} + +#[derive(imp::Immutable)] +struct WithAssocType<T: Trait> { + field: <T as Trait>::Assoc, +} + +util_assert_impl_all!(WithAssocType<imp::UnsafeCell<u8>>: imp::Immutable); + +// Deriving `Immutable` should work if the struct has bounded parameters. + +#[derive(imp::Immutable)] +#[repr(C)] +struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::Immutable, const N: usize>( + [T; N], + imp::PhantomData<&'a &'b ()>, + imp::PhantomData<imp::UnsafeCell<()>>, + &'a imp::UnsafeCell<()>, +) +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Immutable; + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::Immutable); diff --git a/vendor/zerocopy-derive/tests/struct_to_bytes.rs b/vendor/zerocopy-derive/tests/struct_to_bytes.rs new file mode 100644 index 00000000..89125d87 --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_to_bytes.rs @@ -0,0 +1,203 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A struct is `IntoBytes` if: +// - all fields are `IntoBytes` +// - `repr(C)` or `repr(transparent)` and +// - no padding (size of struct equals sum of size of field types) +// - `repr(packed)` + +#[derive(imp::IntoBytes)] +#[repr(C)] +struct CZst; + +util_assert_impl_all!(CZst: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C)] +struct C { + a: u8, + b: u8, + c: util::AU16, +} + +util_assert_impl_all!(C: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C)] +struct SyntacticUnsized { + a: u8, + b: u8, + c: [util::AU16], +} + +util_assert_impl_all!(C: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(transparent)] +struct Transparent { + a: u8, + b: (), +} + +util_assert_impl_all!(Transparent: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(transparent)] +struct TransparentGeneric<T: ?imp::Sized> { + a: (), + b: T, +} + +util_assert_impl_all!(TransparentGeneric<u64>: imp::IntoBytes); +util_assert_impl_all!(TransparentGeneric<[u64]>: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +struct CZstPacked; + +util_assert_impl_all!(CZstPacked: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +struct CPacked { + a: u8, + // NOTE: The `u16` type is not guaranteed to have alignment 2, although it + // does on many platforms. However, to fix this would require a custom type + // with a `#[repr(align(2))]` attribute, and `#[repr(packed)]` types are not + // allowed to transitively contain `#[repr(align(...))]` types. Thus, we + // have no choice but to use `u16` here. Luckily, these tests run in CI on + // platforms on which `u16` has alignment 2, so this isn't that big of a + // deal. + b: u16, +} + +util_assert_impl_all!(CPacked: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C, packed(2))] +// The same caveats as for CPacked apply - we're assuming u64 is at least +// 4-byte aligned by default. Without packed(2), this should fail, as there +// would be padding between a/b assuming u64 is 4+ byte aligned. +struct CPacked2 { + a: u16, + b: u64, +} + +util_assert_impl_all!(CPacked2: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +struct CPackedGeneric<T, U: ?imp::Sized> { + t: T, + // Unsized types stored in `repr(packed)` structs must not be dropped + // because dropping them in-place might be unsound depending on the + // alignment of the outer struct. Sized types can be dropped by first being + // moved to an aligned stack variable, but this isn't possible with unsized + // types. + u: imp::ManuallyDrop<U>, +} + +util_assert_impl_all!(CPackedGeneric<u8, util::AU16>: imp::IntoBytes); +util_assert_impl_all!(CPackedGeneric<u8, [util::AU16]>: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(packed)] +struct PackedGeneric<T, U: ?imp::Sized> { + t: T, + // Unsized types stored in `repr(packed)` structs must not be dropped + // because dropping them in-place might be unsound depending on the + // alignment of the outer struct. Sized types can be dropped by first being + // moved to an aligned stack variable, but this isn't possible with unsized + // types. + u: imp::ManuallyDrop<U>, +} + +util_assert_impl_all!(PackedGeneric<u8, util::AU16>: imp::IntoBytes); +util_assert_impl_all!(PackedGeneric<u8, [util::AU16]>: imp::IntoBytes); + +// This test is non-portable, but works so long as Rust happens to lay this +// struct out with no padding. +#[derive(imp::IntoBytes)] +struct Unpacked { + a: u8, + b: u8, +} + +util_assert_impl_all!(Unpacked: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C)] +struct ReprCGenericOneField<T: ?imp::Sized> { + t: T, +} + +// Even though `ReprCGenericOneField` has generic type arguments, since it only +// has one field, we don't require that its field types implement `Unaligned`. +util_assert_impl_all!(ReprCGenericOneField<util::AU16>: imp::IntoBytes); +util_assert_impl_all!(ReprCGenericOneField<[util::AU16]>: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C)] +struct ReprCGenericMultipleFields<T, U: ?imp::Sized> { + t: T, + u: U, +} + +// Since `ReprCGenericMultipleFields` is generic and has more than one field, +// all field types must implement `Unaligned`. +util_assert_impl_all!(ReprCGenericMultipleFields<u8, [u8; 2]>: imp::IntoBytes); +util_assert_impl_all!(ReprCGenericMultipleFields<u8, [[u8; 2]]>: imp::IntoBytes); +util_assert_not_impl_any!(ReprCGenericMultipleFields<u8, util::AU16>: imp::IntoBytes); +util_assert_not_impl_any!(ReprCGenericMultipleFields<u8, [util::AU16]>: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(transparent)] +struct Unsized { + a: [u8], +} + +util_assert_impl_all!(Unsized: imp::IntoBytes); + +// Deriving `IntoBytes` should work if the struct has bounded parameters. + +#[derive(imp::IntoBytes)] +#[repr(transparent)] +struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::IntoBytes, const N: usize>( + [T; N], + imp::PhantomData<&'a &'b ()>, +) +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::IntoBytes; + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::IntoBytes); + +// Test for the failure reported in #1182. + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +pub struct IndexEntryFlags(u8); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +pub struct IndexEntry<const SIZE_BLOCK_ID: usize> { + block_number: imp::native_endian::U64, + flags: IndexEntryFlags, + block_id: [u8; SIZE_BLOCK_ID], +} + +util_assert_impl_all!(IndexEntry<0>: imp::IntoBytes); +util_assert_impl_all!(IndexEntry<1>: imp::IntoBytes); diff --git a/vendor/zerocopy-derive/tests/struct_try_from_bytes.rs b/vendor/zerocopy-derive/tests/struct_try_from_bytes.rs new file mode 100644 index 00000000..f36765e3 --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_try_from_bytes.rs @@ -0,0 +1,260 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A struct is `imp::TryFromBytes` if: +// - all fields are `imp::TryFromBytes` + +#[test] +fn zst() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&()); + let candidate = candidate.forget_aligned(); + // SAFETY: `&()` trivially consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <() as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(is_bit_valid); +} + +#[derive(imp::TryFromBytes)] +#[repr(C)] +struct One { + a: u8, +} + +util_assert_impl_all!(One: imp::TryFromBytes); + +#[test] +fn one() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&One { a: 42 }); + let candidate = candidate.forget_aligned(); + // SAFETY: `&One` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <One as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(is_bit_valid); +} + +#[derive(imp::TryFromBytes)] +#[repr(C)] +struct Two { + a: bool, + b: (), +} + +util_assert_impl_all!(Two: imp::TryFromBytes); + +#[test] +fn two() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&Two { a: false, b: () }); + let candidate = candidate.forget_aligned(); + // SAFETY: `&Two` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <Two as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(is_bit_valid); +} + +#[test] +fn two_bad() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&[2u8][..]); + let candidate = candidate.forget_aligned(); + // SAFETY: `&Two` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + + // SAFETY: + // - The cast preserves address and size. As a result, the cast will address + // the same bytes as `c`. + // - The cast preserves provenance. + // - Neither the input nor output types contain any `UnsafeCell`s. + let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p.cast::<Two>()) }; + + // SAFETY: `candidate`'s referent is as-initialized as `Two`. + let candidate = unsafe { candidate.assume_initialized() }; + + let is_bit_valid = <Two as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(!is_bit_valid); +} + +#[derive(imp::TryFromBytes)] +#[repr(C)] +struct Unsized { + a: [u8], +} + +util_assert_impl_all!(Unsized: imp::TryFromBytes); + +#[test] +fn un_sized() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&[16, 12, 42][..]); + let candidate = candidate.forget_aligned(); + // SAFETY: `&Unsized` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + + // SAFETY: + // - The cast preserves address and size. As a result, the cast will address + // the same bytes as `c`. + // - The cast preserves provenance. + // - Neither the input nor output types contain any `UnsafeCell`s. + let candidate = unsafe { + candidate.cast_unsized_unchecked(|p| { + let ptr = + imp::core::ptr::NonNull::new_unchecked(p.as_non_null().as_ptr() as *mut Unsized); + ::zerocopy::pointer::PtrInner::new(ptr) + }) + }; + + // SAFETY: `candidate`'s referent is as-initialized as `Two`. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <Unsized as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(is_bit_valid); +} + +#[derive(imp::TryFromBytes)] +#[repr(C)] +struct TypeParams<'a, T: ?imp::Sized, I: imp::Iterator> { + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [u8]>, + d: imp::PhantomData<&'static str>, + e: imp::PhantomData<imp::String>, + f: T, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::TryFromBytes); +util_assert_impl_all!(TypeParams<'static, util::AU16, imp::IntoIter<()>>: imp::TryFromBytes); +util_assert_impl_all!(TypeParams<'static, [util::AU16], imp::IntoIter<()>>: imp::TryFromBytes); + +// Deriving `imp::TryFromBytes` should work if the struct has bounded +// parameters. + +#[derive(imp::TryFromBytes)] +#[repr(transparent)] +struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::TryFromBytes, const N: usize>( + imp::PhantomData<&'a &'b ()>, + [T], +) +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::TryFromBytes; + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::TryFromBytes); + +#[derive(imp::FromBytes)] +#[repr(C)] +struct MaybeFromBytes<T>(T); + +#[test] +fn test_maybe_from_bytes() { + // When deriving `FromBytes` on a type with no generic parameters, we emit a + // trivial `is_bit_valid` impl that always returns true. This test confirms + // that we *don't* spuriously do that when generic parameters are present. + + let candidate = ::zerocopy::Ptr::from_ref(&[2u8][..]); + let candidate = candidate.bikeshed_recall_initialized_from_bytes(); + + // SAFETY: + // - The cast preserves address and size. As a result, the cast will address + // the same bytes as `c`. + // - The cast preserves provenance. + // - Neither the input nor output types contain any `UnsafeCell`s. + let candidate = + unsafe { candidate.cast_unsized_unchecked(|p| p.cast::<MaybeFromBytes<bool>>()) }; + + // SAFETY: `[u8]` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <MaybeFromBytes<bool> as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(!is_bit_valid); +} + +#[derive(Debug, PartialEq, Eq, imp::TryFromBytes, imp::Immutable, imp::KnownLayout)] +#[repr(C, packed)] +struct CPacked { + a: u8, + // NOTE: The `u32` type is not guaranteed to have alignment 4, although it + // does on many platforms. However, to fix this would require a custom type + // with a `#[repr(align(4))]` attribute, and `#[repr(packed)]` types are not + // allowed to transitively contain `#[repr(align(...))]` types. Thus, we + // have no choice but to use `u32` here. Luckily, these tests run in CI on + // platforms on which `u32` has alignment 4, so this isn't that big of a + // deal. + b: u32, +} + +#[test] +fn c_packed() { + let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; + let converted = <CPacked as imp::TryFromBytes>::try_ref_from_bytes(candidate); + imp::assert_eq!(converted, imp::Ok(&CPacked { a: 42, b: u32::MAX })); +} + +#[derive(imp::TryFromBytes, imp::KnownLayout, imp::Immutable)] +#[repr(C, packed)] +struct CPackedUnsized { + a: u8, + // NOTE: The `u32` type is not guaranteed to have alignment 4, although it + // does on many platforms. However, to fix this would require a custom type + // with a `#[repr(align(4))]` attribute, and `#[repr(packed)]` types are not + // allowed to transitively contain `#[repr(align(...))]` types. Thus, we + // have no choice but to use `u32` here. Luckily, these tests run in CI on + // platforms on which `u32` has alignment 4, so this isn't that big of a + // deal. + b: [u32], +} + +#[test] +fn c_packed_unsized() { + let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; + let converted = <CPackedUnsized as imp::TryFromBytes>::try_ref_from_bytes(candidate); + imp::assert!(converted.is_ok()); +} + +#[derive(imp::TryFromBytes)] +#[repr(packed)] +struct PackedUnsized { + a: u8, + // NOTE: The `u32` type is not guaranteed to have alignment 4, although it + // does on many platforms. However, to fix this would require a custom type + // with a `#[repr(align(4))]` attribute, and `#[repr(packed)]` types are not + // allowed to transitively contain `#[repr(align(...))]` types. Thus, we + // have no choice but to use `u32` here. Luckily, these tests run in CI on + // platforms on which `u32` has alignment 4, so this isn't that big of a + // deal. + b: [u32], +} + +#[test] +fn packed_unsized() { + let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; + let converted = <CPackedUnsized as imp::TryFromBytes>::try_ref_from_bytes(candidate); + imp::assert!(converted.is_ok()); + + let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]; + let converted = <CPackedUnsized as imp::TryFromBytes>::try_ref_from_bytes(candidate); + imp::assert!(converted.is_err()); + + let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]; + let converted = <CPackedUnsized as imp::TryFromBytes>::try_ref_from_bytes(candidate); + imp::assert!(converted.is_ok()); +} + +#[derive(imp::TryFromBytes)] +struct A; + +#[derive(imp::TryFromBytes)] +struct B { + a: A, +} diff --git a/vendor/zerocopy-derive/tests/struct_unaligned.rs b/vendor/zerocopy-derive/tests/struct_unaligned.rs new file mode 100644 index 00000000..b2d78e4c --- /dev/null +++ b/vendor/zerocopy-derive/tests/struct_unaligned.rs @@ -0,0 +1,96 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A struct is `Unaligned` if: +// - `repr(align)` is no more than 1 and either +// - `repr(C)` or `repr(transparent)` and +// - all fields Unaligned +// - `repr(packed)` + +#[derive(imp::Unaligned)] +#[repr(C)] +struct Foo { + a: u8, +} + +util_assert_impl_all!(Foo: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(transparent)] +struct Bar { + a: u8, +} + +util_assert_impl_all!(Bar: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(packed)] +struct Baz { + // NOTE: The `u16` type is not guaranteed to have alignment 2, although it + // does on many platforms. However, to fix this would require a custom type + // with a `#[repr(align(2))]` attribute, and `#[repr(packed)]` types are not + // allowed to transitively contain `#[repr(align(...))]` types. Thus, we + // have no choice but to use `u16` here. Luckily, these tests run in CI on + // platforms on which `u16` has alignment 2, so this isn't that big of a + // deal. + a: u16, +} + +util_assert_impl_all!(Baz: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(C, align(1))] +struct FooAlign { + a: u8, +} + +util_assert_impl_all!(FooAlign: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(transparent)] +struct Unsized { + a: [u8], +} + +util_assert_impl_all!(Unsized: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(C)] +struct TypeParams<'a, T: ?imp::Sized, I: imp::Iterator> { + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [::core::primitive::u8]>, + d: imp::PhantomData<&'static ::core::primitive::str>, + e: imp::PhantomData<imp::String>, + f: T, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::Unaligned); +util_assert_impl_all!(TypeParams<'static, ::core::primitive::u8, imp::IntoIter<()>>: imp::Unaligned); +util_assert_impl_all!(TypeParams<'static, [::core::primitive::u8], imp::IntoIter<()>>: imp::Unaligned); + +// Deriving `Unaligned` should work if the struct has bounded parameters. + +#[derive(imp::Unaligned)] +#[repr(transparent)] +struct WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::Unaligned, const N: usize>( + [T; N], + imp::PhantomData<&'a &'b ()>, +) +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Unaligned; + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::Unaligned); diff --git a/vendor/zerocopy-derive/tests/trybuild.rs b/vendor/zerocopy-derive/tests/trybuild.rs new file mode 100644 index 00000000..34e65ad1 --- /dev/null +++ b/vendor/zerocopy-derive/tests/trybuild.rs @@ -0,0 +1,60 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#![allow(clippy::uninlined_format_args)] + +use std::env; + +use testutil::set_rustflags_w_warnings; + +fn test(subdir: &str) { + let version = testutil::ToolchainVersion::extract_from_pwd().unwrap(); + // See the doc comment on this method for an explanation of what this does + // and why we store source files in different directories. + let source_files_dirname = version.get_ui_source_files_dirname_and_maybe_print_warning(); + + // Set `-Wwarnings` in the `RUSTFLAGS` environment variable to ensure that + // `.stderr` files reflect what the typical user would encounter. + set_rustflags_w_warnings(); + + let t = trybuild::TestCases::new(); + t.compile_fail(format!("tests/{}/{}/*.rs", source_files_dirname, subdir)); +} + +#[test] +#[cfg_attr(miri, ignore)] +fn ui() { + test(""); + + // This tests the behavior when `--cfg zerocopy_derive_union_into_bytes` is + // not present, so remove it. If this logic is wrong, that's fine - it will + // exhibit as a test failure that we can debug at that point. + let rustflags = env::var("RUSTFLAGS").unwrap(); + let new_rustflags = rustflags.replace("--cfg zerocopy_derive_union_into_bytes", ""); + + // SAFETY: None of our code is concurrently accessinv env vars. It's + // possible that the test framework has spawned other threads that are + // concurrently accessing env vars, but we can't do anything about that. + #[allow(unused_unsafe)] // `set_var` is safe on our MSRV. + unsafe { + env::set_var("RUSTFLAGS", new_rustflags) + }; + + test("union_into_bytes_cfg"); + + // Reset RUSTFLAGS in case we later add other tests which rely on its value. + // This isn't strictly necessary, but it's easier to add this now when we're + // thinking about the semantics of these env vars than to debug later when + // we've forgotten about it. + // + // SAFETY: See previous safety comment. + #[allow(unused_unsafe)] // `set_var` is safe on our MSRV. + unsafe { + env::set_var("RUSTFLAGS", rustflags) + }; +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/derive_transparent.rs b/vendor/zerocopy-derive/tests/ui-msrv/derive_transparent.rs new file mode 100644 index 00000000..a60c89e2 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/derive_transparent.rs @@ -0,0 +1,38 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use core::marker::PhantomData; + +use zerocopy::{FromBytes, FromZeros, IntoBytes, TryFromBytes, Unaligned}; + +use self::util::util::NotZerocopy; + +fn main() {} + +// Test generic transparent structs + +#[derive(IntoBytes, FromBytes, Unaligned)] +#[repr(transparent)] +struct TransparentStruct<T> { + inner: T, + _phantom: PhantomData<()>, +} + +// It should be legal to derive these traits on a transparent struct, but it +// must also ensure the traits are only implemented when the inner type +// implements them. +util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); diff --git a/vendor/zerocopy-derive/tests/ui-msrv/derive_transparent.stderr b/vendor/zerocopy-derive/tests/ui-msrv/derive_transparent.stderr new file mode 100644 index 00000000..d4d6b560 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/derive_transparent.stderr @@ -0,0 +1,89 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/derive_transparent.rs:34:1 + | +34 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required because of the requirements on the impl of `zerocopy::TryFromBytes` for `TransparentStruct<NotZerocopy>` + --> tests/ui-msrv/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-msrv/derive_transparent.rs:34:1 + | +34 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + = note: this error originates in the macro `::static_assertions::assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-msrv/derive_transparent.rs:35:1 + | +35 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FromZeros` is not implemented for `NotZerocopy` + | +note: required because of the requirements on the impl of `FromZeros` for `TransparentStruct<NotZerocopy>` + --> tests/ui-msrv/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-msrv/derive_transparent.rs:35:1 + | +35 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + = note: this error originates in the macro `::static_assertions::assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/derive_transparent.rs:36:1 + | +36 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + | +note: required because of the requirements on the impl of `zerocopy::FromBytes` for `TransparentStruct<NotZerocopy>` + --> tests/ui-msrv/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-msrv/derive_transparent.rs:36:1 + | +36 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + = note: this error originates in the macro `::static_assertions::assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/derive_transparent.rs:37:1 + | +37 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + | +note: required because of the requirements on the impl of `zerocopy::IntoBytes` for `TransparentStruct<NotZerocopy>` + --> tests/ui-msrv/derive_transparent.rs:24:10 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-msrv/derive_transparent.rs:37:1 + | +37 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + = note: this error originates in the macro `::static_assertions::assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/derive_transparent.rs:38:1 + | +38 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `NotZerocopy` + | +note: required because of the requirements on the impl of `zerocopy::Unaligned` for `TransparentStruct<NotZerocopy>` + --> tests/ui-msrv/derive_transparent.rs:24:32 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-msrv/derive_transparent.rs:38:1 + | +38 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + = note: this error originates in the macro `::static_assertions::assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/enum.rs b/vendor/zerocopy-derive/tests/ui-msrv/enum.rs new file mode 100644 index 00000000..82b43d80 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/enum.rs @@ -0,0 +1,577 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +// +// Generic errors +// + +#[derive(FromBytes)] +#[repr("foo")] +enum Generic1 { + A, +} + +#[derive(FromBytes)] +#[repr(foo)] +enum Generic2 { + A, +} + +#[derive(FromBytes)] +#[repr(transparent)] +enum Generic3 { + A, +} + +#[derive(FromBytes)] +#[repr(u8, u16)] +enum Generic4 { + A, +} + +#[derive(FromBytes)] +enum Generic5 { + A, +} + +// +// Immutable errors +// + +#[derive(Immutable)] +enum Immutable1 { + A(core::cell::UnsafeCell<()>), +} + +#[derive(Immutable)] +enum Never {} + +#[derive(Immutable)] +enum Immutable2 { + Uninhabited(Never, core::cell::UnsafeCell<u8>), + Inhabited(u8), +} + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +enum TryFromBytes1 { + A, +} + +#[derive(TryFromBytes)] +enum TryFromBytes2 { + A, + B(u8), +} + +struct NotTryFromBytes; + +#[derive(TryFromBytes)] +#[repr(u8)] +enum TryFromBytes3 { + A(NotTryFromBytes), +} + +// +// FromZeros errors +// + +#[derive(FromZeros)] +enum FromZeros1 { + A(u8), +} + +#[derive(FromZeros)] +enum FromZeros2 { + A, + B(u8), +} + +#[derive(FromZeros)] +enum FromZeros3 { + A = 1, + B, +} + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros4 { + A = 1, + B = 2, +} + +const NEGATIVE_ONE: i8 = -1; + +#[derive(FromZeros)] +#[repr(i8)] +enum FromZeros5 { + A = NEGATIVE_ONE, + B, +} + +struct NotFromZeros; + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros6 { + A(NotFromZeros), +} + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros7 { + A = 1, + B(NotFromZeros), +} + +// +// FromBytes errors +// + +#[derive(FromBytes)] +enum FromBytes1 { + A, +} + +#[derive(FromBytes)] +#[repr(C)] +enum FromBytes2 { + A, +} + +#[derive(FromBytes)] +#[repr(usize)] +enum FromBytes3 { + A, +} + +#[derive(FromBytes)] +#[repr(isize)] +enum FromBytes4 { + A, +} + +#[derive(FromBytes)] +#[repr(u32)] +enum FromBytes5 { + A, +} + +#[derive(FromBytes)] +#[repr(i32)] +enum FromBytes6 { + A, +} + +#[derive(FromBytes)] +#[repr(u64)] +enum FromBytes7 { + A, +} + +#[derive(FromBytes)] +#[repr(i64)] +enum FromBytes8 { + A, +} + +#[derive(FromBytes)] +#[repr(u8)] +enum FooU8 { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, + Variant255(bool), +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C)] +enum Unaligned1 { + A, +} + +#[derive(Unaligned)] +#[repr(u16)] +enum Unaligned2 { + A, +} + +#[derive(Unaligned)] +#[repr(i16)] +enum Unaligned3 { + A, +} + +#[derive(Unaligned)] +#[repr(u32)] +enum Unaligned4 { + A, +} + +#[derive(Unaligned)] +#[repr(i32)] +enum Unaligned5 { + A, +} + +#[derive(Unaligned)] +#[repr(u64)] +enum Unaligned6 { + A, +} + +#[derive(Unaligned)] +#[repr(i64)] +enum Unaligned7 { + A, +} + +#[derive(Unaligned)] +#[repr(usize)] +enum Unaligned8 { + A, +} + +#[derive(Unaligned)] +#[repr(isize)] +enum Unaligned9 { + A, +} + +#[derive(Unaligned)] +#[repr(u8, align(2))] +enum Unaligned10 { + A, +} + +#[derive(Unaligned)] +#[repr(i8, align(2))] +enum Unaligned11 { + A, +} + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +enum Unaligned12 { + A, +} + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +enum Unaligned13 { + A, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes1 { + A, + B(u8), +} + +#[derive(IntoBytes)] +#[repr(C, align(4))] +struct Align4IntoBytes(u32); + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes2 { + A(Align4IntoBytes), +} + +#[derive(IntoBytes)] +#[repr(u32)] +enum IntoBytes3 { + A(u32), + B(u16), +} + +#[derive(IntoBytes)] +enum IntoBytes4 { + A(u32), + B(u16), +} + +#[derive(IntoBytes)] +enum IntoBytes5 { + A(u32), +} + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes6<T> { + A(T), +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/enum.stderr b/vendor/zerocopy-derive/tests/ui-msrv/enum.stderr new file mode 100644 index 00000000..c39597f0 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/enum.stderr @@ -0,0 +1,406 @@ +error: unrecognized representation hint + --> tests/ui-msrv/enum.rs:19:8 + | +19 | #[repr("foo")] + | ^^^^^ + +error: unrecognized representation hint + --> tests/ui-msrv/enum.rs:25:8 + | +25 | #[repr(foo)] + | ^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:30:10 + | +30 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: this conflicts with another representation hint + --> tests/ui-msrv/enum.rs:37:12 + | +37 | #[repr(u8, u16)] + | ^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:42:10 + | +42 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:69:10 + | +69 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:74:10 + | +74 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:92:10 + | +92 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:97:10 + | +97 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:103:10 + | +103 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + --> tests/ui-msrv/enum.rs:110:1 + | +110 | / #[repr(u8)] +111 | | enum FromZeros4 { +112 | | A = 1, +113 | | B = 2, +114 | | } + | |_^ + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` +help: This enum has discriminants which are not literal integers. One of those may define or imply which variant has a discriminant of zero. Use a literal integer to define or imply the variant with a discriminant of zero. + --> tests/ui-msrv/enum.rs:119:1 + | +119 | / #[repr(i8)] +120 | | enum FromZeros5 { +121 | | A = NEGATIVE_ONE, +122 | | B, +123 | | } + | |_^ + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + --> tests/ui-msrv/enum.rs:134:1 + | +134 | / #[repr(u8)] +135 | | enum FromZeros7 { +136 | | A = 1, +137 | | B(NotFromZeros), +138 | | } + | |_^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:144:10 + | +144 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:150:8 + | +150 | #[repr(C)] + | ^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:156:8 + | +156 | #[repr(usize)] + | ^^^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:162:8 + | +162 | #[repr(isize)] + | ^^^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:168:8 + | +168 | #[repr(u32)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:174:8 + | +174 | #[repr(i32)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:180:8 + | +180 | #[repr(u64)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-msrv/enum.rs:186:8 + | +186 | #[repr(i64)] + | ^^^ + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:456:10 + | +456 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:462:10 + | +462 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:468:10 + | +468 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:474:10 + | +474 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:480:10 + | +480 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:486:10 + | +486 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:492:10 + | +492 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:498:10 + | +498 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/enum.rs:504:10 + | +504 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-msrv/enum.rs:511:12 + | +511 | #[repr(u8, align(2))] + | ^^^^^ + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-msrv/enum.rs:517:12 + | +517 | #[repr(i8, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/enum.rs:523:18 + | +523 | #[repr(align(1), align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/enum.rs:529:18 + | +529 | #[repr(align(2), align(4))] + | ^^^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:562:10 + | +562 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/enum.rs:568:10 + | +568 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: generic parameters may not be used in const operations + --> tests/ui-msrv/enum.rs:576:7 + | +576 | A(T), + | ^ cannot perform const operation using `T` + | + = note: type parameters may not be used in const expressions + +error[E0658]: custom discriminant values are not allowed in enums with tuple or struct variants + --> tests/ui-msrv/enum.rs:136:9 + | +136 | A = 1, + | ^ disallowed custom discriminant +137 | B(NotFromZeros), + | --------------- tuple variant defined here + | + = note: see issue #60553 <https://github.com/rust-lang/rust/issues/60553> for more information + +error[E0565]: meta item in `repr` must be an identifier + --> tests/ui-msrv/enum.rs:19:8 + | +19 | #[repr("foo")] + | ^^^^^ + +error[E0552]: unrecognized representation hint + --> tests/ui-msrv/enum.rs:25:8 + | +25 | #[repr(foo)] + | ^^^ + +error[E0566]: conflicting representation hints + --> tests/ui-msrv/enum.rs:37:8 + | +37 | #[repr(u8, u16)] + | ^^ ^^^ + | + = note: `#[deny(conflicting_repr_hints)]` on by default + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #68585 <https://github.com/rust-lang/rust/issues/68585> + +error[E0277]: the trait bound `UnsafeCell<()>: Immutable` is not satisfied + --> tests/ui-msrv/enum.rs:51:10 + | +51 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `Immutable` is not implemented for `UnsafeCell<()>` + | + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<u8>: Immutable` is not satisfied + --> tests/ui-msrv/enum.rs:59:10 + | +59 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `Immutable` is not implemented for `UnsafeCell<u8>` + | + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotTryFromBytes: TryFromBytes` is not satisfied + --> tests/ui-msrv/enum.rs:82:10 + | +82 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ the trait `TryFromBytes` is not implemented for `NotTryFromBytes` + | + = help: see issue #48214 + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotFromZeros: TryFromBytes` is not satisfied + --> tests/ui-msrv/enum.rs:127:10 + | +127 | #[derive(FromZeros)] + | ^^^^^^^^^ the trait `TryFromBytes` is not implemented for `NotFromZeros` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotFromZeros: FromZeros` is not satisfied + --> tests/ui-msrv/enum.rs:127:10 + | +127 | #[derive(FromZeros)] + | ^^^^^^^^^ the trait `FromZeros` is not implemented for `NotFromZeros` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `bool: FromBytes` is not satisfied + --> tests/ui-msrv/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `FromBytes` is not implemented for `bool` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): PaddingFree<IntoBytes1, 1_usize>` is not satisfied + --> tests/ui-msrv/enum.rs:538:10 + | +538 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `PaddingFree<IntoBytes1, 1_usize>` is not implemented for `()` + | + = help: the following implementations were found: + <() as PaddingFree<T, 0_usize>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): PaddingFree<IntoBytes2, 3_usize>` is not satisfied + --> tests/ui-msrv/enum.rs:549:10 + | +549 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `PaddingFree<IntoBytes2, 3_usize>` is not implemented for `()` + | + = help: the following implementations were found: + <() as PaddingFree<T, 0_usize>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): PaddingFree<IntoBytes3, 2_usize>` is not satisfied + --> tests/ui-msrv/enum.rs:555:10 + | +555 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `PaddingFree<IntoBytes3, 2_usize>` is not implemented for `()` + | + = help: the following implementations were found: + <() as PaddingFree<T, 0_usize>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: generic `Self` types are currently not permitted in anonymous constants + --> tests/ui-msrv/enum.rs:573:10 + | +573 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | +note: not a concrete type + --> tests/ui-msrv/enum.rs:573:10 + | +573 | #[derive(IntoBytes)] + | ^^^^^^^^^ + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/enum_from_bytes_u8_too_few.rs b/vendor/zerocopy-derive/tests/ui-msrv/enum_from_bytes_u8_too_few.rs new file mode 100644 index 00000000..1b1bed31 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/enum_from_bytes_u8_too_few.rs @@ -0,0 +1,272 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +#[derive(FromBytes)] +#[repr(u8)] +enum Foo { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/enum_from_bytes_u8_too_few.stderr b/vendor/zerocopy-derive/tests/ui-msrv/enum_from_bytes_u8_too_few.stderr new file mode 100644 index 00000000..ff828dcc --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/enum_from_bytes_u8_too_few.stderr @@ -0,0 +1,11 @@ +error: FromBytes only supported on repr(u8) enum with 256 variants + --> tests/ui-msrv/enum_from_bytes_u8_too_few.rs:15:1 + | +15 | / #[repr(u8)] +16 | | enum Foo { +17 | | Variant0, +18 | | Variant1, +... | +271 | | Variant254, +272 | | } + | |_^ diff --git a/vendor/zerocopy-derive/tests/ui-msrv/late_compile_pass.rs b/vendor/zerocopy-derive/tests/ui-msrv/late_compile_pass.rs new file mode 100644 index 00000000..d8aeb369 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/late_compile_pass.rs @@ -0,0 +1,85 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::KnownLayout; + +use self::util::util::{NotZerocopy, AU16}; + +fn main() {} + +// These tests cause errors which are generated by a later compilation pass than +// the other errors we generate, and so if they're compiled in the same file, +// the compiler will never get to that pass, and so we won't get the errors. + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +struct TryFromBytes1 { + value: NotZerocopy, +} + +// +// FromZeros errors +// + +#[derive(FromZeros)] +struct FromZeros1 { + value: NotZerocopy, +} + +// +// FromBytes errors +// + +#[derive(FromBytes)] +struct FromBytes1 { + value: NotZerocopy, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes1 { + value: NotZerocopy, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C)] +struct Unaligned1 { + aligned: AU16, +} + +// This specifically tests a bug we had in an old version of the code in which +// the trait bound would only be enforced for the first field's type. +#[derive(Unaligned)] +#[repr(C)] +struct Unaligned2 { + unaligned: u8, + aligned: AU16, +} + +#[derive(Unaligned)] +#[repr(transparent)] +struct Unaligned3 { + aligned: AU16, +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/late_compile_pass.stderr b/vendor/zerocopy-derive/tests/ui-msrv/late_compile_pass.stderr new file mode 100644 index 00000000..c5112fd5 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/late_compile_pass.stderr @@ -0,0 +1,97 @@ +warning: unused import: `zerocopy::KnownLayout` + --> tests/ui-msrv/late_compile_pass.rs:15:5 + | +15 | use zerocopy::KnownLayout; + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:29:10 + | +29 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:38:10 + | +38 | #[derive(FromZeros)] + | ^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:38:10 + | +38 | #[derive(FromZeros)] + | ^^^^^^^^^ the trait `FromZeros` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `FromZeros` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:56:10 + | +56 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + | + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:66:10 + | +66 | #[derive(Unaligned)] + | ^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `AU16` + | + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:74:10 + | +74 | #[derive(Unaligned)] + | ^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `AU16` + | + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/late_compile_pass.rs:81:10 + | +81 | #[derive(Unaligned)] + | ^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `AU16` + | + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/mid_compile_pass.rs b/vendor/zerocopy-derive/tests/ui-msrv/mid_compile_pass.rs new file mode 100644 index 00000000..e0c4bc57 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/mid_compile_pass.rs @@ -0,0 +1,61 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::KnownLayout; + +fn main() {} + +// These tests cause errors which are generated by a later compilation pass than +// the other errors we generate, and so if they're compiled in the same file, +// the compiler will never get to that pass, and so we won't get the errors. + +// +// KnownLayout errors +// + +fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | Y | N | N | KL04 | +#[derive(KnownLayout)] +struct KL04<T: ?Sized>(u8, T); + +fn test_kl04<T: ?Sized>(kl: &KL04<T>) { + assert_kl(kl); +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | Y | Y | N | KL06 | +#[derive(KnownLayout)] +struct KL06<T: ?Sized + KnownLayout>(u8, T); + +fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { + assert_kl(kl); +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | Y | N | N | KL12 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL12<T: ?Sized>(u8, T); + +fn test_kl12<T: ?Sized>(kl: &KL12<T>) { + assert_kl(kl) +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | Y | N | Y | KL13 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL13<T>(u8, T); + +fn test_kl13<T>(t: T) -> impl KnownLayout { + KL13(0u8, t) +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/mid_compile_pass.stderr b/vendor/zerocopy-derive/tests/ui-msrv/mid_compile_pass.stderr new file mode 100644 index 00000000..9648856a --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/mid_compile_pass.stderr @@ -0,0 +1,97 @@ +error[E0277]: the trait bound `T: KnownLayout` is not satisfied + --> tests/ui-msrv/mid_compile_pass.rs:59:26 + | +59 | fn test_kl13<T>(t: T) -> impl KnownLayout { + | ^^^^^^^^^^^^^^^^ the trait `KnownLayout` is not implemented for `T` + | +note: required because of the requirements on the impl of `KnownLayout` for `KL13<T>` + --> tests/ui-msrv/mid_compile_pass.rs:55:10 + | +55 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` + | +59 | fn test_kl13<T: zerocopy::KnownLayout>(t: T) -> impl KnownLayout { + | +++++++++++++++++++++++ + +error[E0277]: the size for values of type `T` cannot be known at compilation time + --> tests/ui-msrv/mid_compile_pass.rs:31:15 + | +30 | fn test_kl04<T: ?Sized>(kl: &KL04<T>) { + | - this type parameter needs to be `std::marker::Sized` +31 | assert_kl(kl); + | ^^ doesn't have a size known at compile-time + | +note: required because it appears within the type `KL04<T>` + --> tests/ui-msrv/mid_compile_pass.rs:28:8 + | +28 | struct KL04<T: ?Sized>(u8, T); + | ^^^^ +note: required because of the requirements on the impl of `KnownLayout` for `KL04<T>` + --> tests/ui-msrv/mid_compile_pass.rs:27:10 + | +27 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ +note: required by a bound in `assert_kl` + --> tests/ui-msrv/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider removing the `?Sized` bound to make the type parameter `Sized` + | +30 - fn test_kl04<T: ?Sized>(kl: &KL04<T>) { +30 + fn test_kl04<T>(kl: &KL04<T>) { + | + +error[E0277]: the size for values of type `T` cannot be known at compilation time + --> tests/ui-msrv/mid_compile_pass.rs:40:15 + | +39 | fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { + | - this type parameter needs to be `std::marker::Sized` +40 | assert_kl(kl); + | ^^ doesn't have a size known at compile-time + | +note: required because it appears within the type `KL06<T>` + --> tests/ui-msrv/mid_compile_pass.rs:37:8 + | +37 | struct KL06<T: ?Sized + KnownLayout>(u8, T); + | ^^^^ +note: required because of the requirements on the impl of `KnownLayout` for `KL06<T>` + --> tests/ui-msrv/mid_compile_pass.rs:36:10 + | +36 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ +note: required by a bound in `assert_kl` + --> tests/ui-msrv/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider removing the `?Sized` bound to make the type parameter `Sized` + | +39 - fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { +39 + fn test_kl06<T: KnownLayout>(kl: &KL06<T>) { + | + +error[E0277]: the trait bound `T: KnownLayout` is not satisfied + --> tests/ui-msrv/mid_compile_pass.rs:50:15 + | +50 | assert_kl(kl) + | ^^ + | | + | expected an implementor of trait `KnownLayout` + | help: consider borrowing here: `&kl` + | +note: required because of the requirements on the impl of `KnownLayout` for `KL12<T>` + --> tests/ui-msrv/mid_compile_pass.rs:45:10 + | +45 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ +note: required by a bound in `assert_kl` + --> tests/ui-msrv/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/msrv_specific.rs b/vendor/zerocopy-derive/tests/ui-msrv/msrv_specific.rs new file mode 100644 index 00000000..6e1ea5b3 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/msrv_specific.rs @@ -0,0 +1,38 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// This file contains tests which trigger errors on MSRV during a different +// compiler pass compared to the stable or nightly toolchains. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::IntoBytes; + +use self::util::util::AU16; + +fn main() {} + +// `repr(C, packed(2))` is not equivalent to `repr(C, packed)`. +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes1<T> { + t0: T, + // Add a second field to avoid triggering the "repr(C) struct with one + // field" special case. + t1: T, +} + +fn is_into_bytes_1<T: IntoBytes>() { + if false { + is_into_bytes_1::<IntoBytes1<AU16>>(); + } +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/msrv_specific.stderr b/vendor/zerocopy-derive/tests/ui-msrv/msrv_specific.stderr new file mode 100644 index 00000000..2574f905 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/msrv_specific.stderr @@ -0,0 +1,25 @@ +warning: unused `#[macro_use]` import + --> tests/ui-msrv/msrv_specific.rs:12:1 + | +12 | #[macro_use] + | ^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/msrv_specific.rs:36:9 + | +36 | is_into_bytes_1::<IntoBytes1<AU16>>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `AU16` + | +note: required because of the requirements on the impl of `zerocopy::IntoBytes` for `IntoBytes1<AU16>` + --> tests/ui-msrv/msrv_specific.rs:25:10 + | +25 | #[derive(IntoBytes)] + | ^^^^^^^^^ +note: required by a bound in `is_into_bytes_1` + --> tests/ui-msrv/msrv_specific.rs:34:23 + | +34 | fn is_into_bytes_1<T: IntoBytes>() { + | ^^^^^^^^^ required by this bound in `is_into_bytes_1` + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/struct.rs b/vendor/zerocopy-derive/tests/ui-msrv/struct.rs new file mode 100644 index 00000000..54a26b29 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/struct.rs @@ -0,0 +1,254 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::{IntoBytes, KnownLayout}; + +use self::util::util::AU16; + +fn main() {} + +// +// KnownLayout errors +// + +struct NotKnownLayout; + +struct NotKnownLayoutDst([u8]); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | N | N | N | KL00 | +#[derive(KnownLayout)] +struct KL00(u8, NotKnownLayoutDst); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | N | Y | N | KL02 | +#[derive(KnownLayout)] +struct KL02(u8, [u8]); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | N | N | N | KL08 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL08(u8, NotKnownLayoutDst); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | N | N | Y | KL09 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL09(NotKnownLayout, NotKnownLayout); + +// +// Immutable errors +// + +#[derive(Immutable)] +struct Immutable1 { + a: core::cell::UnsafeCell<()>, +} + +#[derive(Immutable)] +struct Immutable2 { + a: [core::cell::UnsafeCell<u8>; 0], +} + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +#[repr(packed)] +struct TryFromBytesPacked { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(packed(1))] +struct TryFromBytesPackedN { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(C, packed)] +struct TryFromBytesCPacked { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(C, packed(1))] +struct TryFromBytesCPackedN { + foo: AU16, +} + +// +// IntoBytes errors +// + +// Since `IntoBytes1` has at least one generic parameter, an `IntoBytes` impl is +// emitted in which each field type is given an `Unaligned` bound. Since `foo`'s +// type doesn't implement `Unaligned`, this should fail. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes1<T> { + foo: AU16, + bar: T, +} + +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes2 { + foo: u8, + bar: AU16, +} + +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes3 { + foo: u8, + // We'd prefer to use AU64 here, but you can't use aligned types in + // packed structs. + bar: u64, +} + +type SliceU8 = [u8]; + +// Padding between `u8` and `SliceU8`. `SliceU8` doesn't syntactically look like +// a slice, so this case is handled by our `Sized` support. +// +// NOTE(#1708): This exists to ensure that our error messages are good when a +// field is unsized. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes4 { + a: u8, + b: SliceU8, +} + +// Padding between `u8` and `[u16]`. `[u16]` is syntactically identifiable as a +// slice, so this case is handled by our `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes5 { + a: u8, + b: [u16], +} + +// Trailing padding after `[u8]`. `[u8]` is syntactically identifiable as a +// slice, so this case is handled by our `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes6 { + a: u16, + b: [u8], +} + +// Padding between `u8` and `u16` and also trailing padding after `[u8]`. `[u8]` +// is syntactically identifiable as a slice, so this case is handled by our +// `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes7 { + a: u8, + b: u16, + c: [u8], +} + +#[derive(IntoBytes)] +#[repr(C, C)] // zerocopy-derive conservatively treats these as conflicting reprs +struct IntoBytes8 { + a: u8, +} + +#[derive(IntoBytes)] +struct IntoBytes9<T> { + t: T, +} + +#[derive(IntoBytes)] +#[repr(packed(2))] +struct IntoBytes10<T> { + t: T, +} + +// `repr(C, packed(2))` is not equivalent to `repr(C, packed)`. +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes11<T> { + t0: T, + // Add a second field to avoid triggering the "repr(C) struct with one + // field" special case. + t1: T, +} + +fn is_into_bytes_11<T: IntoBytes>() { + if false { + is_into_bytes_11::<IntoBytes11<AU16>>(); + } +} + +// `repr(C, align(2))` is not sufficient to guarantee the layout of this type. +#[derive(IntoBytes)] +#[repr(C, align(2))] +struct IntoBytes12<T> { + t: T, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C, align(2))] +struct Unaligned1; + +#[derive(Unaligned)] +#[repr(transparent, align(2))] +struct Unaligned2 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(packed, align(2))] +struct Unaligned3; + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +struct Unaligned4; + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +struct Unaligned5; + +#[derive(Unaligned)] +struct Unaligned6; + +#[derive(Unaligned)] +#[repr(packed(2))] +struct Unaligned7; + +// Test the error message emitted when conflicting reprs appear on different +// lines. On the nightly compiler, this emits a "joint span" that spans both +// problematic repr token trees and everything in between. +#[derive(Copy, Clone)] +#[repr(packed(2), C)] +#[derive(Unaligned)] +#[repr(C, packed(2))] +struct WeirdReprSpan; + +#[derive(SplitAt)] +#[repr(C)] +struct SplitAtNotKnownLayout([u8]); + +#[derive(SplitAt, KnownLayout)] +#[repr(C)] +struct SplitAtSized(u8); diff --git a/vendor/zerocopy-derive/tests/ui-msrv/struct.stderr b/vendor/zerocopy-derive/tests/ui-msrv/struct.stderr new file mode 100644 index 00000000..efb5e058 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/struct.stderr @@ -0,0 +1,281 @@ +error: this conflicts with another representation hint + --> tests/ui-msrv/struct.rs:167:11 + | +167 | #[repr(C, C)] // zerocopy-derive conservatively treats these as conflicting reprs + | ^ + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/struct.rs:172:10 + | +172 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/struct.rs:177:10 + | +177 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-msrv/struct.rs:200:10 + | +200 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-msrv/struct.rs:211:11 + | +211 | #[repr(C, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/struct.rs:215:8 + | +215 | #[repr(transparent, align(2))] + | ^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/struct.rs:221:16 + | +221 | #[repr(packed, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/struct.rs:225:18 + | +225 | #[repr(align(1), align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/struct.rs:229:18 + | +229 | #[repr(align(2), align(4))] + | ^^^^^ + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/struct.rs:232:10 + | +232 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/struct.rs:235:10 + | +235 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: this conflicts with another representation hint + --> tests/ui-msrv/struct.rs:245:8 + | +245 | #[repr(C, packed(2))] + | ^ + +error[E0692]: transparent struct cannot have other repr hints + --> tests/ui-msrv/struct.rs:215:8 + | +215 | #[repr(transparent, align(2))] + | ^^^^^^^^^^^ ^^^^^^^^ + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-msrv/struct.rs:31:10 + | +31 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `KL00`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `KL00` + --> tests/ui-msrv/struct.rs:32:8 + | +32 | struct KL00(u8, NotKnownLayoutDst); + | ^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-msrv/struct.rs:36:10 + | +36 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `KL02`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `KL02` + --> tests/ui-msrv/struct.rs:37:8 + | +37 | struct KL02(u8, [u8]); + | ^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotKnownLayoutDst: zerocopy::KnownLayout` is not satisfied + --> tests/ui-msrv/struct.rs:41:10 + | +41 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ the trait `zerocopy::KnownLayout` is not implemented for `NotKnownLayoutDst` + | + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotKnownLayout: zerocopy::KnownLayout` is not satisfied + --> tests/ui-msrv/struct.rs:47:10 + | +47 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ the trait `zerocopy::KnownLayout` is not implemented for `NotKnownLayout` + | + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<()>: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/struct.rs:55:10 + | +55 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<()>` + | + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<u8>: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/struct.rs:60:10 + | +60 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<u8>` + | + = note: required because of the requirements on the impl of `zerocopy::Immutable` for `[UnsafeCell<u8>; 0]` + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/struct.rs:100:10 + | +100 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `AU16` + | + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): PaddingFree<IntoBytes2, 1_usize>` is not satisfied + --> tests/ui-msrv/struct.rs:107:10 + | +107 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `PaddingFree<IntoBytes2, 1_usize>` is not implemented for `()` + | + = help: the following implementations were found: + <() as PaddingFree<T, 0_usize>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): PaddingFree<IntoBytes3, 1_usize>` is not satisfied + --> tests/ui-msrv/struct.rs:114:10 + | +114 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `PaddingFree<IntoBytes3, 1_usize>` is not implemented for `()` + | + = help: the following implementations were found: + <() as PaddingFree<T, 0_usize>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-msrv/struct.rs:130:10 + | +130 | #[derive(IntoBytes)] + | ^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `IntoBytes4`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `IntoBytes4` + --> tests/ui-msrv/struct.rs:132:8 + | +132 | struct IntoBytes4 { + | ^^^^^^^^^^ +note: required by a bound in `std::mem::size_of` + --> $RUST/core/src/mem/mod.rs + | + | pub const fn size_of<T>() -> usize { + | ^ required by this bound in `std::mem::size_of` + = note: this error originates in the macro `::zerocopy::struct_padding` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-msrv/struct.rs:134:8 + | +134 | b: SliceU8, + | ^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `std::mem::size_of` + --> $RUST/core/src/mem/mod.rs + | + | pub const fn size_of<T>() -> usize { + | ^ required by this bound in `std::mem::size_of` + +error[E0277]: the trait bound `(): DynamicPaddingFree<IntoBytes5, true>` is not satisfied + --> tests/ui-msrv/struct.rs:139:10 + | +139 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `DynamicPaddingFree<IntoBytes5, true>` is not implemented for `()` + | + = help: the following implementations were found: + <() as DynamicPaddingFree<T, false>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): DynamicPaddingFree<IntoBytes6, true>` is not satisfied + --> tests/ui-msrv/struct.rs:148:10 + | +148 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `DynamicPaddingFree<IntoBytes6, true>` is not implemented for `()` + | + = help: the following implementations were found: + <() as DynamicPaddingFree<T, false>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): DynamicPaddingFree<IntoBytes7, true>` is not satisfied + --> tests/ui-msrv/struct.rs:158:10 + | +158 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `DynamicPaddingFree<IntoBytes7, true>` is not implemented for `()` + | + = help: the following implementations were found: + <() as DynamicPaddingFree<T, false>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `SplitAtNotKnownLayout: zerocopy::KnownLayout` is not satisfied + --> tests/ui-msrv/struct.rs:248:10 + | +248 | #[derive(SplitAt)] + | ^^^^^^^ the trait `zerocopy::KnownLayout` is not implemented for `SplitAtNotKnownLayout` + | +note: required by a bound in `SplitAt` + --> $WORKSPACE/src/split_at.rs + | + | pub unsafe trait SplitAt: KnownLayout<PointerMetadata = usize> { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `SplitAt` + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `u8: SplitAt` is not satisfied + --> tests/ui-msrv/struct.rs:252:10 + | +252 | #[derive(SplitAt, KnownLayout)] + | ^^^^^^^ the trait `SplitAt` is not implemented for `u8` + | + = help: see issue #48214 + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0271]: type mismatch resolving `<u8 as zerocopy::KnownLayout>::PointerMetadata == usize` + --> tests/ui-msrv/struct.rs:252:10 + | +252 | #[derive(SplitAt, KnownLayout)] + | ^^^^^^^ expected `()`, found `usize` + | + = help: see issue #48214 + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/union.rs b/vendor/zerocopy-derive/tests/ui-msrv/union.rs new file mode 100644 index 00000000..f6eeee2a --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/union.rs @@ -0,0 +1,109 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use std::mem::ManuallyDrop; + +use self::util::util::AU16; + +fn main() {} + +// +// Immutable errors +// + +#[derive(Immutable)] +union Immutable1 { + a: ManuallyDrop<core::cell::UnsafeCell<()>>, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(C)] +union IntoBytes1<T> { + foo: ManuallyDrop<T>, +} + +#[derive(IntoBytes)] +#[repr(C)] +union IntoBytes2 { + foo: u8, + bar: [u8; 2], +} + +// Need a `repr` attribute +#[derive(IntoBytes)] +union IntoBytes3 { + foo: u8, +} + +// `repr(packed(2))` isn't equivalent to `repr(packed)` +#[derive(IntoBytes)] +#[repr(packed(2))] +union IntoBytes4 { + foo: u8, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C, align(2))] +union Unaligned1 { + foo: i16, + bar: AU16, +} + +// Transparent unions are unstable; see issue #60405 +// <https://github.com/rust-lang/rust/issues/60405> for more information. + +// #[derive(Unaligned)] +// #[repr(transparent, align(2))] +// union Unaligned2 { +// foo: u8, +// } + +#[derive(Unaligned)] +#[repr(packed, align(2))] +union Unaligned3 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +struct Unaligned4 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +struct Unaligned5 { + foo: u8, +} + +#[derive(Unaligned)] +union Unaligned6 { + foo: i16, + bar: AU16, +} + +#[derive(Unaligned)] +#[repr(packed(2))] +union Unaligned7 { + foo: i16, + bar: AU16, +} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/union.stderr b/vendor/zerocopy-derive/tests/ui-msrv/union.stderr new file mode 100644 index 00000000..19585758 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/union.stderr @@ -0,0 +1,84 @@ +error: unsupported on types with type parameters + --> tests/ui-msrv/union.rs:34:10 + | +34 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must be #[repr(C)], #[repr(packed)], or #[repr(transparent)] + --> tests/ui-msrv/union.rs:48:10 + | +48 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must be #[repr(C)], #[repr(packed)], or #[repr(transparent)] + --> tests/ui-msrv/union.rs:54:10 + | +54 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-msrv/union.rs:65:11 + | +65 | #[repr(C, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/union.rs:81:16 + | +81 | #[repr(packed, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/union.rs:87:18 + | +87 | #[repr(align(1), align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-msrv/union.rs:93:18 + | +93 | #[repr(align(2), align(4))] + | ^^^^^ + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/union.rs:98:10 + | +98 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-msrv/union.rs:104:10 + | +104 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<()>: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/union.rs:25:10 + | +25 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<()>` + | + = note: required because of the requirements on the impl of `zerocopy::Immutable` for `ManuallyDrop<UnsafeCell<()>>` + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `(): PaddingFree<IntoBytes2, 1_usize>` is not satisfied + --> tests/ui-msrv/union.rs:40:10 + | +40 | #[derive(IntoBytes)] + | ^^^^^^^^^ the trait `PaddingFree<IntoBytes2, 1_usize>` is not implemented for `()` + | + = help: the following implementations were found: + <() as PaddingFree<T, 0_usize>> + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.rs b/vendor/zerocopy-derive/tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.rs new file mode 100644 index 00000000..280f05d4 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.rs @@ -0,0 +1,26 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! See: https://github.com/google/zerocopy/issues/553 +//! zerocopy must still allow derives of deprecated types. +//! This test has a hand-written impl of a deprecated type, and should result in a compilation +//! error. If zerocopy does not tack an allow(deprecated) annotation onto its impls, then this +//! test will fail because more than one compile error will be generated. +#![deny(deprecated)] + +extern crate zerocopy; + +use zerocopy::IntoBytes; + +#[derive(IntoBytes)] +#[repr(C)] +union Foo { + a: u8, +} + +fn main() {} diff --git a/vendor/zerocopy-derive/tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.stderr b/vendor/zerocopy-derive/tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.stderr new file mode 100644 index 00000000..d25c238f --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.stderr @@ -0,0 +1,8 @@ +error: requires --cfg zerocopy_derive_union_into_bytes; +please let us know you use this feature: https://github.com/google/zerocopy/discussions/1802 + --> tests/ui-msrv/union_into_bytes_cfg/union_into_bytes_cfg.rs:20:10 + | +20 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-nightly/absence_of_deprecated_warning.rs b/vendor/zerocopy-derive/tests/ui-nightly/absence_of_deprecated_warning.rs new file mode 100644 index 00000000..ce531a4b --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/absence_of_deprecated_warning.rs @@ -0,0 +1,33 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! See: https://github.com/google/zerocopy/issues/553 +//! zerocopy must still allow derives of deprecated types. +//! This test has a hand-written impl of a deprecated type, and should result in a compilation +//! error. If zerocopy does not tack an allow(deprecated) annotation onto its impls, then this +//! test will fail because more than one compile error will be generated. +#![deny(deprecated)] + +extern crate zerocopy; + +use zerocopy::IntoBytes; + +#[deprecated = "Do not use"] +#[derive(IntoBytes)] +#[repr(C)] +struct OldHeader { + field_a: usize, + collection: [u8; 8], +} + +trait T {} + +// Intentionally trigger a deprecation error +impl T for OldHeader {} + +fn main() {} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/absence_of_deprecated_warning.stderr b/vendor/zerocopy-derive/tests/ui-nightly/absence_of_deprecated_warning.stderr new file mode 100644 index 00000000..49fa697e --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/absence_of_deprecated_warning.stderr @@ -0,0 +1,11 @@ +error: use of deprecated struct `OldHeader`: Do not use + --> tests/ui-nightly/absence_of_deprecated_warning.rs:31:12 + | +31 | impl T for OldHeader {} + | ^^^^^^^^^ + | +note: the lint level is defined here + --> tests/ui-nightly/absence_of_deprecated_warning.rs:14:9 + | +14 | #![deny(deprecated)] + | ^^^^^^^^^^ diff --git a/vendor/zerocopy-derive/tests/ui-nightly/derive_transparent.rs b/vendor/zerocopy-derive/tests/ui-nightly/derive_transparent.rs new file mode 100644 index 00000000..a60c89e2 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/derive_transparent.rs @@ -0,0 +1,38 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use core::marker::PhantomData; + +use zerocopy::{FromBytes, FromZeros, IntoBytes, TryFromBytes, Unaligned}; + +use self::util::util::NotZerocopy; + +fn main() {} + +// Test generic transparent structs + +#[derive(IntoBytes, FromBytes, Unaligned)] +#[repr(transparent)] +struct TransparentStruct<T> { + inner: T, + _phantom: PhantomData<()>, +} + +// It should be legal to derive these traits on a transparent struct, but it +// must also ensure the traits are only implemented when the inner type +// implements them. +util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); diff --git a/vendor/zerocopy-derive/tests/ui-nightly/derive_transparent.stderr b/vendor/zerocopy-derive/tests/ui-nightly/derive_transparent.stderr new file mode 100644 index 00000000..12493b54 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/derive_transparent.stderr @@ -0,0 +1,169 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/derive_transparent.rs:34:23 + | +34 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::TryFromBytes` + --> tests/ui-nightly/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-nightly/derive_transparent.rs:34:1 + | +34 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-nightly/derive_transparent.rs:35:23 + | +35 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `FromZeros` + --> tests/ui-nightly/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-nightly/derive_transparent.rs:35:1 + | +35 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/derive_transparent.rs:36:23 + | +36 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::FromBytes` + --> tests/ui-nightly/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-nightly/derive_transparent.rs:36:1 + | +36 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/derive_transparent.rs:37:23 + | +37 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::IntoBytes` + --> tests/ui-nightly/derive_transparent.rs:24:10 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-nightly/derive_transparent.rs:37:1 + | +37 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `IntoBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/derive_transparent.rs:38:23 + | +38 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::Unaligned` + --> tests/ui-nightly/derive_transparent.rs:24:32 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-nightly/derive_transparent.rs:38:1 + | +38 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `Unaligned` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-nightly/enum.rs b/vendor/zerocopy-derive/tests/ui-nightly/enum.rs new file mode 100644 index 00000000..82b43d80 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/enum.rs @@ -0,0 +1,577 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +// +// Generic errors +// + +#[derive(FromBytes)] +#[repr("foo")] +enum Generic1 { + A, +} + +#[derive(FromBytes)] +#[repr(foo)] +enum Generic2 { + A, +} + +#[derive(FromBytes)] +#[repr(transparent)] +enum Generic3 { + A, +} + +#[derive(FromBytes)] +#[repr(u8, u16)] +enum Generic4 { + A, +} + +#[derive(FromBytes)] +enum Generic5 { + A, +} + +// +// Immutable errors +// + +#[derive(Immutable)] +enum Immutable1 { + A(core::cell::UnsafeCell<()>), +} + +#[derive(Immutable)] +enum Never {} + +#[derive(Immutable)] +enum Immutable2 { + Uninhabited(Never, core::cell::UnsafeCell<u8>), + Inhabited(u8), +} + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +enum TryFromBytes1 { + A, +} + +#[derive(TryFromBytes)] +enum TryFromBytes2 { + A, + B(u8), +} + +struct NotTryFromBytes; + +#[derive(TryFromBytes)] +#[repr(u8)] +enum TryFromBytes3 { + A(NotTryFromBytes), +} + +// +// FromZeros errors +// + +#[derive(FromZeros)] +enum FromZeros1 { + A(u8), +} + +#[derive(FromZeros)] +enum FromZeros2 { + A, + B(u8), +} + +#[derive(FromZeros)] +enum FromZeros3 { + A = 1, + B, +} + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros4 { + A = 1, + B = 2, +} + +const NEGATIVE_ONE: i8 = -1; + +#[derive(FromZeros)] +#[repr(i8)] +enum FromZeros5 { + A = NEGATIVE_ONE, + B, +} + +struct NotFromZeros; + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros6 { + A(NotFromZeros), +} + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros7 { + A = 1, + B(NotFromZeros), +} + +// +// FromBytes errors +// + +#[derive(FromBytes)] +enum FromBytes1 { + A, +} + +#[derive(FromBytes)] +#[repr(C)] +enum FromBytes2 { + A, +} + +#[derive(FromBytes)] +#[repr(usize)] +enum FromBytes3 { + A, +} + +#[derive(FromBytes)] +#[repr(isize)] +enum FromBytes4 { + A, +} + +#[derive(FromBytes)] +#[repr(u32)] +enum FromBytes5 { + A, +} + +#[derive(FromBytes)] +#[repr(i32)] +enum FromBytes6 { + A, +} + +#[derive(FromBytes)] +#[repr(u64)] +enum FromBytes7 { + A, +} + +#[derive(FromBytes)] +#[repr(i64)] +enum FromBytes8 { + A, +} + +#[derive(FromBytes)] +#[repr(u8)] +enum FooU8 { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, + Variant255(bool), +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C)] +enum Unaligned1 { + A, +} + +#[derive(Unaligned)] +#[repr(u16)] +enum Unaligned2 { + A, +} + +#[derive(Unaligned)] +#[repr(i16)] +enum Unaligned3 { + A, +} + +#[derive(Unaligned)] +#[repr(u32)] +enum Unaligned4 { + A, +} + +#[derive(Unaligned)] +#[repr(i32)] +enum Unaligned5 { + A, +} + +#[derive(Unaligned)] +#[repr(u64)] +enum Unaligned6 { + A, +} + +#[derive(Unaligned)] +#[repr(i64)] +enum Unaligned7 { + A, +} + +#[derive(Unaligned)] +#[repr(usize)] +enum Unaligned8 { + A, +} + +#[derive(Unaligned)] +#[repr(isize)] +enum Unaligned9 { + A, +} + +#[derive(Unaligned)] +#[repr(u8, align(2))] +enum Unaligned10 { + A, +} + +#[derive(Unaligned)] +#[repr(i8, align(2))] +enum Unaligned11 { + A, +} + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +enum Unaligned12 { + A, +} + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +enum Unaligned13 { + A, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes1 { + A, + B(u8), +} + +#[derive(IntoBytes)] +#[repr(C, align(4))] +struct Align4IntoBytes(u32); + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes2 { + A(Align4IntoBytes), +} + +#[derive(IntoBytes)] +#[repr(u32)] +enum IntoBytes3 { + A(u32), + B(u16), +} + +#[derive(IntoBytes)] +enum IntoBytes4 { + A(u32), + B(u16), +} + +#[derive(IntoBytes)] +enum IntoBytes5 { + A(u32), +} + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes6<T> { + A(T), +} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/enum.stderr b/vendor/zerocopy-derive/tests/ui-nightly/enum.stderr new file mode 100644 index 00000000..b8ec9a16 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/enum.stderr @@ -0,0 +1,567 @@ +error: unrecognized representation hint + --> tests/ui-nightly/enum.rs:19:8 + | +19 | #[repr("foo")] + | ^^^^^ + +error: unrecognized representation hint + --> tests/ui-nightly/enum.rs:25:8 + | +25 | #[repr(foo)] + | ^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:30:10 + | +30 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: this conflicts with another representation hint + --> tests/ui-nightly/enum.rs:37:8 + | +37 | #[repr(u8, u16)] + | ^^^^^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:42:10 + | +42 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:69:10 + | +69 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:74:10 + | +74 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:92:10 + | +92 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:97:10 + | +97 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:103:10 + | +103 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + --> tests/ui-nightly/enum.rs:110:1 + | +110 | / #[repr(u8)] +111 | | enum FromZeros4 { +112 | | A = 1, +113 | | B = 2, +114 | | } + | |_^ + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + help: This enum has discriminants which are not literal integers. One of those may define or imply which variant has a discriminant of zero. Use a literal integer to define or imply the variant with a discriminant of zero. + --> tests/ui-nightly/enum.rs:119:1 + | +119 | / #[repr(i8)] +120 | | enum FromZeros5 { +121 | | A = NEGATIVE_ONE, +122 | | B, +123 | | } + | |_^ + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + --> tests/ui-nightly/enum.rs:134:1 + | +134 | / #[repr(u8)] +135 | | enum FromZeros7 { +136 | | A = 1, +137 | | B(NotFromZeros), +138 | | } + | |_^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:144:10 + | +144 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:150:8 + | +150 | #[repr(C)] + | ^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:156:8 + | +156 | #[repr(usize)] + | ^^^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:162:8 + | +162 | #[repr(isize)] + | ^^^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:168:8 + | +168 | #[repr(u32)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:174:8 + | +174 | #[repr(i32)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:180:8 + | +180 | #[repr(u64)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-nightly/enum.rs:186:8 + | +186 | #[repr(i64)] + | ^^^ + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:456:10 + | +456 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:462:10 + | +462 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:468:10 + | +468 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:474:10 + | +474 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:480:10 + | +480 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:486:10 + | +486 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:492:10 + | +492 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:498:10 + | +498 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/enum.rs:504:10 + | +504 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-nightly/enum.rs:511:12 + | +511 | #[repr(u8, align(2))] + | ^^^^^^^^ + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-nightly/enum.rs:517:12 + | +517 | #[repr(i8, align(2))] + | ^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/enum.rs:523:8 + | +523 | #[repr(align(1), align(2))] + | ^^^^^^^^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/enum.rs:529:8 + | +529 | #[repr(align(2), align(4))] + | ^^^^^^^^^^^^^^^^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:562:10 + | +562 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/enum.rs:568:10 + | +568 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: generic parameters may not be used in const operations + --> tests/ui-nightly/enum.rs:576:7 + | +576 | A(T), + | ^ cannot perform const operation using `T` + | + = note: type parameters may not be used in const expressions + = help: add `#![feature(generic_const_exprs)]` to allow generic const expressions + +error[E0565]: meta item in `repr` must be an identifier + --> tests/ui-nightly/enum.rs:19:1 + | +19 | #[repr("foo")] + | ^^^^^^^^^^^^^^ + +error[E0552]: unrecognized representation hint + --> tests/ui-nightly/enum.rs:25:8 + | +25 | #[repr(foo)] + | ^^^ + | + = help: valid reprs are `Rust` (default), `C`, `align`, `packed`, `transparent`, `simd`, `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`, `isize`, `usize` + = note: for more information, visit <https://doc.rust-lang.org/reference/type-layout.html?highlight=repr#representations> + +error[E0566]: conflicting representation hints + --> tests/ui-nightly/enum.rs:37:8 + | +37 | #[repr(u8, u16)] + | ^^ ^^^ + | + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #68585 <https://github.com/rust-lang/rust/issues/68585> + = note: `#[deny(conflicting_repr_hints)]` (part of `#[deny(future_incompatible)]`) on by default + +error[E0277]: the trait bound `UnsafeCell<()>: Immutable` is not satisfied + --> tests/ui-nightly/enum.rs:51:10 + | +51 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `Immutable` is not implemented for `UnsafeCell<()>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<()>` + = help: the following other types implement trait `Immutable`: + &T + &mut T + () + *const T + *mut T + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `UnsafeCell<u8>: Immutable` is not satisfied + --> tests/ui-nightly/enum.rs:59:10 + | +59 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `Immutable` is not implemented for `UnsafeCell<u8>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<u8>` + = help: the following other types implement trait `Immutable`: + &T + &mut T + () + *const T + *mut T + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotTryFromBytes: TryFromBytes` is not satisfied + --> tests/ui-nightly/enum.rs:82:10 + | +82 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `TryFromBytes` is not implemented for `NotTryFromBytes` + --> tests/ui-nightly/enum.rs:80:1 + | +80 | struct NotTryFromBytes; + | ^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotTryFromBytes` + = help: the following other types implement trait `TryFromBytes`: + () + *const T + *mut T + <FromZeros6 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + <TryFromBytes3 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + AtomicBool + AtomicI16 + AtomicI32 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotFromZeros: TryFromBytes` is not satisfied + --> tests/ui-nightly/enum.rs:127:10 + | +127 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `TryFromBytes` is not implemented for `NotFromZeros` + --> tests/ui-nightly/enum.rs:125:1 + | +125 | struct NotFromZeros; + | ^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotFromZeros` + = help: the following other types implement trait `TryFromBytes`: + () + *const T + *mut T + <FromZeros6 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + <TryFromBytes3 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + AtomicBool + AtomicI16 + AtomicI32 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotFromZeros: FromZeros` is not satisfied + --> tests/ui-nightly/enum.rs:127:10 + | +127 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotFromZeros` + --> tests/ui-nightly/enum.rs:125:1 + | +125 | struct NotFromZeros; + | ^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotFromZeros` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `bool: FromBytes` is not satisfied + --> tests/ui-nightly/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `FromBytes` is not implemented for `bool` + | + = note: Consider adding `#[derive(FromBytes)]` to `bool` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes1` has 1 total byte(s) of padding + --> tests/ui-nightly/enum.rs:538:10 + | +538 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `PaddingFree<IntoBytes1, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes1, 0>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 63 | impl<T: ?Sized> PaddingFree<T, 0> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes2` has 3 total byte(s) of padding + --> tests/ui-nightly/enum.rs:549:10 + | +549 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `PaddingFree<IntoBytes2, 3>` is not implemented for `()` + but trait `PaddingFree<IntoBytes2, 0>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 63 | impl<T: ?Sized> PaddingFree<T, 0> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes3` has 2 total byte(s) of padding + --> tests/ui-nightly/enum.rs:555:10 + | +555 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `PaddingFree<IntoBytes3, 2>` is not implemented for `()` + but trait `PaddingFree<IntoBytes3, 0>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 63 | impl<T: ?Sized> PaddingFree<T, 0> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error: generic `Self` types are currently not permitted in anonymous constants + --> tests/ui-nightly/enum.rs:573:10 + | +573 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | +note: not a concrete type + --> tests/ui-nightly/enum.rs:573:10 + | +573 | #[derive(IntoBytes)] + | ^^^^^^^^^ + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `bool: FromBytes` is not satisfied + --> tests/ui-nightly/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `FromBytes` is not implemented for `bool` + | + = note: Consider adding `#[derive(FromBytes)]` to `bool` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required for `FooU8` to implement `FromBytes` + --> tests/ui-nightly/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_is_from_bytes` + --> tests/ui-nightly/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ required by this bound in `assert_is_from_bytes` + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-nightly/enum_from_bytes_u8_too_few.rs b/vendor/zerocopy-derive/tests/ui-nightly/enum_from_bytes_u8_too_few.rs new file mode 100644 index 00000000..1b1bed31 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/enum_from_bytes_u8_too_few.rs @@ -0,0 +1,272 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +#[derive(FromBytes)] +#[repr(u8)] +enum Foo { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, +} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/enum_from_bytes_u8_too_few.stderr b/vendor/zerocopy-derive/tests/ui-nightly/enum_from_bytes_u8_too_few.stderr new file mode 100644 index 00000000..947c9d05 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/enum_from_bytes_u8_too_few.stderr @@ -0,0 +1,11 @@ +error: FromBytes only supported on repr(u8) enum with 256 variants + --> tests/ui-nightly/enum_from_bytes_u8_too_few.rs:15:1 + | + 15 | / #[repr(u8)] + 16 | | enum Foo { + 17 | | Variant0, + 18 | | Variant1, +... | +271 | | Variant254, +272 | | } + | |_^ diff --git a/vendor/zerocopy-derive/tests/ui-nightly/late_compile_pass.rs b/vendor/zerocopy-derive/tests/ui-nightly/late_compile_pass.rs new file mode 100644 index 00000000..d8aeb369 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/late_compile_pass.rs @@ -0,0 +1,85 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::KnownLayout; + +use self::util::util::{NotZerocopy, AU16}; + +fn main() {} + +// These tests cause errors which are generated by a later compilation pass than +// the other errors we generate, and so if they're compiled in the same file, +// the compiler will never get to that pass, and so we won't get the errors. + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +struct TryFromBytes1 { + value: NotZerocopy, +} + +// +// FromZeros errors +// + +#[derive(FromZeros)] +struct FromZeros1 { + value: NotZerocopy, +} + +// +// FromBytes errors +// + +#[derive(FromBytes)] +struct FromBytes1 { + value: NotZerocopy, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes1 { + value: NotZerocopy, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C)] +struct Unaligned1 { + aligned: AU16, +} + +// This specifically tests a bug we had in an old version of the code in which +// the trait bound would only be enforced for the first field's type. +#[derive(Unaligned)] +#[repr(C)] +struct Unaligned2 { + unaligned: u8, + aligned: AU16, +} + +#[derive(Unaligned)] +#[repr(transparent)] +struct Unaligned3 { + aligned: AU16, +} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/late_compile_pass.stderr b/vendor/zerocopy-derive/tests/ui-nightly/late_compile_pass.stderr new file mode 100644 index 00000000..dc92b1fd --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/late_compile_pass.stderr @@ -0,0 +1,331 @@ +warning: unused import: `zerocopy::KnownLayout` + --> tests/ui-nightly/late_compile_pass.rs:15:5 + | +15 | use zerocopy::KnownLayout; + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:29:10 + | +29 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:38:10 + | +38 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:38:10 + | +38 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:56:10 + | +56 | #[derive(IntoBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:66:10 + | +66 | #[derive(Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:74:10 + | +74 | #[derive(Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:81:10 + | +81 | #[derive(Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required for `FromBytes1` to implement `zerocopy::FromBytes` + --> tests/ui-nightly/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `<FromBytes1 as zerocopy::TryFromBytes>::is_bit_valid::assert_is_from_bytes` + --> tests/ui-nightly/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ required by this bound in `assert_is_from_bytes` + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-nightly/mid_compile_pass.rs b/vendor/zerocopy-derive/tests/ui-nightly/mid_compile_pass.rs new file mode 100644 index 00000000..e0c4bc57 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/mid_compile_pass.rs @@ -0,0 +1,61 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::KnownLayout; + +fn main() {} + +// These tests cause errors which are generated by a later compilation pass than +// the other errors we generate, and so if they're compiled in the same file, +// the compiler will never get to that pass, and so we won't get the errors. + +// +// KnownLayout errors +// + +fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | Y | N | N | KL04 | +#[derive(KnownLayout)] +struct KL04<T: ?Sized>(u8, T); + +fn test_kl04<T: ?Sized>(kl: &KL04<T>) { + assert_kl(kl); +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | Y | Y | N | KL06 | +#[derive(KnownLayout)] +struct KL06<T: ?Sized + KnownLayout>(u8, T); + +fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { + assert_kl(kl); +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | Y | N | N | KL12 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL12<T: ?Sized>(u8, T); + +fn test_kl12<T: ?Sized>(kl: &KL12<T>) { + assert_kl(kl) +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | Y | N | Y | KL13 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL13<T>(u8, T); + +fn test_kl13<T>(t: T) -> impl KnownLayout { + KL13(0u8, t) +} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/mid_compile_pass.stderr b/vendor/zerocopy-derive/tests/ui-nightly/mid_compile_pass.stderr new file mode 100644 index 00000000..deb13016 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/mid_compile_pass.stderr @@ -0,0 +1,109 @@ +error[E0277]: the trait bound `T: KnownLayout` is not satisfied + --> tests/ui-nightly/mid_compile_pass.rs:59:26 + | +59 | fn test_kl13<T>(t: T) -> impl KnownLayout { + | ^^^^^^^^^^^^^^^^ the trait `KnownLayout` is not implemented for `T` +60 | KL13(0u8, t) + | ------------ return type was inferred to be `KL13<T>` here + | + = note: Consider adding `#[derive(KnownLayout)]` to `T` +note: required for `KL13<T>` to implement `KnownLayout` + --> tests/ui-nightly/mid_compile_pass.rs:55:10 + | +55 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `KnownLayout` + | +59 | fn test_kl13<T: zerocopy::KnownLayout>(t: T) -> impl KnownLayout { + | +++++++++++++++++++++++ + +error[E0277]: the size for values of type `T` cannot be known at compilation time + --> tests/ui-nightly/mid_compile_pass.rs:31:15 + | +30 | fn test_kl04<T: ?Sized>(kl: &KL04<T>) { + | - this type parameter needs to be `Sized` +31 | assert_kl(kl); + | --------- ^^ doesn't have a size known at compile-time + | | + | required by a bound introduced by this call + | +note: required because it appears within the type `KL04<T>` + --> tests/ui-nightly/mid_compile_pass.rs:28:8 + | +28 | struct KL04<T: ?Sized>(u8, T); + | ^^^^ +note: required for `KL04<T>` to implement `KnownLayout` + --> tests/ui-nightly/mid_compile_pass.rs:27:10 + | +27 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_kl` + --> tests/ui-nightly/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider removing the `?Sized` bound to make the type parameter `Sized` + | +30 - fn test_kl04<T: ?Sized>(kl: &KL04<T>) { +30 + fn test_kl04<T>(kl: &KL04<T>) { + | + +error[E0277]: the size for values of type `T` cannot be known at compilation time + --> tests/ui-nightly/mid_compile_pass.rs:40:15 + | +39 | fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { + | - this type parameter needs to be `Sized` +40 | assert_kl(kl); + | --------- ^^ doesn't have a size known at compile-time + | | + | required by a bound introduced by this call + | +note: required because it appears within the type `KL06<T>` + --> tests/ui-nightly/mid_compile_pass.rs:37:8 + | +37 | struct KL06<T: ?Sized + KnownLayout>(u8, T); + | ^^^^ +note: required for `KL06<T>` to implement `KnownLayout` + --> tests/ui-nightly/mid_compile_pass.rs:36:10 + | +36 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_kl` + --> tests/ui-nightly/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider removing the `?Sized` bound to make the type parameter `Sized` + | +39 - fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { +39 + fn test_kl06<T: KnownLayout>(kl: &KL06<T>) { + | + +error[E0277]: the trait bound `KL12<T>: KnownLayout` is not satisfied + --> tests/ui-nightly/mid_compile_pass.rs:50:15 + | +50 | assert_kl(kl) + | --------- ^^ the trait `KnownLayout` is not implemented for `KL12<T>` + | | + | required by a bound introduced by this call + | +note: required for `KL12<T>` to implement `KnownLayout` + --> tests/ui-nightly/mid_compile_pass.rs:45:10 + | +45 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_kl` + --> tests/ui-nightly/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider borrowing here + | +50 | assert_kl(&kl) + | + +50 | assert_kl(&mut kl) + | ++++ diff --git a/vendor/zerocopy-derive/tests/ui-nightly/struct.rs b/vendor/zerocopy-derive/tests/ui-nightly/struct.rs new file mode 100644 index 00000000..54a26b29 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/struct.rs @@ -0,0 +1,254 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::{IntoBytes, KnownLayout}; + +use self::util::util::AU16; + +fn main() {} + +// +// KnownLayout errors +// + +struct NotKnownLayout; + +struct NotKnownLayoutDst([u8]); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | N | N | N | KL00 | +#[derive(KnownLayout)] +struct KL00(u8, NotKnownLayoutDst); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | N | Y | N | KL02 | +#[derive(KnownLayout)] +struct KL02(u8, [u8]); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | N | N | N | KL08 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL08(u8, NotKnownLayoutDst); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | N | N | Y | KL09 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL09(NotKnownLayout, NotKnownLayout); + +// +// Immutable errors +// + +#[derive(Immutable)] +struct Immutable1 { + a: core::cell::UnsafeCell<()>, +} + +#[derive(Immutable)] +struct Immutable2 { + a: [core::cell::UnsafeCell<u8>; 0], +} + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +#[repr(packed)] +struct TryFromBytesPacked { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(packed(1))] +struct TryFromBytesPackedN { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(C, packed)] +struct TryFromBytesCPacked { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(C, packed(1))] +struct TryFromBytesCPackedN { + foo: AU16, +} + +// +// IntoBytes errors +// + +// Since `IntoBytes1` has at least one generic parameter, an `IntoBytes` impl is +// emitted in which each field type is given an `Unaligned` bound. Since `foo`'s +// type doesn't implement `Unaligned`, this should fail. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes1<T> { + foo: AU16, + bar: T, +} + +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes2 { + foo: u8, + bar: AU16, +} + +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes3 { + foo: u8, + // We'd prefer to use AU64 here, but you can't use aligned types in + // packed structs. + bar: u64, +} + +type SliceU8 = [u8]; + +// Padding between `u8` and `SliceU8`. `SliceU8` doesn't syntactically look like +// a slice, so this case is handled by our `Sized` support. +// +// NOTE(#1708): This exists to ensure that our error messages are good when a +// field is unsized. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes4 { + a: u8, + b: SliceU8, +} + +// Padding between `u8` and `[u16]`. `[u16]` is syntactically identifiable as a +// slice, so this case is handled by our `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes5 { + a: u8, + b: [u16], +} + +// Trailing padding after `[u8]`. `[u8]` is syntactically identifiable as a +// slice, so this case is handled by our `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes6 { + a: u16, + b: [u8], +} + +// Padding between `u8` and `u16` and also trailing padding after `[u8]`. `[u8]` +// is syntactically identifiable as a slice, so this case is handled by our +// `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes7 { + a: u8, + b: u16, + c: [u8], +} + +#[derive(IntoBytes)] +#[repr(C, C)] // zerocopy-derive conservatively treats these as conflicting reprs +struct IntoBytes8 { + a: u8, +} + +#[derive(IntoBytes)] +struct IntoBytes9<T> { + t: T, +} + +#[derive(IntoBytes)] +#[repr(packed(2))] +struct IntoBytes10<T> { + t: T, +} + +// `repr(C, packed(2))` is not equivalent to `repr(C, packed)`. +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes11<T> { + t0: T, + // Add a second field to avoid triggering the "repr(C) struct with one + // field" special case. + t1: T, +} + +fn is_into_bytes_11<T: IntoBytes>() { + if false { + is_into_bytes_11::<IntoBytes11<AU16>>(); + } +} + +// `repr(C, align(2))` is not sufficient to guarantee the layout of this type. +#[derive(IntoBytes)] +#[repr(C, align(2))] +struct IntoBytes12<T> { + t: T, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C, align(2))] +struct Unaligned1; + +#[derive(Unaligned)] +#[repr(transparent, align(2))] +struct Unaligned2 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(packed, align(2))] +struct Unaligned3; + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +struct Unaligned4; + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +struct Unaligned5; + +#[derive(Unaligned)] +struct Unaligned6; + +#[derive(Unaligned)] +#[repr(packed(2))] +struct Unaligned7; + +// Test the error message emitted when conflicting reprs appear on different +// lines. On the nightly compiler, this emits a "joint span" that spans both +// problematic repr token trees and everything in between. +#[derive(Copy, Clone)] +#[repr(packed(2), C)] +#[derive(Unaligned)] +#[repr(C, packed(2))] +struct WeirdReprSpan; + +#[derive(SplitAt)] +#[repr(C)] +struct SplitAtNotKnownLayout([u8]); + +#[derive(SplitAt, KnownLayout)] +#[repr(C)] +struct SplitAtSized(u8); diff --git a/vendor/zerocopy-derive/tests/ui-nightly/struct.stderr b/vendor/zerocopy-derive/tests/ui-nightly/struct.stderr new file mode 100644 index 00000000..e1eb1fd1 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/struct.stderr @@ -0,0 +1,554 @@ +error: this conflicts with another representation hint + --> tests/ui-nightly/struct.rs:167:8 + | +167 | #[repr(C, C)] // zerocopy-derive conservatively treats these as conflicting reprs + | ^^^^ + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/struct.rs:172:10 + | +172 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/struct.rs:177:10 + | +177 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-nightly/struct.rs:200:10 + | +200 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-nightly/struct.rs:211:11 + | +211 | #[repr(C, align(2))] + | ^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/struct.rs:215:8 + | +215 | #[repr(transparent, align(2))] + | ^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/struct.rs:221:8 + | +221 | #[repr(packed, align(2))] + | ^^^^^^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/struct.rs:225:8 + | +225 | #[repr(align(1), align(2))] + | ^^^^^^^^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/struct.rs:229:8 + | +229 | #[repr(align(2), align(4))] + | ^^^^^^^^^^^^^^^^^^ + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/struct.rs:232:10 + | +232 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/struct.rs:235:10 + | +235 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: this conflicts with another representation hint + --> tests/ui-nightly/struct.rs:243:19 + | +243 | #[repr(packed(2), C)] + | ___________________^ +244 | | #[derive(Unaligned)] +245 | | #[repr(C, packed(2))] + | |________^ + +error[E0692]: transparent struct cannot have other repr hints + --> tests/ui-nightly/struct.rs:215:8 + | +215 | #[repr(transparent, align(2))] + | ^^^^^^^^^^^ ^^^^^^^^ + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-nightly/struct.rs:31:10 + | +31 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `KL00`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `KL00` + --> tests/ui-nightly/struct.rs:32:8 + | +32 | struct KL00(u8, NotKnownLayoutDst); + | ^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-nightly/struct.rs:36:10 + | +36 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `KL02`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `KL02` + --> tests/ui-nightly/struct.rs:37:8 + | +37 | struct KL02(u8, [u8]); + | ^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotKnownLayoutDst: zerocopy::KnownLayout` is not satisfied + --> tests/ui-nightly/struct.rs:41:10 + | +41 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `NotKnownLayoutDst` + --> tests/ui-nightly/struct.rs:27:1 + | +27 | struct NotKnownLayoutDst([u8]); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `NotKnownLayoutDst` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `NotKnownLayout: zerocopy::KnownLayout` is not satisfied + --> tests/ui-nightly/struct.rs:47:10 + | +47 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `NotKnownLayout` + --> tests/ui-nightly/struct.rs:25:1 + | +25 | struct NotKnownLayout; + | ^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `NotKnownLayout` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `UnsafeCell<()>: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/struct.rs:55:10 + | +55 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<()>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<()>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + F32<O> + F64<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `UnsafeCell<u8>: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/struct.rs:60:10 + | +60 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<u8>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<u8>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + F32<O> + F64<O> + and $N others + = note: required for `[UnsafeCell<u8>; 0]` to implement `zerocopy::Immutable` + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-nightly/struct.rs:71:1 + | +71 | struct TryFromBytesPacked { + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-nightly/struct.rs:77:1 + | +77 | struct TryFromBytesPackedN { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-nightly/struct.rs:83:1 + | +83 | struct TryFromBytesCPacked { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-nightly/struct.rs:89:1 + | +89 | struct TryFromBytesCPackedN { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-nightly/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/struct.rs:100:10 + | +100 | #[derive(IntoBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-nightly/../include.rs + | + 63 | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes2` has 1 total byte(s) of padding + --> tests/ui-nightly/struct.rs:107:10 + | +107 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `PaddingFree<IntoBytes2, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes2, 0>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 63 | impl<T: ?Sized> PaddingFree<T, 0> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes3` has 1 total byte(s) of padding + --> tests/ui-nightly/struct.rs:114:10 + | +114 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `PaddingFree<IntoBytes3, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes3, 0>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 63 | impl<T: ?Sized> PaddingFree<T, 0> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-nightly/struct.rs:130:10 + | +130 | #[derive(IntoBytes)] + | ^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `IntoBytes4`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `IntoBytes4` + --> tests/ui-nightly/struct.rs:132:8 + | +132 | struct IntoBytes4 { + | ^^^^^^^^^^ + = note: required for `IntoBytes4` to implement `macro_util::__size_of::Sized` +note: required by a bound in `macro_util::__size_of::size_of` + --> $WORKSPACE/src/util/macro_util.rs + | + | pub const fn size_of<T: Sized + ?core::marker::Sized>() -> usize { + | ^^^^^ required by this bound in `size_of` + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `[u8]` is unsized + --> tests/ui-nightly/struct.rs:134:8 + | +134 | b: SliceU8, + | ^^^^^^^ `IntoBytes` needs all field types to be `Sized` in order to determine whether there is padding + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: consider using `#[repr(packed)]` to remove padding + = note: `IntoBytes` does not require the fields of `#[repr(packed)]` types to be `Sized` + = note: required for `[u8]` to implement `macro_util::__size_of::Sized` +note: required by a bound in `macro_util::__size_of::size_of` + --> $WORKSPACE/src/util/macro_util.rs + | + | pub const fn size_of<T: Sized + ?core::marker::Sized>() -> usize { + | ^^^^^ required by this bound in `size_of` + +error[E0277]: `IntoBytes5` has one or more padding bytes + --> tests/ui-nightly/struct.rs:139:10 + | +139 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `DynamicPaddingFree<IntoBytes5, true>` is not implemented for `()` + but trait `DynamicPaddingFree<IntoBytes5, false>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 81 | impl<T: ?Sized> DynamicPaddingFree<T, false> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes6` has one or more padding bytes + --> tests/ui-nightly/struct.rs:148:10 + | +148 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `DynamicPaddingFree<IntoBytes6, true>` is not implemented for `()` + but trait `DynamicPaddingFree<IntoBytes6, false>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 81 | impl<T: ?Sized> DynamicPaddingFree<T, false> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes7` has one or more padding bytes + --> tests/ui-nightly/struct.rs:158:10 + | +158 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `DynamicPaddingFree<IntoBytes7, true>` is not implemented for `()` + but trait `DynamicPaddingFree<IntoBytes7, false>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + 81 | impl<T: ?Sized> DynamicPaddingFree<T, false> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0587]: type has conflicting packed and align representation hints + --> tests/ui-nightly/struct.rs:222:1 + | +222 | struct Unaligned3; + | ^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `SplitAtNotKnownLayout: zerocopy::KnownLayout` is not satisfied + --> tests/ui-nightly/struct.rs:248:10 + | +248 | #[derive(SplitAt)] + | ^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `SplitAtNotKnownLayout` + --> tests/ui-nightly/struct.rs:250:1 + | +250 | struct SplitAtNotKnownLayout([u8]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `SplitAtNotKnownLayout` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others +note: required by a bound in `SplitAt` + --> $WORKSPACE/src/split_at.rs + | + 61 | pub unsafe trait SplitAt: KnownLayout<PointerMetadata = usize> { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `SplitAt` + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `u8: SplitAt` is not satisfied + --> tests/ui-nightly/struct.rs:252:10 + | +252 | #[derive(SplitAt, KnownLayout)] + | ^^^^^^^ the trait `SplitAt` is not implemented for `u8` + | + = note: Consider adding `#[derive(SplitAt)]` to `u8` +help: the following other types implement trait `SplitAt` + --> tests/ui-nightly/struct.rs:248:10 + | +248 | #[derive(SplitAt)] + | ^^^^^^^ `SplitAtNotKnownLayout` +... +252 | #[derive(SplitAt, KnownLayout)] + | ^^^^^^^ `SplitAtSized` + | + ::: $WORKSPACE/src/split_at.rs + | + | unsafe impl<T> SplitAt for [T] { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `[T]` + = help: see issue #48214 + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/struct.rs:195:28 + | +195 | is_into_bytes_11::<IntoBytes11<AU16>>(); + | ^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-nightly/../include.rs + | + 63 | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others +note: required for `IntoBytes11<AU16>` to implement `zerocopy::IntoBytes` + --> tests/ui-nightly/struct.rs:184:10 + | +184 | #[derive(IntoBytes)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `is_into_bytes_11` + --> tests/ui-nightly/struct.rs:193:24 + | +193 | fn is_into_bytes_11<T: IntoBytes>() { + | ^^^^^^^^^ required by this bound in `is_into_bytes_11` + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-nightly/union.rs b/vendor/zerocopy-derive/tests/ui-nightly/union.rs new file mode 100644 index 00000000..f6eeee2a --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/union.rs @@ -0,0 +1,109 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use std::mem::ManuallyDrop; + +use self::util::util::AU16; + +fn main() {} + +// +// Immutable errors +// + +#[derive(Immutable)] +union Immutable1 { + a: ManuallyDrop<core::cell::UnsafeCell<()>>, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(C)] +union IntoBytes1<T> { + foo: ManuallyDrop<T>, +} + +#[derive(IntoBytes)] +#[repr(C)] +union IntoBytes2 { + foo: u8, + bar: [u8; 2], +} + +// Need a `repr` attribute +#[derive(IntoBytes)] +union IntoBytes3 { + foo: u8, +} + +// `repr(packed(2))` isn't equivalent to `repr(packed)` +#[derive(IntoBytes)] +#[repr(packed(2))] +union IntoBytes4 { + foo: u8, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C, align(2))] +union Unaligned1 { + foo: i16, + bar: AU16, +} + +// Transparent unions are unstable; see issue #60405 +// <https://github.com/rust-lang/rust/issues/60405> for more information. + +// #[derive(Unaligned)] +// #[repr(transparent, align(2))] +// union Unaligned2 { +// foo: u8, +// } + +#[derive(Unaligned)] +#[repr(packed, align(2))] +union Unaligned3 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +struct Unaligned4 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +struct Unaligned5 { + foo: u8, +} + +#[derive(Unaligned)] +union Unaligned6 { + foo: i16, + bar: AU16, +} + +#[derive(Unaligned)] +#[repr(packed(2))] +union Unaligned7 { + foo: i16, + bar: AU16, +} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/union.stderr b/vendor/zerocopy-derive/tests/ui-nightly/union.stderr new file mode 100644 index 00000000..61dde476 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/union.stderr @@ -0,0 +1,142 @@ +error: unsupported on types with type parameters + --> tests/ui-nightly/union.rs:34:10 + | +34 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must be #[repr(C)], #[repr(packed)], or #[repr(transparent)] + --> tests/ui-nightly/union.rs:48:10 + | +48 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must be #[repr(C)], #[repr(packed)], or #[repr(transparent)] + --> tests/ui-nightly/union.rs:54:10 + | +54 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-nightly/union.rs:65:11 + | +65 | #[repr(C, align(2))] + | ^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/union.rs:81:8 + | +81 | #[repr(packed, align(2))] + | ^^^^^^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/union.rs:87:8 + | +87 | #[repr(align(1), align(2))] + | ^^^^^^^^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-nightly/union.rs:93:8 + | +93 | #[repr(align(2), align(4))] + | ^^^^^^^^^^^^^^^^^^ + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/union.rs:98:10 + | +98 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-nightly/union.rs:104:10 + | +104 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: unexpected `cfg` condition name: `zerocopy_derive_union_into_bytes` + --> tests/ui-nightly/union.rs:40:10 + | +40 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = help: expected names are: `docsrs`, `feature`, and `test` and 31 more + = note: using a cfg inside a derive macro will use the cfgs from the destination crate and not the ones from the defining crate + = help: try referring to `IntoBytes` crate for guidance on how handle this unexpected cfg + = help: the derive macro `IntoBytes` may come from an old version of the `zerocopy_derive` crate, try updating your dependency with `cargo update -p zerocopy_derive` + = note: see <https://doc.rust-lang.org/nightly/rustc/check-cfg/cargo-specifics.html> for more information about checking conditional configuration + = note: `#[warn(unexpected_cfgs)]` on by default + = note: this warning originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<()>: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/union.rs:25:10 + | +25 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<()>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<()>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + F32<O> + F64<O> + and $N others + = note: required for `ManuallyDrop<UnsafeCell<()>>` to implement `zerocopy::Immutable` + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0277]: `IntoBytes2` has 1 total byte(s) of padding + --> tests/ui-nightly/union.rs:40:10 + | +40 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding +help: the trait `PaddingFree<IntoBytes2, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes2, 0>` is implemented for it + --> $WORKSPACE/src/util/macro_util.rs + | + | impl<T: ?Sized> PaddingFree<T, 0> for () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) +help: add `#![feature(trivial_bounds)]` to the crate attributes to enable + | + 9 + #![feature(trivial_bounds)] + | + +error[E0587]: type has conflicting packed and align representation hints + --> tests/ui-nightly/union.rs:82:1 + | +82 | union Unaligned3 { + | ^^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-nightly/union.rs:106:1 + | +106 | union Unaligned7 { + | ^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-nightly/../include.rs + | + 63 | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ diff --git a/vendor/zerocopy-derive/tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.rs b/vendor/zerocopy-derive/tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.rs new file mode 100644 index 00000000..280f05d4 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.rs @@ -0,0 +1,26 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! See: https://github.com/google/zerocopy/issues/553 +//! zerocopy must still allow derives of deprecated types. +//! This test has a hand-written impl of a deprecated type, and should result in a compilation +//! error. If zerocopy does not tack an allow(deprecated) annotation onto its impls, then this +//! test will fail because more than one compile error will be generated. +#![deny(deprecated)] + +extern crate zerocopy; + +use zerocopy::IntoBytes; + +#[derive(IntoBytes)] +#[repr(C)] +union Foo { + a: u8, +} + +fn main() {} diff --git a/vendor/zerocopy-derive/tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.stderr b/vendor/zerocopy-derive/tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.stderr new file mode 100644 index 00000000..3154641f --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.stderr @@ -0,0 +1,22 @@ +error: requires --cfg zerocopy_derive_union_into_bytes; + please let us know you use this feature: https://github.com/google/zerocopy/discussions/1802 + --> tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.rs:20:10 + | +20 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: unexpected `cfg` condition name: `zerocopy_derive_union_into_bytes` + --> tests/ui-nightly/union_into_bytes_cfg/union_into_bytes_cfg.rs:20:10 + | +20 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = help: expected names are: `docsrs`, `feature`, and `test` and 31 more + = note: using a cfg inside a derive macro will use the cfgs from the destination crate and not the ones from the defining crate + = help: try referring to `IntoBytes` crate for guidance on how handle this unexpected cfg + = help: the derive macro `IntoBytes` may come from an old version of the `zerocopy_derive` crate, try updating your dependency with `cargo update -p zerocopy_derive` + = note: see <https://doc.rust-lang.org/nightly/rustc/check-cfg/cargo-specifics.html> for more information about checking conditional configuration + = note: `#[warn(unexpected_cfgs)]` on by default + = note: this warning originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-stable/derive_transparent.rs b/vendor/zerocopy-derive/tests/ui-stable/derive_transparent.rs new file mode 100644 index 00000000..a60c89e2 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/derive_transparent.rs @@ -0,0 +1,38 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use core::marker::PhantomData; + +use zerocopy::{FromBytes, FromZeros, IntoBytes, TryFromBytes, Unaligned}; + +use self::util::util::NotZerocopy; + +fn main() {} + +// Test generic transparent structs + +#[derive(IntoBytes, FromBytes, Unaligned)] +#[repr(transparent)] +struct TransparentStruct<T> { + inner: T, + _phantom: PhantomData<()>, +} + +// It should be legal to derive these traits on a transparent struct, but it +// must also ensure the traits are only implemented when the inner type +// implements them. +util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); +util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); diff --git a/vendor/zerocopy-derive/tests/ui-stable/derive_transparent.stderr b/vendor/zerocopy-derive/tests/ui-stable/derive_transparent.stderr new file mode 100644 index 00000000..c82b354a --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/derive_transparent.stderr @@ -0,0 +1,169 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/derive_transparent.rs:34:23 + | +34 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::TryFromBytes` + --> tests/ui-stable/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-stable/derive_transparent.rs:34:1 + | +34 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: TryFromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-stable/derive_transparent.rs:35:23 + | +35 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `FromZeros` + --> tests/ui-stable/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-stable/derive_transparent.rs:35:1 + | +35 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromZeros); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/derive_transparent.rs:36:23 + | +36 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::FromBytes` + --> tests/ui-stable/derive_transparent.rs:24:21 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-stable/derive_transparent.rs:36:1 + | +36 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: FromBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/derive_transparent.rs:37:23 + | +37 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::IntoBytes` + --> tests/ui-stable/derive_transparent.rs:24:10 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-stable/derive_transparent.rs:37:1 + | +37 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `IntoBytes` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/derive_transparent.rs:38:23 + | +38 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others +note: required for `TransparentStruct<NotZerocopy>` to implement `zerocopy::Unaligned` + --> tests/ui-stable/derive_transparent.rs:24:32 + | +24 | #[derive(IntoBytes, FromBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::{closure#0}::_::{closure#0}::assert_impl_all` + --> tests/ui-stable/derive_transparent.rs:38:1 + | +38 | util_assert_impl_all!(TransparentStruct<NotZerocopy>: Unaligned); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `assert_impl_all` + = note: this error originates in the derive macro `Unaligned` which comes from the expansion of the macro `util_assert_impl_all` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-stable/enum.rs b/vendor/zerocopy-derive/tests/ui-stable/enum.rs new file mode 100644 index 00000000..82b43d80 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/enum.rs @@ -0,0 +1,577 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +// +// Generic errors +// + +#[derive(FromBytes)] +#[repr("foo")] +enum Generic1 { + A, +} + +#[derive(FromBytes)] +#[repr(foo)] +enum Generic2 { + A, +} + +#[derive(FromBytes)] +#[repr(transparent)] +enum Generic3 { + A, +} + +#[derive(FromBytes)] +#[repr(u8, u16)] +enum Generic4 { + A, +} + +#[derive(FromBytes)] +enum Generic5 { + A, +} + +// +// Immutable errors +// + +#[derive(Immutable)] +enum Immutable1 { + A(core::cell::UnsafeCell<()>), +} + +#[derive(Immutable)] +enum Never {} + +#[derive(Immutable)] +enum Immutable2 { + Uninhabited(Never, core::cell::UnsafeCell<u8>), + Inhabited(u8), +} + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +enum TryFromBytes1 { + A, +} + +#[derive(TryFromBytes)] +enum TryFromBytes2 { + A, + B(u8), +} + +struct NotTryFromBytes; + +#[derive(TryFromBytes)] +#[repr(u8)] +enum TryFromBytes3 { + A(NotTryFromBytes), +} + +// +// FromZeros errors +// + +#[derive(FromZeros)] +enum FromZeros1 { + A(u8), +} + +#[derive(FromZeros)] +enum FromZeros2 { + A, + B(u8), +} + +#[derive(FromZeros)] +enum FromZeros3 { + A = 1, + B, +} + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros4 { + A = 1, + B = 2, +} + +const NEGATIVE_ONE: i8 = -1; + +#[derive(FromZeros)] +#[repr(i8)] +enum FromZeros5 { + A = NEGATIVE_ONE, + B, +} + +struct NotFromZeros; + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros6 { + A(NotFromZeros), +} + +#[derive(FromZeros)] +#[repr(u8)] +enum FromZeros7 { + A = 1, + B(NotFromZeros), +} + +// +// FromBytes errors +// + +#[derive(FromBytes)] +enum FromBytes1 { + A, +} + +#[derive(FromBytes)] +#[repr(C)] +enum FromBytes2 { + A, +} + +#[derive(FromBytes)] +#[repr(usize)] +enum FromBytes3 { + A, +} + +#[derive(FromBytes)] +#[repr(isize)] +enum FromBytes4 { + A, +} + +#[derive(FromBytes)] +#[repr(u32)] +enum FromBytes5 { + A, +} + +#[derive(FromBytes)] +#[repr(i32)] +enum FromBytes6 { + A, +} + +#[derive(FromBytes)] +#[repr(u64)] +enum FromBytes7 { + A, +} + +#[derive(FromBytes)] +#[repr(i64)] +enum FromBytes8 { + A, +} + +#[derive(FromBytes)] +#[repr(u8)] +enum FooU8 { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, + Variant255(bool), +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C)] +enum Unaligned1 { + A, +} + +#[derive(Unaligned)] +#[repr(u16)] +enum Unaligned2 { + A, +} + +#[derive(Unaligned)] +#[repr(i16)] +enum Unaligned3 { + A, +} + +#[derive(Unaligned)] +#[repr(u32)] +enum Unaligned4 { + A, +} + +#[derive(Unaligned)] +#[repr(i32)] +enum Unaligned5 { + A, +} + +#[derive(Unaligned)] +#[repr(u64)] +enum Unaligned6 { + A, +} + +#[derive(Unaligned)] +#[repr(i64)] +enum Unaligned7 { + A, +} + +#[derive(Unaligned)] +#[repr(usize)] +enum Unaligned8 { + A, +} + +#[derive(Unaligned)] +#[repr(isize)] +enum Unaligned9 { + A, +} + +#[derive(Unaligned)] +#[repr(u8, align(2))] +enum Unaligned10 { + A, +} + +#[derive(Unaligned)] +#[repr(i8, align(2))] +enum Unaligned11 { + A, +} + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +enum Unaligned12 { + A, +} + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +enum Unaligned13 { + A, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes1 { + A, + B(u8), +} + +#[derive(IntoBytes)] +#[repr(C, align(4))] +struct Align4IntoBytes(u32); + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes2 { + A(Align4IntoBytes), +} + +#[derive(IntoBytes)] +#[repr(u32)] +enum IntoBytes3 { + A(u32), + B(u16), +} + +#[derive(IntoBytes)] +enum IntoBytes4 { + A(u32), + B(u16), +} + +#[derive(IntoBytes)] +enum IntoBytes5 { + A(u32), +} + +#[derive(IntoBytes)] +#[repr(u8)] +enum IntoBytes6<T> { + A(T), +} diff --git a/vendor/zerocopy-derive/tests/ui-stable/enum.stderr b/vendor/zerocopy-derive/tests/ui-stable/enum.stderr new file mode 100644 index 00000000..b1cd6676 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/enum.stderr @@ -0,0 +1,518 @@ +error: unrecognized representation hint + --> tests/ui-stable/enum.rs:19:8 + | +19 | #[repr("foo")] + | ^^^^^ + +error: unrecognized representation hint + --> tests/ui-stable/enum.rs:25:8 + | +25 | #[repr(foo)] + | ^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:30:10 + | +30 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: this conflicts with another representation hint + --> tests/ui-stable/enum.rs:37:12 + | +37 | #[repr(u8, u16)] + | ^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:42:10 + | +42 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:69:10 + | +69 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:74:10 + | +74 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ + | + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:92:10 + | +92 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:97:10 + | +97 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:103:10 + | +103 | #[derive(FromZeros)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + --> tests/ui-stable/enum.rs:110:1 + | +110 | / #[repr(u8)] +111 | | enum FromZeros4 { +112 | | A = 1, +113 | | B = 2, +114 | | } + | |_^ + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + help: This enum has discriminants which are not literal integers. One of those may define or imply which variant has a discriminant of zero. Use a literal integer to define or imply the variant with a discriminant of zero. + --> tests/ui-stable/enum.rs:119:1 + | +119 | / #[repr(i8)] +120 | | enum FromZeros5 { +121 | | A = NEGATIVE_ONE, +122 | | B, +123 | | } + | |_^ + +error: FromZeros only supported on enums with a variant that has a discriminant of `0` + --> tests/ui-stable/enum.rs:134:1 + | +134 | / #[repr(u8)] +135 | | enum FromZeros7 { +136 | | A = 1, +137 | | B(NotFromZeros), +138 | | } + | |_^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:144:10 + | +144 | #[derive(FromBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:150:8 + | +150 | #[repr(C)] + | ^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:156:8 + | +156 | #[repr(usize)] + | ^^^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:162:8 + | +162 | #[repr(isize)] + | ^^^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:168:8 + | +168 | #[repr(u32)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:174:8 + | +174 | #[repr(i32)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:180:8 + | +180 | #[repr(u64)] + | ^^^ + +error: `FromBytes` only supported on enums with `#[repr(...)]` attributes `u8`, `i8`, `u16`, or `i16` + --> tests/ui-stable/enum.rs:186:8 + | +186 | #[repr(i64)] + | ^^^ + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:456:10 + | +456 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:462:10 + | +462 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:468:10 + | +468 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:474:10 + | +474 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:480:10 + | +480 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:486:10 + | +486 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:492:10 + | +492 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:498:10 + | +498 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(u8)] or #[repr(i8)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/enum.rs:504:10 + | +504 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-stable/enum.rs:511:12 + | +511 | #[repr(u8, align(2))] + | ^^^^^ + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-stable/enum.rs:517:12 + | +517 | #[repr(i8, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/enum.rs:523:18 + | +523 | #[repr(align(1), align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/enum.rs:529:18 + | +529 | #[repr(align(2), align(4))] + | ^^^^^ + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:562:10 + | +562 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)] or #[repr(Int)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/enum.rs:568:10 + | +568 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: generic parameters may not be used in const operations + --> tests/ui-stable/enum.rs:576:7 + | +576 | A(T), + | ^ cannot perform const operation using `T` + | + = note: type parameters may not be used in const expressions + +error[E0565]: meta item in `repr` must be an identifier + --> tests/ui-stable/enum.rs:19:1 + | +19 | #[repr("foo")] + | ^^^^^^^^^^^^^^ + +error[E0552]: unrecognized representation hint + --> tests/ui-stable/enum.rs:25:8 + | +25 | #[repr(foo)] + | ^^^ + | + = help: valid reprs are `Rust` (default), `C`, `align`, `packed`, `transparent`, `simd`, `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`, `isize`, `usize` + = note: for more information, visit <https://doc.rust-lang.org/reference/type-layout.html?highlight=repr#representations> + +error[E0566]: conflicting representation hints + --> tests/ui-stable/enum.rs:37:8 + | +37 | #[repr(u8, u16)] + | ^^ ^^^ + | + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #68585 <https://github.com/rust-lang/rust/issues/68585> + = note: `#[deny(conflicting_repr_hints)]` (part of `#[deny(future_incompatible)]`) on by default + +error[E0277]: the trait bound `UnsafeCell<()>: Immutable` is not satisfied + --> tests/ui-stable/enum.rs:51:10 + | +51 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `Immutable` is not implemented for `UnsafeCell<()>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<()>` + = help: the following other types implement trait `Immutable`: + &T + &mut T + () + *const T + *mut T + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<u8>: Immutable` is not satisfied + --> tests/ui-stable/enum.rs:59:10 + | +59 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `Immutable` is not implemented for `UnsafeCell<u8>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<u8>` + = help: the following other types implement trait `Immutable`: + &T + &mut T + () + *const T + *mut T + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotTryFromBytes: TryFromBytes` is not satisfied + --> tests/ui-stable/enum.rs:82:10 + | +82 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `TryFromBytes` is not implemented for `NotTryFromBytes` + --> tests/ui-stable/enum.rs:80:1 + | +80 | struct NotTryFromBytes; + | ^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotTryFromBytes` + = help: the following other types implement trait `TryFromBytes`: + () + *const T + *mut T + <FromZeros6 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + <TryFromBytes3 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + AtomicBool + AtomicI16 + AtomicI32 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotFromZeros: TryFromBytes` is not satisfied + --> tests/ui-stable/enum.rs:127:10 + | +127 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `TryFromBytes` is not implemented for `NotFromZeros` + --> tests/ui-stable/enum.rs:125:1 + | +125 | struct NotFromZeros; + | ^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotFromZeros` + = help: the following other types implement trait `TryFromBytes`: + () + *const T + *mut T + <FromZeros6 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + <TryFromBytes3 as TryFromBytes>::is_bit_valid::___ZerocopyVariantStruct_A + AtomicBool + AtomicI16 + AtomicI32 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotFromZeros: FromZeros` is not satisfied + --> tests/ui-stable/enum.rs:127:10 + | +127 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotFromZeros` + --> tests/ui-stable/enum.rs:125:1 + | +125 | struct NotFromZeros; + | ^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotFromZeros` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `bool: FromBytes` is not satisfied + --> tests/ui-stable/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `FromBytes` is not implemented for `bool` + | + = note: Consider adding `#[derive(FromBytes)]` to `bool` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes1` has 1 total byte(s) of padding + --> tests/ui-stable/enum.rs:538:10 + | +538 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `PaddingFree<IntoBytes1, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes1, 0>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes2` has 3 total byte(s) of padding + --> tests/ui-stable/enum.rs:549:10 + | +549 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `PaddingFree<IntoBytes2, 3>` is not implemented for `()` + but trait `PaddingFree<IntoBytes2, 0>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes3` has 2 total byte(s) of padding + --> tests/ui-stable/enum.rs:555:10 + | +555 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `PaddingFree<IntoBytes3, 2>` is not implemented for `()` + but trait `PaddingFree<IntoBytes3, 0>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: generic `Self` types are currently not permitted in anonymous constants + --> tests/ui-stable/enum.rs:573:10 + | +573 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | +note: not a concrete type + --> tests/ui-stable/enum.rs:573:10 + | +573 | #[derive(IntoBytes)] + | ^^^^^^^^^ + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `bool: FromBytes` is not satisfied + --> tests/ui-stable/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ the trait `FromBytes` is not implemented for `bool` + | + = note: Consider adding `#[derive(FromBytes)]` to `bool` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required for `FooU8` to implement `FromBytes` + --> tests/ui-stable/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_is_from_bytes` + --> tests/ui-stable/enum.rs:191:10 + | +191 | #[derive(FromBytes)] + | ^^^^^^^^^ required by this bound in `assert_is_from_bytes` + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-stable/enum_from_bytes_u8_too_few.rs b/vendor/zerocopy-derive/tests/ui-stable/enum_from_bytes_u8_too_few.rs new file mode 100644 index 00000000..1b1bed31 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/enum_from_bytes_u8_too_few.rs @@ -0,0 +1,272 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +#[derive(FromBytes)] +#[repr(u8)] +enum Foo { + Variant0, + Variant1, + Variant2, + Variant3, + Variant4, + Variant5, + Variant6, + Variant7, + Variant8, + Variant9, + Variant10, + Variant11, + Variant12, + Variant13, + Variant14, + Variant15, + Variant16, + Variant17, + Variant18, + Variant19, + Variant20, + Variant21, + Variant22, + Variant23, + Variant24, + Variant25, + Variant26, + Variant27, + Variant28, + Variant29, + Variant30, + Variant31, + Variant32, + Variant33, + Variant34, + Variant35, + Variant36, + Variant37, + Variant38, + Variant39, + Variant40, + Variant41, + Variant42, + Variant43, + Variant44, + Variant45, + Variant46, + Variant47, + Variant48, + Variant49, + Variant50, + Variant51, + Variant52, + Variant53, + Variant54, + Variant55, + Variant56, + Variant57, + Variant58, + Variant59, + Variant60, + Variant61, + Variant62, + Variant63, + Variant64, + Variant65, + Variant66, + Variant67, + Variant68, + Variant69, + Variant70, + Variant71, + Variant72, + Variant73, + Variant74, + Variant75, + Variant76, + Variant77, + Variant78, + Variant79, + Variant80, + Variant81, + Variant82, + Variant83, + Variant84, + Variant85, + Variant86, + Variant87, + Variant88, + Variant89, + Variant90, + Variant91, + Variant92, + Variant93, + Variant94, + Variant95, + Variant96, + Variant97, + Variant98, + Variant99, + Variant100, + Variant101, + Variant102, + Variant103, + Variant104, + Variant105, + Variant106, + Variant107, + Variant108, + Variant109, + Variant110, + Variant111, + Variant112, + Variant113, + Variant114, + Variant115, + Variant116, + Variant117, + Variant118, + Variant119, + Variant120, + Variant121, + Variant122, + Variant123, + Variant124, + Variant125, + Variant126, + Variant127, + Variant128, + Variant129, + Variant130, + Variant131, + Variant132, + Variant133, + Variant134, + Variant135, + Variant136, + Variant137, + Variant138, + Variant139, + Variant140, + Variant141, + Variant142, + Variant143, + Variant144, + Variant145, + Variant146, + Variant147, + Variant148, + Variant149, + Variant150, + Variant151, + Variant152, + Variant153, + Variant154, + Variant155, + Variant156, + Variant157, + Variant158, + Variant159, + Variant160, + Variant161, + Variant162, + Variant163, + Variant164, + Variant165, + Variant166, + Variant167, + Variant168, + Variant169, + Variant170, + Variant171, + Variant172, + Variant173, + Variant174, + Variant175, + Variant176, + Variant177, + Variant178, + Variant179, + Variant180, + Variant181, + Variant182, + Variant183, + Variant184, + Variant185, + Variant186, + Variant187, + Variant188, + Variant189, + Variant190, + Variant191, + Variant192, + Variant193, + Variant194, + Variant195, + Variant196, + Variant197, + Variant198, + Variant199, + Variant200, + Variant201, + Variant202, + Variant203, + Variant204, + Variant205, + Variant206, + Variant207, + Variant208, + Variant209, + Variant210, + Variant211, + Variant212, + Variant213, + Variant214, + Variant215, + Variant216, + Variant217, + Variant218, + Variant219, + Variant220, + Variant221, + Variant222, + Variant223, + Variant224, + Variant225, + Variant226, + Variant227, + Variant228, + Variant229, + Variant230, + Variant231, + Variant232, + Variant233, + Variant234, + Variant235, + Variant236, + Variant237, + Variant238, + Variant239, + Variant240, + Variant241, + Variant242, + Variant243, + Variant244, + Variant245, + Variant246, + Variant247, + Variant248, + Variant249, + Variant250, + Variant251, + Variant252, + Variant253, + Variant254, +} diff --git a/vendor/zerocopy-derive/tests/ui-stable/enum_from_bytes_u8_too_few.stderr b/vendor/zerocopy-derive/tests/ui-stable/enum_from_bytes_u8_too_few.stderr new file mode 100644 index 00000000..02322be1 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/enum_from_bytes_u8_too_few.stderr @@ -0,0 +1,11 @@ +error: FromBytes only supported on repr(u8) enum with 256 variants + --> tests/ui-stable/enum_from_bytes_u8_too_few.rs:15:1 + | + 15 | / #[repr(u8)] + 16 | | enum Foo { + 17 | | Variant0, + 18 | | Variant1, +... | +271 | | Variant254, +272 | | } + | |_^ diff --git a/vendor/zerocopy-derive/tests/ui-stable/late_compile_pass.rs b/vendor/zerocopy-derive/tests/ui-stable/late_compile_pass.rs new file mode 100644 index 00000000..d8aeb369 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/late_compile_pass.rs @@ -0,0 +1,85 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::KnownLayout; + +use self::util::util::{NotZerocopy, AU16}; + +fn main() {} + +// These tests cause errors which are generated by a later compilation pass than +// the other errors we generate, and so if they're compiled in the same file, +// the compiler will never get to that pass, and so we won't get the errors. + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +struct TryFromBytes1 { + value: NotZerocopy, +} + +// +// FromZeros errors +// + +#[derive(FromZeros)] +struct FromZeros1 { + value: NotZerocopy, +} + +// +// FromBytes errors +// + +#[derive(FromBytes)] +struct FromBytes1 { + value: NotZerocopy, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes1 { + value: NotZerocopy, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C)] +struct Unaligned1 { + aligned: AU16, +} + +// This specifically tests a bug we had in an old version of the code in which +// the trait bound would only be enforced for the first field's type. +#[derive(Unaligned)] +#[repr(C)] +struct Unaligned2 { + unaligned: u8, + aligned: AU16, +} + +#[derive(Unaligned)] +#[repr(transparent)] +struct Unaligned3 { + aligned: AU16, +} diff --git a/vendor/zerocopy-derive/tests/ui-stable/late_compile_pass.stderr b/vendor/zerocopy-derive/tests/ui-stable/late_compile_pass.stderr new file mode 100644 index 00000000..edc57e89 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/late_compile_pass.stderr @@ -0,0 +1,291 @@ +warning: unused import: `zerocopy::KnownLayout` + --> tests/ui-stable/late_compile_pass.rs:15:5 + | +15 | use zerocopy::KnownLayout; + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:29:10 + | +29 | #[derive(TryFromBytes)] + | ^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `TryFromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:38:10 + | +38 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:38:10 + | +38 | #[derive(FromZeros)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromZeros` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:56:10 + | +56 | #[derive(IntoBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:66:10 + | +66 | #[derive(Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:74:10 + | +74 | #[derive(Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:81:10 + | +81 | #[derive(Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required for `FromBytes1` to implement `zerocopy::FromBytes` + --> tests/ui-stable/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `<FromBytes1 as zerocopy::TryFromBytes>::is_bit_valid::assert_is_from_bytes` + --> tests/ui-stable/late_compile_pass.rs:47:10 + | +47 | #[derive(FromBytes)] + | ^^^^^^^^^ required by this bound in `assert_is_from_bytes` + = note: this error originates in the derive macro `FromBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-stable/mid_compile_pass.rs b/vendor/zerocopy-derive/tests/ui-stable/mid_compile_pass.rs new file mode 100644 index 00000000..e0c4bc57 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/mid_compile_pass.rs @@ -0,0 +1,61 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::KnownLayout; + +fn main() {} + +// These tests cause errors which are generated by a later compilation pass than +// the other errors we generate, and so if they're compiled in the same file, +// the compiler will never get to that pass, and so we won't get the errors. + +// +// KnownLayout errors +// + +fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | Y | N | N | KL04 | +#[derive(KnownLayout)] +struct KL04<T: ?Sized>(u8, T); + +fn test_kl04<T: ?Sized>(kl: &KL04<T>) { + assert_kl(kl); +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | Y | Y | N | KL06 | +#[derive(KnownLayout)] +struct KL06<T: ?Sized + KnownLayout>(u8, T); + +fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { + assert_kl(kl); +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | Y | N | N | KL12 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL12<T: ?Sized>(u8, T); + +fn test_kl12<T: ?Sized>(kl: &KL12<T>) { + assert_kl(kl) +} + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | Y | N | Y | KL13 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL13<T>(u8, T); + +fn test_kl13<T>(t: T) -> impl KnownLayout { + KL13(0u8, t) +} diff --git a/vendor/zerocopy-derive/tests/ui-stable/mid_compile_pass.stderr b/vendor/zerocopy-derive/tests/ui-stable/mid_compile_pass.stderr new file mode 100644 index 00000000..3a937f8f --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/mid_compile_pass.stderr @@ -0,0 +1,109 @@ +error[E0277]: the trait bound `T: KnownLayout` is not satisfied + --> tests/ui-stable/mid_compile_pass.rs:59:26 + | +59 | fn test_kl13<T>(t: T) -> impl KnownLayout { + | ^^^^^^^^^^^^^^^^ the trait `KnownLayout` is not implemented for `T` +60 | KL13(0u8, t) + | ------------ return type was inferred to be `KL13<T>` here + | + = note: Consider adding `#[derive(KnownLayout)]` to `T` +note: required for `KL13<T>` to implement `KnownLayout` + --> tests/ui-stable/mid_compile_pass.rs:55:10 + | +55 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `KnownLayout` + | +59 | fn test_kl13<T: zerocopy::KnownLayout>(t: T) -> impl KnownLayout { + | +++++++++++++++++++++++ + +error[E0277]: the size for values of type `T` cannot be known at compilation time + --> tests/ui-stable/mid_compile_pass.rs:31:15 + | +30 | fn test_kl04<T: ?Sized>(kl: &KL04<T>) { + | - this type parameter needs to be `Sized` +31 | assert_kl(kl); + | --------- ^^ doesn't have a size known at compile-time + | | + | required by a bound introduced by this call + | +note: required because it appears within the type `KL04<T>` + --> tests/ui-stable/mid_compile_pass.rs:28:8 + | +28 | struct KL04<T: ?Sized>(u8, T); + | ^^^^ +note: required for `KL04<T>` to implement `KnownLayout` + --> tests/ui-stable/mid_compile_pass.rs:27:10 + | +27 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_kl` + --> tests/ui-stable/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider removing the `?Sized` bound to make the type parameter `Sized` + | +30 - fn test_kl04<T: ?Sized>(kl: &KL04<T>) { +30 + fn test_kl04<T>(kl: &KL04<T>) { + | + +error[E0277]: the size for values of type `T` cannot be known at compilation time + --> tests/ui-stable/mid_compile_pass.rs:40:15 + | +39 | fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { + | - this type parameter needs to be `Sized` +40 | assert_kl(kl); + | --------- ^^ doesn't have a size known at compile-time + | | + | required by a bound introduced by this call + | +note: required because it appears within the type `KL06<T>` + --> tests/ui-stable/mid_compile_pass.rs:37:8 + | +37 | struct KL06<T: ?Sized + KnownLayout>(u8, T); + | ^^^^ +note: required for `KL06<T>` to implement `KnownLayout` + --> tests/ui-stable/mid_compile_pass.rs:36:10 + | +36 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_kl` + --> tests/ui-stable/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider removing the `?Sized` bound to make the type parameter `Sized` + | +39 - fn test_kl06<T: ?Sized + KnownLayout>(kl: &KL06<T>) { +39 + fn test_kl06<T: KnownLayout>(kl: &KL06<T>) { + | + +error[E0277]: the trait bound `KL12<T>: KnownLayout` is not satisfied + --> tests/ui-stable/mid_compile_pass.rs:50:15 + | +50 | assert_kl(kl) + | --------- ^^ the trait `KnownLayout` is not implemented for `KL12<T>` + | | + | required by a bound introduced by this call + | +note: required for `KL12<T>` to implement `KnownLayout` + --> tests/ui-stable/mid_compile_pass.rs:45:10 + | +45 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `assert_kl` + --> tests/ui-stable/mid_compile_pass.rs:23:26 + | +23 | fn assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + | ^^^^^^^^^^^ required by this bound in `assert_kl` + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider borrowing here + | +50 | assert_kl(&kl) + | + +50 | assert_kl(&mut kl) + | ++++ diff --git a/vendor/zerocopy-derive/tests/ui-stable/struct.rs b/vendor/zerocopy-derive/tests/ui-stable/struct.rs new file mode 100644 index 00000000..54a26b29 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/struct.rs @@ -0,0 +1,254 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use zerocopy::{IntoBytes, KnownLayout}; + +use self::util::util::AU16; + +fn main() {} + +// +// KnownLayout errors +// + +struct NotKnownLayout; + +struct NotKnownLayoutDst([u8]); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | N | N | N | KL00 | +#[derive(KnownLayout)] +struct KL00(u8, NotKnownLayoutDst); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | N | N | Y | N | KL02 | +#[derive(KnownLayout)] +struct KL02(u8, [u8]); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | N | N | N | KL08 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL08(u8, NotKnownLayoutDst); + +// | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | +// | Y | N | N | Y | KL09 | +#[derive(KnownLayout)] +#[repr(C)] +struct KL09(NotKnownLayout, NotKnownLayout); + +// +// Immutable errors +// + +#[derive(Immutable)] +struct Immutable1 { + a: core::cell::UnsafeCell<()>, +} + +#[derive(Immutable)] +struct Immutable2 { + a: [core::cell::UnsafeCell<u8>; 0], +} + +// +// TryFromBytes errors +// + +#[derive(TryFromBytes)] +#[repr(packed)] +struct TryFromBytesPacked { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(packed(1))] +struct TryFromBytesPackedN { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(C, packed)] +struct TryFromBytesCPacked { + foo: AU16, +} + +#[derive(TryFromBytes)] +#[repr(C, packed(1))] +struct TryFromBytesCPackedN { + foo: AU16, +} + +// +// IntoBytes errors +// + +// Since `IntoBytes1` has at least one generic parameter, an `IntoBytes` impl is +// emitted in which each field type is given an `Unaligned` bound. Since `foo`'s +// type doesn't implement `Unaligned`, this should fail. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes1<T> { + foo: AU16, + bar: T, +} + +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes2 { + foo: u8, + bar: AU16, +} + +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes3 { + foo: u8, + // We'd prefer to use AU64 here, but you can't use aligned types in + // packed structs. + bar: u64, +} + +type SliceU8 = [u8]; + +// Padding between `u8` and `SliceU8`. `SliceU8` doesn't syntactically look like +// a slice, so this case is handled by our `Sized` support. +// +// NOTE(#1708): This exists to ensure that our error messages are good when a +// field is unsized. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes4 { + a: u8, + b: SliceU8, +} + +// Padding between `u8` and `[u16]`. `[u16]` is syntactically identifiable as a +// slice, so this case is handled by our `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes5 { + a: u8, + b: [u16], +} + +// Trailing padding after `[u8]`. `[u8]` is syntactically identifiable as a +// slice, so this case is handled by our `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes6 { + a: u16, + b: [u8], +} + +// Padding between `u8` and `u16` and also trailing padding after `[u8]`. `[u8]` +// is syntactically identifiable as a slice, so this case is handled by our +// `repr(C)` slice DST support. +#[derive(IntoBytes)] +#[repr(C)] +struct IntoBytes7 { + a: u8, + b: u16, + c: [u8], +} + +#[derive(IntoBytes)] +#[repr(C, C)] // zerocopy-derive conservatively treats these as conflicting reprs +struct IntoBytes8 { + a: u8, +} + +#[derive(IntoBytes)] +struct IntoBytes9<T> { + t: T, +} + +#[derive(IntoBytes)] +#[repr(packed(2))] +struct IntoBytes10<T> { + t: T, +} + +// `repr(C, packed(2))` is not equivalent to `repr(C, packed)`. +#[derive(IntoBytes)] +#[repr(C, packed(2))] +struct IntoBytes11<T> { + t0: T, + // Add a second field to avoid triggering the "repr(C) struct with one + // field" special case. + t1: T, +} + +fn is_into_bytes_11<T: IntoBytes>() { + if false { + is_into_bytes_11::<IntoBytes11<AU16>>(); + } +} + +// `repr(C, align(2))` is not sufficient to guarantee the layout of this type. +#[derive(IntoBytes)] +#[repr(C, align(2))] +struct IntoBytes12<T> { + t: T, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C, align(2))] +struct Unaligned1; + +#[derive(Unaligned)] +#[repr(transparent, align(2))] +struct Unaligned2 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(packed, align(2))] +struct Unaligned3; + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +struct Unaligned4; + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +struct Unaligned5; + +#[derive(Unaligned)] +struct Unaligned6; + +#[derive(Unaligned)] +#[repr(packed(2))] +struct Unaligned7; + +// Test the error message emitted when conflicting reprs appear on different +// lines. On the nightly compiler, this emits a "joint span" that spans both +// problematic repr token trees and everything in between. +#[derive(Copy, Clone)] +#[repr(packed(2), C)] +#[derive(Unaligned)] +#[repr(C, packed(2))] +struct WeirdReprSpan; + +#[derive(SplitAt)] +#[repr(C)] +struct SplitAtNotKnownLayout([u8]); + +#[derive(SplitAt, KnownLayout)] +#[repr(C)] +struct SplitAtSized(u8); diff --git a/vendor/zerocopy-derive/tests/ui-stable/struct.stderr b/vendor/zerocopy-derive/tests/ui-stable/struct.stderr new file mode 100644 index 00000000..e2354dd4 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/struct.stderr @@ -0,0 +1,470 @@ +error: this conflicts with another representation hint + --> tests/ui-stable/struct.rs:167:11 + | +167 | #[repr(C, C)] // zerocopy-derive conservatively treats these as conflicting reprs + | ^ + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/struct.rs:172:10 + | +172 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/struct.rs:177:10 + | +177 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have a non-align #[repr(...)] attribute in order to guarantee this type's memory layout + --> tests/ui-stable/struct.rs:200:10 + | +200 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-stable/struct.rs:211:11 + | +211 | #[repr(C, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/struct.rs:215:8 + | +215 | #[repr(transparent, align(2))] + | ^^^^^^^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/struct.rs:221:16 + | +221 | #[repr(packed, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/struct.rs:225:18 + | +225 | #[repr(align(1), align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/struct.rs:229:18 + | +229 | #[repr(align(2), align(4))] + | ^^^^^ + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/struct.rs:232:10 + | +232 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/struct.rs:235:10 + | +235 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: this conflicts with another representation hint + --> tests/ui-stable/struct.rs:245:8 + | +245 | #[repr(C, packed(2))] + | ^ + +error[E0692]: transparent struct cannot have other repr hints + --> tests/ui-stable/struct.rs:215:8 + | +215 | #[repr(transparent, align(2))] + | ^^^^^^^^^^^ ^^^^^^^^ + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/struct.rs:31:10 + | +31 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `KL00`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `KL00` + --> tests/ui-stable/struct.rs:32:8 + | +32 | struct KL00(u8, NotKnownLayoutDst); + | ^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/struct.rs:36:10 + | +36 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `KL02`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `KL02` + --> tests/ui-stable/struct.rs:37:8 + | +37 | struct KL02(u8, [u8]); + | ^^^^ + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotKnownLayoutDst: zerocopy::KnownLayout` is not satisfied + --> tests/ui-stable/struct.rs:41:10 + | +41 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `NotKnownLayoutDst` + --> tests/ui-stable/struct.rs:27:1 + | +27 | struct NotKnownLayoutDst([u8]); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `NotKnownLayoutDst` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotKnownLayout: zerocopy::KnownLayout` is not satisfied + --> tests/ui-stable/struct.rs:47:10 + | +47 | #[derive(KnownLayout)] + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `NotKnownLayout` + --> tests/ui-stable/struct.rs:25:1 + | +25 | struct NotKnownLayout; + | ^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `NotKnownLayout` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<()>: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/struct.rs:55:10 + | +55 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<()>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<()>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + F32<O> + F64<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<u8>: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/struct.rs:60:10 + | +60 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<u8>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<u8>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + F32<O> + F64<O> + and $N others + = note: required for `[UnsafeCell<u8>; 0]` to implement `zerocopy::Immutable` + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-stable/struct.rs:71:1 + | +71 | struct TryFromBytesPacked { + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-stable/struct.rs:77:1 + | +77 | struct TryFromBytesPackedN { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-stable/struct.rs:83:1 + | +83 | struct TryFromBytesCPacked { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-stable/struct.rs:89:1 + | +89 | struct TryFromBytesCPackedN { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-stable/../include.rs + | + | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/struct.rs:100:10 + | +100 | #[derive(IntoBytes)] + | ^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-stable/../include.rs + | + 63 | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes2` has 1 total byte(s) of padding + --> tests/ui-stable/struct.rs:107:10 + | +107 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `PaddingFree<IntoBytes2, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes2, 0>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes3` has 1 total byte(s) of padding + --> tests/ui-stable/struct.rs:114:10 + | +114 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `PaddingFree<IntoBytes3, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes3, 0>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/struct.rs:130:10 + | +130 | #[derive(IntoBytes)] + | ^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `IntoBytes4`, the trait `Sized` is not implemented for `[u8]` +note: required because it appears within the type `IntoBytes4` + --> tests/ui-stable/struct.rs:132:8 + | +132 | struct IntoBytes4 { + | ^^^^^^^^^^ + = note: required for `IntoBytes4` to implement `macro_util::__size_of::Sized` +note: required by a bound in `macro_util::__size_of::size_of` + --> $WORKSPACE/src/util/macro_util.rs + | + | pub const fn size_of<T: Sized + ?core::marker::Sized>() -> usize { + | ^^^^^ required by this bound in `size_of` + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `[u8]` is unsized + --> tests/ui-stable/struct.rs:134:8 + | +134 | b: SliceU8, + | ^^^^^^^ `IntoBytes` needs all field types to be `Sized` in order to determine whether there is padding + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: consider using `#[repr(packed)]` to remove padding + = note: `IntoBytes` does not require the fields of `#[repr(packed)]` types to be `Sized` + = note: required for `[u8]` to implement `macro_util::__size_of::Sized` +note: required by a bound in `macro_util::__size_of::size_of` + --> $WORKSPACE/src/util/macro_util.rs + | + | pub const fn size_of<T: Sized + ?core::marker::Sized>() -> usize { + | ^^^^^ required by this bound in `size_of` + +error[E0277]: `IntoBytes5` has one or more padding bytes + --> tests/ui-stable/struct.rs:139:10 + | +139 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `DynamicPaddingFree<IntoBytes5, true>` is not implemented for `()` + but trait `DynamicPaddingFree<IntoBytes5, false>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes6` has one or more padding bytes + --> tests/ui-stable/struct.rs:148:10 + | +148 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `DynamicPaddingFree<IntoBytes6, true>` is not implemented for `()` + but trait `DynamicPaddingFree<IntoBytes6, false>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes7` has one or more padding bytes + --> tests/ui-stable/struct.rs:158:10 + | +158 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `DynamicPaddingFree<IntoBytes7, true>` is not implemented for `()` + but trait `DynamicPaddingFree<IntoBytes7, false>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0587]: type has conflicting packed and align representation hints + --> tests/ui-stable/struct.rs:222:1 + | +222 | struct Unaligned3; + | ^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `SplitAtNotKnownLayout: zerocopy::KnownLayout` is not satisfied + --> tests/ui-stable/struct.rs:248:10 + | +248 | #[derive(SplitAt)] + | ^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `SplitAtNotKnownLayout` + --> tests/ui-stable/struct.rs:250:1 + | +250 | struct SplitAtNotKnownLayout([u8]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `SplitAtNotKnownLayout` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others +note: required by a bound in `SplitAt` + --> $WORKSPACE/src/split_at.rs + | + 61 | pub unsafe trait SplitAt: KnownLayout<PointerMetadata = usize> { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `SplitAt` + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `u8: SplitAt` is not satisfied + --> tests/ui-stable/struct.rs:252:10 + | +252 | #[derive(SplitAt, KnownLayout)] + | ^^^^^^^ the trait `SplitAt` is not implemented for `u8` + | + = note: Consider adding `#[derive(SplitAt)]` to `u8` + = help: the following other types implement trait `SplitAt`: + SplitAtNotKnownLayout + SplitAtSized + [T] + = help: see issue #48214 + = note: this error originates in the derive macro `SplitAt` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `AU16: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/struct.rs:195:28 + | +195 | is_into_bytes_11::<IntoBytes11<AU16>>(); + | ^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `AU16` + --> tests/ui-stable/../include.rs + | + 63 | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `AU16` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others +note: required for `IntoBytes11<AU16>` to implement `zerocopy::IntoBytes` + --> tests/ui-stable/struct.rs:184:10 + | +184 | #[derive(IntoBytes)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `is_into_bytes_11` + --> tests/ui-stable/struct.rs:193:24 + | +193 | fn is_into_bytes_11<T: IntoBytes>() { + | ^^^^^^^^^ required by this bound in `is_into_bytes_11` + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/ui-stable/union.rs b/vendor/zerocopy-derive/tests/ui-stable/union.rs new file mode 100644 index 00000000..f6eeee2a --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/union.rs @@ -0,0 +1,109 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +#[path = "../include.rs"] +mod util; + +use std::mem::ManuallyDrop; + +use self::util::util::AU16; + +fn main() {} + +// +// Immutable errors +// + +#[derive(Immutable)] +union Immutable1 { + a: ManuallyDrop<core::cell::UnsafeCell<()>>, +} + +// +// IntoBytes errors +// + +#[derive(IntoBytes)] +#[repr(C)] +union IntoBytes1<T> { + foo: ManuallyDrop<T>, +} + +#[derive(IntoBytes)] +#[repr(C)] +union IntoBytes2 { + foo: u8, + bar: [u8; 2], +} + +// Need a `repr` attribute +#[derive(IntoBytes)] +union IntoBytes3 { + foo: u8, +} + +// `repr(packed(2))` isn't equivalent to `repr(packed)` +#[derive(IntoBytes)] +#[repr(packed(2))] +union IntoBytes4 { + foo: u8, +} + +// +// Unaligned errors +// + +#[derive(Unaligned)] +#[repr(C, align(2))] +union Unaligned1 { + foo: i16, + bar: AU16, +} + +// Transparent unions are unstable; see issue #60405 +// <https://github.com/rust-lang/rust/issues/60405> for more information. + +// #[derive(Unaligned)] +// #[repr(transparent, align(2))] +// union Unaligned2 { +// foo: u8, +// } + +#[derive(Unaligned)] +#[repr(packed, align(2))] +union Unaligned3 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(align(1), align(2))] +struct Unaligned4 { + foo: u8, +} + +#[derive(Unaligned)] +#[repr(align(2), align(4))] +struct Unaligned5 { + foo: u8, +} + +#[derive(Unaligned)] +union Unaligned6 { + foo: i16, + bar: AU16, +} + +#[derive(Unaligned)] +#[repr(packed(2))] +union Unaligned7 { + foo: i16, + bar: AU16, +} diff --git a/vendor/zerocopy-derive/tests/ui-stable/union.stderr b/vendor/zerocopy-derive/tests/ui-stable/union.stderr new file mode 100644 index 00000000..c17306ea --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/union.stderr @@ -0,0 +1,130 @@ +error: unsupported on types with type parameters + --> tests/ui-stable/union.rs:34:10 + | +34 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must be #[repr(C)], #[repr(packed)], or #[repr(transparent)] + --> tests/ui-stable/union.rs:48:10 + | +48 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must be #[repr(C)], #[repr(packed)], or #[repr(transparent)] + --> tests/ui-stable/union.rs:54:10 + | +54 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: cannot derive `Unaligned` on type with alignment greater than 1 + --> tests/ui-stable/union.rs:65:11 + | +65 | #[repr(C, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/union.rs:81:16 + | +81 | #[repr(packed, align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/union.rs:87:18 + | +87 | #[repr(align(1), align(2))] + | ^^^^^ + +error: this conflicts with another representation hint + --> tests/ui-stable/union.rs:93:18 + | +93 | #[repr(align(2), align(4))] + | ^^^^^ + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/union.rs:98:10 + | +98 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: must have #[repr(C)], #[repr(transparent)], or #[repr(packed)] attribute in order to guarantee this type's alignment + --> tests/ui-stable/union.rs:104:10 + | +104 | #[derive(Unaligned)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `Unaligned` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: unexpected `cfg` condition name: `zerocopy_derive_union_into_bytes` + --> tests/ui-stable/union.rs:40:10 + | +40 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = help: expected names are: `docsrs`, `feature`, and `test` and 31 more + = note: using a cfg inside a derive macro will use the cfgs from the destination crate and not the ones from the defining crate + = help: try referring to `IntoBytes` crate for guidance on how handle this unexpected cfg + = help: the derive macro `IntoBytes` may come from an old version of the `zerocopy_derive` crate, try updating your dependency with `cargo update -p zerocopy_derive` + = note: see <https://doc.rust-lang.org/nightly/rustc/check-cfg/cargo-specifics.html> for more information about checking conditional configuration + = note: `#[warn(unexpected_cfgs)]` on by default + = note: this warning originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `UnsafeCell<()>: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/union.rs:25:10 + | +25 | #[derive(Immutable)] + | ^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `UnsafeCell<()>` + | + = note: Consider adding `#[derive(Immutable)]` to `UnsafeCell<()>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + F32<O> + F64<O> + and $N others + = note: required for `ManuallyDrop<UnsafeCell<()>>` to implement `zerocopy::Immutable` + = help: see issue #48214 + = note: this error originates in the derive macro `Immutable` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: `IntoBytes2` has 1 total byte(s) of padding + --> tests/ui-stable/union.rs:40:10 + | +40 | #[derive(IntoBytes)] + | ^^^^^^^^^ types with padding cannot implement `IntoBytes` + | + = note: consider using `zerocopy::Unalign` to lower the alignment of individual fields + = note: consider adding explicit fields where padding would be + = note: consider using `#[repr(packed)]` to remove padding + = help: the trait `PaddingFree<IntoBytes2, 1>` is not implemented for `()` + but trait `PaddingFree<IntoBytes2, 0>` is implemented for it + = help: see issue #48214 + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0587]: type has conflicting packed and align representation hints + --> tests/ui-stable/union.rs:82:1 + | +82 | union Unaligned3 { + | ^^^^^^^^^^^^^^^^ + +error[E0588]: packed type cannot transitively contain a `#[repr(align)]` type + --> tests/ui-stable/union.rs:106:1 + | +106 | union Unaligned7 { + | ^^^^^^^^^^^^^^^^ + | +note: `AU16` has a `#[repr(align)]` attribute + --> tests/ui-stable/../include.rs + | + 63 | pub struct AU16(pub u16); + | ^^^^^^^^^^^^^^^ diff --git a/vendor/zerocopy-derive/tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.rs b/vendor/zerocopy-derive/tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.rs new file mode 100644 index 00000000..280f05d4 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.rs @@ -0,0 +1,26 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! See: https://github.com/google/zerocopy/issues/553 +//! zerocopy must still allow derives of deprecated types. +//! This test has a hand-written impl of a deprecated type, and should result in a compilation +//! error. If zerocopy does not tack an allow(deprecated) annotation onto its impls, then this +//! test will fail because more than one compile error will be generated. +#![deny(deprecated)] + +extern crate zerocopy; + +use zerocopy::IntoBytes; + +#[derive(IntoBytes)] +#[repr(C)] +union Foo { + a: u8, +} + +fn main() {} diff --git a/vendor/zerocopy-derive/tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.stderr b/vendor/zerocopy-derive/tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.stderr new file mode 100644 index 00000000..9c038e20 --- /dev/null +++ b/vendor/zerocopy-derive/tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.stderr @@ -0,0 +1,22 @@ +error: requires --cfg zerocopy_derive_union_into_bytes; + please let us know you use this feature: https://github.com/google/zerocopy/discussions/1802 + --> tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.rs:20:10 + | +20 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = note: this error originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: unexpected `cfg` condition name: `zerocopy_derive_union_into_bytes` + --> tests/ui-stable/union_into_bytes_cfg/union_into_bytes_cfg.rs:20:10 + | +20 | #[derive(IntoBytes)] + | ^^^^^^^^^ + | + = help: expected names are: `docsrs`, `feature`, and `test` and 31 more + = note: using a cfg inside a derive macro will use the cfgs from the destination crate and not the ones from the defining crate + = help: try referring to `IntoBytes` crate for guidance on how handle this unexpected cfg + = help: the derive macro `IntoBytes` may come from an old version of the `zerocopy_derive` crate, try updating your dependency with `cargo update -p zerocopy_derive` + = note: see <https://doc.rust-lang.org/nightly/rustc/check-cfg/cargo-specifics.html> for more information about checking conditional configuration + = note: `#[warn(unexpected_cfgs)]` on by default + = note: this warning originates in the derive macro `IntoBytes` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy-derive/tests/union_from_bytes.rs b/vendor/zerocopy-derive/tests/union_from_bytes.rs new file mode 100644 index 00000000..f8482248 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_from_bytes.rs @@ -0,0 +1,74 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A union is `imp::FromBytes` if: +// - all fields are `imp::FromBytes` + +#[derive(Clone, Copy, imp::Immutable, imp::FromBytes)] +union Zst { + a: (), +} + +util_assert_impl_all!(Zst: imp::FromBytes); +test_trivial_is_bit_valid!(Zst => test_zst_trivial_is_bit_valid); + +#[derive(imp::Immutable, imp::FromBytes)] +union One { + a: u8, +} + +util_assert_impl_all!(One: imp::FromBytes); +test_trivial_is_bit_valid!(One => test_one_trivial_is_bit_valid); + +#[derive(imp::Immutable, imp::FromBytes)] +union Two { + a: u8, + b: Zst, +} + +util_assert_impl_all!(Two: imp::FromBytes); +test_trivial_is_bit_valid!(Two => test_two_trivial_is_bit_valid); + +#[derive(imp::Immutable, imp::FromBytes)] +union TypeParams<'a, T: imp::Copy, I: imp::Iterator> +where + I::Item: imp::Copy, +{ + a: T, + c: I::Item, + d: u8, + e: imp::PhantomData<&'a [u8]>, + f: imp::PhantomData<&'static str>, + g: imp::PhantomData<imp::String>, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::FromBytes); +test_trivial_is_bit_valid!(TypeParams<'static, (), imp::IntoIter<()>> => test_type_params_trivial_is_bit_valid); + +// Deriving `imp::FromBytes` should work if the union has bounded parameters. + +#[derive(imp::Immutable, imp::FromBytes)] +#[repr(C)] +union WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::FromBytes, const N: usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Copy + imp::FromBytes, +{ + a: [T; N], + b: imp::PhantomData<&'a &'b ()>, +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::FromBytes); +test_trivial_is_bit_valid!(WithParams<'static, 'static, u8, 42> => test_with_params_trivial_is_bit_valid); diff --git a/vendor/zerocopy-derive/tests/union_from_zeros.rs b/vendor/zerocopy-derive/tests/union_from_zeros.rs new file mode 100644 index 00000000..4f5b8e17 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_from_zeros.rs @@ -0,0 +1,69 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A union is `imp::FromZeros` if: +// - all fields are `imp::FromZeros` + +#[derive(Clone, Copy, imp::Immutable, imp::FromZeros)] +union Zst { + a: (), +} + +util_assert_impl_all!(Zst: imp::FromZeros); + +#[derive(imp::Immutable, imp::FromZeros)] +union One { + a: bool, +} + +util_assert_impl_all!(One: imp::FromZeros); + +#[derive(imp::Immutable, imp::FromZeros)] +union Two { + a: bool, + b: Zst, +} + +util_assert_impl_all!(Two: imp::FromZeros); + +#[derive(imp::Immutable, imp::FromZeros)] +union TypeParams<'a, T: imp::Copy, I: imp::Iterator> +where + I::Item: imp::Copy, +{ + a: T, + c: I::Item, + d: u8, + e: imp::PhantomData<&'a [u8]>, + f: imp::PhantomData<&'static str>, + g: imp::PhantomData<imp::String>, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::FromZeros); + +// Deriving `imp::FromZeros` should work if the union has bounded parameters. + +#[derive(imp::Immutable, imp::FromZeros)] +#[repr(C)] +union WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::FromZeros, const N: usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Copy + imp::FromZeros, +{ + a: [T; N], + b: imp::PhantomData<&'a &'b ()>, +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::FromZeros); diff --git a/vendor/zerocopy-derive/tests/union_known_layout.rs b/vendor/zerocopy-derive/tests/union_known_layout.rs new file mode 100644 index 00000000..c8668759 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_known_layout.rs @@ -0,0 +1,66 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(Clone, Copy, imp::KnownLayout)] +union Zst { + a: (), +} + +util_assert_impl_all!(Zst: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +union One { + a: bool, +} + +util_assert_impl_all!(One: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +union Two { + a: bool, + b: Zst, +} + +util_assert_impl_all!(Two: imp::KnownLayout); + +#[derive(imp::KnownLayout)] +union TypeParams<'a, T: imp::Copy, I: imp::Iterator> +where + I::Item: imp::Copy, +{ + a: T, + c: I::Item, + d: u8, + e: imp::PhantomData<&'a [u8]>, + f: imp::PhantomData<&'static str>, + g: imp::PhantomData<imp::String>, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::KnownLayout); + +// Deriving `imp::KnownLayout` should work if the union has bounded parameters. + +#[derive(imp::KnownLayout)] +#[repr(C)] +union WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::KnownLayout, const N: usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Copy + imp::KnownLayout, +{ + a: [T; N], + b: imp::PhantomData<&'a &'b ()>, +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::KnownLayout); diff --git a/vendor/zerocopy-derive/tests/union_no_cell.rs b/vendor/zerocopy-derive/tests/union_no_cell.rs new file mode 100644 index 00000000..46070fe9 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_no_cell.rs @@ -0,0 +1,68 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +#[derive(Clone, Copy, imp::Immutable)] +union Zst { + a: (), +} + +util_assert_impl_all!(Zst: imp::Immutable); + +#[derive(imp::Immutable)] +union One { + a: bool, +} + +util_assert_impl_all!(One: imp::Immutable); + +#[derive(imp::Immutable)] +union Two { + a: bool, + b: Zst, +} + +util_assert_impl_all!(Two: imp::Immutable); + +#[derive(imp::Immutable)] +union TypeParams<'a, T: imp::Copy, I: imp::Iterator> +where + I::Item: imp::Copy, +{ + a: T, + c: I::Item, + d: u8, + e: imp::PhantomData<&'a [::core::primitive::u8]>, + f: imp::PhantomData<&'static ::core::primitive::str>, + g: imp::PhantomData<imp::String>, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::Immutable); + +// Deriving `imp::Immutable` should work if the union has bounded parameters. + +#[derive(imp::Immutable)] +#[repr(C)] +union WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::Immutable, const N: usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::Copy + imp::Immutable, +{ + a: [T; N], + b: imp::PhantomData<&'a &'b ()>, + c: imp::PhantomData<imp::UnsafeCell<()>>, + d: &'a imp::UnsafeCell<()>, +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::Immutable); diff --git a/vendor/zerocopy-derive/tests/union_to_bytes.rs b/vendor/zerocopy-derive/tests/union_to_bytes.rs new file mode 100644 index 00000000..e1acba35 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_to_bytes.rs @@ -0,0 +1,75 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A union is `imp::IntoBytes` if: +// - all fields are `imp::IntoBytes` +// - `repr(C)` or `repr(transparent)` and +// - no padding (size of union equals size of each field type) +// - `repr(packed)` + +#[derive(imp::IntoBytes, Clone, Copy)] +#[repr(C)] +union CZst { + a: (), +} + +util_assert_impl_all!(CZst: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C)] +union C { + a: u8, + b: u8, +} + +util_assert_impl_all!(C: imp::IntoBytes); + +// Transparent unions are unstable; see issue #60405 +// <https://github.com/rust-lang/rust/issues/60405> for more information. + +// #[derive(imp::IntoBytes)] +// #[repr(transparent)] +// union Transparent { +// a: u8, +// b: CZst, +// } + +// is_as_bytes!(Transparent); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +union CZstPacked { + a: (), +} + +util_assert_impl_all!(CZstPacked: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +union CPacked { + a: u8, + b: i8, +} + +util_assert_impl_all!(CPacked: imp::IntoBytes); + +#[derive(imp::IntoBytes)] +#[repr(C, packed)] +union CMultibytePacked { + a: i32, + b: u32, + c: f32, +} + +util_assert_impl_all!(CMultibytePacked: imp::IntoBytes); diff --git a/vendor/zerocopy-derive/tests/union_try_from_bytes.rs b/vendor/zerocopy-derive/tests/union_try_from_bytes.rs new file mode 100644 index 00000000..80bae235 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_try_from_bytes.rs @@ -0,0 +1,183 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A struct is `imp::TryFromBytes` if: +// - any of its fields are `imp::TryFromBytes` + +#[derive(imp::Immutable, imp::TryFromBytes)] +union One { + a: u8, +} + +util_assert_impl_all!(One: imp::TryFromBytes); + +#[test] +fn one() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&One { a: 42 }); + let candidate = candidate.forget_aligned(); + // SAFETY: `&One` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <One as imp::TryFromBytes>::is_bit_valid(candidate); + assert!(is_bit_valid); +} + +#[derive(imp::Immutable, imp::TryFromBytes)] +#[repr(C)] +union Two { + a: bool, + b: bool, +} + +util_assert_impl_all!(Two: imp::TryFromBytes); + +#[test] +fn two() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate_a = ::zerocopy::Ptr::from_ref(&Two { a: false }); + let candidate_a = candidate_a.forget_aligned(); + // SAFETY: `&Two` consists entirely of initialized bytes. + let candidate_a = unsafe { candidate_a.assume_initialized() }; + let is_bit_valid = <Two as imp::TryFromBytes>::is_bit_valid(candidate_a); + assert!(is_bit_valid); + + let candidate_b = ::zerocopy::Ptr::from_ref(&Two { b: true }); + let candidate_b = candidate_b.forget_aligned(); + // SAFETY: `&Two` consists entirely of initialized bytes. + let candidate_b = unsafe { candidate_b.assume_initialized() }; + let is_bit_valid = <Two as imp::TryFromBytes>::is_bit_valid(candidate_b); + assert!(is_bit_valid); +} + +#[test] +fn two_bad() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&[2u8][..]); + let candidate = candidate.forget_aligned(); + // SAFETY: `&[u8]` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + + // SAFETY: + // - The cast preserves address and size. As a result, the cast will address + // the same bytes as `c`. + // - The cast preserves provenance. + // - Neither the input nor output types contain any `UnsafeCell`s. + let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p.cast::<Two>()) }; + + // SAFETY: `candidate`'s referent is as-initialized as `Two`. + let candidate = unsafe { candidate.assume_initialized() }; + + let is_bit_valid = <Two as imp::TryFromBytes>::is_bit_valid(candidate); + assert!(!is_bit_valid); +} + +#[derive(imp::Immutable, imp::TryFromBytes)] +#[repr(C)] +union BoolAndZst { + a: bool, + b: (), +} + +#[test] +fn bool_and_zst() { + // FIXME(#5): Use `try_transmute` in this test once it's available. + let candidate = ::zerocopy::Ptr::from_ref(&[2u8][..]); + let candidate = candidate.forget_aligned(); + // SAFETY: `&[u8]` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + + // SAFETY: + // - The cast preserves address and size. As a result, the cast will address + // the same bytes as `c`. + // - The cast preserves provenance. + // - Neither the input nor output types contain any `UnsafeCell`s. + let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p.cast::<BoolAndZst>()) }; + + // SAFETY: `candidate`'s referent is fully initialized. + let candidate = unsafe { candidate.assume_initialized() }; + + let is_bit_valid = <BoolAndZst as imp::TryFromBytes>::is_bit_valid(candidate); + assert!(is_bit_valid); +} + +#[derive(imp::FromBytes)] +#[repr(C)] +union MaybeFromBytes<T: imp::Copy> { + t: T, +} + +#[test] +fn test_maybe_from_bytes() { + // When deriving `FromBytes` on a type with no generic parameters, we emit a + // trivial `is_bit_valid` impl that always returns true. This test confirms + // that we *don't* spuriously do that when generic parameters are present. + + let candidate = ::zerocopy::Ptr::from_ref(&[2u8][..]); + let candidate = candidate.bikeshed_recall_initialized_from_bytes(); + + // SAFETY: + // - The cast preserves address and size. As a result, the cast will address + // the same bytes as `c`. + // - The cast preserves provenance. + // - Neither the input nor output types contain any `UnsafeCell`s. + let candidate = + unsafe { candidate.cast_unsized_unchecked(|p| p.cast::<MaybeFromBytes<bool>>()) }; + + // SAFETY: `[u8]` consists entirely of initialized bytes. + let candidate = unsafe { candidate.assume_initialized() }; + let is_bit_valid = <MaybeFromBytes<bool> as imp::TryFromBytes>::is_bit_valid(candidate); + imp::assert!(!is_bit_valid); +} + +#[derive(imp::Immutable, imp::TryFromBytes)] +#[repr(C)] +union TypeParams<'a, T: imp::Copy, I: imp::Iterator> +where + I::Item: imp::Copy, +{ + a: I::Item, + b: u8, + c: imp::PhantomData<&'a [u8]>, + d: imp::PhantomData<&'static str>, + e: imp::PhantomData<imp::String>, + f: T, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::TryFromBytes); +util_assert_impl_all!(TypeParams<'static, util::AU16, imp::IntoIter<()>>: imp::TryFromBytes); +util_assert_impl_all!(TypeParams<'static, [util::AU16; 2], imp::IntoIter<()>>: imp::TryFromBytes); + +// Deriving `imp::TryFromBytes` should work if the union has bounded parameters. + +#[derive(imp::Immutable, imp::TryFromBytes)] +#[repr(C)] +union WithParams<'a: 'b, 'b: 'a, T: 'a + 'b + imp::TryFromBytes, const N: usize> +where + 'a: 'b, + 'b: 'a, + T: 'a + 'b + imp::TryFromBytes + imp::Copy, +{ + a: imp::PhantomData<&'a &'b ()>, + b: T, +} + +util_assert_impl_all!(WithParams<'static, 'static, u8, 42>: imp::TryFromBytes); + +#[derive(Clone, Copy, imp::TryFromBytes, imp::Immutable)] +struct A; + +#[derive(imp::TryFromBytes)] +union B { + a: A, +} diff --git a/vendor/zerocopy-derive/tests/union_unaligned.rs b/vendor/zerocopy-derive/tests/union_unaligned.rs new file mode 100644 index 00000000..49aaa9a0 --- /dev/null +++ b/vendor/zerocopy-derive/tests/union_unaligned.rs @@ -0,0 +1,77 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// A union is `Unaligned` if: +// - `repr(align)` is no more than 1 and either +// - `repr(C)` or `repr(transparent)` and +// - all fields `Unaligned` +// - `repr(packed)` + +#[derive(imp::Unaligned)] +#[repr(C)] +union Foo { + a: imp::u8, +} + +util_assert_impl_all!(Foo: imp::Unaligned); + +// Transparent unions are unstable; see issue #60405 +// <https://github.com/rust-lang/rust/issues/60405> for more information. + +// #[derive(Unaligned)] +// #[repr(transparent)] +// union Bar { +// a: u8, +// } + +// is_unaligned!(Bar); + +#[derive(imp::Unaligned)] +#[repr(packed)] +union Baz { + // NOTE: The `u16` type is not guaranteed to have alignment 2, although it + // does on many platforms. However, to fix this would require a custom type + // with a `#[repr(align(2))]` attribute, and `#[repr(packed)]` types are not + // allowed to transitively contain `#[repr(align(...))]` types. Thus, we + // have no choice but to use `u16` here. Luckily, these tests run in CI on + // platforms on which `u16` has alignment 2, so this isn't that big of a + // deal. + a: u16, +} + +util_assert_impl_all!(Baz: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(C, align(1))] +union FooAlign { + a: imp::u8, +} + +util_assert_impl_all!(FooAlign: imp::Unaligned); + +#[derive(imp::Unaligned)] +#[repr(C)] +union TypeParams<'a, T: imp::Copy, I: imp::Iterator> +where + I::Item: imp::Copy, +{ + a: T, + c: I::Item, + d: u8, + e: imp::PhantomData<&'a [imp::u8]>, + f: imp::PhantomData<&'static imp::str>, + g: imp::PhantomData<imp::String>, +} + +util_assert_impl_all!(TypeParams<'static, (), imp::IntoIter<()>>: imp::Unaligned); diff --git a/vendor/zerocopy-derive/tests/unsafe_cell.rs b/vendor/zerocopy-derive/tests/unsafe_cell.rs new file mode 100644 index 00000000..7f6d8b7a --- /dev/null +++ b/vendor/zerocopy-derive/tests/unsafe_cell.rs @@ -0,0 +1,33 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// See comment in `include.rs` for why we disable the prelude. +#![no_implicit_prelude] +#![allow(warnings)] + +include!("include.rs"); + +// Test to make sure that all of our derives are compatible with `UnsafeCell`s. +// +// We test both `FromBytes` and `FromZeros`, as the `FromBytes` implied derive +// of `TryFromBytes` emits a trivial `is_bit_valid` impl - we want to test the +// non-trivial impl, which deriving `FromZeros` accomplishes. + +#[derive(imp::FromBytes, imp::IntoBytes, imp::KnownLayout, imp::Unaligned)] +#[repr(C)] +struct StructFromBytes(imp::UnsafeCell<u8>); + +#[derive(imp::FromZeros, imp::IntoBytes, imp::KnownLayout, imp::Unaligned)] +#[repr(C)] +struct StructFromZeros(imp::UnsafeCell<bool>); + +#[derive(imp::FromZeros, imp::IntoBytes, imp::KnownLayout, imp::Unaligned)] +#[repr(u8)] +enum EnumFromZeros { + A(imp::UnsafeCell<bool>), +} diff --git a/vendor/zerocopy/.cargo-checksum.json b/vendor/zerocopy/.cargo-checksum.json new file mode 100644 index 00000000..eb7f4eca --- /dev/null +++ b/vendor/zerocopy/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"40f5680978c2fdd8b6075d81fbe94b2f1fb98feca30ebbeae2ed0da13cb325d9","AGENTS.md":"fd6d21f3d5ee887d03cd2104bedc460abdcba0d50677a53cd5c237ba3cc50fa1","CHANGELOG.md":"876e5a74db78bdc53677ec4d489ce7b2fc87840f2dc094423cef406138082708","CONTRIBUTING.md":"a0980739c88a1b5fe71e487ec49ead40a33e96042d4cb2310240ac1a635c97d3","Cargo.lock":"d200ba9004576ac835da2ec264eff32b870bd9e1c3f68ca1b5e71f9777760893","Cargo.toml":"d83b467ac8f316750baa8f0255902bb28b4253c31a59c9726323da62f964bad9","Cargo.toml.orig":"5a95ebf926daa0570293ed48b869b642432ec9dce31e6ecc76adaafce4ec2fa1","LICENSE-APACHE":"9d185ac6703c4b0453974c0d85e9eee43e6941009296bb1f5eb0b54e2329e9f3","LICENSE-BSD":"83c1763356e822adde0a2cae748d938a73fdc263849ccff6b27776dff213bd32","LICENSE-MIT":"1a2f5c12ddc934d58956aa5dbdd3255fe55fd957633ab7d0d39e4f0daa73f7df","POLICIES.md":"43dbf7f7b6e003fe080099e51ea2b70b5d99fa2185ee54717a914202a42e1a82","README.md":"63f7477532c23478d2878b172811d8f941774291c01eb6d0197deb1986ad64aa","build.rs":"f0320d24ec7c5f02417a834bc437d26b9ca073f10f4b0fa4cd708395f83e9920","cargo.sh":"8cc26650d78a7377953546bffb612017e748aa0c36c14365cda810d3d1f6ebc8","ci/check_actions.sh":"5a9f7087b3cb931e9fb557f24cd20eb9ca6b7f4e4e7d396ec774a51c992b57f3","ci/check_all_toolchains_tested.sh":"e67462dc8744126d54ecf15cbb3941147e3158953eed476af8e4cddf56dc352b","ci/check_fmt.sh":"0668de6302f287dc6c037ba5863bb6d2b3cb7efb14be35a5c594ad353e8275a7","ci/check_job_dependencies.sh":"79291a5abd881f9151260af741aadc38d1c4df2e22593d60cc424723d201c265","ci/check_msrv_is_minimal.sh":"bddd6c3319a6e2fc5f5ff95a7e58733dba50d4874d1805aa1e09de8f12f5afdd","ci/check_readme.sh":"00abb89eb4d94c26773f6a568faada404868c1be023e7048f970392a76ba4a31","ci/check_todo.sh":"6b7085c06b45d80d51f2853ebd4d83675790b8a5ac7b61a075c967cfb7db56a7","ci/check_versions.sh":"04a0ae151dfbb5c26d8af5722ba312dab75733e08a1eefc0680e67b053730065","ci/release_crate_version.sh":"0048efe5c2eb256f79c93689359aba729929a46b389559697763b880354c8f4a","clippy.toml":"df67a6131fff2fe52309e797d2dfad080fc8cbdfcb1baa7f14415b3e397c291c","githooks/pre-push":"07754017a6d56f1dd0b1f9d45ed9f867f78e6fe18d2e9fe68bd49bd660b66e55","rustfmt.toml":"47800771c2ffb8e12eecebc29b8282aea5353ec0bc5c2ccd9ba9d8e5dd6454ab","src/byte_slice.rs":"eff8c9cfaa19350e3c7b9e4311c9ab51c2fa182fe70528643cf9ed6633b8eaae","src/byteorder.rs":"4fa5f75aa558cafc8f342c8ca9d838ef67198de3fb07ed637b764b58427fdd74","src/deprecated.rs":"df993f07cb37628012e45716aa74237a1294b98eef80f1ec69a9b17b9f354342","src/doctests.rs":"15b4641dd8a9c93b6ce630516f04aba4aa439072d08ba9820d597ae4127424cd","src/error.rs":"c7c505059be3dec2d34f26fb19a7c6117f3bace1c7df423ffc5fe3309baa35ef","src/impls.rs":"5b443ac6a9bb5a3b05cb4ce49b5895740dad3fbd6a9989c2ccf74e26043f8f5c","src/layout.rs":"0538d66d6b7aec64a052a6defccb8760654bc1c2f0a6a50e92f011245b7ba69b","src/lib.rs":"c67cd5d3c6e2bef6d7e0c2c52527201a7376d4eb6af00693ffbc1406de6550f1","src/macros.rs":"54b48458b371ab14ce40b67d1206e35b870ffa8e5f01ef40b3f2eb0947aebd3a","src/pointer/inner.rs":"d635a8ac62a7c9751d3be4f52d756ba597fbe46a2507af044800b31e67f1e4f3","src/pointer/invariant.rs":"817452ba94b18db728d405eec2512afcc8e8aa771151899d4839700216ad6041","src/pointer/mod.rs":"867bc068873c414d7d6405cc46d237fc379aca6b120233d161e762d24445c097","src/pointer/ptr.rs":"7fb992d307080c806fe0639ccf2345c4d219a86a2a436cef4120e7eb38e9f05a","src/pointer/transmute.rs":"c0dbd9eeb3a558669b5e5bf0d6bdb90974358545c136926bb12700894845816d","src/ref.rs":"6bf9faa14e42c57738478e205a7d6b924964f9218492067eb3bc4facd98d09c6","src/split_at.rs":"63849e12f1728b87c8220532bbb004e5d548fa2c26772469246e463433f07bb6","src/util/macro_util.rs":"744641ed194f25bff3477f64c26bf4ee90d908e9bd61e5417de78ea0f1f1e5bd","src/util/macros.rs":"a62a6efecb5890dbddfec5241516b175302c7a76e121139cf11c9a8d4082bac3","src/util/mod.rs":"bb08db9352030bc332b1ae52f0aaa36fb01a57a1261198e04cb1a442b42d8fea","src/wrappers.rs":"73890c6276c6d8636797f8fb3a92259789e37cfd7df09b40d3d4f44fc044fe4c","testdata/include_value/data":"88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589","tests/trybuild.rs":"cbee2dd970cc0737f9a91e4b5e90ea9f1ce0c2639767550c1754519c2d73942d","tests/ui-msrv/diagnostic-not-implemented-from-bytes.rs":"d41e080139456e38ee162a57e489749ed46f4ab3d548b873fff5e3cfd1915089","tests/ui-msrv/diagnostic-not-implemented-from-bytes.stderr":"4d6f6a622aaf9652774459000ae22fa9ba185f94038937e29db60a83201ebb11","tests/ui-msrv/diagnostic-not-implemented-from-zeros.rs":"7068df4b45955c6f40bd71dfaa51ed87c850221a142fc5471b6354d5ed7a2c46","tests/ui-msrv/diagnostic-not-implemented-from-zeros.stderr":"6e811a51f5ca455fa40d7d3d9c82d9f77920d7e951edf04df123b54442416de2","tests/ui-msrv/diagnostic-not-implemented-immutable.rs":"701f02b3e7dbb91ea6257cc08b5aeaed81a32be83f2424ca3887655926b568de","tests/ui-msrv/diagnostic-not-implemented-immutable.stderr":"6ad821a39c1d87e135b7f2271e8764d256cdb8a29a59a3b71d60a308494833c5","tests/ui-msrv/diagnostic-not-implemented-into-bytes.rs":"aaedc974685ddc133c1f6c547f51f480057db9932700210ea513be55ddb3497f","tests/ui-msrv/diagnostic-not-implemented-into-bytes.stderr":"6d4171e67ba7db879f669e4a29c70bc6da2136e6bb863a7db2e4018ddce52b25","tests/ui-msrv/diagnostic-not-implemented-issue-1296.rs":"b030a925ff39cb2ea9b1777273f0bcfd982d972fa43cb5f2b6fdc1e6da467312","tests/ui-msrv/diagnostic-not-implemented-issue-1296.stderr":"b86aefff7280c4e9b4d1dad00be80967be770ffd5b03c7be5ee71fe36ba44287","tests/ui-msrv/diagnostic-not-implemented-known-layout.rs":"1e9d77125b68169b231d62da4fb1a1a76fb71bf7a1c4275a096a0715873b4fd2","tests/ui-msrv/diagnostic-not-implemented-known-layout.stderr":"0c01befe42780668a7b85786a53f63988e933451f6195a67b9c8e21d806152f6","tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.rs":"922b8eab7092dba41ec37b3ebcad7b48eb8d391e54dff3f2869bcf1954c85003","tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.stderr":"94acea9ac40ea7c7f57537797235f44dc7416211125aa3dc0349711568a6f9a9","tests/ui-msrv/diagnostic-not-implemented-unaligned.rs":"8af59d04924492eff43708cb504a421d9c5b8e42cae1ea0a80118f911af47e7b","tests/ui-msrv/diagnostic-not-implemented-unaligned.stderr":"1b9d9aa822179c417f1615cfd0db5db7cdfa2efed766732e7aff54eb2394b775","tests/ui-msrv/include_value_not_from_bytes.rs":"74f4d54dad245eff591cb46344fd1f25fb27b73de941c04d1ee82b0ad2639807","tests/ui-msrv/include_value_not_from_bytes.stderr":"38fb361f9b342dd1924cfd6c2067c9a7adc6f3d1433656354cd3e819402d5f94","tests/ui-msrv/include_value_wrong_size.rs":"5a0561eeb4cb245cf5b68547b43faa2432c56bfe23f7f50677a0b110dab60cdb","tests/ui-msrv/include_value_wrong_size.stderr":"df5f93c98a658e40f195e65ccc3b0460d3e70ddf080412ff24edba1dd8da032d","tests/ui-msrv/invalid-impls/invalid-impls.rs":"8f1cf816cc77f5dd6d19bba4a463ec46a4281523c7551ab8ebcdb46197fc7732","tests/ui-msrv/invalid-impls/invalid-impls.stderr":"92414680bf94c4f9f4cad7884c5efc2eb24f1b40a09c658470b9728d4816f47c","tests/ui-msrv/max-align.rs":"ffcb6687c98e5629d01b17cbd0845ec195007cc39aa244b26a77d17688c8f13d","tests/ui-msrv/max-align.stderr":"38775aa2a8bc035eedbc57ab0081c865b804d9a7fc5200ec425210fdea6a69d1","tests/ui-msrv/ptr-is-invariant-over-v.rs":"1b88e571206e42ff130d46edb5274a58e39d2b2c3ada8427be53f27793787930","tests/ui-msrv/ptr-is-invariant-over-v.stderr":"204211c5a511f4705412301b5c41139352a727454e71cf438635780a763b5dd7","tests/ui-msrv/transmute-dst-not-frombytes.rs":"2d045c51d4ea1a435ca3f4e435412eeb0ac6511597dccab8e0040d4903f2b7df","tests/ui-msrv/transmute-dst-not-frombytes.stderr":"4884cc5106c88290ce93f415a6f04be824546849fff8252080efae0c27cb9c31","tests/ui-msrv/transmute-mut-const.rs":"5689e8cbc11728f948d0263009a2219e272734e317bd20abd00bd4ddcefba713","tests/ui-msrv/transmute-mut-const.stderr":"df4b2a4c358c29279f16c591c088ca2d7bc10264827c3278df0771c8c6734e3b","tests/ui-msrv/transmute-mut-dst-not-a-reference.rs":"5d784ab588f081bfc304501f811a85ea2662f88fff8274ccbd53172ec255212c","tests/ui-msrv/transmute-mut-dst-not-a-reference.stderr":"40edcbbf7713493d2bf757b5851f6a8059faa166832ddff70266641ca60c6d18","tests/ui-msrv/transmute-mut-dst-not-frombytes.rs":"2b95c8d73c6a321137b5a59c12f554e7433b10f0e95e3ac657330bce0bab149c","tests/ui-msrv/transmute-mut-dst-not-frombytes.stderr":"f7d2010e522be6c7cf92267f631fd8460458a31318e864ab4178e9585b41c537","tests/ui-msrv/transmute-mut-dst-not-intobytes.rs":"2e4da8d35584a2bbeaea33398bb5773a2d40aeee2e2ea7272e9382940cb97ec3","tests/ui-msrv/transmute-mut-dst-not-intobytes.stderr":"11a249f3b16a36558130ac3c529320e400277840f69c7551f4b31de1b684808d","tests/ui-msrv/transmute-mut-dst-unsized.rs":"58c3423c07dd06ca98e61439f318ba5f3f7fc68ca9cb59371ebc482ad54709db","tests/ui-msrv/transmute-mut-dst-unsized.stderr":"dafce50cd8df1aa51d357f00e83c446d708a115ad2c18677b23f1b14d8bbfd25","tests/ui-msrv/transmute-mut-illegal-lifetime.rs":"ec18bf7b3d9bd2674b43d0e04fc0545227473d43b07e2bbccc19c2068df33673","tests/ui-msrv/transmute-mut-illegal-lifetime.stderr":"ff5965b190242ce05735d7c072c11565c5bd8609261c83dd06396ae8416dfbaa","tests/ui-msrv/transmute-mut-src-dst-not-references.rs":"0b73d42fbcecba3483e24d4e9296d24d551de18822b45120e225356c5ccefad8","tests/ui-msrv/transmute-mut-src-dst-not-references.stderr":"fc2740d55afdb07bdde457ac259f48ef5b3e13503968299e51791576328b207d","tests/ui-msrv/transmute-mut-src-immutable.rs":"606aba0c01726255c9be7e67a032ce854209c62dffec16d5dd2c8f484e19979a","tests/ui-msrv/transmute-mut-src-immutable.stderr":"6854b18881116cecf0c716eac01aac312bfe43a295a797c4ad01ac8b7ea7d81c","tests/ui-msrv/transmute-mut-src-not-a-reference.rs":"e627a60c6f6d1b398bdcfc9307dbc57b268cc784b4967d1afaceed7eebd5db47","tests/ui-msrv/transmute-mut-src-not-a-reference.stderr":"538af460b18f588b6075307de50ba1307f98189d2f2aea74346a77ad8b64710c","tests/ui-msrv/transmute-mut-src-not-frombytes.rs":"647cb1a8fa51b4e833fd3f498a471a344318affc0d2ca8080a7f17dc8a27b30a","tests/ui-msrv/transmute-mut-src-not-frombytes.stderr":"898fc436cea7ae655f35495e4e601ddef08933bc1069ed7129a7edd417b5d347","tests/ui-msrv/transmute-mut-src-not-intobytes.rs":"40ed14c627923d668173d9d5b3ee5cb0462a126ad4f209224205abc4a3f33b45","tests/ui-msrv/transmute-mut-src-not-intobytes.stderr":"aaed331e17675f0f5fa4cba627122cf38c30c0e1287e225d0bba0831cb92ee6e","tests/ui-msrv/transmute-mut-src-unsized.rs":"8ed4d688fc7f465c3dbd475f6628c265cecd4e57b31a3d5a929f02f3dca7a474","tests/ui-msrv/transmute-mut-src-unsized.stderr":"28aaf147561de1bda9f84959ce1463cfd472537af36c20134020032d9ac051a0","tests/ui-msrv/transmute-ptr-to-usize.rs":"ea33dc39115509988d9abd6ac6536d88d82082417b21da9f9bc8cf8369c69618","tests/ui-msrv/transmute-ptr-to-usize.stderr":"97f97dfca59430f123b802e21ccb463041eddabdcc840621036454249c83df20","tests/ui-msrv/transmute-ref-dst-mutable.rs":"1c48caae9912f70dec5f5a99a0c880fe6a3022f11fd412438b8a1576803e5f73","tests/ui-msrv/transmute-ref-dst-mutable.stderr":"289e040b3e725546081dfd07640e499a5622915954f12c871708d3f46ff43d7a","tests/ui-msrv/transmute-ref-dst-not-a-reference.rs":"c4b8a6c1970e30390d0a301e2dbe718b9eeef743299f7e91cd12c582ec203af7","tests/ui-msrv/transmute-ref-dst-not-a-reference.stderr":"b6c1f2aede85cce47f5ca379b9ae5a77c777e7c60de6590578c47432ebacae88","tests/ui-msrv/transmute-ref-dst-not-frombytes.rs":"88f5a210bea3e3a775f92b6aaf4586e62cb9c6f61bcdc1cd7e63ad9ad214b863","tests/ui-msrv/transmute-ref-dst-not-frombytes.stderr":"c17eee33de0e94d29cbb04f7e7ddd3e6b11793fe24d59bae498a3763b7eab983","tests/ui-msrv/transmute-ref-dst-not-nocell.rs":"71ed55ef424c8173bb438c6a1985a1a53e09d7964950d64a50ddb914c3425760","tests/ui-msrv/transmute-ref-dst-not-nocell.stderr":"3983f0ec4ead95d693c5cc189751c18fff5553c4507bafcbd16b35c7c5d8715d","tests/ui-msrv/transmute-ref-dst-unsized.rs":"c374df8d00541fd34fff37e231e341501a427961f60d88ad3e3c375085cc060d","tests/ui-msrv/transmute-ref-dst-unsized.stderr":"6d108355c49d1fcaa03b33f84b24ed5e530179eaf9d10407a639dc493fc9beaf","tests/ui-msrv/transmute-ref-illegal-lifetime.rs":"6812bbf7ec851a8591464f10864dbd1f225e65ed5793b6f6375cbe8a9db50b14","tests/ui-msrv/transmute-ref-illegal-lifetime.stderr":"4f2a3e71cda94564f2343ca9ff23de3eca0d2ff465cedacab187151183813092","tests/ui-msrv/transmute-ref-src-dst-not-references.rs":"7311602a0153b260d819e9608e8e66ef5904919a2349a95187919d8211e48e23","tests/ui-msrv/transmute-ref-src-dst-not-references.stderr":"003bb1ccb5cf8322416e00e0fa5645f94d76aad875e60d281daae9625eb583a4","tests/ui-msrv/transmute-ref-src-not-a-reference.rs":"a921f168fa6cb3c6a19894cecdb118bc3164275746672a916aa5194b92f2fb57","tests/ui-msrv/transmute-ref-src-not-a-reference.stderr":"5a8d829089820ec79d9cd8d9ffac7dbde430914fdad691d46edcd96414d5cad0","tests/ui-msrv/transmute-ref-src-not-intobytes.rs":"81c6fa9775976ab678585a19a174470225e05f0b0cb43ceb754c198a8f0c9732","tests/ui-msrv/transmute-ref-src-not-intobytes.stderr":"42bcfe9f4f3126b6b274bf7838793a449be50b45b484caa7e608143ee492e520","tests/ui-msrv/transmute-ref-src-not-nocell.rs":"0f1cf580d97ed69667b02bfe5de1425ceb0678fc43bf2130e65adc38f3dc5f3f","tests/ui-msrv/transmute-ref-src-not-nocell.stderr":"542ac3b71708fb9373701924b31d16e2db09f6e14ce140491e737dce5400573c","tests/ui-msrv/transmute-ref-src-unsized.rs":"d7797488f0ab5db89944ac7db25625c63aef72e6e4ed481d00a083449050b813","tests/ui-msrv/transmute-ref-src-unsized.stderr":"2d89281a87f6322c34bcf1cc87227d0088d2fc3ac7cf36f0dc6142f17069876c","tests/ui-msrv/transmute-size-decrease.rs":"d191596652adbc26329449af4b2183bdae0a387ee668690b1de8535b71495eb7","tests/ui-msrv/transmute-size-decrease.stderr":"d11ad6ec7bc90cce95a2bb415bdbb5273f684c0b5dd49d982f59c4a719b921c4","tests/ui-msrv/transmute-size-increase-allow-shrink.rs":"d24d67d0012bf85d06b7e5ca790ca725d5fd559052d35322a72490897963cb42","tests/ui-msrv/transmute-size-increase-allow-shrink.stderr":"94a4e467ff346ddc56e35b667b424113cfb9d2c65276b47530f09c771a8681bd","tests/ui-msrv/transmute-size-increase.rs":"8ca917f831b736bfadb6ccee22acde205098678a3a2cb3cc753dfecacafcb0be","tests/ui-msrv/transmute-size-increase.stderr":"dc42334ddbd0e67151a3122cdfdd63d3b7ae9162027bdb61004579331aeb4dd3","tests/ui-msrv/transmute-src-not-intobytes.rs":"e88bbf21c2512b9c04f938ead3990161f9b050aa469d9164244966ba6e2f02dd","tests/ui-msrv/transmute-src-not-intobytes.stderr":"50f65a51782e96e82fe9e8162cb86bb9db710a30331320e420b38d65034a8419","tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs":"534191c737297f8d60d1227a9cbbb9700bb08db15783d9191df3a64c8424e303","tests/ui-msrv/try_transmute-dst-not-tryfrombytes.stderr":"af8bfea743c5ec2272d4828573f1e984615c63b808dc19b70e27c77cbfc5cc4a","tests/ui-msrv/try_transmute-size-decrease.rs":"b0b9722d8032c1af00202c7bd99ac65e92ffb3dae67e17a6c10724ba86a3b1f0","tests/ui-msrv/try_transmute-size-decrease.stderr":"f70e66f6c58c3bdefd6144a1f40be18f37b819937eddb7a84dad2c5b01a5d93c","tests/ui-msrv/try_transmute-size-increase.rs":"d67d46178f8a945ab2ac5f40b71949468c337b799fec8fd24c241b94d8a6c87b","tests/ui-msrv/try_transmute-size-increase.stderr":"a79eefcba3e2ccb28841b896cd87b426508ee77139a9ae68711fb5b5f203202e","tests/ui-msrv/try_transmute-src-not-intobytes.rs":"fe267df8308820b9d6f6f3c2ba9f139d1f06c9355dcda85a72af312cf1485eb8","tests/ui-msrv/try_transmute-src-not-intobytes.stderr":"a387fa192db75cecfb18009928b87860a65a74cb39ab003977fbb03b3e95ca6a","tests/ui-msrv/try_transmute_mut-alignment-increase.rs":"7515fa447232f1a59b9b6fae435bbf19095ab4538ae0459c9f38678ae0b67ebb","tests/ui-msrv/try_transmute_mut-alignment-increase.stderr":"58dec98bb403f1acd267eedadb4c52a9a6b6582d7946407f502f4e85ace21aca","tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs":"d5150c8c6dac0d727f91763964a211da00c9842d3130c4c600b4ff6475ef6b83","tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.stderr":"115f1d3aca310204ad6e2a5f2d31d1c9897448981047308d0702bef10225fc2a","tests/ui-msrv/try_transmute_mut-size-decrease.rs":"d18f22001da76c8781c8f276657ea73a0de92144e8a2f9c07cdcc2e2fd0df9d5","tests/ui-msrv/try_transmute_mut-size-decrease.stderr":"417da7753854af83dc1974dfb75273de779d20c376aaeed8069fbc074e32e6c5","tests/ui-msrv/try_transmute_mut-size-increase.rs":"29dbecad8f2e42a844e1a0a0e656be1724029362a8b8349b1a9cb6415fd7af6b","tests/ui-msrv/try_transmute_mut-size-increase.stderr":"f7830b448ce48a2eb38eb6af1a67fc5de70f7b4848ad2102ad9375e8e2e30e9e","tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs":"5146bff98d4fa7ad8edeb1f5a4e4d869b6296ca3ad6c239066ba7ae0584f3c1a","tests/ui-msrv/try_transmute_mut-src-not-frombytes.stderr":"ebe8b735a66f8292f7bf52e0f53122ab3f04a2f3d274eba28f124bfceb166dfe","tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs":"cee71e9cf881cf442861326320d58d30b95b9ed051dcd20c5bf1867869c1f792","tests/ui-msrv/try_transmute_mut-src-not-intobytes.stderr":"fa66b0b69cc549046c0ac0b3a1d45f7a4baacd3aa31b27f967d883b4a55d5674","tests/ui-msrv/try_transmute_ref-alignment-increase.rs":"11ae7f214f83b50e9fb7e556ea3d64d07baa274ef18143e0ff4fd76b1095fd64","tests/ui-msrv/try_transmute_ref-alignment-increase.stderr":"0f32103fdd51fc8328516ed7c039d1c369d5955399a5ed6a30294cde616d7a0c","tests/ui-msrv/try_transmute_ref-dst-mutable.rs":"1ed1c2bf9f6588cc8b2eb57afac161052ac8124e7065606b2aaab51db2aa3457","tests/ui-msrv/try_transmute_ref-dst-mutable.stderr":"8e32cfc1c968313e3d20f8d97f2f464c57635ba749f4882a7d11b41fa13f3fbe","tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs":"8a52c04394cc7f3d89b6b93e3906aef833931f730ba5beaed83cb4072334aa01","tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr":"c6141f063a6b51023d99c1b19f30a601a122f12d4ff70fe80bbe259cd95d930c","tests/ui-msrv/try_transmute_ref-size-decrease.rs":"81fabb98426fec9f0809e9706461d10b0fa67fe4bece21d2fe5547be4f7bb9ce","tests/ui-msrv/try_transmute_ref-size-decrease.stderr":"88ec42ee7efd6b336c5a0aac4d4725e1a12f12769a3a53f8804bab0916489c30","tests/ui-msrv/try_transmute_ref-size-increase.rs":"3ba36df7628324b50cfebec229f721b865de15d368b8fd3da2fa197d111fb31b","tests/ui-msrv/try_transmute_ref-size-increase.stderr":"731f5de96aad014412ff7f62fbf12d1c4f8a82d2008340917640588729c787bf","tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.rs":"cb455e796e85ac9af743f9b3a1ece8c5e8ee9e4a18128e84685d3c124c595ebe","tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.stderr":"8494ad555cb66d69851a2c173fd4c64e53625ddd1228bd883b97bcd9cf553301","tests/ui-nightly/diagnostic-not-implemented-from-bytes.rs":"d41e080139456e38ee162a57e489749ed46f4ab3d548b873fff5e3cfd1915089","tests/ui-nightly/diagnostic-not-implemented-from-bytes.stderr":"9eee5e4a97d966296a52727198ee6a65320eca7ea8039b57e0df114085533c30","tests/ui-nightly/diagnostic-not-implemented-from-zeros.rs":"7068df4b45955c6f40bd71dfaa51ed87c850221a142fc5471b6354d5ed7a2c46","tests/ui-nightly/diagnostic-not-implemented-from-zeros.stderr":"022d5a33685c806823dbbd3b3d91c72379c021ab1b2a0d257341ef7a2cb94bc9","tests/ui-nightly/diagnostic-not-implemented-immutable.rs":"701f02b3e7dbb91ea6257cc08b5aeaed81a32be83f2424ca3887655926b568de","tests/ui-nightly/diagnostic-not-implemented-immutable.stderr":"38c4a04fb954ec958da10c4de2f209edbd00c8ada1e20cdcf1155a49ae8e0ea9","tests/ui-nightly/diagnostic-not-implemented-into-bytes.rs":"aaedc974685ddc133c1f6c547f51f480057db9932700210ea513be55ddb3497f","tests/ui-nightly/diagnostic-not-implemented-into-bytes.stderr":"af5d6ef94fe84932f68df10a2485406db7fd01eec83875121e4f62522bc0ef52","tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs":"b030a925ff39cb2ea9b1777273f0bcfd982d972fa43cb5f2b6fdc1e6da467312","tests/ui-nightly/diagnostic-not-implemented-issue-1296.stderr":"d6b552c50d90cadab581d88e97e89ad3a0ba524db96fb1a238f8a956b28be61d","tests/ui-nightly/diagnostic-not-implemented-known-layout.rs":"1e9d77125b68169b231d62da4fb1a1a76fb71bf7a1c4275a096a0715873b4fd2","tests/ui-nightly/diagnostic-not-implemented-known-layout.stderr":"c4b1c6cf4d9b6c219bbd76d3ad97b480e5195ed03430b9b08b96f7b499de5632","tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.rs":"922b8eab7092dba41ec37b3ebcad7b48eb8d391e54dff3f2869bcf1954c85003","tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.stderr":"b1b4a6f8a645f84b8743be5fc9c56076159ef5fb64ca00aa597270400f9c18f0","tests/ui-nightly/diagnostic-not-implemented-unaligned.rs":"8af59d04924492eff43708cb504a421d9c5b8e42cae1ea0a80118f911af47e7b","tests/ui-nightly/diagnostic-not-implemented-unaligned.stderr":"866caedb43090c3c84c3f874197da0355dd68b4c9c408db7e70b312a46c95a9f","tests/ui-nightly/include_value_not_from_bytes.rs":"74f4d54dad245eff591cb46344fd1f25fb27b73de941c04d1ee82b0ad2639807","tests/ui-nightly/include_value_not_from_bytes.stderr":"aed85100bea1880d5a43b03581fd9d6a4fbb90a3d4b152905c81f74e39bbc54a","tests/ui-nightly/include_value_wrong_size.rs":"5a0561eeb4cb245cf5b68547b43faa2432c56bfe23f7f50677a0b110dab60cdb","tests/ui-nightly/include_value_wrong_size.stderr":"8e629e37dda26ab8dcb22240997656df3ad9e29f5c03f081106e86a0f75f9a69","tests/ui-nightly/invalid-impls/invalid-impls.rs":"8f1cf816cc77f5dd6d19bba4a463ec46a4281523c7551ab8ebcdb46197fc7732","tests/ui-nightly/invalid-impls/invalid-impls.stderr":"484525cb1fe73974262a663301f49717ccc05d2891ab375d8e9ea4a2c0d61acd","tests/ui-nightly/max-align.rs":"ffcb6687c98e5629d01b17cbd0845ec195007cc39aa244b26a77d17688c8f13d","tests/ui-nightly/max-align.stderr":"e6a1e261b02aa0fded5a3f3e3cdda6afe067f0d1430d684e3d7bd24af2e8635a","tests/ui-nightly/ptr-is-invariant-over-v.rs":"1b88e571206e42ff130d46edb5274a58e39d2b2c3ada8427be53f27793787930","tests/ui-nightly/ptr-is-invariant-over-v.stderr":"9214b624a8d0d4f2fc3c2e7a17c46ad6843af6e8adb4c5fb2718ba85da4e4ea0","tests/ui-nightly/transmute-dst-not-frombytes.rs":"2d045c51d4ea1a435ca3f4e435412eeb0ac6511597dccab8e0040d4903f2b7df","tests/ui-nightly/transmute-dst-not-frombytes.stderr":"b48189d050615eb0d32f69d9abdb2a0d73b633fbbf8bcbfc8577d215078c506b","tests/ui-nightly/transmute-mut-const.rs":"5689e8cbc11728f948d0263009a2219e272734e317bd20abd00bd4ddcefba713","tests/ui-nightly/transmute-mut-const.stderr":"ad33bf966e5ff2eedb8027dcbc091dcf3eb4a1bfd5179e5bc0dc563b4c130ad3","tests/ui-nightly/transmute-mut-dst-not-a-reference.rs":"5d784ab588f081bfc304501f811a85ea2662f88fff8274ccbd53172ec255212c","tests/ui-nightly/transmute-mut-dst-not-a-reference.stderr":"71bbe3ef3c2e1acdb27e132730a353f59502e96ce0e0dd561f37d83f66a87fd7","tests/ui-nightly/transmute-mut-dst-not-frombytes.rs":"2b95c8d73c6a321137b5a59c12f554e7433b10f0e95e3ac657330bce0bab149c","tests/ui-nightly/transmute-mut-dst-not-frombytes.stderr":"2d8b7a070e595f01893c4fb411e94f83f66b476d09bbd5054716d0c6d99a7bbb","tests/ui-nightly/transmute-mut-dst-not-intobytes.rs":"2e4da8d35584a2bbeaea33398bb5773a2d40aeee2e2ea7272e9382940cb97ec3","tests/ui-nightly/transmute-mut-dst-not-intobytes.stderr":"675414f04d0cb1c50c31e79abcf637237c2c67d36cac29454c41695dd2246239","tests/ui-nightly/transmute-mut-dst-unsized.rs":"58c3423c07dd06ca98e61439f318ba5f3f7fc68ca9cb59371ebc482ad54709db","tests/ui-nightly/transmute-mut-dst-unsized.stderr":"286054e4de6ad2c7beb5773974275633508f8320370570b95579afea932d6a74","tests/ui-nightly/transmute-mut-illegal-lifetime.rs":"ec18bf7b3d9bd2674b43d0e04fc0545227473d43b07e2bbccc19c2068df33673","tests/ui-nightly/transmute-mut-illegal-lifetime.stderr":"b0379252732ca51314077fa20d3fb4bfcbee61f486229547c807ed0d7dede9c8","tests/ui-nightly/transmute-mut-src-dst-not-references.rs":"0b73d42fbcecba3483e24d4e9296d24d551de18822b45120e225356c5ccefad8","tests/ui-nightly/transmute-mut-src-dst-not-references.stderr":"0e1e17242ec0b1e9052087b18ccdde9fa117e430a3675b624c49d36eb933ee88","tests/ui-nightly/transmute-mut-src-immutable.rs":"606aba0c01726255c9be7e67a032ce854209c62dffec16d5dd2c8f484e19979a","tests/ui-nightly/transmute-mut-src-immutable.stderr":"d99fc596f5732e5a4b193028812e047ba0c748017a94fd55d4f7802849159359","tests/ui-nightly/transmute-mut-src-not-a-reference.rs":"e627a60c6f6d1b398bdcfc9307dbc57b268cc784b4967d1afaceed7eebd5db47","tests/ui-nightly/transmute-mut-src-not-a-reference.stderr":"3085a0120d3dcbc1d01d226e0b4235fe39da9a956444aabdedac9ec938aca609","tests/ui-nightly/transmute-mut-src-not-frombytes.rs":"647cb1a8fa51b4e833fd3f498a471a344318affc0d2ca8080a7f17dc8a27b30a","tests/ui-nightly/transmute-mut-src-not-frombytes.stderr":"38ecc0fdfce9a35a736d43e3314d7727043d97c6cdc45a8e5d629efa4e28b6e3","tests/ui-nightly/transmute-mut-src-not-intobytes.rs":"40ed14c627923d668173d9d5b3ee5cb0462a126ad4f209224205abc4a3f33b45","tests/ui-nightly/transmute-mut-src-not-intobytes.stderr":"8ff84cdf030145ee6af39eb8660b3cc12baefda1a1c99f03a8ec168b07270eac","tests/ui-nightly/transmute-mut-src-unsized.rs":"8ed4d688fc7f465c3dbd475f6628c265cecd4e57b31a3d5a929f02f3dca7a474","tests/ui-nightly/transmute-mut-src-unsized.stderr":"3e53f2e5c4007d9a65af4213d4b11fc1ff49220e2d3d5d015772d8f2e6f87c8f","tests/ui-nightly/transmute-ptr-to-usize.rs":"ea33dc39115509988d9abd6ac6536d88d82082417b21da9f9bc8cf8369c69618","tests/ui-nightly/transmute-ptr-to-usize.stderr":"a815eca200ba3c91d1ace01405f7eee5990d1c71ad6dbd3d36672724baf14e62","tests/ui-nightly/transmute-ref-dst-mutable.rs":"1c48caae9912f70dec5f5a99a0c880fe6a3022f11fd412438b8a1576803e5f73","tests/ui-nightly/transmute-ref-dst-mutable.stderr":"96d38ce9a807ad7b60a846a8f5558c447da0d6cbe9225a077df4997712424d9a","tests/ui-nightly/transmute-ref-dst-not-a-reference.rs":"c4b8a6c1970e30390d0a301e2dbe718b9eeef743299f7e91cd12c582ec203af7","tests/ui-nightly/transmute-ref-dst-not-a-reference.stderr":"8ed2540877865fcdfca6e150465996a8f2872eb122ed5d647825e9181ae64754","tests/ui-nightly/transmute-ref-dst-not-frombytes.rs":"88f5a210bea3e3a775f92b6aaf4586e62cb9c6f61bcdc1cd7e63ad9ad214b863","tests/ui-nightly/transmute-ref-dst-not-frombytes.stderr":"5cb1f280d96bd54c39ea5181f9f00530d56cc5294e53b17a84a81cd843cf774f","tests/ui-nightly/transmute-ref-dst-not-nocell.rs":"71ed55ef424c8173bb438c6a1985a1a53e09d7964950d64a50ddb914c3425760","tests/ui-nightly/transmute-ref-dst-not-nocell.stderr":"2be667ca15049ad165980aa68f057be236325067df6d2d8ab3dcb56ccad9e191","tests/ui-nightly/transmute-ref-dst-unsized.rs":"c374df8d00541fd34fff37e231e341501a427961f60d88ad3e3c375085cc060d","tests/ui-nightly/transmute-ref-dst-unsized.stderr":"1cd2f5e592729c3b54eee0c5913ed9c725565f3e6cb76d20719ef48ff23a6d67","tests/ui-nightly/transmute-ref-illegal-lifetime.rs":"6812bbf7ec851a8591464f10864dbd1f225e65ed5793b6f6375cbe8a9db50b14","tests/ui-nightly/transmute-ref-illegal-lifetime.stderr":"cb98c1b304334e58fc61be1c4b7782e68ab92d90a44c9627326d94d14a44cc38","tests/ui-nightly/transmute-ref-src-dst-not-references.rs":"7311602a0153b260d819e9608e8e66ef5904919a2349a95187919d8211e48e23","tests/ui-nightly/transmute-ref-src-dst-not-references.stderr":"ca5b956d99998df493f7c72df6bd315b8cd6f1f9b113a45416b0088f1e368900","tests/ui-nightly/transmute-ref-src-not-a-reference.rs":"a921f168fa6cb3c6a19894cecdb118bc3164275746672a916aa5194b92f2fb57","tests/ui-nightly/transmute-ref-src-not-a-reference.stderr":"f7b51be513a0603070e218bdd1ca2c47a94c58151ab5649f68877ea3d83268f4","tests/ui-nightly/transmute-ref-src-not-intobytes.rs":"81c6fa9775976ab678585a19a174470225e05f0b0cb43ceb754c198a8f0c9732","tests/ui-nightly/transmute-ref-src-not-intobytes.stderr":"fd9af9448514dc1aee1c7898d20cbeb34cc089b2e279c0307937f3b781f62f54","tests/ui-nightly/transmute-ref-src-not-nocell.rs":"0f1cf580d97ed69667b02bfe5de1425ceb0678fc43bf2130e65adc38f3dc5f3f","tests/ui-nightly/transmute-ref-src-not-nocell.stderr":"4f9d005f1a9d0345e9c0efd6103c5e808573aa431a49c122275bdfc74a850594","tests/ui-nightly/transmute-ref-src-unsized.rs":"d7797488f0ab5db89944ac7db25625c63aef72e6e4ed481d00a083449050b813","tests/ui-nightly/transmute-ref-src-unsized.stderr":"18c3a7284981e9f402cfb43e7ce87e75f3dab3c43d462cf534c12ad337403783","tests/ui-nightly/transmute-size-decrease.rs":"d191596652adbc26329449af4b2183bdae0a387ee668690b1de8535b71495eb7","tests/ui-nightly/transmute-size-decrease.stderr":"5bea10ccca340f92874f09380e109d52d28693283b7c9d06385924a7f5f40d9b","tests/ui-nightly/transmute-size-increase-allow-shrink.rs":"d24d67d0012bf85d06b7e5ca790ca725d5fd559052d35322a72490897963cb42","tests/ui-nightly/transmute-size-increase-allow-shrink.stderr":"e649f6b3290b27aded981ccf0826893d92502de117f0a90273eb8a7f55ccd3da","tests/ui-nightly/transmute-size-increase.rs":"8ca917f831b736bfadb6ccee22acde205098678a3a2cb3cc753dfecacafcb0be","tests/ui-nightly/transmute-size-increase.stderr":"cd1e47fc2a228429c48fb65bbae98958f9beb49a84945f67dfe5075c5eef6621","tests/ui-nightly/transmute-src-not-intobytes.rs":"e88bbf21c2512b9c04f938ead3990161f9b050aa469d9164244966ba6e2f02dd","tests/ui-nightly/transmute-src-not-intobytes.stderr":"d9e4accc84059bd31ad624677c9d364b57171b035ca3254dc6f30053709d32af","tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs":"534191c737297f8d60d1227a9cbbb9700bb08db15783d9191df3a64c8424e303","tests/ui-nightly/try_transmute-dst-not-tryfrombytes.stderr":"ac89c317d93991d710092435572e4222770615d2f0de61b73e85fbe1ad99b7f0","tests/ui-nightly/try_transmute-size-decrease.rs":"b0b9722d8032c1af00202c7bd99ac65e92ffb3dae67e17a6c10724ba86a3b1f0","tests/ui-nightly/try_transmute-size-decrease.stderr":"6e806fd13bd2d961d0af597272637cb2a9db5a1de88a1228bc4643ffc9d3e156","tests/ui-nightly/try_transmute-size-increase.rs":"d67d46178f8a945ab2ac5f40b71949468c337b799fec8fd24c241b94d8a6c87b","tests/ui-nightly/try_transmute-size-increase.stderr":"21f82b6e4aa0f8b4495f064c560f7f2715b06b3f409acb0834455b6a32477e56","tests/ui-nightly/try_transmute-src-not-intobytes.rs":"fe267df8308820b9d6f6f3c2ba9f139d1f06c9355dcda85a72af312cf1485eb8","tests/ui-nightly/try_transmute-src-not-intobytes.stderr":"aa37d60f5866b4d57377db1fe5316ed931aefe0d3783219ff6a4b82f03a0a452","tests/ui-nightly/try_transmute_mut-alignment-increase.rs":"7515fa447232f1a59b9b6fae435bbf19095ab4538ae0459c9f38678ae0b67ebb","tests/ui-nightly/try_transmute_mut-alignment-increase.stderr":"3315b7cf6107cf9bf09d12ec0aa7c7aa63e1bd3cf7fe9ce214fa85dfd4b3e1ad","tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs":"d5150c8c6dac0d727f91763964a211da00c9842d3130c4c600b4ff6475ef6b83","tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.stderr":"f8d3bf10967dbdc127fcac5b98bcb72c11eb5003d5d6ce8b82f2a25c35597cb4","tests/ui-nightly/try_transmute_mut-size-decrease.rs":"d18f22001da76c8781c8f276657ea73a0de92144e8a2f9c07cdcc2e2fd0df9d5","tests/ui-nightly/try_transmute_mut-size-decrease.stderr":"d62d253558dea1cca500a610cc0c8403773936f3eb074cae6a3bdff5cfb3d29c","tests/ui-nightly/try_transmute_mut-size-increase.rs":"29dbecad8f2e42a844e1a0a0e656be1724029362a8b8349b1a9cb6415fd7af6b","tests/ui-nightly/try_transmute_mut-size-increase.stderr":"7881a25d87477e01ba7d129f8216dfa4638679bff1dd0cf18079f48cbc037593","tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs":"5146bff98d4fa7ad8edeb1f5a4e4d869b6296ca3ad6c239066ba7ae0584f3c1a","tests/ui-nightly/try_transmute_mut-src-not-frombytes.stderr":"4a093f081eec127d766d235c5dc69e12474d8d07124ae65cfb635b4cdafbc401","tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs":"cee71e9cf881cf442861326320d58d30b95b9ed051dcd20c5bf1867869c1f792","tests/ui-nightly/try_transmute_mut-src-not-intobytes.stderr":"f9d412670dd39de3b09234901e3442934c30997e2b74982c6373172943981c48","tests/ui-nightly/try_transmute_ref-alignment-increase.rs":"11ae7f214f83b50e9fb7e556ea3d64d07baa274ef18143e0ff4fd76b1095fd64","tests/ui-nightly/try_transmute_ref-alignment-increase.stderr":"2dbec7ad47e5c4be0ab7b4abb4ef84ca9863e8e03dc4272c00271cfe6197fd0b","tests/ui-nightly/try_transmute_ref-dst-mutable.rs":"1ed1c2bf9f6588cc8b2eb57afac161052ac8124e7065606b2aaab51db2aa3457","tests/ui-nightly/try_transmute_ref-dst-mutable.stderr":"028d0a5e4828d2f2b8aa0f317d9fede4a16f2d442414214d86052f52740726a5","tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs":"8a52c04394cc7f3d89b6b93e3906aef833931f730ba5beaed83cb4072334aa01","tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr":"1a314c7a2414f67cfad65561c98af81f3c364eaa050406306278906ae5111745","tests/ui-nightly/try_transmute_ref-size-decrease.rs":"81fabb98426fec9f0809e9706461d10b0fa67fe4bece21d2fe5547be4f7bb9ce","tests/ui-nightly/try_transmute_ref-size-decrease.stderr":"d3b5656878ade1e940e3bc9d2289ebaaa588f0c12af9dbb26165aa5b73d5198b","tests/ui-nightly/try_transmute_ref-size-increase.rs":"3ba36df7628324b50cfebec229f721b865de15d368b8fd3da2fa197d111fb31b","tests/ui-nightly/try_transmute_ref-size-increase.stderr":"f2c09ea04ef8bcc49bb2dd7df573c2bab8cad5754b72d8d5cbf6c471991d485b","tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.rs":"cb455e796e85ac9af743f9b3a1ece8c5e8ee9e4a18128e84685d3c124c595ebe","tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.stderr":"47742a4955efaeb09f0a9c32f3b233e516867364c255fb85fcd7e3058925ea8f","tests/ui-stable/diagnostic-not-implemented-from-bytes.rs":"d41e080139456e38ee162a57e489749ed46f4ab3d548b873fff5e3cfd1915089","tests/ui-stable/diagnostic-not-implemented-from-bytes.stderr":"790283fe85618d5cbd8afe5c44f1bb30ecd15cc19d07b63b23672602eb1cc0ec","tests/ui-stable/diagnostic-not-implemented-from-zeros.rs":"7068df4b45955c6f40bd71dfaa51ed87c850221a142fc5471b6354d5ed7a2c46","tests/ui-stable/diagnostic-not-implemented-from-zeros.stderr":"f740aad6867c85a888bc49a0e77604ebbc9d4202b4d68ca76c8b9a101051fd84","tests/ui-stable/diagnostic-not-implemented-immutable.rs":"701f02b3e7dbb91ea6257cc08b5aeaed81a32be83f2424ca3887655926b568de","tests/ui-stable/diagnostic-not-implemented-immutable.stderr":"7729448e401e640e30b1d9567284fcda80549bc93c6a5744073c5746d4a920ff","tests/ui-stable/diagnostic-not-implemented-into-bytes.rs":"aaedc974685ddc133c1f6c547f51f480057db9932700210ea513be55ddb3497f","tests/ui-stable/diagnostic-not-implemented-into-bytes.stderr":"50cb9efed7c9dc0758f40a5b917882f1b4a5f6cc8ebf30f0fc8050e326db1d89","tests/ui-stable/diagnostic-not-implemented-issue-1296.rs":"b030a925ff39cb2ea9b1777273f0bcfd982d972fa43cb5f2b6fdc1e6da467312","tests/ui-stable/diagnostic-not-implemented-issue-1296.stderr":"91738713954e5bcba6fc548a289cefcd483384af6f75b7bece5afdc480efe432","tests/ui-stable/diagnostic-not-implemented-known-layout.rs":"1e9d77125b68169b231d62da4fb1a1a76fb71bf7a1c4275a096a0715873b4fd2","tests/ui-stable/diagnostic-not-implemented-known-layout.stderr":"f6074b7394668ae7c4904eab553d8ae02819446b11fea9452071b9765b1d7087","tests/ui-stable/diagnostic-not-implemented-try-from-bytes.rs":"922b8eab7092dba41ec37b3ebcad7b48eb8d391e54dff3f2869bcf1954c85003","tests/ui-stable/diagnostic-not-implemented-try-from-bytes.stderr":"8f13f0614bb703043c58feb8df52e27e6835a4e389a1b8dc26cb6b48058ca9d6","tests/ui-stable/diagnostic-not-implemented-unaligned.rs":"8af59d04924492eff43708cb504a421d9c5b8e42cae1ea0a80118f911af47e7b","tests/ui-stable/diagnostic-not-implemented-unaligned.stderr":"fc8274d232d0201cfa161b6555d806ae7715f99919f335bf587ad7d6ff7eda4a","tests/ui-stable/include_value_not_from_bytes.rs":"74f4d54dad245eff591cb46344fd1f25fb27b73de941c04d1ee82b0ad2639807","tests/ui-stable/include_value_not_from_bytes.stderr":"5a75a15d3225f45eaff49797d58559183ef8c321134d4eb2b998f1dd10915a35","tests/ui-stable/include_value_wrong_size.rs":"5a0561eeb4cb245cf5b68547b43faa2432c56bfe23f7f50677a0b110dab60cdb","tests/ui-stable/include_value_wrong_size.stderr":"25bd3dae1effd2f280c37da62c84ebf5aac8cb537e564b054c8ac80d15175a67","tests/ui-stable/invalid-impls/invalid-impls.rs":"8f1cf816cc77f5dd6d19bba4a463ec46a4281523c7551ab8ebcdb46197fc7732","tests/ui-stable/invalid-impls/invalid-impls.stderr":"2774b19066e8a4c8fb7510767c3b2bf82473ff7f004182abd5c2cebcdb4bee21","tests/ui-stable/max-align.rs":"ffcb6687c98e5629d01b17cbd0845ec195007cc39aa244b26a77d17688c8f13d","tests/ui-stable/max-align.stderr":"a8bd50e80cd0ae680a52ea71d06d259a43300dcfbf6b336a12cb371fe84e119b","tests/ui-stable/ptr-is-invariant-over-v.rs":"1b88e571206e42ff130d46edb5274a58e39d2b2c3ada8427be53f27793787930","tests/ui-stable/ptr-is-invariant-over-v.stderr":"e44898734e01055090b508e2923010f7e455b341dfb8d5c365583219448dad5c","tests/ui-stable/transmute-dst-not-frombytes.rs":"2d045c51d4ea1a435ca3f4e435412eeb0ac6511597dccab8e0040d4903f2b7df","tests/ui-stable/transmute-dst-not-frombytes.stderr":"33f1d8b38b4bba6a6258941335e6ff5674c0f8d7eb9801c3b2baca232a52f2fe","tests/ui-stable/transmute-mut-const.rs":"5689e8cbc11728f948d0263009a2219e272734e317bd20abd00bd4ddcefba713","tests/ui-stable/transmute-mut-const.stderr":"c05b601177992521b71aa80a97022457641c59b57cc91ff0a0c2b1bc499df2b3","tests/ui-stable/transmute-mut-dst-not-a-reference.rs":"5d784ab588f081bfc304501f811a85ea2662f88fff8274ccbd53172ec255212c","tests/ui-stable/transmute-mut-dst-not-a-reference.stderr":"7a6e78f3cc02001e52e24d961b6de19957b681a2834ea4f62e877e7167a5910a","tests/ui-stable/transmute-mut-dst-not-frombytes.rs":"2b95c8d73c6a321137b5a59c12f554e7433b10f0e95e3ac657330bce0bab149c","tests/ui-stable/transmute-mut-dst-not-frombytes.stderr":"6e55c65526f2bb620095ce84c44875a0f60c2bd839be1df2f79503919c801c50","tests/ui-stable/transmute-mut-dst-not-intobytes.rs":"2e4da8d35584a2bbeaea33398bb5773a2d40aeee2e2ea7272e9382940cb97ec3","tests/ui-stable/transmute-mut-dst-not-intobytes.stderr":"e62de4153ad39eea866b8a54428a489eea9f088d287521bf7a5aefa0761343c4","tests/ui-stable/transmute-mut-dst-unsized.rs":"58c3423c07dd06ca98e61439f318ba5f3f7fc68ca9cb59371ebc482ad54709db","tests/ui-stable/transmute-mut-dst-unsized.stderr":"a192f0367155f0709ef8da40b929c5f70bb067b43959c6c97c79d9a102e04809","tests/ui-stable/transmute-mut-illegal-lifetime.rs":"ec18bf7b3d9bd2674b43d0e04fc0545227473d43b07e2bbccc19c2068df33673","tests/ui-stable/transmute-mut-illegal-lifetime.stderr":"3a43e0be32ef3589fe3fa713d387bd3976bd8c75813f9641bbf7c539e10bed41","tests/ui-stable/transmute-mut-src-dst-not-references.rs":"0b73d42fbcecba3483e24d4e9296d24d551de18822b45120e225356c5ccefad8","tests/ui-stable/transmute-mut-src-dst-not-references.stderr":"830581700736527e224bd923da3cd9c215e68556d2379c678174c08eff1501d6","tests/ui-stable/transmute-mut-src-dst-unsized.stderr":"e53116bd827f30dff68f24c4b6a86dc634aa3b2804ce00c037ee95b4b5dcb0b9","tests/ui-stable/transmute-mut-src-immutable.rs":"606aba0c01726255c9be7e67a032ce854209c62dffec16d5dd2c8f484e19979a","tests/ui-stable/transmute-mut-src-immutable.stderr":"7c24d82d943695955b3ec1f0a53a349645fd3de1d549f3be989532e3774279bf","tests/ui-stable/transmute-mut-src-not-a-reference.rs":"e627a60c6f6d1b398bdcfc9307dbc57b268cc784b4967d1afaceed7eebd5db47","tests/ui-stable/transmute-mut-src-not-a-reference.stderr":"29b09aea59cfdb4b6535c5d33ec803539f28e53cce81938767ea0c22a1b1ce7d","tests/ui-stable/transmute-mut-src-not-frombytes.rs":"647cb1a8fa51b4e833fd3f498a471a344318affc0d2ca8080a7f17dc8a27b30a","tests/ui-stable/transmute-mut-src-not-frombytes.stderr":"e0d1f15fa695b9babc901b40e9c9219c79a8b3cc5f81b971b0e73dfe1290cc51","tests/ui-stable/transmute-mut-src-not-intobytes.rs":"40ed14c627923d668173d9d5b3ee5cb0462a126ad4f209224205abc4a3f33b45","tests/ui-stable/transmute-mut-src-not-intobytes.stderr":"608ffad6d6097f83b393febfbf1267b26b683f7f63623ecb36f3a5698c89e4f1","tests/ui-stable/transmute-mut-src-unsized.rs":"8ed4d688fc7f465c3dbd475f6628c265cecd4e57b31a3d5a929f02f3dca7a474","tests/ui-stable/transmute-mut-src-unsized.stderr":"26cd4d952dfd909c87567459d22d349934c85a3e061aba56bed2f0951720eecd","tests/ui-stable/transmute-ptr-to-usize.rs":"ea33dc39115509988d9abd6ac6536d88d82082417b21da9f9bc8cf8369c69618","tests/ui-stable/transmute-ptr-to-usize.stderr":"9b69640228c8558a70a0512a7508f3cd3cd0d0183614e4162cbeee829aca7764","tests/ui-stable/transmute-ref-dst-mutable.rs":"1c48caae9912f70dec5f5a99a0c880fe6a3022f11fd412438b8a1576803e5f73","tests/ui-stable/transmute-ref-dst-mutable.stderr":"fc83b5283cb5319fd7a2b79f94ed0a49f16bce5b222f7e1cc5ce5a879f3de650","tests/ui-stable/transmute-ref-dst-not-a-reference.rs":"c4b8a6c1970e30390d0a301e2dbe718b9eeef743299f7e91cd12c582ec203af7","tests/ui-stable/transmute-ref-dst-not-a-reference.stderr":"e8a126f4832344b8a69591fcc25e22bbbb29f2078b809a47f8afa40ac1087a1f","tests/ui-stable/transmute-ref-dst-not-frombytes.rs":"88f5a210bea3e3a775f92b6aaf4586e62cb9c6f61bcdc1cd7e63ad9ad214b863","tests/ui-stable/transmute-ref-dst-not-frombytes.stderr":"ce843cb8ed4545dc60f5a4183e312f034c52dd3a2d1e19c988d1db0095bd1fc5","tests/ui-stable/transmute-ref-dst-not-nocell.rs":"71ed55ef424c8173bb438c6a1985a1a53e09d7964950d64a50ddb914c3425760","tests/ui-stable/transmute-ref-dst-not-nocell.stderr":"63d4723073da96e356b672ce61cdfec1971abfd365f479628cc29ae9bc050764","tests/ui-stable/transmute-ref-dst-unsized.rs":"c374df8d00541fd34fff37e231e341501a427961f60d88ad3e3c375085cc060d","tests/ui-stable/transmute-ref-dst-unsized.stderr":"4de86fa6717a8e8599f93ed4bc11fdf98dc3eb9eeda05edfb6bfd188e5f5796d","tests/ui-stable/transmute-ref-illegal-lifetime.rs":"6812bbf7ec851a8591464f10864dbd1f225e65ed5793b6f6375cbe8a9db50b14","tests/ui-stable/transmute-ref-illegal-lifetime.stderr":"45ab741d710dc5a01a21ab64f99927e7da5593328b2037b9bc82a87bc0969136","tests/ui-stable/transmute-ref-src-dst-not-references.rs":"7311602a0153b260d819e9608e8e66ef5904919a2349a95187919d8211e48e23","tests/ui-stable/transmute-ref-src-dst-not-references.stderr":"2bff9f290ec40458939a1633f850853b3486220cfd40bc24c4e52635b7455742","tests/ui-stable/transmute-ref-src-dst-unsized.stderr":"ecbfb5e0a5c06fce54b4fe6a50682c9bc6b7807cca137aaa73b8ea5f663c49ab","tests/ui-stable/transmute-ref-src-not-a-reference.rs":"a921f168fa6cb3c6a19894cecdb118bc3164275746672a916aa5194b92f2fb57","tests/ui-stable/transmute-ref-src-not-a-reference.stderr":"52efb101d85126138395fbed84c7cb911f86ea4457b991d91b2b6ec66521bcff","tests/ui-stable/transmute-ref-src-not-intobytes.rs":"81c6fa9775976ab678585a19a174470225e05f0b0cb43ceb754c198a8f0c9732","tests/ui-stable/transmute-ref-src-not-intobytes.stderr":"c02c61f300bf89713f03f2e55cbe6053f397e643a4a3c04c3b20e7968f555ab2","tests/ui-stable/transmute-ref-src-not-nocell.rs":"0f1cf580d97ed69667b02bfe5de1425ceb0678fc43bf2130e65adc38f3dc5f3f","tests/ui-stable/transmute-ref-src-not-nocell.stderr":"6be94982f83c6012b5e90bffc5a27e99a32bc5b44585f6874899ccab3c60dfdf","tests/ui-stable/transmute-ref-src-unsized.rs":"d7797488f0ab5db89944ac7db25625c63aef72e6e4ed481d00a083449050b813","tests/ui-stable/transmute-ref-src-unsized.stderr":"94e78082b1f598945fccb75119939e4279d4d156f4928c4ff1d0d43ad4d46a1d","tests/ui-stable/transmute-size-decrease.rs":"d191596652adbc26329449af4b2183bdae0a387ee668690b1de8535b71495eb7","tests/ui-stable/transmute-size-decrease.stderr":"f1647c83bac305d46e5fffca2c32b6d7b15068f2a8e9c46fd0319d22d2b31477","tests/ui-stable/transmute-size-increase-allow-shrink.rs":"d24d67d0012bf85d06b7e5ca790ca725d5fd559052d35322a72490897963cb42","tests/ui-stable/transmute-size-increase-allow-shrink.stderr":"daf4e150bbddd9e0c1a41bfc6f3bb7b4633d3f58d550360ff709472d921a7dc6","tests/ui-stable/transmute-size-increase.rs":"8ca917f831b736bfadb6ccee22acde205098678a3a2cb3cc753dfecacafcb0be","tests/ui-stable/transmute-size-increase.stderr":"a39a62a790bf4ad8573c76ce18379b6f18c6c9006bcc7b5b217908f2ec32417b","tests/ui-stable/transmute-src-not-intobytes.rs":"e88bbf21c2512b9c04f938ead3990161f9b050aa469d9164244966ba6e2f02dd","tests/ui-stable/transmute-src-not-intobytes.stderr":"0e604a364e21afa6e77a86e76e5ea833311322181a1259614a4eb76cd92369aa","tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs":"534191c737297f8d60d1227a9cbbb9700bb08db15783d9191df3a64c8424e303","tests/ui-stable/try_transmute-dst-not-tryfrombytes.stderr":"2fcc4ca5e2924c8531db45e94584071edbbdc0544973edc6fb6c3ab9eeabfa04","tests/ui-stable/try_transmute-size-decrease.rs":"b0b9722d8032c1af00202c7bd99ac65e92ffb3dae67e17a6c10724ba86a3b1f0","tests/ui-stable/try_transmute-size-decrease.stderr":"e072f6306d1382230f8737b93e40e6be6ec98df7fa3ee07028ec0684328058ab","tests/ui-stable/try_transmute-size-increase.rs":"d67d46178f8a945ab2ac5f40b71949468c337b799fec8fd24c241b94d8a6c87b","tests/ui-stable/try_transmute-size-increase.stderr":"3be5aeb6aa65e18003b1fec52ed6fb016ed6b64dc61135437230e65afbbb28cf","tests/ui-stable/try_transmute-src-not-intobytes.rs":"fe267df8308820b9d6f6f3c2ba9f139d1f06c9355dcda85a72af312cf1485eb8","tests/ui-stable/try_transmute-src-not-intobytes.stderr":"619234202fb3995d31c83ef9a0c898dfce75e342a424695d993d58be88c65564","tests/ui-stable/try_transmute_mut-alignment-increase.rs":"7515fa447232f1a59b9b6fae435bbf19095ab4538ae0459c9f38678ae0b67ebb","tests/ui-stable/try_transmute_mut-alignment-increase.stderr":"e02554663c47493331b96520a06f26931701fa523928bddab658ac09b9a342d1","tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs":"d5150c8c6dac0d727f91763964a211da00c9842d3130c4c600b4ff6475ef6b83","tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.stderr":"93faca70689f9249504a667f00a64a6d2b3a69697338736e53b54e98d3a51211","tests/ui-stable/try_transmute_mut-size-decrease.rs":"d18f22001da76c8781c8f276657ea73a0de92144e8a2f9c07cdcc2e2fd0df9d5","tests/ui-stable/try_transmute_mut-size-decrease.stderr":"6fb3cab7b22d9c7a5e142ac191c0a4ed16588126d1d505a2dc7f35897d2c7c71","tests/ui-stable/try_transmute_mut-size-increase.rs":"29dbecad8f2e42a844e1a0a0e656be1724029362a8b8349b1a9cb6415fd7af6b","tests/ui-stable/try_transmute_mut-size-increase.stderr":"c3ee6bc20588ccf47d591d3368a2e8c50beebc95f23fdcfec95d77d3ba23fd49","tests/ui-stable/try_transmute_mut-src-not-frombytes.rs":"5146bff98d4fa7ad8edeb1f5a4e4d869b6296ca3ad6c239066ba7ae0584f3c1a","tests/ui-stable/try_transmute_mut-src-not-frombytes.stderr":"d09cfabc0ff4eba980df68511b369786c63377acaf780a2b8f2aa079eaabf0e0","tests/ui-stable/try_transmute_mut-src-not-intobytes.rs":"cee71e9cf881cf442861326320d58d30b95b9ed051dcd20c5bf1867869c1f792","tests/ui-stable/try_transmute_mut-src-not-intobytes.stderr":"e978407425ae376c3a186e9826009bf53737ad4566009a8ffd8151931b39450c","tests/ui-stable/try_transmute_ref-alignment-increase.rs":"11ae7f214f83b50e9fb7e556ea3d64d07baa274ef18143e0ff4fd76b1095fd64","tests/ui-stable/try_transmute_ref-alignment-increase.stderr":"d1deea8a3d804eaa3e3b30423d1caecb35a5aca034f4665f1222e7e8515f5f42","tests/ui-stable/try_transmute_ref-dst-mutable.rs":"1ed1c2bf9f6588cc8b2eb57afac161052ac8124e7065606b2aaab51db2aa3457","tests/ui-stable/try_transmute_ref-dst-mutable.stderr":"c1da81279fcaa0e29903b053b92e9690408e93b2542fa24b456d01ced229a754","tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs":"8a52c04394cc7f3d89b6b93e3906aef833931f730ba5beaed83cb4072334aa01","tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr":"8564dd0f1fa49d790958b34c510c982765776bd7bb563f5bcbfb2511df696650","tests/ui-stable/try_transmute_ref-size-decrease.rs":"81fabb98426fec9f0809e9706461d10b0fa67fe4bece21d2fe5547be4f7bb9ce","tests/ui-stable/try_transmute_ref-size-decrease.stderr":"d6f18947532766c75af5dfe9dc294f652f0ee4a9d41e07429af647690eca5f65","tests/ui-stable/try_transmute_ref-size-increase.rs":"3ba36df7628324b50cfebec229f721b865de15d368b8fd3da2fa197d111fb31b","tests/ui-stable/try_transmute_ref-size-increase.stderr":"fab69d46601049112baca89e6e957c45defb9864d30f7e9c6d517c4c78887168","tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.rs":"cb455e796e85ac9af743f9b3a1ece8c5e8ee9e4a18128e84685d3c124c595ebe","tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.stderr":"392868f8a57e3d11a43def1bbc890dd09929b4ec6b9bee6d105ee304e45d6540","win-cargo.bat":"dbde5af501630f6d14a0681d27f30ef2ffaeb1753d14be2f7cb1a7f285458c07"},"package":"fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"} \ No newline at end of file diff --git a/vendor/zerocopy/.cargo_vcs_info.json b/vendor/zerocopy/.cargo_vcs_info.json new file mode 100644 index 00000000..1f2275f5 --- /dev/null +++ b/vendor/zerocopy/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "8ffc71c66080d4e9d8db3c01614ffa724c43d0c5" + }, + "path_in_vcs": "" +} \ No newline at end of file diff --git a/vendor/zerocopy/AGENTS.md b/vendor/zerocopy/AGENTS.md new file mode 100644 index 00000000..778733ae --- /dev/null +++ b/vendor/zerocopy/AGENTS.md @@ -0,0 +1,105 @@ +<!-- Copyright 2025 The Fuchsia Authors + +Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +<LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +This file may not be copied, modified, or distributed except according to +those terms. --> + +# Development Instructions + +This repository uses a wrapper script around Cargo to ensure consistent toolchain usage and configuration. + +## Build and Test + +**IMPORTANT:** You must **NEVER** run `cargo` directly. Instead, you must **ALWAYS** use `yes | ./cargo.sh` for all `cargo` sub-commands (e.g., `check`, `test`, `build`). Using `yes |` is required to bypass interactive prompts for toolchain installation. + +### Syntax +`yes | ./cargo.sh +<toolchain> <command> [args]` + +### Toolchains +The `<toolchain>` argument is mandatory and can be one of the following: + +- `msrv`: Runs with the Minimum Supported Rust Version. +- `stable`: Runs with the stable toolchain. +- `nightly`: Runs with the nightly toolchain. +- `all`: Runs the command on `msrv`, `stable`, and `nightly` sequentially. +- Version-gated toolchains: You can also pass specific version-gated toolchains defined in `Cargo.toml`, such as `zerocopy-core-error-1-81-0`. + +### Linting + +Clippy should **always** be run on the `nightly` toolchain. + +```bash +yes | ./cargo.sh +nightly clippy +yes | ./cargo.sh +nightly clippy --tests +``` + +### Examples + +```bash +# Check the code using the nightly toolchain +# DO NOT RUN: cargo check +yes | ./cargo.sh +nightly check + +# Run tests on all supported toolchains +# DO NOT RUN: cargo test +yes | ./cargo.sh +all test + +# Run a specific test on stable +yes | ./cargo.sh +stable test -- test_name +``` + +## Workflow + +### Pre-submission Checks + +Before submitting code, run `./githooks/pre-push` to confirm that all pre-push hooks succeed. + +### UI Tests + +When updating UI test files (in `tests/ui*` or `zerocopy-derive/tests/ui*`), run `./tools/update-ui-test-files.sh` to update the corresponding stderr files. + +### Pull Requests and Commit Messages + +When a PR resolves an issue, the PR description and commit message should include a line like `Closes #123`. +When a PR makes progress on, but does not close, an issue, the PR description and commit message should include a line like `Makes progress on #123`. + +## Safety + +### Pointer Casts + +- **Avoid `&slice[0] as *const T` or `&slice[0] as *mut T`.** + Instead, use `slice.as_ptr()` or `slice.as_mut_ptr()`. Casting a reference to + a single element creates a raw pointer that is only valid for that element. + Accessing subsequent elements via pointer arithmetic is Undefined Behavior. + See [unsafe-code-guidelines#134](https://github.com/rust-lang/unsafe-code-guidelines/issues/134). + +- **Avoid converting `&mut T` to `*const T` (or `*const U`)**. + This advice applies if you intend to later cast the pointer to `*mut T` and + mutate the data. This conversion reborrows `&mut T` as a shared reference + `&T`, which may restrict permissions under Stacked Borrows. Instead, cast + `&mut T` directly to `*mut T` first, then to `*const T` if necessary. See + [rust#56604](https://github.com/rust-lang/rust/issues/56604). + +## Code Style + +### File Headers + +Each file should contain a copyright header (excluding auto-generated files such as `.stderr` files). The header should follow the format found in existing files (e.g. `src/lib.rs`), using the appropriate comment syntax for the file type. + +### Formatting + +To determine how to format code, read the formatting checker script in `ci/check_fmt.sh`. + +### Comments + +All comments (including `//`, `///`, and `//!`) should be wrapped at 80 columns. + +**Exceptions:** +- Markdown tables +- Inline ASCII diagrams +- Long URLs +- Comments inside of code blocks +- Comments which trail non-comment code +- Other cases where wrapping would significantly degrade readability (use your judgment). diff --git a/vendor/zerocopy/CHANGELOG.md b/vendor/zerocopy/CHANGELOG.md new file mode 100644 index 00000000..31d3ba39 --- /dev/null +++ b/vendor/zerocopy/CHANGELOG.md @@ -0,0 +1,43 @@ +<!-- Copyright 2023 The Fuchsia Authors + +Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +<LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +This file may not be copied, modified, or distributed except according to +those terms. --> + +# Changelog + +## Releases + +We track releases and release notes using [GitHub +Releases](https://github.com/google/zerocopy/releases). + +## Yanks and Regressions + +### 0.2.2 through 0.2.8, 0.3.0 through 0.3.1, 0.4.0, 0.5.0, 0.6.0 through 0.6.5, 0.7.0 through 0.7.30 + +*Security advisories for this bug have been published as +[RUSTSEC-2023-0074][rustsec-advisory] and [GHSA-3mv5-343c-w2qg][github-advisory].* + +In these versions, the `Ref` methods `into_ref`, `into_mut`, `into_slice`, and +`into_mut_slice` were permitted in combination with the standard library +`cell::Ref` and `cell::RefMut` types for `Ref<B, T>`'s `B` type parameter. These +combinations are unsound, and may permit safe code to exhibit undefined +behavior. Fixes have been published to each affected minor version which do not +permit this code to compile. + +See [#716][issue-716] for more details. + +[rustsec-advisory]: https://rustsec.org/advisories/RUSTSEC-2023-0074.html +[github-advisory]: https://github.com/google/zerocopy/security/advisories/GHSA-3mv5-343c-w2qg +[issue-716]: https://github.com/google/zerocopy/issues/716 + +### 0.7.27, 0.7.28 + +These versions were briefly yanked due to a non-soundness regression reported in +[#672][pull-672]. After reconsidering our yanking policy in [#679][issue-679], +we un-yanked these versions. + +[pull-672]: https://github.com/google/zerocopy/pull/672 +[issue-679]: https://github.com/google/zerocopy/issues/679 diff --git a/vendor/zerocopy/CONTRIBUTING.md b/vendor/zerocopy/CONTRIBUTING.md new file mode 100644 index 00000000..72c96ac4 --- /dev/null +++ b/vendor/zerocopy/CONTRIBUTING.md @@ -0,0 +1,11 @@ +<!-- Copyright 2022 The Fuchsia Authors + +Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +<LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +This file may not be copied, modified, or distributed except according to +those terms. --> + +# How to Contribute + +See our [Contributing Guide](https://github.com/google/zerocopy/discussions/1318). diff --git a/vendor/zerocopy/Cargo.lock b/vendor/zerocopy/Cargo.lock new file mode 100644 index 00000000..dd224cee --- /dev/null +++ b/vendor/zerocopy/Cargo.lock @@ -0,0 +1,296 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "basic-toml" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a" +dependencies = [ + "serde", +] + +[[package]] +name = "dissimilar" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921" + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "elain" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba39bdf557eef05f2c1c2e986cbab6b85329b922e7606e5b63ee4c5037ba77a" + +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "once_cell" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" + +[[package]] +name = "proc-macro2" +version = "1.0.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56dea16b0a29e94408b9aa5e2940a4eedbd128a1ba20e8f7ae60fd3d465af0e" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "serde" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.143" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "syn" +version = "2.0.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2863d96a84c6439701d7a38f9de935ec562c8832cc55d1dde0f513b52fad106" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "trybuild" +version = "1.0.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a9d3ba662913483d6722303f619e75ea10b7855b0f8e0d72799cf8621bb488f" +dependencies = [ + "basic-toml", + "dissimilar", + "glob", + "once_cell", + "serde", + "serde_derive", + "serde_json", + "termcolor", +] + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "zerocopy" +version = "0.8.31" +dependencies = [ + "either", + "elain", + "glob", + "itertools", + "rand", + "rustversion", + "static_assertions", + "trybuild", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/vendor/zerocopy/Cargo.toml b/vendor/zerocopy/Cargo.toml new file mode 100644 index 00000000..37d2f97a --- /dev/null +++ b/vendor/zerocopy/Cargo.toml @@ -0,0 +1,128 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.56.0" +name = "zerocopy" +version = "0.8.31" +authors = [ + "Joshua Liebow-Feeser <joshlf@google.com>", + "Jack Wrenn <jswrenn@amazon.com>", +] +build = "build.rs" +exclude = [".*"] +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = """Zerocopy makes zero-cost memory manipulation effortless. We write "unsafe" so you don't have to.""" +readme = "README.md" +keywords = [ + "cast", + "convert", + "transmute", + "transmutation", + "type-punning", +] +categories = [ + "embedded", + "encoding", + "no-std::no-alloc", + "parsing", + "rust-patterns", +] +license = "BSD-2-Clause OR Apache-2.0 OR MIT" +repository = "https://github.com/google/zerocopy" + +[package.metadata.build-rs] +no-zerocopy-simd-x86-avx12-1-89-0 = "1.89.0" +no-zerocopy-core-error-1-81-0 = "1.81.0" +no-zerocopy-diagnostic-on-unimplemented-1-78-0 = "1.78.0" +no-zerocopy-generic-bounds-in-const-fn-1-61-0 = "1.61.0" +no-zerocopy-target-has-atomics-1-60-0 = "1.60.0" +no-zerocopy-aarch64-simd-1-59-0 = "1.59.0" +no-zerocopy-panic-in-const-and-vec-try-reserve-1-57-0 = "1.57.0" + +[package.metadata.ci] +pinned-stable = "1.91.1" +pinned-nightly = "nightly-2025-11-23" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = [ + "--cfg", + "doc_cfg", + "--generate-link-to-definition", +] + +[package.metadata.playground] +features = ["__internal_use_only_features_that_work_on_stable"] + +[features] +__internal_use_only_features_that_work_on_stable = [ + "alloc", + "derive", + "simd", + "std", +] +alloc = [] +derive = ["zerocopy-derive"] +float-nightly = [] +simd = [] +simd-nightly = ["simd"] +std = ["alloc"] + +[lib] +name = "zerocopy" +path = "src/lib.rs" + +[[test]] +name = "trybuild" +path = "tests/trybuild.rs" + +[dependencies.zerocopy-derive] +version = "=0.8.31" +optional = true + +[dev-dependencies.either] +version = "=1.13.0" + +[dev-dependencies.elain] +version = "0.3.0" + +[dev-dependencies.glob] +version = "=0.3.2" + +[dev-dependencies.itertools] +version = "0.11" + +[dev-dependencies.rand] +version = "0.8.5" +features = ["small_rng"] +default-features = false + +[dev-dependencies.rustversion] +version = "1.0" + +[dev-dependencies.static_assertions] +version = "1.1" + +[dev-dependencies.trybuild] +version = "=1.0.89" +features = ["diff"] + +[dev-dependencies.zerocopy-derive] +version = "=0.8.31" + +[target."cfg(any())".dependencies.zerocopy-derive] +version = "=0.8.31" diff --git a/vendor/zerocopy/Cargo.toml.orig b/vendor/zerocopy/Cargo.toml.orig new file mode 100644 index 00000000..ff1a2763 --- /dev/null +++ b/vendor/zerocopy/Cargo.toml.orig @@ -0,0 +1,115 @@ +# Copyright 2018 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +# Put both crates in a single workspace so that `trybuild` compiler errors have +# paths that are stable regardless of the path to the repository root. This +# avoids issues like: +# https://github.com/dtolnay/trybuild/issues/207#issuecomment-131227.594 +[workspace] + +[package] +edition = "2021" +name = "zerocopy" +version = "0.8.31" +authors = ["Joshua Liebow-Feeser <joshlf@google.com>", "Jack Wrenn <jswrenn@amazon.com>"] +description = "Zerocopy makes zero-cost memory manipulation effortless. We write \"unsafe\" so you don't have to." +categories = ["embedded", "encoding", "no-std::no-alloc", "parsing", "rust-patterns"] +keywords = ["cast", "convert", "transmute", "transmutation", "type-punning"] +license = "BSD-2-Clause OR Apache-2.0 OR MIT" +repository = "https://github.com/google/zerocopy" +rust-version = "1.56.0" + +exclude = [".*"] + +[package.metadata.build-rs] +# These key/value pairs are parsed by `build.rs`. Each entry names a `--cfg` +# which will be emitted if zerocopy is built with a toolchain version *lower* +# than the specified version. In the emitted `--cfg`, dashes are replaced by +# underscores. +# +# Each name is suffixed with the version it corresponds to. This is a convention +# used in the codebase to make it less likely for us to make mistakes when +# writing `doc_cfg` attributes. + +# From 1.89.0, Rust supports x86 AVX-12 SIMD types (previously gated by the +# `stdarch_x86_avx512` feature). +no-zerocopy-simd-x86-avx12-1-89-0 = "1.89.0" + +# From 1.81.0, Rust supports the `core::error::Error` trait. +no-zerocopy-core-error-1-81-0 = "1.81.0" + +# From 1.78.0, Rust supports the `#[diagnostic::on_unimplemented]` attribute. +no-zerocopy-diagnostic-on-unimplemented-1-78-0 = "1.78.0" + +# From 1.61.0, Rust supports generic types with trait bounds in `const fn`. +no-zerocopy-generic-bounds-in-const-fn-1-61-0 = "1.61.0" + +# From 1.60.0, Rust supports `cfg(target_has_atomics)`, which allows us to +# detect whether a target supports particular sets of atomics. +no-zerocopy-target-has-atomics-1-60-0 = "1.60.0" + +# When the "simd" feature is enabled, include SIMD types from the +# `core::arch::aarch64` module, which was stabilized in 1.59.0. On earlier Rust +# versions, these types require the "simd-nightly" feature. +no-zerocopy-aarch64-simd-1-59-0 = "1.59.0" + +# Permit panicking in `const fn`s and calling `Vec::try_reserve`. +no-zerocopy-panic-in-const-and-vec-try-reserve-1-57-0 = "1.57.0" + +[package.metadata.ci] +# The versions of the stable and nightly compiler toolchains to use in CI. +pinned-stable = "1.91.1" +pinned-nightly = "nightly-2025-11-23" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "doc_cfg", "--generate-link-to-definition"] + +[package.metadata.playground] +features = ["__internal_use_only_features_that_work_on_stable"] + +[features] +alloc = [] +derive = ["zerocopy-derive"] +simd = [] +simd-nightly = ["simd"] +float-nightly = [] +std = ["alloc"] +# This feature depends on all other features that work on the stable compiler. +# We make no stability guarantees about this feature; it may be modified or +# removed at any time. +__internal_use_only_features_that_work_on_stable = ["alloc", "derive", "simd", "std"] + +[dependencies] +zerocopy-derive = { version = "=0.8.31", path = "zerocopy-derive", optional = true } + +# The "associated proc macro pattern" ensures that the versions of zerocopy and +# zerocopy-derive remain equal, even if the 'derive' feature isn't used. +# See: https://github.com/matklad/macro-dep-test +[target.'cfg(any())'.dependencies] +zerocopy-derive = { version = "=0.8.31", path = "zerocopy-derive" } + +[dev-dependencies] +# More recent versions of `either` have an MSRV higher than ours. +either = "=1.13.0" +# FIXME(#381) Remove this dependency once we have our own layout gadgets. +elain = "0.3.0" +# More recent versions of `glob` have an MSRV higher than ours. +glob = "=0.3.2" +itertools = "0.11" +rand = { version = "0.8.5", default-features = false, features = ["small_rng"] } +rustversion = "1.0" +static_assertions = "1.1" +testutil = { path = "testutil" } +# Pinned to a specific version so that the version used for local development +# and the version used in CI are guaranteed to be the same. Future versions +# sometimes change the output format slightly, so a version mismatch can cause +# CI test failures. +trybuild = { version = "=1.0.89", features = ["diff"] } +# In tests, unlike in production, zerocopy-derive is not optional +zerocopy-derive = { version = "=0.8.31", path = "zerocopy-derive" } diff --git a/vendor/zerocopy/LICENSE-APACHE b/vendor/zerocopy/LICENSE-APACHE new file mode 100644 index 00000000..2dc22c12 --- /dev/null +++ b/vendor/zerocopy/LICENSE-APACHE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Fuchsia Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/vendor/zerocopy/LICENSE-BSD b/vendor/zerocopy/LICENSE-BSD new file mode 100644 index 00000000..7ed244f4 --- /dev/null +++ b/vendor/zerocopy/LICENSE-BSD @@ -0,0 +1,24 @@ +Copyright 2019 The Fuchsia Authors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/zerocopy/LICENSE-MIT b/vendor/zerocopy/LICENSE-MIT new file mode 100644 index 00000000..26e15216 --- /dev/null +++ b/vendor/zerocopy/LICENSE-MIT @@ -0,0 +1,26 @@ +Copyright 2023 The Fuchsia Authors + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + diff --git a/vendor/zerocopy/POLICIES.md b/vendor/zerocopy/POLICIES.md new file mode 100644 index 00000000..a05b3c35 --- /dev/null +++ b/vendor/zerocopy/POLICIES.md @@ -0,0 +1,119 @@ +<!-- Copyright 2023 The Fuchsia Authors + +Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +<LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +This file may not be copied, modified, or distributed except according to +those terms. --> + +# Zerocopy's Policies + +## Soundness + +Zerocopy is expressly designed for use in security-critical contexts. It is used +in hardware security firmware, cryptographic implementations, hypervisors, and +more. We understand that software in these contexts has a very high bar for +correctness, and we take our responsibility to meet that bar very seriously. + +This section describes policies which are designed to ensure the correctness and +soundness of our code and prevent regressions. + +### Forwards-compatibility + +Rust does not currently have a formal memory model. As such, while Rust provides +guarantees about the semantics of some operations, the semantics of many +operations is up in the air and subject to change. + +Zerocopy strives to ensure that our code - and code emitted by our custom +derives - is sound under any version of Rust as early as our MSRV, and will +continue to be sound under any future version of Rust. The policies in this +section are designed to help ensure that we live up to this goal. + +### Safety comments + +Each non-test `unsafe` block must be annotated with a "safety comment" which +provides a rationale for its soundness. In order to ensure that our soundness is +forwards-compatible, safety comments must satisfy the following criteria: +- Safety comments must constitute a (possibly informal) proof that all of Rust's + soundness rules are upheld. +- Safety comments must only rely for their correctness on statements which + appear in the stable versions of the [Rust Reference] or standard library + documentation (ie, the docs for [core], [alloc], and [std]); arguments which + rely on text from the beta or nightly versions of these documents are not + considered complete. +- All statements from the Reference or standard library documentation which are + relied upon for soundness must be quoted in the safety comment. This ensures + that there is no ambiguity as to what aspect of the text is being cited. This + is especially important in cases where the text of these documents changes in + the future. Such changes are of course required to be backwards-compatible, + but may change the manner in which a particular guarantee is explained. + +We use the [`clippy::undocumented_unsafe_blocks`] lint to ensure that `unsafe` +blocks cannot be added without a safety comment. Note that there are a few +outstanding uncommented `unsafe` blocks which are tracked in [#429]. Our goal is +to reach 100% safety comment coverage and not regress once we've reached it. + +[Rust Reference]: https://doc.rust-lang.org/reference/ +[core]: https://doc.rust-lang.org/stable/core/ +[alloc]: https://doc.rust-lang.org/stable/alloc/ +[std]: https://doc.rust-lang.org/stable/std/ +[`clippy::undocumented_unsafe_blocks`]: https://rust-lang.github.io/rust-clippy/master/index.html#/undocumented_unsafe_blocks +[#429]: https://github.com/google/zerocopy/issues/429 + +#### Exceptions to our safety comment policy + +In rare circumstances, the soundness of an `unsafe` block may depend upon +semantics which are widely agreed upon but not formally guaranteed. In order to +avoid slowing down zerocopy's development to an unreasonable degree, a safety +comment may violate our safety comment policy so long as all of the following +hold: +- The safety comment's correctness may rely on semantics which are not + guaranteed in official Rust documentation *so long as* a member of the Rust + team has articulated in an official communication (e.g. a comment on a Rust + GitHub repo) that Rust intends to guarantee particular semantics. +- There exists an active effort to formalize the guarantee in Rust's official + documentation. + +### Target architecture support + +Zerocopy bases its soundness on guarantees made about the semantics of Rust +which appear in the Rust Reference or standard library documentation; zerocopy +is sound so long as these guarantees hold. There are known cases in which these +guarantees do not hold on certain target architectures (see +[rust-lang/unsafe-code-guidelines#461]); on such target architectures, zerocopy +may be unsound. We consider it outside of zerocopy's scope to reason about these +cases. Zerocopy makes no effort maintain soundness in cases where Rust's +documented guarantees do not hold. + +[rust-lang/unsafe-code-guidelines#461]: https://github.com/rust-lang/unsafe-code-guidelines/issues/461 + +## MSRV + +<!-- Our policy used to be simply that MSRV was a breaking change in all +circumstances. This implicitly relied on syn having the same MSRV policy, which +it does not. See #1085 and #1088. --> + +Without the `derive` feature enabled, zerocopy's minimum supported Rust version +(MSRV) is encoded the `package.rust-version` field in its `Cargo.toml` file. For +zerocopy, we consider an increase in MSRV to be a semver-breaking change, and +will only increase our MSRV during semver-breaking version changes (e.g., 0.1 -> +0.2, 1.0 -> 2.0, etc). + +For zerocopy with the `derive` feature enabled, and for the zerocopy-derive +crate, we inherit the maximum MSRV any of our dependencies. As of this writing +(2024-10-03), at least one dependency (syn) does *not* consider MSRV increases +to be semver-breaking changes. Thus, using the `derive` feature may result in +the effective MSRV increasing within a semver version train. + +## Yanking + +Whenever a bug or regression is identified, we will yank any affected versions +which are part of the current version train. For example, if the most recent +version is 0.10.20 and a bug is uncovered, we will release a fix in 0.10.21 and +yank all 0.10.X versions which are affected. We *may* also yank versions in +previous version trains on a case-by-case basis, but we don't guarantee it. + +For information about a particular yanked or un-yanked version, see our [yank +log][yank-log]. + +[yank-log]: https://github.com/google/zerocopy/blob/main/CHANGELOG.md#yanks-and-regressions diff --git a/vendor/zerocopy/README.md b/vendor/zerocopy/README.md new file mode 100644 index 00000000..ba195670 --- /dev/null +++ b/vendor/zerocopy/README.md @@ -0,0 +1,213 @@ +<!-- Copyright 2024 The Fuchsia Authors + +Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +<LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +This file may not be copied, modified, or distributed except according to +those terms. + +WARNING: DO NOT EDIT THIS FILE. It is generated automatically. Edits should be +made in the doc comment on `src/lib.rs` or in `tools/generate-readme`. +--> + +# zerocopy + +***<span style="font-size: 140%">Fast, safe, <span +style="color:red;">compile error</span>. Pick two.</span>*** + +Zerocopy makes zero-cost memory manipulation effortless. We write `unsafe` +so you don't have to. + +*For an overview of what's changed from zerocopy 0.7, check out our [release +notes][release-notes], which include a step-by-step upgrading guide.* + +*Have questions? Need more out of zerocopy? Submit a [customer request +issue][customer-request-issue] or ask the maintainers on +[GitHub][github-q-a] or [Discord][discord]!* + +[customer-request-issue]: https://github.com/google/zerocopy/issues/new/choose +[release-notes]: https://github.com/google/zerocopy/discussions/1680 +[github-q-a]: https://github.com/google/zerocopy/discussions/categories/q-a +[discord]: https://discord.gg/MAvWH2R6zk + +## Overview + +###### Conversion Traits + +Zerocopy provides four derivable traits for zero-cost conversions: +- `TryFromBytes` indicates that a type may safely be converted from + certain byte sequences (conditional on runtime checks) +- `FromZeros` indicates that a sequence of zero bytes represents a valid + instance of a type +- `FromBytes` indicates that a type may safely be converted from an + arbitrary byte sequence +- `IntoBytes` indicates that a type may safely be converted *to* a byte + sequence + +These traits support sized types, slices, and [slice DSTs][slice-dsts]. + +[slice-dsts]: KnownLayout#dynamically-sized-types + +###### Marker Traits + +Zerocopy provides three derivable marker traits that do not provide any +functionality themselves, but are required to call certain methods provided +by the conversion traits: +- `KnownLayout` indicates that zerocopy can reason about certain layout + qualities of a type +- `Immutable` indicates that a type is free from interior mutability, + except by ownership or an exclusive (`&mut`) borrow +- `Unaligned` indicates that a type's alignment requirement is 1 + +You should generally derive these marker traits whenever possible. + +###### Conversion Macros + +Zerocopy provides six macros for safe casting between types: + +- (`try_`[try_transmute])`transmute` (conditionally) converts a value of + one type to a value of another type of the same size +- (`try_`[try_transmute_mut])`transmute_mut` (conditionally) converts a + mutable reference of one type to a mutable reference of another type of + the same size +- (`try_`[try_transmute_ref])`transmute_ref` (conditionally) converts a + mutable or immutable reference of one type to an immutable reference of + another type of the same size + +These macros perform *compile-time* size and alignment checks, meaning that +unconditional casts have zero cost at runtime. Conditional casts do not need +to validate size or alignment runtime, but do need to validate contents. + +These macros cannot be used in generic contexts. For generic conversions, +use the methods defined by the [conversion traits](#conversion-traits). + +###### Byteorder-Aware Numerics + +Zerocopy provides byte-order aware integer types that support these +conversions; see the `byteorder` module. These types are especially useful +for network parsing. + +## Cargo Features + +- **`alloc`** + By default, `zerocopy` is `no_std`. When the `alloc` feature is enabled, + the `alloc` crate is added as a dependency, and some allocation-related + functionality is added. + +- **`std`** + By default, `zerocopy` is `no_std`. When the `std` feature is enabled, the + `std` crate is added as a dependency (ie, `no_std` is disabled), and + support for some `std` types is added. `std` implies `alloc`. + +- **`derive`** + Provides derives for the core marker traits via the `zerocopy-derive` + crate. These derives are re-exported from `zerocopy`, so it is not + necessary to depend on `zerocopy-derive` directly. + + However, you may experience better compile times if you instead directly + depend on both `zerocopy` and `zerocopy-derive` in your `Cargo.toml`, + since doing so will allow Rust to compile these crates in parallel. To do + so, do *not* enable the `derive` feature, and list both dependencies in + your `Cargo.toml` with the same leading non-zero version number; e.g: + + ```toml + [dependencies] + zerocopy = "0.X" + zerocopy-derive = "0.X" + ``` + + To avoid the risk of [duplicate import errors][duplicate-import-errors] if + one of your dependencies enables zerocopy's `derive` feature, import + derives as `use zerocopy_derive::*` rather than by name (e.g., `use + zerocopy_derive::FromBytes`). + +- **`simd`** + When the `simd` feature is enabled, `FromZeros`, `FromBytes`, and + `IntoBytes` impls are emitted for all stable SIMD types which exist on the + target platform. Note that the layout of SIMD types is not yet stabilized, + so these impls may be removed in the future if layout changes make them + invalid. For more information, see the Unsafe Code Guidelines Reference + page on the [layout of packed SIMD vectors][simd-layout]. + +- **`simd-nightly`** + Enables the `simd` feature and adds support for SIMD types which are only + available on nightly. Since these types are unstable, support for any type + may be removed at any point in the future. + +- **`float-nightly`** + Adds support for the unstable `f16` and `f128` types. These types are + not yet fully implemented and may not be supported on all platforms. + +[duplicate-import-errors]: https://github.com/google/zerocopy/issues/1587 +[simd-layout]: https://rust-lang.github.io/unsafe-code-guidelines/layout/packed-simd-vectors.html + +## Security Ethos + +Zerocopy is expressly designed for use in security-critical contexts. We +strive to ensure that that zerocopy code is sound under Rust's current +memory model, and *any future memory model*. We ensure this by: +- **...not 'guessing' about Rust's semantics.** + We annotate `unsafe` code with a precise rationale for its soundness that + cites a relevant section of Rust's official documentation. When Rust's + documented semantics are unclear, we work with the Rust Operational + Semantics Team to clarify Rust's documentation. +- **...rigorously testing our implementation.** + We run tests using [Miri], ensuring that zerocopy is sound across a wide + array of supported target platforms of varying endianness and pointer + width, and across both current and experimental memory models of Rust. +- **...formally proving the correctness of our implementation.** + We apply formal verification tools like [Kani][kani] to prove zerocopy's + correctness. + +For more information, see our full [soundness policy]. + +[Miri]: https://github.com/rust-lang/miri +[Kani]: https://github.com/model-checking/kani +[soundness policy]: https://github.com/google/zerocopy/blob/main/POLICIES.md#soundness + +## Relationship to Project Safe Transmute + +[Project Safe Transmute] is an official initiative of the Rust Project to +develop language-level support for safer transmutation. The Project consults +with crates like zerocopy to identify aspects of safer transmutation that +would benefit from compiler support, and has developed an [experimental, +compiler-supported analysis][mcp-transmutability] which determines whether, +for a given type, any value of that type may be soundly transmuted into +another type. Once this functionality is sufficiently mature, zerocopy +intends to replace its internal transmutability analysis (implemented by our +custom derives) with the compiler-supported one. This change will likely be +an implementation detail that is invisible to zerocopy's users. + +Project Safe Transmute will not replace the need for most of zerocopy's +higher-level abstractions. The experimental compiler analysis is a tool for +checking the soundness of `unsafe` code, not a tool to avoid writing +`unsafe` code altogether. For the foreseeable future, crates like zerocopy +will still be required in order to provide higher-level abstractions on top +of the building block provided by Project Safe Transmute. + +[Project Safe Transmute]: https://rust-lang.github.io/rfcs/2835-project-safe-transmute.html +[mcp-transmutability]: https://github.com/rust-lang/compiler-team/issues/411 + +## MSRV + +See our [MSRV policy]. + +[MSRV policy]: https://github.com/google/zerocopy/blob/main/POLICIES.md#msrv + +## Changelog + +Zerocopy uses [GitHub Releases]. + +[GitHub Releases]: https://github.com/google/zerocopy/releases + +## Thanks + +Zerocopy is maintained by engineers at Google with help from [many wonderful +contributors][contributors]. Thank you to everyone who has lent a hand in +making Rust a little more secure! + +[contributors]: https://github.com/google/zerocopy/graphs/contributors + +## Disclaimer + +Disclaimer: Zerocopy is not an officially supported Google product. diff --git a/vendor/zerocopy/build.rs b/vendor/zerocopy/build.rs new file mode 100644 index 00000000..2d8ffde3 --- /dev/null +++ b/vendor/zerocopy/build.rs @@ -0,0 +1,256 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Sometimes we want to use lints which were added after our MSRV. +// `unknown_lints` is `warn` by default and we deny warnings in CI, so without +// this attribute, any unknown lint would cause a CI failure when testing with +// our MSRV. +#![allow(unknown_lints)] +#![deny(renamed_and_removed_lints)] +#![deny( + anonymous_parameters, + deprecated_in_future, + late_bound_lifetime_arguments, + missing_copy_implementations, + missing_debug_implementations, + path_statements, + patterns_in_fns_without_body, + rust_2018_idioms, + trivial_numeric_casts, + unreachable_pub, + unsafe_op_in_unsafe_fn, + unused_extern_crates, + variant_size_differences +)] +#![deny( + clippy::all, + clippy::alloc_instead_of_core, + clippy::arithmetic_side_effects, + clippy::as_underscore, + clippy::assertions_on_result_states, + clippy::as_conversions, + clippy::correctness, + clippy::dbg_macro, + clippy::decimal_literal_representation, + clippy::get_unwrap, + clippy::indexing_slicing, + clippy::missing_inline_in_public_items, + clippy::missing_safety_doc, + clippy::obfuscated_if_else, + clippy::perf, + clippy::print_stdout, + clippy::style, + clippy::suspicious, + clippy::todo, + clippy::undocumented_unsafe_blocks, + clippy::unimplemented, + clippy::unnested_or_patterns, + clippy::unwrap_used, + clippy::use_debug +)] + +use std::{env, fs, process::Command, str}; + +fn main() { + // Avoid unnecessary re-building. + println!("cargo:rerun-if-changed=build.rs"); + // This is necessary because changes to the list of detected Rust toolchain + // versions will affect what `--cfg`s this script emits. Without this, + // changes to that list have no effect on the build without running `cargo + // clean` or similar. + println!("cargo:rerun-if-changed=Cargo.toml"); + + let version_cfgs = parse_version_cfgs_from_cargo_toml(); + let rustc_version = rustc_version(); + + if rustc_version >= (Version { major: 1, minor: 77, patch: 0 }) { + for version_cfg in &version_cfgs { + // This tells the `unexpected_cfgs` lint to expect to see all of + // these `cfg`s. The `cargo::` syntax was only added in 1.77, so we + // don't emit these on earlier toolchain versions. + println!("cargo:rustc-check-cfg=cfg({})", version_cfg.cfg_name); + + // This tells the `unexpected_cfgs` lint to expect to see `cfg`s of + // the form `rust = "1.2.3"`. These aren't real `cfg`s, but we use + // them in `cfg_attr(doc_cfg, doc(cfg(rust = "1.2.3")))` on items + // that are version-gated so that the rendered Rustdoc shows which + // Rust toolchain versions those items are available on. + let Version { major, minor, patch } = version_cfg.version; + println!("cargo:rustc-check-cfg=cfg(rust, values(\"{}.{}.{}\"))", major, minor, patch); + } + // FIXME(https://github.com/rust-lang/rust/issues/124816): Remove these + // once they're no longer needed. + println!("cargo:rustc-check-cfg=cfg(doc_cfg)"); + println!("cargo:rustc-check-cfg=cfg(kani)"); + println!( + "cargo:rustc-check-cfg=cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)" + ); + println!("cargo:rustc-check-cfg=cfg(coverage_nightly)"); + } + + for version_cfg in version_cfgs { + if rustc_version < version_cfg.version { + println!("cargo:rustc-cfg={}", version_cfg.cfg_name); + } + } +} + +#[derive(Debug, Ord, PartialEq, PartialOrd, Eq)] +struct Version { + major: usize, + minor: usize, + patch: usize, +} + +#[derive(Debug)] +struct VersionCfg { + version: Version, + cfg_name: String, +} + +const ITER_FIRST_NEXT_EXPECT_MSG: &str = "unreachable: a string split cannot produce 0 items"; + +fn parse_version_cfgs_from_cargo_toml() -> Vec<VersionCfg> { + let cargo_toml = fs::read_to_string("Cargo.toml").expect("failed to read Cargo.toml"); + + // Expect a Cargo.toml with the following format: + // + // ... + // + // [package.metadata.build-rs] + // # Comments... + // zerocopy-panic-in-const-fn = "1.57.0" + // + // ... + // + // [...] + // + // In other words, the following sections, in order: + // - Arbitrary content + // - The literal header `[package.metadata.build-rs]` + // - Any number of: + // - Comments + // - Key/value pairs + // - A TOML table, indicating the end of the section we care about + + const TABLE_HEADER: &str = "[package.metadata.build-rs]"; + + if !cargo_toml.contains(TABLE_HEADER) { + panic!("{}", format!("Cargo.toml does not contain `{}`", TABLE_HEADER)); + } + + // Now that we know there's at least one instance of `TABLE_HEADER`, we + // consume the iterator until we find the text following that first + // instance. This isn't terribly bullet-proof, but we also authored + // `Cargo.toml`, and we'd have to mess up pretty badly to accidentally put + // two copies of the same table header in that file. + let mut iter = cargo_toml.split(TABLE_HEADER); + let _prefix = iter.next().expect(ITER_FIRST_NEXT_EXPECT_MSG); + let rest = iter.next().expect("unreachable: we already confirmed that there's a table header"); + + // Scan until we find the next table section, which should start with a `[` + // character at the beginning of a line. + let mut iter = rest.split("\n["); + let section = iter.next().expect("unreachable: a string split cannot produce 0 items"); + + section + .lines() + .filter_map(|line| { + // Parse lines of one of the following forms: + // + // # Comment + // + // name-of-key = "1.2.3" # Comment + // + // Comments on their own line are ignored, and comments after a + // key/value pair will be stripped before further processing. + + // We don't need to handle the case where the `#` character isn't a + // comment (which can happen if it's inside a string) since we authored + // `Cargo.toml` and, in this section, we only put Rust version numbers + // in strings. + let before_comment = line.split('#').next().expect(ITER_FIRST_NEXT_EXPECT_MSG); + let before_comment_without_whitespace = before_comment.trim_start(); + if before_comment_without_whitespace.is_empty() { + return None; + } + + // At this point, assuming Cargo.toml is correctly formatted according + // to the format expected by this function, we know that + // `before_comment_without_whitespace` is of the form: + // + // name-of-key = "1.2.3" # Comment + // + // ...with no leading whitespace, and where the trailing comment is + // optional. + + let mut iter = before_comment_without_whitespace.split_whitespace(); + let name = iter.next().expect(ITER_FIRST_NEXT_EXPECT_MSG); + const EXPECT_MSG: &str = + "expected lines of the format `name-of-key = \"1.2.3\" # Comment`"; + let equals_sign = iter.next().expect(EXPECT_MSG); + let value = iter.next().expect(EXPECT_MSG); + + assert_eq!(equals_sign, "=", "{}", EXPECT_MSG); + + // Replace dashes with underscores. + let name = name.replace('-', "_"); + + // Strip the quotation marks. + let value = value.trim_start_matches('"').trim_end_matches('"'); + + let mut iter = value.split('.'); + let major = iter.next().expect(ITER_FIRST_NEXT_EXPECT_MSG); + let minor = iter.next().expect(EXPECT_MSG); + let patch = iter.next().expect(EXPECT_MSG); + + assert_eq!(iter.next(), None, "{}", EXPECT_MSG); + + let major: usize = major.parse().expect(EXPECT_MSG); + let minor: usize = minor.parse().expect(EXPECT_MSG); + let patch: usize = patch.parse().expect(EXPECT_MSG); + + Some(VersionCfg { version: Version { major, minor, patch }, cfg_name: name }) + }) + .collect() +} + +fn rustc_version() -> Version { + let rustc_cmd_name = env::var_os("RUSTC").expect("could not get rustc command name"); + let version = + Command::new(rustc_cmd_name).arg("--version").output().expect("could not invoke rustc"); + if !version.status.success() { + panic!( + "rustc failed with status: {}\nrustc output: {}", + version.status, + String::from_utf8_lossy(version.stderr.as_slice()) + ); + } + + const RUSTC_EXPECT_MSG: &str = "could not parse rustc version output"; + let version = str::from_utf8(version.stdout.as_slice()).expect(RUSTC_EXPECT_MSG); + let version = version.trim_start_matches("rustc "); + // The version string is sometimes followed by other information such as the + // string `-nightly` or other build information. We don't care about any of + // that. + let version = version + .split(|c: char| c != '.' && !c.is_ascii_digit()) + .next() + .expect(ITER_FIRST_NEXT_EXPECT_MSG); + let mut iter = version.split('.'); + let major = iter.next().expect(ITER_FIRST_NEXT_EXPECT_MSG); + let minor = iter.next().expect(RUSTC_EXPECT_MSG); + let patch = iter.next().expect(RUSTC_EXPECT_MSG); + + let major: usize = major.parse().expect(RUSTC_EXPECT_MSG); + let minor: usize = minor.parse().expect(RUSTC_EXPECT_MSG); + let patch: usize = patch.parse().expect(RUSTC_EXPECT_MSG); + + Version { major, minor, patch } +} diff --git a/vendor/zerocopy/cargo.sh b/vendor/zerocopy/cargo.sh new file mode 100755 index 00000000..499fde21 --- /dev/null +++ b/vendor/zerocopy/cargo.sh @@ -0,0 +1,15 @@ +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail + +# Build `cargo-zerocopy` without any RUSTFLAGS or CARGO_TARGET_DIR set in the +# environment +env -u RUSTFLAGS -u CARGO_TARGET_DIR cargo +stable build --manifest-path tools/Cargo.toml -p cargo-zerocopy -q +# Thin wrapper around the `cargo-zerocopy` binary in `tools/cargo-zerocopy` +./tools/target/debug/cargo-zerocopy $@ diff --git a/vendor/zerocopy/ci/check_actions.sh b/vendor/zerocopy/ci/check_actions.sh new file mode 100755 index 00000000..6eef7ea6 --- /dev/null +++ b/vendor/zerocopy/ci/check_actions.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +# +# Copyright 2025 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail + +script_name="ci/check_actions.sh" + +# Ensure action-validator is installed +if [ ! -x "$HOME/.cargo/bin/action-validator" ]; then + echo "$script_name: action-validator not found, installing..." >&2 + # Install specific version to ensure reproducibility + cargo install -q action-validator --version 0.8.0 --locked +fi +export PATH="$HOME/.cargo/bin:$PATH" + +# Files to exclude from validation (e.g., because they are not Actions/Workflows) +# Use relative paths matching `find .github` output +EXCLUDE_FILES=( + "./.github/dependabot.yml" + "./.github/release.yml" +) + +failed=0 + +# Use process substitution and while loop to handle filenames with spaces robustly +while IFS= read -r -d '' file; do + # Check if file is in exclusion list + for exclude in "${EXCLUDE_FILES[@]}"; do + if [[ "$file" == "$exclude" ]]; then + continue 2 + fi + done + + if ! output=$(action-validator "$file" 2>&1); then + echo "$script_name: ❌ Validation failed for $file" >&2 + echo "$output" | sed "s|^|$script_name: |" >&2 + failed=1 + fi +done < <(find ./.github -type f \( -iname '*.yml' -o -iname '*.yaml' \) -print0) + +if [[ $failed -ne 0 ]]; then + echo "$script_name: One or more files failed validation." >&2 + exit 1 +fi diff --git a/vendor/zerocopy/ci/check_all_toolchains_tested.sh b/vendor/zerocopy/ci/check_all_toolchains_tested.sh new file mode 100755 index 00000000..88d4208a --- /dev/null +++ b/vendor/zerocopy/ci/check_all_toolchains_tested.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail + +# Check whether the set of toolchains tested in this file (other than +# 'msrv', 'stable', and 'nightly') is equal to the set of toolchains +# listed in the 'package.metadata.build-rs' section of Cargo.toml. +# +# If the inputs to `diff` are not identical, `diff` exits with a +# non-zero error code, which causes this script to fail (thanks to +# `set -e`). +diff \ + <(cat .github/workflows/ci.yml | yq '.jobs.build_test.strategy.matrix.toolchain | .[]' | \ + sort -u | grep -v '^\(msrv\|stable\|nightly\)$') \ + <(cargo metadata -q --format-version 1 | \ + jq -r ".packages[] | select(.name == \"zerocopy\").metadata.\"build-rs\" | keys | .[]" | \ + sort -u) >&2 diff --git a/vendor/zerocopy/ci/check_fmt.sh b/vendor/zerocopy/ci/check_fmt.sh new file mode 100755 index 00000000..bc7a6302 --- /dev/null +++ b/vendor/zerocopy/ci/check_fmt.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail +files=$(find . -iname '*.rs' -type f -not -path './target/*') +# check that find succeeded +if [[ -z $files ]] +then + exit 1 +fi +./cargo.sh +nightly fmt --check -- $files >&2 diff --git a/vendor/zerocopy/ci/check_job_dependencies.sh b/vendor/zerocopy/ci/check_job_dependencies.sh new file mode 100755 index 00000000..a9e0d362 --- /dev/null +++ b/vendor/zerocopy/ci/check_job_dependencies.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail +which yq > /dev/null +jobs=$(for i in $(find .github -iname '*.yaml' -or -iname '*.yml') + do + # Select jobs that are triggered by pull request. + if yq -e '.on | has("pull_request")' "$i" 2>/dev/null >/dev/null + then + # This gets the list of jobs that all-jobs-succeed does not depend on. + comm -23 \ + <(yq -r '.jobs | keys | .[]' "$i" | sort | uniq) \ + <(yq -r '.jobs.all-jobs-succeed.needs[]' "$i" | sort | uniq) + fi + + # The grep call here excludes all-jobs-succeed from the list of jobs that + # all-jobs-succeed does not depend on. If all-jobs-succeed does + # not depend on itself, we do not care about it. + done | sort | uniq | (grep -v '^all-jobs-succeed$' || true) +) + +if [ -n "$jobs" ] +then + missing_jobs="$(echo "$jobs" | tr ' ' '\n')" + echo "all-jobs-succeed missing dependencies on some jobs: $missing_jobs" | tee -a $GITHUB_STEP_SUMMARY >&2 + exit 1 +fi diff --git a/vendor/zerocopy/ci/check_msrv_is_minimal.sh b/vendor/zerocopy/ci/check_msrv_is_minimal.sh new file mode 100755 index 00000000..b67a52c5 --- /dev/null +++ b/vendor/zerocopy/ci/check_msrv_is_minimal.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# +# Copyright 2025 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail + +read -r -d '' PYTHON_SCRIPT <<'EOF' || true +import sys +import json + +def parse_version(v): + """Converts a version string to a tuple of integers.""" + return tuple(map(int, v.split("."))) + +def main(): + """ + Checks that the package's MSRV is strictly lower than any version + specified in `package.metadata.build-rs`. + """ + try: + data = json.load(sys.stdin) + except json.JSONDecodeError as e: + print(f"Error parsing JSON from cargo metadata: {e}", file=sys.stderr) + sys.exit(1) + + # Find the zerocopy package + try: + pkg = next(p for p in data["packages"] if p["name"] == "zerocopy") + except StopIteration: + print("Error: zerocopy package not found in metadata", file=sys.stderr) + sys.exit(1) + + msrv_str = pkg.get("rust_version") + if not msrv_str: + print("Error: rust-version not found in Cargo.toml", file=sys.stderr) + sys.exit(1) + + try: + msrv = parse_version(msrv_str) + except ValueError: + print(f"Error: Invalid MSRV format: {msrv_str}", file=sys.stderr) + sys.exit(1) + + build_rs_versions = (pkg.get("metadata") or {}).get("build-rs", {}) + + failed = False + for name, ver_str in build_rs_versions.items(): + try: + ver = parse_version(ver_str) + except ValueError: + print(f"Warning: Skipping invalid version format for {name}: {ver_str}", file=sys.stderr) + continue + + # Check that MSRV < Version (strictly lower) + if not (msrv < ver): + print(f"Error: MSRV ({msrv_str}) is not strictly lower than {name} ({ver_str})", file=sys.stderr) + failed = True + + if failed: + sys.exit(1) + +if __name__ == "__main__": + main() +EOF + +cargo metadata --format-version 1 --no-deps | python3 -c "$PYTHON_SCRIPT" diff --git a/vendor/zerocopy/ci/check_readme.sh b/vendor/zerocopy/ci/check_readme.sh new file mode 100755 index 00000000..241128d0 --- /dev/null +++ b/vendor/zerocopy/ci/check_readme.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail + +# Install again in case the installation failed during the +# `generate_cache` step. We treat that step as best-effort and +# suppress all errors from it. +cargo install -q cargo-readme --version 3.2.0 + +diff <(cargo -q run --manifest-path tools/Cargo.toml -p generate-readme) README.md >&2 +exit $? diff --git a/vendor/zerocopy/ci/check_todo.sh b/vendor/zerocopy/ci/check_todo.sh new file mode 100755 index 00000000..bb5b240f --- /dev/null +++ b/vendor/zerocopy/ci/check_todo.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# +# Copyright 2025 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -euo pipefail + +# This allows us to leave XODO comments in this file and have them still be +# picked up by this script without having the script itself trigger false +# positives. The alternative would be to exclude this script entirely, which +# would mean that we couldn't use XODO comments in this script. +KEYWORD=$(echo XODO | sed -e 's/X/T/') + +# Make sure `rg` is installed (if this fails, `set -e` above will cause the +# script to exit). +rg --version >/dev/null + +# -H: Print filename (default for multiple files/recursive) +# -n: Print line number +# -w: Match whole words +output=$(rg -H -n -w "$KEYWORD" || true) + +if [ -n "$output" ]; then + echo "Found $KEYWORD markers in the codebase." >&2 + echo "$KEYWORD is used for tasks that should be done before merging a PR; if you want to leave a message in the codebase, use FIXME." >&2 + echo "" >&2 + echo "$output" >&2 + exit 1 +fi diff --git a/vendor/zerocopy/ci/check_versions.sh b/vendor/zerocopy/ci/check_versions.sh new file mode 100755 index 00000000..4ef9f1a6 --- /dev/null +++ b/vendor/zerocopy/ci/check_versions.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail + +# Usage: version <crate-name> +function version { + cargo metadata -q --format-version 1 | jq -r ".packages[] | select(.name == \"$1\").version" +} + +ver_zerocopy=$(version zerocopy) +ver_zerocopy_derive=$(version zerocopy-derive) + +# Usage: dependency-version <kind> <target> +function dependency-version { + KIND="$1" + TARGET="$2" + cargo metadata -q --format-version 1 \ + | jq -r ".packages[] | select(.name == \"zerocopy\").dependencies[] | select((.name == \"zerocopy-derive\") and .kind == $KIND and .target == $TARGET).req" +} + +# The non-dev dependency version (kind `null` filters out the dev +# dependency, and target `null` filters out the targeted version). +zerocopy_derive_dep_ver=$(dependency-version null null) + +# The non-dev dependency, targeted version (kind `null` filters out +# the dev dependency). +zerocopy_derive_targeted_ver=$(dependency-version null '"cfg(any())"') + +# The dev dependency version (kind `"dev"` selects only the dev +# dependency). +zerocopy_derive_dev_dep_ver=$(dependency-version '"dev"' null) + +function assert-match { + VER_A="$1" + VER_B="$2" + SUCCESS_MSG="$3" + FAILURE_MSG="$4" + if [[ "$VER_A" == "$VER_B" ]]; then + echo "$SUCCESS_MSG" | tee -a $GITHUB_STEP_SUMMARY + else + echo "$FAILURE_MSG" | tee -a $GITHUB_STEP_SUMMARY >&2 + exit 1 + fi +} + +assert-match "$ver_zerocopy" "$ver_zerocopy_derive" \ + "Same crate version ($ver_zerocopy) found for zerocopy and zerocopy-derive." \ + "Different crate versions found for zerocopy ($ver_zerocopy) and zerocopy-derive ($ver_zerocopy_derive)." + +# Note the leading `=` sign - the dependency needs to be an exact one. +assert-match "=$ver_zerocopy_derive" "$zerocopy_derive_dep_ver" \ + "zerocopy depends upon same version of zerocopy-derive in-tree ($zerocopy_derive_dep_ver)." \ + "zerocopy depends upon different version of zerocopy-derive ($zerocopy_derive_dep_ver) than the one in-tree ($ver_zerocopy_derive)." + +# Note the leading `=` sign - the dependency needs to be an exact one. +assert-match "=$ver_zerocopy_derive" "$zerocopy_derive_dev_dep_ver" \ + "In dev mode, zerocopy depends upon same version of zerocopy-derive in-tree ($zerocopy_derive_dev_dep_ver)." \ + "In dev mode, zerocopy depends upon different version of zerocopy-derive ($zerocopy_derive_dev_dep_ver) than the one in-tree ($ver_zerocopy_derive)." + +assert-match "$zerocopy_derive_dep_ver" "$zerocopy_derive_targeted_ver" \ + "Same crate version ($zerocopy_derive_dep_ver) found for optional and targeted zerocopy-derive dependency." \ + "Different crate versions found for optional ($zerocopy_derive_dep_ver) and targeted ($zerocopy_derive_targeted_ver) dependency." diff --git a/vendor/zerocopy/ci/release_crate_version.sh b/vendor/zerocopy/ci/release_crate_version.sh new file mode 100755 index 00000000..e916bdf4 --- /dev/null +++ b/vendor/zerocopy/ci/release_crate_version.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -e + +if [ $# -ne 1 ]; then + echo "Usage: $0 <version>" >&2 + exit 1 +fi + +VERSION="$1" + +sed -i -e "s/^zerocopy-derive = { version = \"=[0-9a-zA-Z\.-]*\"/zerocopy-derive = { version = \"=$VERSION\"/" Cargo.toml +sed -i -e "s/^version = \"[0-9a-zA-Z\.-]*\"/version = \"$VERSION\"/" Cargo.toml zerocopy-derive/Cargo.toml diff --git a/vendor/zerocopy/clippy.toml b/vendor/zerocopy/clippy.toml new file mode 100644 index 00000000..9c114064 --- /dev/null +++ b/vendor/zerocopy/clippy.toml @@ -0,0 +1,10 @@ +# Copyright 2023 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +accept-comment-above-statement = true +accept-comment-above-attributes = true diff --git a/vendor/zerocopy/githooks/pre-push b/vendor/zerocopy/githooks/pre-push new file mode 100755 index 00000000..34f147fb --- /dev/null +++ b/vendor/zerocopy/githooks/pre-push @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# +# Copyright 2024 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +set -eo pipefail +echo "Running pre-push git hook: $0" +# Forego redirecting stdout to /dev/null on check_fmt.sh because the output from +# `cargo fmt` is useful (and the good stuff is not delivered by stderr). +# +# Background all jobs and wait for them so they can run in parallel. +./ci/check_actions.sh & ACTIONS_PID=$! +./ci/check_fmt.sh & FMT_PID=$! +./ci/check_all_toolchains_tested.sh >/dev/null & TOOLCHAINS_PID=$! +./ci/check_job_dependencies.sh >/dev/null & JOB_DEPS_PID=$! +./ci/check_readme.sh >/dev/null & README_PID=$! +./ci/check_todo.sh >/dev/null & XODO_PID=$! +./ci/check_versions.sh >/dev/null & VERSIONS_PID=$! +./ci/check_msrv_is_minimal.sh >/dev/null & MSRV_PID=$! + +# `wait <pid>` exits with the same status code as the job it's waiting for. +# Since we `set -e` above, this will have the effect of causing the entire +# script to exit with a non-zero status code if any of these jobs does the same. +# Note that, while `wait` (with no PID argument) waits for all backgrounded +# jobs, it exits with code 0 even if one of the backgrounded jobs does not, so +# we can't use it here. +wait $ACTIONS_PID +wait $FMT_PID +wait $TOOLCHAINS_PID +wait $JOB_DEPS_PID +wait $README_PID +wait $XODO_PID +wait $VERSIONS_PID +wait $MSRV_PID + +# Ensure that this script calls all scripts in `ci/*`. This isn't a foolproof +# check since it just checks for the string in this script (e.g., it could be in +# a comment, which would trigger a false positive), but it should catch obvious +# errors. Also note that this entire hook is a nice-to-have - failures that +# aren't caught here will still be caught in CI. +# +# This was added because, in #728, we added `ci/check_all_toolchains_tested.sh` +# without calling it from this script. +GLOBIGNORE="./*/release_crate_version.sh" # We don't want to run this one +for f in ./ci/*; do + grep "$f" githooks/pre-push >/dev/null || { echo "$f not called from githooks/pre-push" >&2 ; exit 1; } +done +unset GLOBIGNORE diff --git a/vendor/zerocopy/rustfmt.toml b/vendor/zerocopy/rustfmt.toml new file mode 100644 index 00000000..345f7e1b --- /dev/null +++ b/vendor/zerocopy/rustfmt.toml @@ -0,0 +1,22 @@ +# Copyright 2022 The Fuchsia Authors +# +# Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +# <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +# license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +edition = "2024" + +# The "Default" setting has a heuristic which splits lines too aggressively. +# We are willing to revisit this setting in future versions of rustfmt. +# Bugs: +# * https://github.com/rust-lang/rustfmt/issues/3119 +# * https://github.com/rust-lang/rustfmt/issues/3120 +use_small_heuristics = "Max" + +# Prevent carriage returns +newline_style = "Unix" + +imports_granularity = "Crate" +group_imports = "StdExternalCrate" diff --git a/vendor/zerocopy/src/byte_slice.rs b/vendor/zerocopy/src/byte_slice.rs new file mode 100644 index 00000000..4e427690 --- /dev/null +++ b/vendor/zerocopy/src/byte_slice.rs @@ -0,0 +1,403 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Traits for types that encapsulate a `[u8]`. +//! +//! These traits are used to bound the `B` parameter of [`Ref`]. + +use core::{ + cell, + ops::{Deref, DerefMut}, +}; + +// For each trait polyfill, as soon as the corresponding feature is stable, the +// polyfill import will be unused because method/function resolution will prefer +// the inherent method/function over a trait method/function. Thus, we suppress +// the `unused_imports` warning. +// +// See the documentation on `util::polyfills` for more information. +#[allow(unused_imports)] +use crate::util::polyfills::{self, NonNullExt as _, NumExt as _}; +#[cfg(doc)] +use crate::Ref; + +/// A mutable or immutable reference to a byte slice. +/// +/// `ByteSlice` abstracts over the mutability of a byte slice reference, and is +/// implemented for various special reference types such as +/// [`Ref<[u8]>`](core::cell::Ref) and [`RefMut<[u8]>`](core::cell::RefMut). +/// +/// # Safety +/// +/// Implementations of `ByteSlice` must promise that their implementations of +/// [`Deref`] and [`DerefMut`] are "stable". In particular, given `B: ByteSlice` +/// and `b: B`, two calls, each to either `b.deref()` or `b.deref_mut()`, must +/// return a byte slice with the same address and length. This must hold even if +/// the two calls are separated by an arbitrary sequence of calls to methods on +/// `ByteSlice`, [`ByteSliceMut`], [`IntoByteSlice`], or [`IntoByteSliceMut`], +/// or on their super-traits. This does *not* need to hold if the two calls are +/// separated by any method calls, field accesses, or field modifications *other +/// than* those from these traits. +/// +/// Note that this also implies that, given `b: B`, the address and length +/// cannot be modified via objects other than `b`, either on the same thread or +/// on another thread. +pub unsafe trait ByteSlice: Deref<Target = [u8]> + Sized {} + +/// A mutable reference to a byte slice. +/// +/// `ByteSliceMut` abstracts over various ways of storing a mutable reference to +/// a byte slice, and is implemented for various special reference types such as +/// `RefMut<[u8]>`. +/// +/// `ByteSliceMut` is a shorthand for [`ByteSlice`] and [`DerefMut`]. +pub trait ByteSliceMut: ByteSlice + DerefMut {} +impl<B: ByteSlice + DerefMut> ByteSliceMut for B {} + +/// A [`ByteSlice`] which can be copied without violating dereference stability. +/// +/// # Safety +/// +/// If `B: CopyableByteSlice`, then the dereference stability properties +/// required by [`ByteSlice`] (see that trait's safety documentation) do not +/// only hold regarding two calls to `b.deref()` or `b.deref_mut()`, but also +/// hold regarding `c.deref()` or `c.deref_mut()`, where `c` is produced by +/// copying `b`. +pub unsafe trait CopyableByteSlice: ByteSlice + Copy + CloneableByteSlice {} + +/// A [`ByteSlice`] which can be cloned without violating dereference stability. +/// +/// # Safety +/// +/// If `B: CloneableByteSlice`, then the dereference stability properties +/// required by [`ByteSlice`] (see that trait's safety documentation) do not +/// only hold regarding two calls to `b.deref()` or `b.deref_mut()`, but also +/// hold regarding `c.deref()` or `c.deref_mut()`, where `c` is produced by +/// `b.clone()`, `b.clone().clone()`, etc. +pub unsafe trait CloneableByteSlice: ByteSlice + Clone {} + +/// A [`ByteSlice`] that can be split in two. +/// +/// # Safety +/// +/// Unsafe code may depend for its soundness on the assumption that `split_at` +/// and `split_at_unchecked` are implemented correctly. In particular, given `B: +/// SplitByteSlice` and `b: B`, if `b.deref()` returns a byte slice with address +/// `addr` and length `len`, then if `split <= len`, both of these +/// invocations: +/// - `b.split_at(split)` +/// - `b.split_at_unchecked(split)` +/// +/// ...will return `(first, second)` such that: +/// - `first`'s address is `addr` and its length is `split` +/// - `second`'s address is `addr + split` and its length is `len - split` +pub unsafe trait SplitByteSlice: ByteSlice { + /// Attempts to split `self` at the midpoint. + /// + /// `s.split_at(mid)` returns `Ok((s[..mid], s[mid..]))` if `mid <= + /// s.deref().len()` and otherwise returns `Err(s)`. + /// + /// # Safety + /// + /// Unsafe code may rely on this function correctly implementing the above + /// functionality. + #[inline] + fn split_at(self, mid: usize) -> Result<(Self, Self), Self> { + if mid <= self.deref().len() { + // SAFETY: Above, we ensure that `mid <= self.deref().len()`. By + // invariant on `ByteSlice`, a supertrait of `SplitByteSlice`, + // `.deref()` is guaranteed to be "stable"; i.e., it will always + // dereference to a byte slice of the same address and length. Thus, + // we can be sure that the above precondition remains satisfied + // through the call to `split_at_unchecked`. + unsafe { Ok(self.split_at_unchecked(mid)) } + } else { + Err(self) + } + } + + /// Splits the slice at the midpoint, possibly omitting bounds checks. + /// + /// `s.split_at_unchecked(mid)` returns `s[..mid]` and `s[mid..]`. + /// + /// # Safety + /// + /// `mid` must not be greater than `self.deref().len()`. + /// + /// # Panics + /// + /// Implementations of this method may choose to perform a bounds check and + /// panic if `mid > self.deref().len()`. They may also panic for any other + /// reason. Since it is optional, callers must not rely on this behavior for + /// soundness. + #[must_use] + unsafe fn split_at_unchecked(self, mid: usize) -> (Self, Self); +} + +/// A shorthand for [`SplitByteSlice`] and [`ByteSliceMut`]. +pub trait SplitByteSliceMut: SplitByteSlice + ByteSliceMut {} +impl<B: SplitByteSlice + ByteSliceMut> SplitByteSliceMut for B {} + +#[allow(clippy::missing_safety_doc)] // There's a `Safety` section on `into_byte_slice`. +/// A [`ByteSlice`] that conveys no ownership, and so can be converted into a +/// byte slice. +/// +/// Some `ByteSlice` types (notably, the standard library's [`Ref`] type) convey +/// ownership, and so they cannot soundly be moved by-value into a byte slice +/// type (`&[u8]`). Some methods in this crate's API (such as [`Ref::into_ref`]) +/// are only compatible with `ByteSlice` types without these ownership +/// semantics. +/// +/// [`Ref`]: core::cell::Ref +pub unsafe trait IntoByteSlice<'a>: ByteSlice { + /// Coverts `self` into a `&[u8]`. + /// + /// # Safety + /// + /// The returned reference has the same address and length as `self.deref()` + /// and `self.deref_mut()`. + /// + /// Note that, combined with the safety invariant on [`ByteSlice`], this + /// safety invariant implies that the returned reference is "stable" in the + /// sense described in the `ByteSlice` docs. + fn into_byte_slice(self) -> &'a [u8]; +} + +#[allow(clippy::missing_safety_doc)] // There's a `Safety` section on `into_byte_slice_mut`. +/// A [`ByteSliceMut`] that conveys no ownership, and so can be converted into a +/// mutable byte slice. +/// +/// Some `ByteSliceMut` types (notably, the standard library's [`RefMut`] type) +/// convey ownership, and so they cannot soundly be moved by-value into a byte +/// slice type (`&mut [u8]`). Some methods in this crate's API (such as +/// [`Ref::into_mut`]) are only compatible with `ByteSliceMut` types without +/// these ownership semantics. +/// +/// [`RefMut`]: core::cell::RefMut +pub unsafe trait IntoByteSliceMut<'a>: IntoByteSlice<'a> + ByteSliceMut { + /// Coverts `self` into a `&mut [u8]`. + /// + /// # Safety + /// + /// The returned reference has the same address and length as `self.deref()` + /// and `self.deref_mut()`. + /// + /// Note that, combined with the safety invariant on [`ByteSlice`], this + /// safety invariant implies that the returned reference is "stable" in the + /// sense described in the `ByteSlice` docs. + fn into_byte_slice_mut(self) -> &'a mut [u8]; +} + +// FIXME(#429): Add a "SAFETY" comment and remove this `allow`. +#[allow(clippy::undocumented_unsafe_blocks)] +unsafe impl ByteSlice for &[u8] {} + +// FIXME(#429): Add a "SAFETY" comment and remove this `allow`. +#[allow(clippy::undocumented_unsafe_blocks)] +unsafe impl CopyableByteSlice for &[u8] {} + +// FIXME(#429): Add a "SAFETY" comment and remove this `allow`. +#[allow(clippy::undocumented_unsafe_blocks)] +unsafe impl CloneableByteSlice for &[u8] {} + +// SAFETY: This delegates to `polyfills:split_at_unchecked`, which is documented +// to correctly split `self` into two slices at the given `mid` point. +unsafe impl SplitByteSlice for &[u8] { + #[inline] + unsafe fn split_at_unchecked(self, mid: usize) -> (Self, Self) { + // SAFETY: By contract on caller, `mid` is not greater than + // `bytes.len()`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + (<[u8]>::get_unchecked(self, ..mid), <[u8]>::get_unchecked(self, mid..)) + } + } +} + +// SAFETY: See inline. +unsafe impl<'a> IntoByteSlice<'a> for &'a [u8] { + #[inline(always)] + fn into_byte_slice(self) -> &'a [u8] { + // SAFETY: It would be patently insane to implement `<Deref for + // &[u8]>::deref` as anything other than `fn deref(&self) -> &[u8] { + // *self }`. Assuming this holds, then `self` is stable as required by + // `into_byte_slice`. + self + } +} + +// FIXME(#429): Add a "SAFETY" comment and remove this `allow`. +#[allow(clippy::undocumented_unsafe_blocks)] +unsafe impl ByteSlice for &mut [u8] {} + +// SAFETY: This delegates to `polyfills:split_at_mut_unchecked`, which is +// documented to correctly split `self` into two slices at the given `mid` +// point. +unsafe impl SplitByteSlice for &mut [u8] { + #[inline] + unsafe fn split_at_unchecked(self, mid: usize) -> (Self, Self) { + use core::slice::from_raw_parts_mut; + + // `l_ptr` is non-null, because `self` is non-null, by invariant on + // `&mut [u8]`. + let l_ptr = self.as_mut_ptr(); + + // SAFETY: By contract on caller, `mid` is not greater than + // `self.len()`. + let r_ptr = unsafe { l_ptr.add(mid) }; + + let l_len = mid; + + // SAFETY: By contract on caller, `mid` is not greater than + // `self.len()`. + // + // FIXME(#67): Remove this allow. See NumExt for more details. + #[allow(unstable_name_collisions)] + let r_len = unsafe { self.len().unchecked_sub(mid) }; + + // SAFETY: These invocations of `from_raw_parts_mut` satisfy its + // documented safety preconditions [1]: + // - The data `l_ptr` and `r_ptr` are valid for both reads and writes of + // `l_len` and `r_len` bytes, respectively, and they are trivially + // aligned. In particular: + // - The entire memory range of each slice is contained within a + // single allocated object, since `l_ptr` and `r_ptr` are both + // derived from within the address range of `self`. + // - Both `l_ptr` and `r_ptr` are non-null and trivially aligned. + // `self` is non-null by invariant on `&mut [u8]`, and the + // operations that derive `l_ptr` and `r_ptr` from `self` do not + // nullify either pointer. + // - The data `l_ptr` and `r_ptr` point to `l_len` and `r_len`, + // respectively, consecutive properly initialized values of type `u8`. + // This is true for `self` by invariant on `&mut [u8]`, and remains + // true for these two sub-slices of `self`. + // - The memory referenced by the returned slice cannot be accessed + // through any other pointer (not derived from the return value) for + // the duration of lifetime `'a``, because: + // - `split_at_unchecked` consumes `self` (which is not `Copy`), + // - `split_at_unchecked` does not exfiltrate any references to this + // memory, besides those references returned below, + // - the returned slices are non-overlapping. + // - The individual sizes of the sub-slices of `self` are no larger than + // `isize::MAX`, because their combined sizes are no larger than + // `isize::MAX`, by invariant on `self`. + // + // [1] https://doc.rust-lang.org/std/slice/fn.from_raw_parts_mut.html#safety + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + (from_raw_parts_mut(l_ptr, l_len), from_raw_parts_mut(r_ptr, r_len)) + } + } +} + +// SAFETY: See inline. +unsafe impl<'a> IntoByteSlice<'a> for &'a mut [u8] { + #[inline(always)] + fn into_byte_slice(self) -> &'a [u8] { + // SAFETY: It would be patently insane to implement `<Deref for &mut + // [u8]>::deref` as anything other than `fn deref(&self) -> &[u8] { + // *self }`. Assuming this holds, then `self` is stable as required by + // `into_byte_slice`. + self + } +} + +// SAFETY: See inline. +unsafe impl<'a> IntoByteSliceMut<'a> for &'a mut [u8] { + #[inline(always)] + fn into_byte_slice_mut(self) -> &'a mut [u8] { + // SAFETY: It would be patently insane to implement `<DerefMut for &mut + // [u8]>::deref` as anything other than `fn deref_mut(&mut self) -> &mut + // [u8] { *self }`. Assuming this holds, then `self` is stable as + // required by `into_byte_slice_mut`. + self + } +} + +// FIXME(#429): Add a "SAFETY" comment and remove this `allow`. +#[allow(clippy::undocumented_unsafe_blocks)] +unsafe impl ByteSlice for cell::Ref<'_, [u8]> {} + +// SAFETY: This delegates to stdlib implementation of `Ref::map_split`, which is +// assumed to be correct, and `SplitByteSlice::split_at_unchecked`, which is +// documented to correctly split `self` into two slices at the given `mid` +// point. +unsafe impl SplitByteSlice for cell::Ref<'_, [u8]> { + #[inline] + unsafe fn split_at_unchecked(self, mid: usize) -> (Self, Self) { + cell::Ref::map_split(self, |slice| + // SAFETY: By precondition on caller, `mid` is not greater than + // `slice.len()`. + unsafe { + SplitByteSlice::split_at_unchecked(slice, mid) + }) + } +} + +// FIXME(#429): Add a "SAFETY" comment and remove this `allow`. +#[allow(clippy::undocumented_unsafe_blocks)] +unsafe impl ByteSlice for cell::RefMut<'_, [u8]> {} + +// SAFETY: This delegates to stdlib implementation of `RefMut::map_split`, which +// is assumed to be correct, and `SplitByteSlice::split_at_unchecked`, which is +// documented to correctly split `self` into two slices at the given `mid` +// point. +unsafe impl SplitByteSlice for cell::RefMut<'_, [u8]> { + #[inline] + unsafe fn split_at_unchecked(self, mid: usize) -> (Self, Self) { + cell::RefMut::map_split(self, |slice| + // SAFETY: By precondition on caller, `mid` is not greater than + // `slice.len()` + unsafe { + SplitByteSlice::split_at_unchecked(slice, mid) + }) + } +} + +#[cfg(kani)] +mod proofs { + use super::*; + + fn any_vec() -> Vec<u8> { + let len = kani::any(); + kani::assume(len <= isize::MAX as usize); + vec![0u8; len] + } + + #[kani::proof] + fn prove_split_at_unchecked() { + let v = any_vec(); + let slc = v.as_slice(); + let mid = kani::any(); + kani::assume(mid <= slc.len()); + let (l, r) = unsafe { slc.split_at_unchecked(mid) }; + assert_eq!(l.len() + r.len(), slc.len()); + + let slc: *const _ = slc; + let l: *const _ = l; + let r: *const _ = r; + + assert_eq!(slc.cast::<u8>(), l.cast::<u8>()); + assert_eq!(unsafe { slc.cast::<u8>().add(mid) }, r.cast::<u8>()); + + let mut v = any_vec(); + let slc = v.as_mut_slice(); + let len = slc.len(); + let mid = kani::any(); + kani::assume(mid <= slc.len()); + let (l, r) = unsafe { slc.split_at_unchecked(mid) }; + assert_eq!(l.len() + r.len(), len); + + let l: *mut _ = l; + let r: *mut _ = r; + let slc: *mut _ = slc; + + assert_eq!(slc.cast::<u8>(), l.cast::<u8>()); + assert_eq!(unsafe { slc.cast::<u8>().add(mid) }, r.cast::<u8>()); + } +} diff --git a/vendor/zerocopy/src/byteorder.rs b/vendor/zerocopy/src/byteorder.rs new file mode 100644 index 00000000..4065d677 --- /dev/null +++ b/vendor/zerocopy/src/byteorder.rs @@ -0,0 +1,1532 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Byte order-aware numeric primitives. +//! +//! This module contains equivalents of the native multi-byte integer types with +//! no alignment requirement and supporting byte order conversions. +//! +//! For each native multi-byte integer type - `u16`, `i16`, `u32`, etc - and +//! floating point type - `f32` and `f64` - an equivalent type is defined by +//! this module - [`U16`], [`I16`], [`U32`], [`F32`], [`F64`], etc. Unlike their +//! native counterparts, these types have alignment 1, and take a type parameter +//! specifying the byte order in which the bytes are stored in memory. Each type +//! implements this crate's relevant conversion and marker traits. +//! +//! These two properties, taken together, make these types useful for defining +//! data structures whose memory layout matches a wire format such as that of a +//! network protocol or a file format. Such formats often have multi-byte values +//! at offsets that do not respect the alignment requirements of the equivalent +//! native types, and stored in a byte order not necessarily the same as that of +//! the target platform. +//! +//! Type aliases are provided for common byte orders in the [`big_endian`], +//! [`little_endian`], [`network_endian`], and [`native_endian`] submodules. +//! Note that network-endian is a synonym for big-endian. +//! +//! # Example +//! +//! One use of these types is for representing network packet formats, such as +//! UDP: +//! +//! ```rust +//! use zerocopy::{*, byteorder::network_endian::U16}; +//! # use zerocopy_derive::*; +//! +//! #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)] +//! #[repr(C)] +//! struct UdpHeader { +//! src_port: U16, +//! dst_port: U16, +//! length: U16, +//! checksum: U16, +//! } +//! +//! #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)] +//! #[repr(C, packed)] +//! struct UdpPacket { +//! header: UdpHeader, +//! body: [u8], +//! } +//! +//! impl UdpPacket { +//! fn parse(bytes: &[u8]) -> Option<&UdpPacket> { +//! UdpPacket::ref_from_bytes(bytes).ok() +//! } +//! } +//! ``` + +use core::{ + convert::{TryFrom, TryInto}, + fmt::{Binary, Debug, LowerHex, Octal, UpperHex}, + hash::Hash, + num::TryFromIntError, +}; + +use super::*; + +/// A type-level representation of byte order. +/// +/// This type is implemented by [`BigEndian`] and [`LittleEndian`], which +/// represent big-endian and little-endian byte order respectively. This module +/// also provides a number of useful aliases for those types: [`NativeEndian`], +/// [`NetworkEndian`], [`BE`], and [`LE`]. +/// +/// `ByteOrder` types can be used to specify the byte order of the types in this +/// module - for example, [`U32<BigEndian>`] is a 32-bit integer stored in +/// big-endian byte order. +/// +/// [`U32<BigEndian>`]: U32 +pub trait ByteOrder: + Copy + Clone + Debug + Display + Eq + PartialEq + Ord + PartialOrd + Hash + private::Sealed +{ + #[doc(hidden)] + const ORDER: Order; +} + +mod private { + pub trait Sealed {} + + impl Sealed for super::BigEndian {} + impl Sealed for super::LittleEndian {} +} + +#[allow(missing_copy_implementations, missing_debug_implementations)] +#[doc(hidden)] +pub enum Order { + BigEndian, + LittleEndian, +} + +/// Big-endian byte order. +/// +/// See [`ByteOrder`] for more details. +#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum BigEndian {} + +impl ByteOrder for BigEndian { + const ORDER: Order = Order::BigEndian; +} + +impl Display for BigEndian { + #[inline] + fn fmt(&self, _: &mut Formatter<'_>) -> fmt::Result { + match *self {} + } +} + +/// Little-endian byte order. +/// +/// See [`ByteOrder`] for more details. +#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum LittleEndian {} + +impl ByteOrder for LittleEndian { + const ORDER: Order = Order::LittleEndian; +} + +impl Display for LittleEndian { + #[inline] + fn fmt(&self, _: &mut Formatter<'_>) -> fmt::Result { + match *self {} + } +} + +/// The endianness used by this platform. +/// +/// This is a type alias for [`BigEndian`] or [`LittleEndian`] depending on the +/// endianness of the target platform. +#[cfg(target_endian = "big")] +pub type NativeEndian = BigEndian; + +/// The endianness used by this platform. +/// +/// This is a type alias for [`BigEndian`] or [`LittleEndian`] depending on the +/// endianness of the target platform. +#[cfg(target_endian = "little")] +pub type NativeEndian = LittleEndian; + +/// The endianness used in many network protocols. +/// +/// This is a type alias for [`BigEndian`]. +pub type NetworkEndian = BigEndian; + +/// A type alias for [`BigEndian`]. +pub type BE = BigEndian; + +/// A type alias for [`LittleEndian`]. +pub type LE = LittleEndian; + +macro_rules! impl_fmt_trait { + ($name:ident, $native:ident, $trait:ident) => { + impl<O: ByteOrder> $trait for $name<O> { + #[inline(always)] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + $trait::fmt(&self.get(), f) + } + } + }; +} + +macro_rules! impl_fmt_traits { + ($name:ident, $native:ident, "floating point number") => { + impl_fmt_trait!($name, $native, Display); + }; + ($name:ident, $native:ident, "unsigned integer") => { + impl_fmt_traits!($name, $native, @all_types); + }; + ($name:ident, $native:ident, "signed integer") => { + impl_fmt_traits!($name, $native, @all_types); + }; + ($name:ident, $native:ident, @all_types) => { + impl_fmt_trait!($name, $native, Display); + impl_fmt_trait!($name, $native, Octal); + impl_fmt_trait!($name, $native, LowerHex); + impl_fmt_trait!($name, $native, UpperHex); + impl_fmt_trait!($name, $native, Binary); + }; +} + +macro_rules! impl_ops_traits { + ($name:ident, $native:ident, "floating point number") => { + impl_ops_traits!($name, $native, @all_types); + impl_ops_traits!($name, $native, @signed_integer_floating_point); + + impl<O: ByteOrder> PartialOrd for $name<O> { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + self.get().partial_cmp(&other.get()) + } + } + }; + ($name:ident, $native:ident, "unsigned integer") => { + impl_ops_traits!($name, $native, @signed_unsigned_integer); + impl_ops_traits!($name, $native, @all_types); + }; + ($name:ident, $native:ident, "signed integer") => { + impl_ops_traits!($name, $native, @signed_unsigned_integer); + impl_ops_traits!($name, $native, @signed_integer_floating_point); + impl_ops_traits!($name, $native, @all_types); + }; + ($name:ident, $native:ident, @signed_unsigned_integer) => { + impl_ops_traits!(@without_byteorder_swap $name, $native, BitAnd, bitand, BitAndAssign, bitand_assign); + impl_ops_traits!(@without_byteorder_swap $name, $native, BitOr, bitor, BitOrAssign, bitor_assign); + impl_ops_traits!(@without_byteorder_swap $name, $native, BitXor, bitxor, BitXorAssign, bitxor_assign); + impl_ops_traits!(@with_byteorder_swap $name, $native, Shl, shl, ShlAssign, shl_assign); + impl_ops_traits!(@with_byteorder_swap $name, $native, Shr, shr, ShrAssign, shr_assign); + + impl<O> core::ops::Not for $name<O> { + type Output = $name<O>; + + #[inline(always)] + fn not(self) -> $name<O> { + let self_native = $native::from_ne_bytes(self.0); + $name((!self_native).to_ne_bytes(), PhantomData) + } + } + + impl<O: ByteOrder> PartialOrd for $name<O> { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } + } + + impl<O: ByteOrder> Ord for $name<O> { + #[inline(always)] + fn cmp(&self, other: &Self) -> Ordering { + self.get().cmp(&other.get()) + } + } + + impl<O: ByteOrder> PartialOrd<$native> for $name<O> { + #[inline(always)] + fn partial_cmp(&self, other: &$native) -> Option<Ordering> { + self.get().partial_cmp(other) + } + } + }; + ($name:ident, $native:ident, @signed_integer_floating_point) => { + impl<O: ByteOrder> core::ops::Neg for $name<O> { + type Output = $name<O>; + + #[inline(always)] + fn neg(self) -> $name<O> { + let self_native: $native = self.get(); + #[allow(clippy::arithmetic_side_effects)] + $name::<O>::new(-self_native) + } + } + }; + ($name:ident, $native:ident, @all_types) => { + impl_ops_traits!(@with_byteorder_swap $name, $native, Add, add, AddAssign, add_assign); + impl_ops_traits!(@with_byteorder_swap $name, $native, Div, div, DivAssign, div_assign); + impl_ops_traits!(@with_byteorder_swap $name, $native, Mul, mul, MulAssign, mul_assign); + impl_ops_traits!(@with_byteorder_swap $name, $native, Rem, rem, RemAssign, rem_assign); + impl_ops_traits!(@with_byteorder_swap $name, $native, Sub, sub, SubAssign, sub_assign); + }; + (@with_byteorder_swap $name:ident, $native:ident, $trait:ident, $method:ident, $trait_assign:ident, $method_assign:ident) => { + impl<O: ByteOrder> core::ops::$trait<$name<O>> for $name<O> { + type Output = $name<O>; + + #[inline(always)] + fn $method(self, rhs: $name<O>) -> $name<O> { + let self_native: $native = self.get(); + let rhs_native: $native = rhs.get(); + let result_native = core::ops::$trait::$method(self_native, rhs_native); + $name::<O>::new(result_native) + } + } + + impl<O: ByteOrder> core::ops::$trait<$name<O>> for $native { + type Output = $name<O>; + + #[inline(always)] + fn $method(self, rhs: $name<O>) -> $name<O> { + let rhs_native: $native = rhs.get(); + let result_native = core::ops::$trait::$method(self, rhs_native); + $name::<O>::new(result_native) + } + } + + impl<O: ByteOrder> core::ops::$trait<$native> for $name<O> { + type Output = $name<O>; + + #[inline(always)] + fn $method(self, rhs: $native) -> $name<O> { + let self_native: $native = self.get(); + let result_native = core::ops::$trait::$method(self_native, rhs); + $name::<O>::new(result_native) + } + } + + impl<O: ByteOrder> core::ops::$trait_assign<$name<O>> for $name<O> { + #[inline(always)] + fn $method_assign(&mut self, rhs: $name<O>) { + *self = core::ops::$trait::$method(*self, rhs); + } + } + + impl<O: ByteOrder> core::ops::$trait_assign<$name<O>> for $native { + #[inline(always)] + fn $method_assign(&mut self, rhs: $name<O>) { + let rhs_native: $native = rhs.get(); + *self = core::ops::$trait::$method(*self, rhs_native); + } + } + + impl<O: ByteOrder> core::ops::$trait_assign<$native> for $name<O> { + #[inline(always)] + fn $method_assign(&mut self, rhs: $native) { + *self = core::ops::$trait::$method(*self, rhs); + } + } + }; + // Implement traits in terms of the same trait on the native type, but + // without performing a byte order swap when both operands are byteorder + // types. This only works for bitwise operations like `&`, `|`, etc. + // + // When only one operand is a byteorder type, we still need to perform a + // byteorder swap. + (@without_byteorder_swap $name:ident, $native:ident, $trait:ident, $method:ident, $trait_assign:ident, $method_assign:ident) => { + impl<O: ByteOrder> core::ops::$trait<$name<O>> for $name<O> { + type Output = $name<O>; + + #[inline(always)] + fn $method(self, rhs: $name<O>) -> $name<O> { + let self_native = $native::from_ne_bytes(self.0); + let rhs_native = $native::from_ne_bytes(rhs.0); + let result_native = core::ops::$trait::$method(self_native, rhs_native); + $name(result_native.to_ne_bytes(), PhantomData) + } + } + + impl<O: ByteOrder> core::ops::$trait<$name<O>> for $native { + type Output = $name<O>; + + #[inline(always)] + fn $method(self, rhs: $name<O>) -> $name<O> { + // No runtime cost - just byte packing + let rhs_native = $native::from_ne_bytes(rhs.0); + // (Maybe) runtime cost - byte order swap + let slf_byteorder = $name::<O>::new(self); + // No runtime cost - just byte packing + let slf_native = $native::from_ne_bytes(slf_byteorder.0); + // Runtime cost - perform the operation + let result_native = core::ops::$trait::$method(slf_native, rhs_native); + // No runtime cost - just byte unpacking + $name(result_native.to_ne_bytes(), PhantomData) + } + } + + impl<O: ByteOrder> core::ops::$trait<$native> for $name<O> { + type Output = $name<O>; + + #[inline(always)] + fn $method(self, rhs: $native) -> $name<O> { + // (Maybe) runtime cost - byte order swap + let rhs_byteorder = $name::<O>::new(rhs); + // No runtime cost - just byte packing + let rhs_native = $native::from_ne_bytes(rhs_byteorder.0); + // No runtime cost - just byte packing + let slf_native = $native::from_ne_bytes(self.0); + // Runtime cost - perform the operation + let result_native = core::ops::$trait::$method(slf_native, rhs_native); + // No runtime cost - just byte unpacking + $name(result_native.to_ne_bytes(), PhantomData) + } + } + + impl<O: ByteOrder> core::ops::$trait_assign<$name<O>> for $name<O> { + #[inline(always)] + fn $method_assign(&mut self, rhs: $name<O>) { + *self = core::ops::$trait::$method(*self, rhs); + } + } + + impl<O: ByteOrder> core::ops::$trait_assign<$name<O>> for $native { + #[inline(always)] + fn $method_assign(&mut self, rhs: $name<O>) { + // (Maybe) runtime cost - byte order swap + let rhs_native = rhs.get(); + // Runtime cost - perform the operation + *self = core::ops::$trait::$method(*self, rhs_native); + } + } + + impl<O: ByteOrder> core::ops::$trait_assign<$native> for $name<O> { + #[inline(always)] + fn $method_assign(&mut self, rhs: $native) { + *self = core::ops::$trait::$method(*self, rhs); + } + } + }; +} + +macro_rules! doc_comment { + ($x:expr, $($tt:tt)*) => { + #[doc = $x] + $($tt)* + }; +} + +macro_rules! define_max_value_constant { + ($name:ident, $bytes:expr, "unsigned integer") => { + /// The maximum value. + /// + /// This constant should be preferred to constructing a new value using + /// `new`, as `new` may perform an endianness swap depending on the + /// endianness `O` and the endianness of the platform. + pub const MAX_VALUE: $name<O> = $name([0xFFu8; $bytes], PhantomData); + }; + // We don't provide maximum and minimum value constants for signed values + // and floats because there's no way to do it generically - it would require + // a different value depending on the value of the `ByteOrder` type + // parameter. Currently, one workaround would be to provide implementations + // for concrete implementations of that trait. In the long term, if we are + // ever able to make the `new` constructor a const fn, we could use that + // instead. + ($name:ident, $bytes:expr, "signed integer") => {}; + ($name:ident, $bytes:expr, "floating point number") => {}; +} + +macro_rules! define_type { + ( + $article:ident, + $description:expr, + $name:ident, + $native:ident, + $bits:expr, + $bytes:expr, + $from_be_fn:path, + $to_be_fn:path, + $from_le_fn:path, + $to_le_fn:path, + $number_kind:tt, + [$($larger_native:ty),*], + [$($larger_native_try:ty),*], + [$($larger_byteorder:ident),*], + [$($larger_byteorder_try:ident),*] + ) => { + doc_comment! { + concat!($description, " stored in a given byte order. + +`", stringify!($name), "` is like the native `", stringify!($native), "` type with +two major differences: First, it has no alignment requirement (its alignment is 1). +Second, the endianness of its memory layout is given by the type parameter `O`, +which can be any type which implements [`ByteOrder`]. In particular, this refers +to [`BigEndian`], [`LittleEndian`], [`NativeEndian`], and [`NetworkEndian`]. + +", stringify!($article), " `", stringify!($name), "` can be constructed using +the [`new`] method, and its contained value can be obtained as a native +`",stringify!($native), "` using the [`get`] method, or updated in place with +the [`set`] method. In all cases, if the endianness `O` is not the same as the +endianness of the current platform, an endianness swap will be performed in +order to uphold the invariants that a) the layout of `", stringify!($name), "` +has endianness `O` and that, b) the layout of `", stringify!($native), "` has +the platform's native endianness. + +`", stringify!($name), "` implements [`FromBytes`], [`IntoBytes`], and [`Unaligned`], +making it useful for parsing and serialization. See the module documentation for an +example of how it can be used for parsing UDP packets. + +[`new`]: crate::byteorder::", stringify!($name), "::new +[`get`]: crate::byteorder::", stringify!($name), "::get +[`set`]: crate::byteorder::", stringify!($name), "::set +[`FromBytes`]: crate::FromBytes +[`IntoBytes`]: crate::IntoBytes +[`Unaligned`]: crate::Unaligned"), + #[derive(Copy, Clone, Eq, PartialEq, Hash)] + #[cfg_attr(any(feature = "derive", test), derive(KnownLayout, Immutable, FromBytes, IntoBytes, Unaligned))] + #[repr(transparent)] + pub struct $name<O>([u8; $bytes], PhantomData<O>); + } + + #[cfg(not(any(feature = "derive", test)))] + impl_known_layout!(O => $name<O>); + + #[allow(unused_unsafe)] // Unused when `feature = "derive"`. + // SAFETY: `$name<O>` is `repr(transparent)`, and so it has the same + // layout as its only non-zero field, which is a `u8` array. `u8` arrays + // are `Immutable`, `TryFromBytes`, `FromZeros`, `FromBytes`, + // `IntoBytes`, and `Unaligned`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + impl_or_verify!(O => Immutable for $name<O>); + impl_or_verify!(O => TryFromBytes for $name<O>); + impl_or_verify!(O => FromZeros for $name<O>); + impl_or_verify!(O => FromBytes for $name<O>); + impl_or_verify!(O => IntoBytes for $name<O>); + impl_or_verify!(O => Unaligned for $name<O>); + }; + + impl<O> Default for $name<O> { + #[inline(always)] + fn default() -> $name<O> { + $name::ZERO + } + } + + impl<O> $name<O> { + /// The value zero. + /// + /// This constant should be preferred to constructing a new value + /// using `new`, as `new` may perform an endianness swap depending + /// on the endianness and platform. + pub const ZERO: $name<O> = $name([0u8; $bytes], PhantomData); + + define_max_value_constant!($name, $bytes, $number_kind); + + /// Constructs a new value from bytes which are already in `O` byte + /// order. + #[must_use = "has no side effects"] + #[inline(always)] + pub const fn from_bytes(bytes: [u8; $bytes]) -> $name<O> { + $name(bytes, PhantomData) + } + + /// Extracts the bytes of `self` without swapping the byte order. + /// + /// The returned bytes will be in `O` byte order. + #[must_use = "has no side effects"] + #[inline(always)] + pub const fn to_bytes(self) -> [u8; $bytes] { + self.0 + } + } + + impl<O: ByteOrder> $name<O> { + maybe_const_trait_bounded_fn! { + /// Constructs a new value, possibly performing an endianness + /// swap to guarantee that the returned value has endianness + /// `O`. + #[must_use = "has no side effects"] + #[inline(always)] + pub const fn new(n: $native) -> $name<O> { + let bytes = match O::ORDER { + Order::BigEndian => $to_be_fn(n), + Order::LittleEndian => $to_le_fn(n), + }; + + $name(bytes, PhantomData) + } + } + + maybe_const_trait_bounded_fn! { + /// Returns the value as a primitive type, possibly performing + /// an endianness swap to guarantee that the return value has + /// the endianness of the native platform. + #[must_use = "has no side effects"] + #[inline(always)] + pub const fn get(self) -> $native { + match O::ORDER { + Order::BigEndian => $from_be_fn(self.0), + Order::LittleEndian => $from_le_fn(self.0), + } + } + } + + /// Updates the value in place as a primitive type, possibly + /// performing an endianness swap to guarantee that the stored value + /// has the endianness `O`. + #[inline(always)] + pub fn set(&mut self, n: $native) { + *self = Self::new(n); + } + } + + // The reasoning behind which traits to implement here is to only + // implement traits which won't cause inference issues. Notably, + // comparison traits like PartialEq and PartialOrd tend to cause + // inference issues. + + impl<O: ByteOrder> From<$name<O>> for [u8; $bytes] { + #[inline(always)] + fn from(x: $name<O>) -> [u8; $bytes] { + x.0 + } + } + + impl<O: ByteOrder> From<[u8; $bytes]> for $name<O> { + #[inline(always)] + fn from(bytes: [u8; $bytes]) -> $name<O> { + $name(bytes, PhantomData) + } + } + + impl<O: ByteOrder> From<$name<O>> for $native { + #[inline(always)] + fn from(x: $name<O>) -> $native { + x.get() + } + } + + impl<O: ByteOrder> From<$native> for $name<O> { + #[inline(always)] + fn from(x: $native) -> $name<O> { + $name::new(x) + } + } + + $( + impl<O: ByteOrder> From<$name<O>> for $larger_native { + #[inline(always)] + fn from(x: $name<O>) -> $larger_native { + x.get().into() + } + } + )* + + $( + impl<O: ByteOrder> TryFrom<$larger_native_try> for $name<O> { + type Error = TryFromIntError; + #[inline(always)] + fn try_from(x: $larger_native_try) -> Result<$name<O>, TryFromIntError> { + $native::try_from(x).map($name::new) + } + } + )* + + $( + impl<O: ByteOrder, P: ByteOrder> From<$name<O>> for $larger_byteorder<P> { + #[inline(always)] + fn from(x: $name<O>) -> $larger_byteorder<P> { + $larger_byteorder::new(x.get().into()) + } + } + )* + + $( + impl<O: ByteOrder, P: ByteOrder> TryFrom<$larger_byteorder_try<P>> for $name<O> { + type Error = TryFromIntError; + #[inline(always)] + fn try_from(x: $larger_byteorder_try<P>) -> Result<$name<O>, TryFromIntError> { + x.get().try_into().map($name::new) + } + } + )* + + impl<O> AsRef<[u8; $bytes]> for $name<O> { + #[inline(always)] + fn as_ref(&self) -> &[u8; $bytes] { + &self.0 + } + } + + impl<O> AsMut<[u8; $bytes]> for $name<O> { + #[inline(always)] + fn as_mut(&mut self) -> &mut [u8; $bytes] { + &mut self.0 + } + } + + impl<O> PartialEq<$name<O>> for [u8; $bytes] { + #[inline(always)] + fn eq(&self, other: &$name<O>) -> bool { + self.eq(&other.0) + } + } + + impl<O> PartialEq<[u8; $bytes]> for $name<O> { + #[inline(always)] + fn eq(&self, other: &[u8; $bytes]) -> bool { + self.0.eq(other) + } + } + + impl<O: ByteOrder> PartialEq<$native> for $name<O> { + #[inline(always)] + fn eq(&self, other: &$native) -> bool { + self.get().eq(other) + } + } + + impl_fmt_traits!($name, $native, $number_kind); + impl_ops_traits!($name, $native, $number_kind); + + impl<O: ByteOrder> Debug for $name<O> { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + // This results in a format like "U16(42)". + f.debug_tuple(stringify!($name)).field(&self.get()).finish() + } + } + }; +} + +define_type!( + A, + "A 16-bit unsigned integer", + U16, + u16, + 16, + 2, + u16::from_be_bytes, + u16::to_be_bytes, + u16::from_le_bytes, + u16::to_le_bytes, + "unsigned integer", + [u32, u64, u128, usize], + [u32, u64, u128, usize], + [U32, U64, U128, Usize], + [U32, U64, U128, Usize] +); +define_type!( + A, + "A 32-bit unsigned integer", + U32, + u32, + 32, + 4, + u32::from_be_bytes, + u32::to_be_bytes, + u32::from_le_bytes, + u32::to_le_bytes, + "unsigned integer", + [u64, u128], + [u64, u128], + [U64, U128], + [U64, U128] +); +define_type!( + A, + "A 64-bit unsigned integer", + U64, + u64, + 64, + 8, + u64::from_be_bytes, + u64::to_be_bytes, + u64::from_le_bytes, + u64::to_le_bytes, + "unsigned integer", + [u128], + [u128], + [U128], + [U128] +); +define_type!( + A, + "A 128-bit unsigned integer", + U128, + u128, + 128, + 16, + u128::from_be_bytes, + u128::to_be_bytes, + u128::from_le_bytes, + u128::to_le_bytes, + "unsigned integer", + [], + [], + [], + [] +); +define_type!( + A, + "A word-sized unsigned integer", + Usize, + usize, + mem::size_of::<usize>() * 8, + mem::size_of::<usize>(), + usize::from_be_bytes, + usize::to_be_bytes, + usize::from_le_bytes, + usize::to_le_bytes, + "unsigned integer", + [], + [], + [], + [] +); +define_type!( + An, + "A 16-bit signed integer", + I16, + i16, + 16, + 2, + i16::from_be_bytes, + i16::to_be_bytes, + i16::from_le_bytes, + i16::to_le_bytes, + "signed integer", + [i32, i64, i128, isize], + [i32, i64, i128, isize], + [I32, I64, I128, Isize], + [I32, I64, I128, Isize] +); +define_type!( + An, + "A 32-bit signed integer", + I32, + i32, + 32, + 4, + i32::from_be_bytes, + i32::to_be_bytes, + i32::from_le_bytes, + i32::to_le_bytes, + "signed integer", + [i64, i128], + [i64, i128], + [I64, I128], + [I64, I128] +); +define_type!( + An, + "A 64-bit signed integer", + I64, + i64, + 64, + 8, + i64::from_be_bytes, + i64::to_be_bytes, + i64::from_le_bytes, + i64::to_le_bytes, + "signed integer", + [i128], + [i128], + [I128], + [I128] +); +define_type!( + An, + "A 128-bit signed integer", + I128, + i128, + 128, + 16, + i128::from_be_bytes, + i128::to_be_bytes, + i128::from_le_bytes, + i128::to_le_bytes, + "signed integer", + [], + [], + [], + [] +); +define_type!( + An, + "A word-sized signed integer", + Isize, + isize, + mem::size_of::<isize>() * 8, + mem::size_of::<isize>(), + isize::from_be_bytes, + isize::to_be_bytes, + isize::from_le_bytes, + isize::to_le_bytes, + "signed integer", + [], + [], + [], + [] +); + +// FIXME(https://github.com/rust-lang/rust/issues/72447): Use the endianness +// conversion methods directly once those are const-stable. +macro_rules! define_float_conversion { + ($ty:ty, $bits:ident, $bytes:expr, $mod:ident) => { + mod $mod { + use super::*; + + define_float_conversion!($ty, $bits, $bytes, from_be_bytes, to_be_bytes); + define_float_conversion!($ty, $bits, $bytes, from_le_bytes, to_le_bytes); + } + }; + ($ty:ty, $bits:ident, $bytes:expr, $from:ident, $to:ident) => { + // Clippy: The suggestion of using `from_bits()` instead doesn't work + // because `from_bits` is not const-stable on our MSRV. + #[allow(clippy::unnecessary_transmutes)] + pub(crate) const fn $from(bytes: [u8; $bytes]) -> $ty { + transmute!($bits::$from(bytes)) + } + + pub(crate) const fn $to(f: $ty) -> [u8; $bytes] { + // Clippy: The suggestion of using `f.to_bits()` instead doesn't + // work because `to_bits` is not const-stable on our MSRV. + #[allow(clippy::unnecessary_transmutes)] + let bits: $bits = transmute!(f); + bits.$to() + } + }; +} + +define_float_conversion!(f32, u32, 4, f32_ext); +define_float_conversion!(f64, u64, 8, f64_ext); + +define_type!( + An, + "A 32-bit floating point number", + F32, + f32, + 32, + 4, + f32_ext::from_be_bytes, + f32_ext::to_be_bytes, + f32_ext::from_le_bytes, + f32_ext::to_le_bytes, + "floating point number", + [f64], + [], + [F64], + [] +); +define_type!( + An, + "A 64-bit floating point number", + F64, + f64, + 64, + 8, + f64_ext::from_be_bytes, + f64_ext::to_be_bytes, + f64_ext::from_le_bytes, + f64_ext::to_le_bytes, + "floating point number", + [], + [], + [], + [] +); + +macro_rules! module { + ($name:ident, $trait:ident, $endianness_str:expr) => { + /// Numeric primitives stored in + #[doc = $endianness_str] + /// byte order. + pub mod $name { + use super::$trait; + + module!(@ty U16, $trait, "16-bit unsigned integer", $endianness_str); + module!(@ty U32, $trait, "32-bit unsigned integer", $endianness_str); + module!(@ty U64, $trait, "64-bit unsigned integer", $endianness_str); + module!(@ty U128, $trait, "128-bit unsigned integer", $endianness_str); + module!(@ty I16, $trait, "16-bit signed integer", $endianness_str); + module!(@ty I32, $trait, "32-bit signed integer", $endianness_str); + module!(@ty I64, $trait, "64-bit signed integer", $endianness_str); + module!(@ty I128, $trait, "128-bit signed integer", $endianness_str); + module!(@ty F32, $trait, "32-bit floating point number", $endianness_str); + module!(@ty F64, $trait, "64-bit floating point number", $endianness_str); + } + }; + (@ty $ty:ident, $trait:ident, $desc_str:expr, $endianness_str:expr) => { + /// A + #[doc = $desc_str] + /// stored in + #[doc = $endianness_str] + /// byte order. + pub type $ty = crate::byteorder::$ty<$trait>; + }; +} + +module!(big_endian, BigEndian, "big-endian"); +module!(little_endian, LittleEndian, "little-endian"); +module!(network_endian, NetworkEndian, "network-endian"); +module!(native_endian, NativeEndian, "native-endian"); + +#[cfg(any(test, kani))] +mod tests { + use super::*; + + #[cfg(not(kani))] + mod compatibility { + pub(super) use rand::{ + distributions::{Distribution, Standard}, + rngs::SmallRng, + Rng, SeedableRng, + }; + + pub(crate) trait Arbitrary {} + + impl<T> Arbitrary for T {} + } + + #[cfg(kani)] + mod compatibility { + pub(crate) use kani::Arbitrary; + + pub(crate) struct SmallRng; + + impl SmallRng { + pub(crate) fn seed_from_u64(_state: u64) -> Self { + Self + } + } + + pub(crate) trait Rng { + fn sample<T, D: Distribution<T>>(&mut self, _distr: D) -> T + where + T: Arbitrary, + { + kani::any() + } + } + + impl Rng for SmallRng {} + + pub(crate) trait Distribution<T> {} + impl<T, U> Distribution<T> for U {} + + pub(crate) struct Standard; + } + + use compatibility::*; + + // A native integer type (u16, i32, etc). + trait Native: Arbitrary + FromBytes + IntoBytes + Immutable + Copy + PartialEq + Debug { + const ZERO: Self; + const MAX_VALUE: Self; + + type Distribution: Distribution<Self>; + const DIST: Self::Distribution; + + fn rand<R: Rng>(rng: &mut R) -> Self { + rng.sample(Self::DIST) + } + + #[cfg_attr(kani, allow(unused))] + fn checked_add(self, rhs: Self) -> Option<Self>; + + #[cfg_attr(kani, allow(unused))] + fn checked_div(self, rhs: Self) -> Option<Self>; + + #[cfg_attr(kani, allow(unused))] + fn checked_mul(self, rhs: Self) -> Option<Self>; + + #[cfg_attr(kani, allow(unused))] + fn checked_rem(self, rhs: Self) -> Option<Self>; + + #[cfg_attr(kani, allow(unused))] + fn checked_sub(self, rhs: Self) -> Option<Self>; + + #[cfg_attr(kani, allow(unused))] + fn checked_shl(self, rhs: Self) -> Option<Self>; + + #[cfg_attr(kani, allow(unused))] + fn checked_shr(self, rhs: Self) -> Option<Self>; + + fn is_nan(self) -> bool; + + /// For `f32` and `f64`, NaN values are not considered equal to + /// themselves. This method is like `assert_eq!`, but it treats NaN + /// values as equal. + fn assert_eq_or_nan(self, other: Self) { + let slf = (!self.is_nan()).then(|| self); + let other = (!other.is_nan()).then(|| other); + assert_eq!(slf, other); + } + } + + trait ByteArray: + FromBytes + IntoBytes + Immutable + Copy + AsRef<[u8]> + AsMut<[u8]> + Debug + Default + Eq + { + /// Invert the order of the bytes in the array. + fn invert(self) -> Self; + } + + trait ByteOrderType: + FromBytes + IntoBytes + Unaligned + Copy + Eq + Debug + Hash + From<Self::Native> + { + type Native: Native; + type ByteArray: ByteArray; + + const ZERO: Self; + + fn new(native: Self::Native) -> Self; + fn get(self) -> Self::Native; + fn set(&mut self, native: Self::Native); + fn from_bytes(bytes: Self::ByteArray) -> Self; + fn into_bytes(self) -> Self::ByteArray; + + /// For `f32` and `f64`, NaN values are not considered equal to + /// themselves. This method is like `assert_eq!`, but it treats NaN + /// values as equal. + fn assert_eq_or_nan(self, other: Self) { + let slf = (!self.get().is_nan()).then(|| self); + let other = (!other.get().is_nan()).then(|| other); + assert_eq!(slf, other); + } + } + + trait ByteOrderTypeUnsigned: ByteOrderType { + const MAX_VALUE: Self; + } + + macro_rules! impl_byte_array { + ($bytes:expr) => { + impl ByteArray for [u8; $bytes] { + fn invert(mut self) -> [u8; $bytes] { + self.reverse(); + self + } + } + }; + } + + impl_byte_array!(2); + impl_byte_array!(4); + impl_byte_array!(8); + impl_byte_array!(16); + + macro_rules! impl_byte_order_type_unsigned { + ($name:ident, unsigned) => { + impl<O: ByteOrder> ByteOrderTypeUnsigned for $name<O> { + const MAX_VALUE: $name<O> = $name::MAX_VALUE; + } + }; + ($name:ident, signed) => {}; + } + + macro_rules! impl_traits { + ($name:ident, $native:ident, $sign:ident $(, @$float:ident)?) => { + impl Native for $native { + // For some types, `0 as $native` is required (for example, when + // `$native` is a floating-point type; `0` is an integer), but + // for other types, it's a trivial cast. In all cases, Clippy + // thinks it's dangerous. + #[allow(trivial_numeric_casts, clippy::as_conversions)] + const ZERO: $native = 0 as $native; + const MAX_VALUE: $native = $native::MAX; + + type Distribution = Standard; + const DIST: Standard = Standard; + + impl_traits!(@float_dependent_methods $(@$float)?); + } + + impl<O: ByteOrder> ByteOrderType for $name<O> { + type Native = $native; + type ByteArray = [u8; mem::size_of::<$native>()]; + + const ZERO: $name<O> = $name::ZERO; + + fn new(native: $native) -> $name<O> { + $name::new(native) + } + + fn get(self) -> $native { + $name::get(self) + } + + fn set(&mut self, native: $native) { + $name::set(self, native) + } + + fn from_bytes(bytes: [u8; mem::size_of::<$native>()]) -> $name<O> { + $name::from(bytes) + } + + fn into_bytes(self) -> [u8; mem::size_of::<$native>()] { + <[u8; mem::size_of::<$native>()]>::from(self) + } + } + + impl_byte_order_type_unsigned!($name, $sign); + }; + (@float_dependent_methods) => { + fn checked_add(self, rhs: Self) -> Option<Self> { self.checked_add(rhs) } + fn checked_div(self, rhs: Self) -> Option<Self> { self.checked_div(rhs) } + fn checked_mul(self, rhs: Self) -> Option<Self> { self.checked_mul(rhs) } + fn checked_rem(self, rhs: Self) -> Option<Self> { self.checked_rem(rhs) } + fn checked_sub(self, rhs: Self) -> Option<Self> { self.checked_sub(rhs) } + fn checked_shl(self, rhs: Self) -> Option<Self> { self.checked_shl(rhs.try_into().unwrap_or(u32::MAX)) } + fn checked_shr(self, rhs: Self) -> Option<Self> { self.checked_shr(rhs.try_into().unwrap_or(u32::MAX)) } + fn is_nan(self) -> bool { false } + }; + (@float_dependent_methods @float) => { + fn checked_add(self, rhs: Self) -> Option<Self> { Some(self + rhs) } + fn checked_div(self, rhs: Self) -> Option<Self> { Some(self / rhs) } + fn checked_mul(self, rhs: Self) -> Option<Self> { Some(self * rhs) } + fn checked_rem(self, rhs: Self) -> Option<Self> { Some(self % rhs) } + fn checked_sub(self, rhs: Self) -> Option<Self> { Some(self - rhs) } + fn checked_shl(self, _rhs: Self) -> Option<Self> { unimplemented!() } + fn checked_shr(self, _rhs: Self) -> Option<Self> { unimplemented!() } + fn is_nan(self) -> bool { self.is_nan() } + }; + } + + impl_traits!(U16, u16, unsigned); + impl_traits!(U32, u32, unsigned); + impl_traits!(U64, u64, unsigned); + impl_traits!(U128, u128, unsigned); + impl_traits!(Usize, usize, unsigned); + impl_traits!(I16, i16, signed); + impl_traits!(I32, i32, signed); + impl_traits!(I64, i64, signed); + impl_traits!(I128, i128, signed); + impl_traits!(Isize, isize, unsigned); + impl_traits!(F32, f32, signed, @float); + impl_traits!(F64, f64, signed, @float); + + macro_rules! call_for_unsigned_types { + ($fn:ident, $byteorder:ident) => { + $fn::<U16<$byteorder>>(); + $fn::<U32<$byteorder>>(); + $fn::<U64<$byteorder>>(); + $fn::<U128<$byteorder>>(); + $fn::<Usize<$byteorder>>(); + }; + } + + macro_rules! call_for_signed_types { + ($fn:ident, $byteorder:ident) => { + $fn::<I16<$byteorder>>(); + $fn::<I32<$byteorder>>(); + $fn::<I64<$byteorder>>(); + $fn::<I128<$byteorder>>(); + $fn::<Isize<$byteorder>>(); + }; + } + + macro_rules! call_for_float_types { + ($fn:ident, $byteorder:ident) => { + $fn::<F32<$byteorder>>(); + $fn::<F64<$byteorder>>(); + }; + } + + macro_rules! call_for_all_types { + ($fn:ident, $byteorder:ident) => { + call_for_unsigned_types!($fn, $byteorder); + call_for_signed_types!($fn, $byteorder); + call_for_float_types!($fn, $byteorder); + }; + } + + #[cfg(target_endian = "big")] + type NonNativeEndian = LittleEndian; + #[cfg(target_endian = "little")] + type NonNativeEndian = BigEndian; + + // We use a `u64` seed so that we can use `SeedableRng::seed_from_u64`. + // `SmallRng`'s `SeedableRng::Seed` differs by platform, so if we wanted to + // call `SeedableRng::from_seed`, which takes a `Seed`, we would need + // conditional compilation by `target_pointer_width`. + const RNG_SEED: u64 = 0x7A03CAE2F32B5B8F; + + const RAND_ITERS: usize = if cfg!(any(miri, kani)) { + // The tests below which use this constant used to take a very long time + // on Miri, which slows down local development and CI jobs. We're not + // using Miri to check for the correctness of our code, but rather its + // soundness, and at least in the context of these particular tests, a + // single loop iteration is just as good for surfacing UB as multiple + // iterations are. + // + // As of the writing of this comment, here's one set of measurements: + // + // $ # RAND_ITERS == 1 + // $ cargo miri test -- -Z unstable-options --report-time endian + // test byteorder::tests::test_native_endian ... ok <0.049s> + // test byteorder::tests::test_non_native_endian ... ok <0.061s> + // + // $ # RAND_ITERS == 1024 + // $ cargo miri test -- -Z unstable-options --report-time endian + // test byteorder::tests::test_native_endian ... ok <25.716s> + // test byteorder::tests::test_non_native_endian ... ok <38.127s> + 1 + } else { + 1024 + }; + + #[test] + fn test_const_methods() { + use big_endian::*; + + #[rustversion::since(1.61.0)] + const _U: U16 = U16::new(0); + #[rustversion::since(1.61.0)] + const _NATIVE: u16 = _U.get(); + const _FROM_BYTES: U16 = U16::from_bytes([0, 1]); + const _BYTES: [u8; 2] = _FROM_BYTES.to_bytes(); + } + + #[cfg_attr(test, test)] + #[cfg_attr(kani, kani::proof)] + fn test_zero() { + fn test_zero<T: ByteOrderType>() { + assert_eq!(T::ZERO.get(), T::Native::ZERO); + } + + call_for_all_types!(test_zero, NativeEndian); + call_for_all_types!(test_zero, NonNativeEndian); + } + + #[cfg_attr(test, test)] + #[cfg_attr(kani, kani::proof)] + fn test_max_value() { + fn test_max_value<T: ByteOrderTypeUnsigned>() { + assert_eq!(T::MAX_VALUE.get(), T::Native::MAX_VALUE); + } + + call_for_unsigned_types!(test_max_value, NativeEndian); + call_for_unsigned_types!(test_max_value, NonNativeEndian); + } + + #[cfg_attr(test, test)] + #[cfg_attr(kani, kani::proof)] + fn test_endian() { + fn test<T: ByteOrderType>(invert: bool) { + let mut r = SmallRng::seed_from_u64(RNG_SEED); + for _ in 0..RAND_ITERS { + let native = T::Native::rand(&mut r); + let mut bytes = T::ByteArray::default(); + bytes.as_mut_bytes().copy_from_slice(native.as_bytes()); + if invert { + bytes = bytes.invert(); + } + let mut from_native = T::new(native); + let from_bytes = T::from_bytes(bytes); + + from_native.assert_eq_or_nan(from_bytes); + from_native.get().assert_eq_or_nan(native); + from_bytes.get().assert_eq_or_nan(native); + + assert_eq!(from_native.into_bytes(), bytes); + assert_eq!(from_bytes.into_bytes(), bytes); + + let updated = T::Native::rand(&mut r); + from_native.set(updated); + from_native.get().assert_eq_or_nan(updated); + } + } + + fn test_native<T: ByteOrderType>() { + test::<T>(false); + } + + fn test_non_native<T: ByteOrderType>() { + test::<T>(true); + } + + call_for_all_types!(test_native, NativeEndian); + call_for_all_types!(test_non_native, NonNativeEndian); + } + + #[test] + fn test_ops_impls() { + // Test implementations of traits in `core::ops`. Some of these are + // fairly banal, but some are optimized to perform the operation without + // swapping byte order (namely, bit-wise operations which are identical + // regardless of byte order). These are important to test, and while + // we're testing those anyway, it's trivial to test all of the impls. + + fn test<T, FTT, FTN, FNT, FNN, FNNChecked, FATT, FATN, FANT>( + op_t_t: FTT, + op_t_n: FTN, + op_n_t: FNT, + op_n_n: FNN, + op_n_n_checked: Option<FNNChecked>, + op_assign: Option<(FATT, FATN, FANT)>, + ) where + T: ByteOrderType, + FTT: Fn(T, T) -> T, + FTN: Fn(T, T::Native) -> T, + FNT: Fn(T::Native, T) -> T, + FNN: Fn(T::Native, T::Native) -> T::Native, + FNNChecked: Fn(T::Native, T::Native) -> Option<T::Native>, + + FATT: Fn(&mut T, T), + FATN: Fn(&mut T, T::Native), + FANT: Fn(&mut T::Native, T), + { + let mut r = SmallRng::seed_from_u64(RNG_SEED); + for _ in 0..RAND_ITERS { + let n0 = T::Native::rand(&mut r); + let n1 = T::Native::rand(&mut r); + let t0 = T::new(n0); + let t1 = T::new(n1); + + // If this operation would overflow/underflow, skip it rather + // than attempt to catch and recover from panics. + if matches!(&op_n_n_checked, Some(checked) if checked(n0, n1).is_none()) { + continue; + } + + let t_t_res = op_t_t(t0, t1); + let t_n_res = op_t_n(t0, n1); + let n_t_res = op_n_t(n0, t1); + let n_n_res = op_n_n(n0, n1); + + // For `f32` and `f64`, NaN values are not considered equal to + // themselves. We store `Option<f32>`/`Option<f64>` and store + // NaN as `None` so they can still be compared. + let val_or_none = |t: T| (!T::Native::is_nan(t.get())).then(|| t.get()); + let t_t_res = val_or_none(t_t_res); + let t_n_res = val_or_none(t_n_res); + let n_t_res = val_or_none(n_t_res); + let n_n_res = (!T::Native::is_nan(n_n_res)).then(|| n_n_res); + assert_eq!(t_t_res, n_n_res); + assert_eq!(t_n_res, n_n_res); + assert_eq!(n_t_res, n_n_res); + + if let Some((op_assign_t_t, op_assign_t_n, op_assign_n_t)) = &op_assign { + let mut t_t_res = t0; + op_assign_t_t(&mut t_t_res, t1); + let mut t_n_res = t0; + op_assign_t_n(&mut t_n_res, n1); + let mut n_t_res = n0; + op_assign_n_t(&mut n_t_res, t1); + + // For `f32` and `f64`, NaN values are not considered equal to + // themselves. We store `Option<f32>`/`Option<f64>` and store + // NaN as `None` so they can still be compared. + let t_t_res = val_or_none(t_t_res); + let t_n_res = val_or_none(t_n_res); + let n_t_res = (!T::Native::is_nan(n_t_res)).then(|| n_t_res); + assert_eq!(t_t_res, n_n_res); + assert_eq!(t_n_res, n_n_res); + assert_eq!(n_t_res, n_n_res); + } + } + } + + macro_rules! test { + ( + @binary + $trait:ident, + $method:ident $([$checked_method:ident])?, + $trait_assign:ident, + $method_assign:ident, + $($call_for_macros:ident),* + ) => {{ + fn t<T>() + where + T: ByteOrderType, + T: core::ops::$trait<T, Output = T>, + T: core::ops::$trait<T::Native, Output = T>, + T::Native: core::ops::$trait<T, Output = T>, + T::Native: core::ops::$trait<T::Native, Output = T::Native>, + + T: core::ops::$trait_assign<T>, + T: core::ops::$trait_assign<T::Native>, + T::Native: core::ops::$trait_assign<T>, + T::Native: core::ops::$trait_assign<T::Native>, + { + test::<T, _, _, _, _, _, _, _, _>( + core::ops::$trait::$method, + core::ops::$trait::$method, + core::ops::$trait::$method, + core::ops::$trait::$method, + { + #[allow(unused_mut, unused_assignments)] + let mut op_native_checked = None::<fn(T::Native, T::Native) -> Option<T::Native>>; + $( + op_native_checked = Some(T::Native::$checked_method); + )? + op_native_checked + }, + Some(( + <T as core::ops::$trait_assign<T>>::$method_assign, + <T as core::ops::$trait_assign::<T::Native>>::$method_assign, + <T::Native as core::ops::$trait_assign::<T>>::$method_assign + )), + ); + } + + $( + $call_for_macros!(t, NativeEndian); + $call_for_macros!(t, NonNativeEndian); + )* + }}; + ( + @unary + $trait:ident, + $method:ident, + $($call_for_macros:ident),* + ) => {{ + fn t<T>() + where + T: ByteOrderType, + T: core::ops::$trait<Output = T>, + T::Native: core::ops::$trait<Output = T::Native>, + { + test::<T, _, _, _, _, _, _, _, _>( + |slf, _rhs| core::ops::$trait::$method(slf), + |slf, _rhs| core::ops::$trait::$method(slf), + |slf, _rhs| core::ops::$trait::$method(slf).into(), + |slf, _rhs| core::ops::$trait::$method(slf), + None::<fn(T::Native, T::Native) -> Option<T::Native>>, + None::<(fn(&mut T, T), fn(&mut T, T::Native), fn(&mut T::Native, T))>, + ); + } + + $( + $call_for_macros!(t, NativeEndian); + $call_for_macros!(t, NonNativeEndian); + )* + }}; + } + + test!(@binary Add, add[checked_add], AddAssign, add_assign, call_for_all_types); + test!(@binary Div, div[checked_div], DivAssign, div_assign, call_for_all_types); + test!(@binary Mul, mul[checked_mul], MulAssign, mul_assign, call_for_all_types); + test!(@binary Rem, rem[checked_rem], RemAssign, rem_assign, call_for_all_types); + test!(@binary Sub, sub[checked_sub], SubAssign, sub_assign, call_for_all_types); + + test!(@binary BitAnd, bitand, BitAndAssign, bitand_assign, call_for_unsigned_types, call_for_signed_types); + test!(@binary BitOr, bitor, BitOrAssign, bitor_assign, call_for_unsigned_types, call_for_signed_types); + test!(@binary BitXor, bitxor, BitXorAssign, bitxor_assign, call_for_unsigned_types, call_for_signed_types); + test!(@binary Shl, shl[checked_shl], ShlAssign, shl_assign, call_for_unsigned_types, call_for_signed_types); + test!(@binary Shr, shr[checked_shr], ShrAssign, shr_assign, call_for_unsigned_types, call_for_signed_types); + + test!(@unary Not, not, call_for_signed_types, call_for_unsigned_types); + test!(@unary Neg, neg, call_for_signed_types, call_for_float_types); + } + + #[test] + fn test_debug_impl() { + // Ensure that Debug applies format options to the inner value. + let val = U16::<LE>::new(10); + assert_eq!(format!("{:?}", val), "U16(10)"); + assert_eq!(format!("{:03?}", val), "U16(010)"); + assert_eq!(format!("{:x?}", val), "U16(a)"); + } +} diff --git a/vendor/zerocopy/src/deprecated.rs b/vendor/zerocopy/src/deprecated.rs new file mode 100644 index 00000000..4c5e4981 --- /dev/null +++ b/vendor/zerocopy/src/deprecated.rs @@ -0,0 +1,211 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Deprecated items. These are kept separate so that they don't clutter up +//! other modules. + +use super::*; + +impl<B, T> Ref<B, T> +where + B: ByteSlice, + T: KnownLayout + Immutable + ?Sized, +{ + #[deprecated(since = "0.8.0", note = "renamed to `Ref::from_bytes`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new(bytes: B) -> Option<Ref<B, T>> { + Self::from_bytes(bytes).ok() + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, + T: KnownLayout + Immutable + ?Sized, +{ + #[deprecated(since = "0.8.0", note = "renamed to `Ref::from_prefix`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_from_prefix(bytes: B) -> Option<(Ref<B, T>, B)> { + Self::from_prefix(bytes).ok() + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, + T: KnownLayout + Immutable + ?Sized, +{ + #[deprecated(since = "0.8.0", note = "renamed to `Ref::from_suffix`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_from_suffix(bytes: B) -> Option<(B, Ref<B, T>)> { + Self::from_suffix(bytes).ok() + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSlice, + T: Unaligned + KnownLayout + Immutable + ?Sized, +{ + #[deprecated( + since = "0.8.0", + note = "use `Ref::from_bytes`; for `T: Unaligned`, the returned `CastError` implements `Into<SizeError>`" + )] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_unaligned(bytes: B) -> Option<Ref<B, T>> { + Self::from_bytes(bytes).ok() + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, + T: Unaligned + KnownLayout + Immutable + ?Sized, +{ + #[deprecated( + since = "0.8.0", + note = "use `Ref::from_prefix`; for `T: Unaligned`, the returned `CastError` implements `Into<SizeError>`" + )] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_unaligned_from_prefix(bytes: B) -> Option<(Ref<B, T>, B)> { + Self::from_prefix(bytes).ok() + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, + T: Unaligned + KnownLayout + Immutable + ?Sized, +{ + #[deprecated( + since = "0.8.0", + note = "use `Ref::from_suffix`; for `T: Unaligned`, the returned `CastError` implements `Into<SizeError>`" + )] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_unaligned_from_suffix(bytes: B) -> Option<(B, Ref<B, T>)> { + Self::from_suffix(bytes).ok() + } +} + +impl<B, T> Ref<B, [T]> +where + B: ByteSlice, + T: Immutable, +{ + #[deprecated(since = "0.8.0", note = "`Ref::from_bytes` now supports slices")] + #[doc(hidden)] + #[inline(always)] + pub fn new_slice(bytes: B) -> Option<Ref<B, [T]>> { + Self::from_bytes(bytes).ok() + } +} + +impl<B, T> Ref<B, [T]> +where + B: ByteSlice, + T: Unaligned + Immutable, +{ + #[deprecated( + since = "0.8.0", + note = "`Ref::from_bytes` now supports slices; for `T: Unaligned`, the returned `CastError` implements `Into<SizeError>`" + )] + #[doc(hidden)] + #[inline(always)] + pub fn new_slice_unaligned(bytes: B) -> Option<Ref<B, [T]>> { + Ref::from_bytes(bytes).ok() + } +} + +impl<'a, B, T> Ref<B, [T]> +where + B: 'a + IntoByteSlice<'a>, + T: FromBytes + Immutable, +{ + #[deprecated(since = "0.8.0", note = "`Ref::into_ref` now supports slices")] + #[doc(hidden)] + #[inline(always)] + pub fn into_slice(self) -> &'a [T] { + Ref::into_ref(self) + } +} + +impl<'a, B, T> Ref<B, [T]> +where + B: 'a + IntoByteSliceMut<'a>, + T: FromBytes + IntoBytes + Immutable, +{ + #[deprecated(since = "0.8.0", note = "`Ref::into_mut` now supports slices")] + #[doc(hidden)] + #[inline(always)] + pub fn into_mut_slice(self) -> &'a mut [T] { + Ref::into_mut(self) + } +} + +impl<B, T> Ref<B, [T]> +where + B: SplitByteSlice, + T: Immutable, +{ + #[deprecated(since = "0.8.0", note = "replaced by `Ref::from_prefix_with_elems`")] + #[must_use = "has no side effects"] + #[doc(hidden)] + #[inline(always)] + pub fn new_slice_from_prefix(bytes: B, count: usize) -> Option<(Ref<B, [T]>, B)> { + Ref::from_prefix_with_elems(bytes, count).ok() + } + + #[deprecated(since = "0.8.0", note = "replaced by `Ref::from_suffix_with_elems`")] + #[must_use = "has no side effects"] + #[doc(hidden)] + #[inline(always)] + pub fn new_slice_from_suffix(bytes: B, count: usize) -> Option<(B, Ref<B, [T]>)> { + Ref::from_suffix_with_elems(bytes, count).ok() + } +} + +impl<B, T> Ref<B, [T]> +where + B: SplitByteSlice, + T: Unaligned + Immutable, +{ + #[deprecated( + since = "0.8.0", + note = "use `Ref::from_prefix_with_elems`; for `T: Unaligned`, the returned `CastError` implements `Into<SizeError>`" + )] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_slice_unaligned_from_prefix(bytes: B, count: usize) -> Option<(Ref<B, [T]>, B)> { + Ref::from_prefix_with_elems(bytes, count).ok() + } + + #[deprecated( + since = "0.8.0", + note = "use `Ref::from_suffix_with_elems`; for `T: Unaligned`, the returned `CastError` implements `Into<SizeError>`" + )] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + pub fn new_slice_unaligned_from_suffix(bytes: B, count: usize) -> Option<(B, Ref<B, [T]>)> { + Ref::from_suffix_with_elems(bytes, count).ok() + } +} diff --git a/vendor/zerocopy/src/doctests.rs b/vendor/zerocopy/src/doctests.rs new file mode 100644 index 00000000..938e16a9 --- /dev/null +++ b/vendor/zerocopy/src/doctests.rs @@ -0,0 +1,125 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#![cfg(feature = "derive")] // Required for derives on `SliceDst` +#![allow(dead_code)] + +//! Our UI test framework, built on the `trybuild` crate, does not support +//! testing for post-monomorphization errors. Instead, we use doctests, which +//! are able to test for post-monomorphization errors. + +use crate::*; + +#[derive(KnownLayout, FromBytes, IntoBytes, Immutable)] +#[repr(C)] +#[allow(missing_debug_implementations, missing_copy_implementations)] +pub struct SliceDst<T, U> { + pub t: T, + pub u: [U], +} + +#[allow(clippy::must_use_candidate, clippy::missing_inline_in_public_items, clippy::todo)] +impl<T: FromBytes + IntoBytes, U: FromBytes + IntoBytes> SliceDst<T, U> { + pub fn new() -> &'static SliceDst<T, U> { + todo!() + } + + pub fn new_mut() -> &'static mut SliceDst<T, U> { + todo!() + } +} + +/// We require that the alignment of the destination type is not larger than the +/// alignment of the source type. +/// +/// ```compile_fail,E0080 +/// let increase_alignment: &u16 = zerocopy::transmute_ref!(&[0u8; 2]); +/// ``` +/// +/// ```compile_fail,E0080 +/// let mut src = [0u8; 2]; +/// let increase_alignment: &mut u16 = zerocopy::transmute_mut!(&mut src); +/// ``` +enum TransmuteRefMutAlignmentIncrease {} + +/// We require that the size of the destination type is not larger than the size +/// of the source type. +/// +/// ```compile_fail,E0080 +/// let increase_size: &[u8; 2] = zerocopy::transmute_ref!(&0u8); +/// ``` +/// +/// ```compile_fail,E0080 +/// let mut src = 0u8; +/// let increase_size: &mut [u8; 2] = zerocopy::transmute_mut!(&mut src); +/// ``` +enum TransmuteRefMutSizeIncrease {} + +/// We require that the size of the destination type is not smaller than the +/// size of the source type. +/// +/// ```compile_fail,E0080 +/// let decrease_size: &u8 = zerocopy::transmute_ref!(&[0u8; 2]); +/// ``` +/// +/// ```compile_fail,E0080 +/// let mut src = [0u8; 2]; +/// let decrease_size: &mut u8 = zerocopy::transmute_mut!(&mut src); +/// ``` +enum TransmuteRefMutSizeDecrease {} + +/// It's not possible in the general case to increase the trailing slice offset +/// during a reference transmutation - some pointer metadata values would not be +/// supportable, and so such a transmutation would be fallible. +/// +/// ```compile_fail,E0080 +/// use zerocopy::doctests::SliceDst; +/// let src: &SliceDst<u8, u8> = SliceDst::new(); +/// let increase_offset: &SliceDst<[u8; 2], u8> = zerocopy::transmute_ref!(src); +/// ``` +/// +/// ```compile_fail,E0080 +/// use zerocopy::doctests::SliceDst; +/// let src: &mut SliceDst<u8, u8> = SliceDst::new_mut(); +/// let increase_offset: &mut SliceDst<[u8; 2], u8> = zerocopy::transmute_mut!(src); +/// ``` +enum TransmuteRefMutDstOffsetIncrease {} + +/// Reference transmutes are not possible when the difference between the source +/// and destination types' trailing slice offsets is not a multiple of the +/// destination type's trailing slice element size. +/// +/// ```compile_fail,E0080 +/// use zerocopy::doctests::SliceDst; +/// let src: &SliceDst<[u8; 3], [u8; 2]> = SliceDst::new(); +/// let _: &SliceDst<[u8; 2], [u8; 2]> = zerocopy::transmute_ref!(src); +/// ``` +/// +/// ```compile_fail,E0080 +/// use zerocopy::doctests::SliceDst; +/// let src: &mut SliceDst<[u8; 3], [u8; 2]> = SliceDst::new_mut(); +/// let _: &mut SliceDst<[u8; 2], [u8; 2]> = zerocopy::transmute_mut!(src); +/// ``` +enum TransmuteRefMutDstOffsetNotMultiple {} + +/// Reference transmutes are not possible when the source's trailing slice +/// element size is not a multiple of the destination's. +/// +/// ```compile_fail,E0080 +/// use zerocopy::doctests::SliceDst; +/// let src: &SliceDst<(), [u8; 3]> = SliceDst::new(); +/// let _: &SliceDst<(), [u8; 2]> = zerocopy::transmute_ref!(src); +/// ``` +/// +/// ```compile_fail,E0080 +/// use zerocopy::doctests::SliceDst; +/// let src: &mut SliceDst<(), [u8; 3]> = SliceDst::new_mut(); +/// let _: &mut SliceDst<(), [u8; 2]> = zerocopy::transmute_mut!(src); +/// ``` +enum TransmuteRefMutDstElemSizeNotMultiple {} diff --git a/vendor/zerocopy/src/error.rs b/vendor/zerocopy/src/error.rs new file mode 100644 index 00000000..fd977e20 --- /dev/null +++ b/vendor/zerocopy/src/error.rs @@ -0,0 +1,1172 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Types related to error reporting. +//! +//! ## Single failure mode errors +//! +//! Generally speaking, zerocopy's conversions may fail for one of up to three +//! reasons: +//! - [`AlignmentError`]: the conversion source was improperly aligned +//! - [`SizeError`]: the conversion source was of incorrect size +//! - [`ValidityError`]: the conversion source contained invalid data +//! +//! Methods that only have one failure mode, like +//! [`FromBytes::read_from_bytes`], return that mode's corresponding error type +//! directly. +//! +//! ## Compound errors +//! +//! Conversion methods that have either two or three possible failure modes +//! return one of these error types: +//! - [`CastError`]: the error type of reference conversions +//! - [`TryCastError`]: the error type of fallible reference conversions +//! - [`TryReadError`]: the error type of fallible read conversions +//! +//! ## [`Unaligned`] destination types +//! +//! For [`Unaligned`] destination types, alignment errors are impossible. All +//! compound error types support infallibly discarding the alignment error via +//! [`From`] so long as `Dst: Unaligned`. For example, see [`<SizeError as +//! From<ConvertError>>::from`][size-error-from]. +//! +//! [size-error-from]: struct.SizeError.html#method.from-1 +//! +//! ## Accessing the conversion source +//! +//! All error types provide an `into_src` method that converts the error into +//! the source value underlying the failed conversion. +//! +//! ## Display formatting +//! +//! All error types provide a `Display` implementation that produces a +//! human-readable error message. When `debug_assertions` are enabled, these +//! error messages are verbose and may include potentially sensitive +//! information, including: +//! +//! - the names of the involved types +//! - the sizes of the involved types +//! - the addresses of the involved types +//! - the contents of the involved types +//! +//! When `debug_assertions` are disabled (as is default for `release` builds), +//! such potentially sensitive information is excluded. +//! +//! In the future, we may support manually configuring this behavior. If you are +//! interested in this feature, [let us know on GitHub][issue-1457] so we know +//! to prioritize it. +//! +//! [issue-1457]: https://github.com/google/zerocopy/issues/1457 +//! +//! ## Validation order +//! +//! Our conversion methods typically check alignment, then size, then bit +//! validity. However, we do not guarantee that this is always the case, and +//! this behavior may change between releases. +//! +//! ## `Send`, `Sync`, and `'static` +//! +//! Our error types are `Send`, `Sync`, and `'static` when their `Src` parameter +//! is `Send`, `Sync`, or `'static`, respectively. This can cause issues when an +//! error is sent or synchronized across threads; e.g.: +//! +//! ```compile_fail,E0515 +//! use zerocopy::*; +//! +//! let result: SizeError<&[u8], u32> = std::thread::spawn(|| { +//! let source = &mut [0u8, 1, 2][..]; +//! // Try (and fail) to read a `u32` from `source`. +//! u32::read_from_bytes(source).unwrap_err() +//! }).join().unwrap(); +//! ``` +//! +//! To work around this, use [`map_src`][CastError::map_src] to convert the +//! source parameter to an unproblematic type; e.g.: +//! +//! ``` +//! use zerocopy::*; +//! +//! let result: SizeError<(), u32> = std::thread::spawn(|| { +//! let source = &mut [0u8, 1, 2][..]; +//! // Try (and fail) to read a `u32` from `source`. +//! u32::read_from_bytes(source).unwrap_err() +//! // Erase the error source. +//! .map_src(drop) +//! }).join().unwrap(); +//! ``` +//! +//! Alternatively, use `.to_string()` to eagerly convert the error into a +//! human-readable message; e.g.: +//! +//! ``` +//! use zerocopy::*; +//! +//! let result: Result<u32, String> = std::thread::spawn(|| { +//! let source = &mut [0u8, 1, 2][..]; +//! // Try (and fail) to read a `u32` from `source`. +//! u32::read_from_bytes(source) +//! // Eagerly render the error message. +//! .map_err(|err| err.to_string()) +//! }).join().unwrap(); +//! ``` +#[cfg(not(no_zerocopy_core_error_1_81_0))] +use core::error::Error; +use core::{ + convert::Infallible, + fmt::{self, Debug, Write}, + ops::Deref, +}; +#[cfg(all(no_zerocopy_core_error_1_81_0, any(feature = "std", test)))] +use std::error::Error; + +use crate::{util::SendSyncPhantomData, KnownLayout, TryFromBytes, Unaligned}; +#[cfg(doc)] +use crate::{FromBytes, Ref}; + +/// Zerocopy's generic error type. +/// +/// Generally speaking, zerocopy's conversions may fail for one of up to three +/// reasons: +/// - [`AlignmentError`]: the conversion source was improperly aligned +/// - [`SizeError`]: the conversion source was of incorrect size +/// - [`ValidityError`]: the conversion source contained invalid data +/// +/// However, not all conversions produce all errors. For instance, +/// [`FromBytes::ref_from_bytes`] may fail due to alignment or size issues, but +/// not validity issues. This generic error type captures these +/// (im)possibilities via parameterization: `A` is parameterized with +/// [`AlignmentError`], `S` is parameterized with [`SizeError`], and `V` is +/// parameterized with [`Infallible`]. +/// +/// Zerocopy never uses this type directly in its API. Rather, we provide three +/// pre-parameterized aliases: +/// - [`CastError`]: the error type of reference conversions +/// - [`TryCastError`]: the error type of fallible reference conversions +/// - [`TryReadError`]: the error type of fallible read conversions +#[derive(PartialEq, Eq, Clone)] +pub enum ConvertError<A, S, V> { + /// The conversion source was improperly aligned. + Alignment(A), + /// The conversion source was of incorrect size. + Size(S), + /// The conversion source contained invalid data. + Validity(V), +} + +impl<Src, Dst: ?Sized + Unaligned, S, V> From<ConvertError<AlignmentError<Src, Dst>, S, V>> + for ConvertError<Infallible, S, V> +{ + /// Infallibly discards the alignment error from this `ConvertError` since + /// `Dst` is unaligned. + /// + /// Since [`Dst: Unaligned`], it is impossible to encounter an alignment + /// error. This method permits discarding that alignment error infallibly + /// and replacing it with [`Infallible`]. + /// + /// [`Dst: Unaligned`]: crate::Unaligned + /// + /// # Examples + /// + /// ``` + /// use core::convert::Infallible; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, KnownLayout, Unaligned, Immutable)] + /// #[repr(C, packed)] + /// struct Bools { + /// one: bool, + /// two: bool, + /// many: [bool], + /// } + /// + /// impl Bools { + /// fn parse(bytes: &[u8]) -> Result<&Bools, AlignedTryCastError<&[u8], Bools>> { + /// // Since `Bools: Unaligned`, we can infallibly discard + /// // the alignment error. + /// Bools::try_ref_from_bytes(bytes).map_err(Into::into) + /// } + /// } + /// ``` + #[inline] + fn from(err: ConvertError<AlignmentError<Src, Dst>, S, V>) -> ConvertError<Infallible, S, V> { + match err { + ConvertError::Alignment(e) => { + #[allow(unreachable_code)] + return ConvertError::Alignment(Infallible::from(e)); + } + ConvertError::Size(e) => ConvertError::Size(e), + ConvertError::Validity(e) => ConvertError::Validity(e), + } + } +} + +impl<A: fmt::Debug, S: fmt::Debug, V: fmt::Debug> fmt::Debug for ConvertError<A, S, V> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Alignment(e) => f.debug_tuple("Alignment").field(e).finish(), + Self::Size(e) => f.debug_tuple("Size").field(e).finish(), + Self::Validity(e) => f.debug_tuple("Validity").field(e).finish(), + } + } +} + +/// Produces a human-readable error message. +/// +/// The message differs between debug and release builds. When +/// `debug_assertions` are enabled, this message is verbose and includes +/// potentially sensitive information. +impl<A: fmt::Display, S: fmt::Display, V: fmt::Display> fmt::Display for ConvertError<A, S, V> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Alignment(e) => e.fmt(f), + Self::Size(e) => e.fmt(f), + Self::Validity(e) => e.fmt(f), + } + } +} + +#[cfg(any(not(no_zerocopy_core_error_1_81_0), feature = "std", test))] +#[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.81.0", feature = "std"))))] +impl<A, S, V> Error for ConvertError<A, S, V> +where + A: fmt::Display + fmt::Debug, + S: fmt::Display + fmt::Debug, + V: fmt::Display + fmt::Debug, +{ +} + +/// The error emitted if the conversion source is improperly aligned. +pub struct AlignmentError<Src, Dst: ?Sized> { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type involved in the conversion. + /// + /// INVARIANT: An `AlignmentError` may only be constructed if `Dst`'s + /// alignment requirement is greater than one. + _dst: SendSyncPhantomData<Dst>, +} + +impl<Src, Dst: ?Sized> AlignmentError<Src, Dst> { + /// # Safety + /// + /// The caller must ensure that `Dst`'s alignment requirement is greater + /// than one. + pub(crate) unsafe fn new_unchecked(src: Src) -> Self { + // INVARIANT: The caller guarantees that `Dst`'s alignment requirement + // is greater than one. + Self { src, _dst: SendSyncPhantomData::default() } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + pub(crate) fn with_src<NewSrc>(self, new_src: NewSrc) -> AlignmentError<NewSrc, Dst> { + // INVARIANT: `with_src` doesn't change the type of `Dst`, so the + // invariant that `Dst`'s alignment requirement is greater than one is + // preserved. + AlignmentError { src: new_src, _dst: SendSyncPhantomData::default() } + } + + /// Maps the source value associated with the conversion error. + /// + /// This can help mitigate [issues with `Send`, `Sync` and `'static` + /// bounds][self#send-sync-and-static]. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::*; + /// + /// let unaligned = Unalign::new(0u16); + /// + /// // Attempt to deref `unaligned`. This might fail with an alignment error. + /// let maybe_n: Result<&u16, AlignmentError<&Unalign<u16>, u16>> = unaligned.try_deref(); + /// + /// // Map the error's source to its address as a usize. + /// let maybe_n: Result<&u16, AlignmentError<usize, u16>> = maybe_n.map_err(|err| { + /// err.map_src(|src| src as *const _ as usize) + /// }); + /// ``` + #[inline] + pub fn map_src<NewSrc>(self, f: impl FnOnce(Src) -> NewSrc) -> AlignmentError<NewSrc, Dst> { + AlignmentError { src: f(self.src), _dst: SendSyncPhantomData::default() } + } + + pub(crate) fn into<S, V>(self) -> ConvertError<Self, S, V> { + ConvertError::Alignment(self) + } + + /// Format extra details for a verbose, human-readable error message. + /// + /// This formatting may include potentially sensitive information. + fn display_verbose_extras(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result + where + Src: Deref, + Dst: KnownLayout, + { + #[allow(clippy::as_conversions)] + let addr = self.src.deref() as *const _ as *const (); + let addr_align = 2usize.pow((crate::util::AsAddress::addr(addr)).trailing_zeros()); + + f.write_str("\n\nSource type: ")?; + f.write_str(core::any::type_name::<Src>())?; + + f.write_str("\nSource address: ")?; + addr.fmt(f)?; + f.write_str(" (a multiple of ")?; + addr_align.fmt(f)?; + f.write_str(")")?; + + f.write_str("\nDestination type: ")?; + f.write_str(core::any::type_name::<Dst>())?; + + f.write_str("\nDestination alignment: ")?; + <Dst as KnownLayout>::LAYOUT.align.get().fmt(f)?; + + Ok(()) + } +} + +impl<Src: Clone, Dst: ?Sized> Clone for AlignmentError<Src, Dst> { + #[inline] + fn clone(&self) -> Self { + Self { src: self.src.clone(), _dst: SendSyncPhantomData::default() } + } +} + +impl<Src: PartialEq, Dst: ?Sized> PartialEq for AlignmentError<Src, Dst> { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.src == other.src + } +} + +impl<Src: Eq, Dst: ?Sized> Eq for AlignmentError<Src, Dst> {} + +impl<Src, Dst: ?Sized + Unaligned> From<AlignmentError<Src, Dst>> for Infallible { + #[inline(always)] + fn from(_: AlignmentError<Src, Dst>) -> Infallible { + // SAFETY: `AlignmentError`s can only be constructed when `Dst`'s + // alignment requirement is greater than one. In this block, `Dst: + // Unaligned`, which means that its alignment requirement is equal to + // one. Thus, it's not possible to reach here at runtime. + unsafe { core::hint::unreachable_unchecked() } + } +} + +#[cfg(test)] +impl<Src, Dst> AlignmentError<Src, Dst> { + // A convenience constructor so that test code doesn't need to write + // `unsafe`. + fn new_checked(src: Src) -> AlignmentError<Src, Dst> { + assert_ne!(core::mem::align_of::<Dst>(), 1); + // SAFETY: The preceding assertion guarantees that `Dst`'s alignment + // requirement is greater than one. + unsafe { AlignmentError::new_unchecked(src) } + } +} + +impl<Src, Dst: ?Sized> fmt::Debug for AlignmentError<Src, Dst> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AlignmentError").finish() + } +} + +/// Produces a human-readable error message. +/// +/// The message differs between debug and release builds. When +/// `debug_assertions` are enabled, this message is verbose and includes +/// potentially sensitive information. +impl<Src, Dst: ?Sized> fmt::Display for AlignmentError<Src, Dst> +where + Src: Deref, + Dst: KnownLayout, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("The conversion failed because the address of the source is not a multiple of the alignment of the destination type.")?; + + if cfg!(debug_assertions) { + self.display_verbose_extras(f) + } else { + Ok(()) + } + } +} + +#[cfg(any(not(no_zerocopy_core_error_1_81_0), feature = "std", test))] +#[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.81.0", feature = "std"))))] +impl<Src, Dst: ?Sized> Error for AlignmentError<Src, Dst> +where + Src: Deref, + Dst: KnownLayout, +{ +} + +impl<Src, Dst: ?Sized, S, V> From<AlignmentError<Src, Dst>> + for ConvertError<AlignmentError<Src, Dst>, S, V> +{ + #[inline(always)] + fn from(err: AlignmentError<Src, Dst>) -> Self { + Self::Alignment(err) + } +} + +/// The error emitted if the conversion source is of incorrect size. +pub struct SizeError<Src, Dst: ?Sized> { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type involved in the conversion. + _dst: SendSyncPhantomData<Dst>, +} + +impl<Src, Dst: ?Sized> SizeError<Src, Dst> { + pub(crate) fn new(src: Src) -> Self { + Self { src, _dst: SendSyncPhantomData::default() } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src<NewSrc>(self, new_src: NewSrc) -> SizeError<NewSrc, Dst> { + SizeError { src: new_src, _dst: SendSyncPhantomData::default() } + } + + /// Maps the source value associated with the conversion error. + /// + /// This can help mitigate [issues with `Send`, `Sync` and `'static` + /// bounds][self#send-sync-and-static]. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::*; + /// + /// let source: [u8; 3] = [0, 1, 2]; + /// + /// // Try to read a `u32` from `source`. This will fail because there are insufficient + /// // bytes in `source`. + /// let maybe_u32: Result<u32, SizeError<&[u8], u32>> = u32::read_from_bytes(&source[..]); + /// + /// // Map the error's source to its size. + /// let maybe_u32: Result<u32, SizeError<usize, u32>> = maybe_u32.map_err(|err| { + /// err.map_src(|src| src.len()) + /// }); + /// ``` + #[inline] + pub fn map_src<NewSrc>(self, f: impl FnOnce(Src) -> NewSrc) -> SizeError<NewSrc, Dst> { + SizeError { src: f(self.src), _dst: SendSyncPhantomData::default() } + } + + /// Sets the destination type associated with the conversion error. + pub(crate) fn with_dst<NewDst: ?Sized>(self) -> SizeError<Src, NewDst> { + SizeError { src: self.src, _dst: SendSyncPhantomData::default() } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into<A, V>(self) -> ConvertError<A, Self, V> { + ConvertError::Size(self) + } + + /// Format extra details for a verbose, human-readable error message. + /// + /// This formatting may include potentially sensitive information. + fn display_verbose_extras(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result + where + Src: Deref, + Dst: KnownLayout, + { + // include the source type + f.write_str("\nSource type: ")?; + f.write_str(core::any::type_name::<Src>())?; + + // include the source.deref() size + let src_size = core::mem::size_of_val(&*self.src); + f.write_str("\nSource size: ")?; + src_size.fmt(f)?; + f.write_str(" byte")?; + if src_size != 1 { + f.write_char('s')?; + } + + // if `Dst` is `Sized`, include the `Dst` size + if let crate::SizeInfo::Sized { size } = Dst::LAYOUT.size_info { + f.write_str("\nDestination size: ")?; + size.fmt(f)?; + f.write_str(" byte")?; + if size != 1 { + f.write_char('s')?; + } + } + + // include the destination type + f.write_str("\nDestination type: ")?; + f.write_str(core::any::type_name::<Dst>())?; + + Ok(()) + } +} + +impl<Src: Clone, Dst: ?Sized> Clone for SizeError<Src, Dst> { + #[inline] + fn clone(&self) -> Self { + Self { src: self.src.clone(), _dst: SendSyncPhantomData::default() } + } +} + +impl<Src: PartialEq, Dst: ?Sized> PartialEq for SizeError<Src, Dst> { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.src == other.src + } +} + +impl<Src: Eq, Dst: ?Sized> Eq for SizeError<Src, Dst> {} + +impl<Src, Dst: ?Sized> fmt::Debug for SizeError<Src, Dst> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SizeError").finish() + } +} + +/// Produces a human-readable error message. +/// +/// The message differs between debug and release builds. When +/// `debug_assertions` are enabled, this message is verbose and includes +/// potentially sensitive information. +impl<Src, Dst: ?Sized> fmt::Display for SizeError<Src, Dst> +where + Src: Deref, + Dst: KnownLayout, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("The conversion failed because the source was incorrectly sized to complete the conversion into the destination type.")?; + if cfg!(debug_assertions) { + f.write_str("\n")?; + self.display_verbose_extras(f)?; + } + Ok(()) + } +} + +#[cfg(any(not(no_zerocopy_core_error_1_81_0), feature = "std", test))] +#[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.81.0", feature = "std"))))] +impl<Src, Dst: ?Sized> Error for SizeError<Src, Dst> +where + Src: Deref, + Dst: KnownLayout, +{ +} + +impl<Src, Dst: ?Sized, A, V> From<SizeError<Src, Dst>> for ConvertError<A, SizeError<Src, Dst>, V> { + #[inline(always)] + fn from(err: SizeError<Src, Dst>) -> Self { + Self::Size(err) + } +} + +/// The error emitted if the conversion source contains invalid data. +pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + /// The source value involved in the conversion. + pub(crate) src: Src, + /// The inner destination type involved in the conversion. + _dst: SendSyncPhantomData<Dst>, +} + +impl<Src, Dst: ?Sized + TryFromBytes> ValidityError<Src, Dst> { + pub(crate) fn new(src: Src) -> Self { + Self { src, _dst: SendSyncPhantomData::default() } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + /// Maps the source value associated with the conversion error. + /// + /// This can help mitigate [issues with `Send`, `Sync` and `'static` + /// bounds][self#send-sync-and-static]. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::*; + /// + /// let source: u8 = 42; + /// + /// // Try to transmute the `source` to a `bool`. This will fail. + /// let maybe_bool: Result<bool, ValidityError<u8, bool>> = try_transmute!(source); + /// + /// // Drop the error's source. + /// let maybe_bool: Result<bool, ValidityError<(), bool>> = maybe_bool.map_err(|err| { + /// err.map_src(drop) + /// }); + /// ``` + #[inline] + pub fn map_src<NewSrc>(self, f: impl FnOnce(Src) -> NewSrc) -> ValidityError<NewSrc, Dst> { + ValidityError { src: f(self.src), _dst: SendSyncPhantomData::default() } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into<A, S>(self) -> ConvertError<A, S, Self> { + ConvertError::Validity(self) + } + + /// Format extra details for a verbose, human-readable error message. + /// + /// This formatting may include potentially sensitive information. + fn display_verbose_extras(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result + where + Dst: KnownLayout, + { + f.write_str("Destination type: ")?; + f.write_str(core::any::type_name::<Dst>())?; + Ok(()) + } +} + +impl<Src: Clone, Dst: ?Sized + TryFromBytes> Clone for ValidityError<Src, Dst> { + #[inline] + fn clone(&self) -> Self { + Self { src: self.src.clone(), _dst: SendSyncPhantomData::default() } + } +} + +impl<Src: PartialEq, Dst: ?Sized + TryFromBytes> PartialEq for ValidityError<Src, Dst> { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.src == other.src + } +} + +impl<Src: Eq, Dst: ?Sized + TryFromBytes> Eq for ValidityError<Src, Dst> {} + +impl<Src, Dst: ?Sized + TryFromBytes> fmt::Debug for ValidityError<Src, Dst> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ValidityError").finish() + } +} + +/// Produces a human-readable error message. +/// +/// The message differs between debug and release builds. When +/// `debug_assertions` are enabled, this message is verbose and includes +/// potentially sensitive information. +impl<Src, Dst: ?Sized> fmt::Display for ValidityError<Src, Dst> +where + Dst: KnownLayout + TryFromBytes, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("The conversion failed because the source bytes are not a valid value of the destination type.")?; + if cfg!(debug_assertions) { + f.write_str("\n\n")?; + self.display_verbose_extras(f)?; + } + Ok(()) + } +} + +#[cfg(any(not(no_zerocopy_core_error_1_81_0), feature = "std", test))] +#[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.81.0", feature = "std"))))] +impl<Src, Dst: ?Sized> Error for ValidityError<Src, Dst> where Dst: KnownLayout + TryFromBytes {} + +impl<Src, Dst: ?Sized + TryFromBytes, A, S> From<ValidityError<Src, Dst>> + for ConvertError<A, S, ValidityError<Src, Dst>> +{ + #[inline(always)] + fn from(err: ValidityError<Src, Dst>) -> Self { + Self::Validity(err) + } +} + +/// The error type of reference conversions. +/// +/// Reference conversions, like [`FromBytes::ref_from_bytes`] may emit +/// [alignment](AlignmentError) and [size](SizeError) errors. +// Bounds on generic parameters are not enforced in type aliases, but they do +// appear in rustdoc. +#[allow(type_alias_bounds)] +pub type CastError<Src, Dst: ?Sized> = + ConvertError<AlignmentError<Src, Dst>, SizeError<Src, Dst>, Infallible>; + +impl<Src, Dst: ?Sized> CastError<Src, Dst> { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(e) => e.src, + Self::Size(e) => e.src, + Self::Validity(i) => match i {}, + } + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src<NewSrc>(self, new_src: NewSrc) -> CastError<NewSrc, Dst> { + match self { + Self::Alignment(e) => CastError::Alignment(e.with_src(new_src)), + Self::Size(e) => CastError::Size(e.with_src(new_src)), + Self::Validity(i) => match i {}, + } + } + + /// Maps the source value associated with the conversion error. + /// + /// This can help mitigate [issues with `Send`, `Sync` and `'static` + /// bounds][self#send-sync-and-static]. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::*; + /// + /// let source: [u8; 3] = [0, 1, 2]; + /// + /// // Try to read a `u32` from `source`. This will fail because there are insufficient + /// // bytes in `source`. + /// let maybe_u32: Result<&u32, CastError<&[u8], u32>> = u32::ref_from_bytes(&source[..]); + /// + /// // Map the error's source to its size and address. + /// let maybe_u32: Result<&u32, CastError<(usize, usize), u32>> = maybe_u32.map_err(|err| { + /// err.map_src(|src| (src.len(), src.as_ptr() as usize)) + /// }); + /// ``` + #[inline] + pub fn map_src<NewSrc>(self, f: impl FnOnce(Src) -> NewSrc) -> CastError<NewSrc, Dst> { + match self { + Self::Alignment(e) => CastError::Alignment(e.map_src(f)), + Self::Size(e) => CastError::Size(e.map_src(f)), + Self::Validity(i) => match i {}, + } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> TryCastError<Src, Dst> + where + Dst: TryFromBytes, + { + match self { + Self::Alignment(e) => TryCastError::Alignment(e), + Self::Size(e) => TryCastError::Size(e), + Self::Validity(i) => match i {}, + } + } +} + +impl<Src, Dst: ?Sized + Unaligned> From<CastError<Src, Dst>> for SizeError<Src, Dst> { + /// Infallibly extracts the [`SizeError`] from this `CastError` since `Dst` + /// is unaligned. + /// + /// Since [`Dst: Unaligned`], it is impossible to encounter an alignment + /// error, and so the only error that can be encountered at runtime is a + /// [`SizeError`]. This method permits extracting that `SizeError` + /// infallibly. + /// + /// [`Dst: Unaligned`]: crate::Unaligned + /// + /// # Examples + /// + /// ```rust + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)] + /// #[repr(C)] + /// struct UdpHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)] + /// #[repr(C, packed)] + /// struct UdpPacket { + /// header: UdpHeader, + /// body: [u8], + /// } + /// + /// impl UdpPacket { + /// pub fn parse(bytes: &[u8]) -> Result<&UdpPacket, SizeError<&[u8], UdpPacket>> { + /// // Since `UdpPacket: Unaligned`, we can map the `CastError` to a `SizeError`. + /// UdpPacket::ref_from_bytes(bytes).map_err(Into::into) + /// } + /// } + /// ``` + #[inline(always)] + fn from(err: CastError<Src, Dst>) -> SizeError<Src, Dst> { + match err { + #[allow(unreachable_code)] + CastError::Alignment(e) => match Infallible::from(e) {}, + CastError::Size(e) => e, + CastError::Validity(i) => match i {}, + } + } +} + +/// The error type of fallible reference conversions. +/// +/// Fallible reference conversions, like [`TryFromBytes::try_ref_from_bytes`] +/// may emit [alignment](AlignmentError), [size](SizeError), and +/// [validity](ValidityError) errors. +// Bounds on generic parameters are not enforced in type aliases, but they do +// appear in rustdoc. +#[allow(type_alias_bounds)] +pub type TryCastError<Src, Dst: ?Sized + TryFromBytes> = + ConvertError<AlignmentError<Src, Dst>, SizeError<Src, Dst>, ValidityError<Src, Dst>>; + +// FIXME(#1139): Remove the `TryFromBytes` here and in other downstream +// locations (all the way to `ValidityError`) if we determine it's not necessary +// for rich validity errors. +impl<Src, Dst: ?Sized + TryFromBytes> TryCastError<Src, Dst> { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(e) => e.src, + Self::Size(e) => e.src, + Self::Validity(e) => e.src, + } + } + + /// Maps the source value associated with the conversion error. + /// + /// This can help mitigate [issues with `Send`, `Sync` and `'static` + /// bounds][self#send-sync-and-static]. + /// + /// # Examples + /// + /// ``` + /// use core::num::NonZeroU32; + /// use zerocopy::*; + /// + /// let source: [u8; 3] = [0, 0, 0]; + /// + /// // Try to read a `NonZeroU32` from `source`. + /// let maybe_u32: Result<&NonZeroU32, TryCastError<&[u8], NonZeroU32>> + /// = NonZeroU32::try_ref_from_bytes(&source[..]); + /// + /// // Map the error's source to its size and address. + /// let maybe_u32: Result<&NonZeroU32, TryCastError<(usize, usize), NonZeroU32>> = + /// maybe_u32.map_err(|err| { + /// err.map_src(|src| (src.len(), src.as_ptr() as usize)) + /// }); + /// ``` + #[inline] + pub fn map_src<NewSrc>(self, f: impl FnOnce(Src) -> NewSrc) -> TryCastError<NewSrc, Dst> { + match self { + Self::Alignment(e) => TryCastError::Alignment(e.map_src(f)), + Self::Size(e) => TryCastError::Size(e.map_src(f)), + Self::Validity(e) => TryCastError::Validity(e.map_src(f)), + } + } +} + +impl<Src, Dst: ?Sized + TryFromBytes> From<CastError<Src, Dst>> for TryCastError<Src, Dst> { + #[inline] + fn from(value: CastError<Src, Dst>) -> Self { + match value { + CastError::Alignment(e) => Self::Alignment(e), + CastError::Size(e) => Self::Size(e), + CastError::Validity(i) => match i {}, + } + } +} + +/// The error type of fallible read-conversions. +/// +/// Fallible read-conversions, like [`TryFromBytes::try_read_from_bytes`] may +/// emit [size](SizeError) and [validity](ValidityError) errors, but not +/// alignment errors. +// Bounds on generic parameters are not enforced in type aliases, but they do +// appear in rustdoc. +#[allow(type_alias_bounds)] +pub type TryReadError<Src, Dst: ?Sized + TryFromBytes> = + ConvertError<Infallible, SizeError<Src, Dst>, ValidityError<Src, Dst>>; + +impl<Src, Dst: ?Sized + TryFromBytes> TryReadError<Src, Dst> { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(i) => match i {}, + Self::Size(e) => e.src, + Self::Validity(e) => e.src, + } + } + + /// Maps the source value associated with the conversion error. + /// + /// This can help mitigate [issues with `Send`, `Sync` and `'static` + /// bounds][self#send-sync-and-static]. + /// + /// # Examples + /// + /// ``` + /// use core::num::NonZeroU32; + /// use zerocopy::*; + /// + /// let source: [u8; 3] = [0, 0, 0]; + /// + /// // Try to read a `NonZeroU32` from `source`. + /// let maybe_u32: Result<NonZeroU32, TryReadError<&[u8], NonZeroU32>> + /// = NonZeroU32::try_read_from_bytes(&source[..]); + /// + /// // Map the error's source to its size. + /// let maybe_u32: Result<NonZeroU32, TryReadError<usize, NonZeroU32>> = + /// maybe_u32.map_err(|err| { + /// err.map_src(|src| src.len()) + /// }); + /// ``` + #[inline] + pub fn map_src<NewSrc>(self, f: impl FnOnce(Src) -> NewSrc) -> TryReadError<NewSrc, Dst> { + match self { + Self::Alignment(i) => match i {}, + Self::Size(e) => TryReadError::Size(e.map_src(f)), + Self::Validity(e) => TryReadError::Validity(e.map_src(f)), + } + } +} + +/// The error type of well-aligned, fallible casts. +/// +/// This is like [`TryCastError`], but for casts that are always well-aligned. +/// It is identical to `TryCastError`, except that its alignment error is +/// [`Infallible`]. +/// +/// As of this writing, none of zerocopy's API produces this error directly. +/// However, it is useful since it permits users to infallibly discard alignment +/// errors when they can prove statically that alignment errors are impossible. +/// +/// # Examples +/// +/// ``` +/// use core::convert::Infallible; +/// use zerocopy::*; +/// # use zerocopy_derive::*; +/// +/// #[derive(TryFromBytes, KnownLayout, Unaligned, Immutable)] +/// #[repr(C, packed)] +/// struct Bools { +/// one: bool, +/// two: bool, +/// many: [bool], +/// } +/// +/// impl Bools { +/// fn parse(bytes: &[u8]) -> Result<&Bools, AlignedTryCastError<&[u8], Bools>> { +/// // Since `Bools: Unaligned`, we can infallibly discard +/// // the alignment error. +/// Bools::try_ref_from_bytes(bytes).map_err(Into::into) +/// } +/// } +/// ``` +#[allow(type_alias_bounds)] +pub type AlignedTryCastError<Src, Dst: ?Sized + TryFromBytes> = + ConvertError<Infallible, SizeError<Src, Dst>, ValidityError<Src, Dst>>; + +/// The error type of a failed allocation. +/// +/// This type is intended to be deprecated in favor of the standard library's +/// [`AllocError`] type once it is stabilized. When that happens, this type will +/// be replaced by a type alias to the standard library type. We do not intend +/// to treat this as a breaking change; users who wish to avoid breakage should +/// avoid writing code which assumes that this is *not* such an alias. For +/// example, implementing the same trait for both types will result in an impl +/// conflict once this type is an alias. +/// +/// [`AllocError`]: https://doc.rust-lang.org/alloc/alloc/struct.AllocError.html +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct AllocError; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_send_sync() { + // Test that all error types are `Send + Sync` even if `Dst: !Send + + // !Sync`. + + #[allow(dead_code)] + fn is_send_sync<T: Send + Sync>(_t: T) {} + + #[allow(dead_code)] + fn alignment_err_is_send_sync<Src: Send + Sync, Dst>(err: AlignmentError<Src, Dst>) { + is_send_sync(err) + } + + #[allow(dead_code)] + fn size_err_is_send_sync<Src: Send + Sync, Dst>(err: SizeError<Src, Dst>) { + is_send_sync(err) + } + + #[allow(dead_code)] + fn validity_err_is_send_sync<Src: Send + Sync, Dst: TryFromBytes>( + err: ValidityError<Src, Dst>, + ) { + is_send_sync(err) + } + + #[allow(dead_code)] + fn convert_error_is_send_sync<Src: Send + Sync, Dst: TryFromBytes>( + err: ConvertError< + AlignmentError<Src, Dst>, + SizeError<Src, Dst>, + ValidityError<Src, Dst>, + >, + ) { + is_send_sync(err) + } + } + + #[test] + fn test_eq_partial_eq_clone() { + // Test that all error types implement `Eq`, `PartialEq` + // and `Clone` if src does + // even if `Dst: !Eq`, `!PartialEq`, `!Clone`. + + #[allow(dead_code)] + fn is_eq_partial_eq_clone<T: Eq + PartialEq + Clone>(_t: T) {} + + #[allow(dead_code)] + fn alignment_err_is_eq_partial_eq_clone<Src: Eq + PartialEq + Clone, Dst>( + err: AlignmentError<Src, Dst>, + ) { + is_eq_partial_eq_clone(err) + } + + #[allow(dead_code)] + fn size_err_is_eq_partial_eq_clone<Src: Eq + PartialEq + Clone, Dst>( + err: SizeError<Src, Dst>, + ) { + is_eq_partial_eq_clone(err) + } + + #[allow(dead_code)] + fn validity_err_is_eq_partial_eq_clone<Src: Eq + PartialEq + Clone, Dst: TryFromBytes>( + err: ValidityError<Src, Dst>, + ) { + is_eq_partial_eq_clone(err) + } + + #[allow(dead_code)] + fn convert_error_is_eq_partial_eq_clone<Src: Eq + PartialEq + Clone, Dst: TryFromBytes>( + err: ConvertError< + AlignmentError<Src, Dst>, + SizeError<Src, Dst>, + ValidityError<Src, Dst>, + >, + ) { + is_eq_partial_eq_clone(err) + } + } + + #[test] + fn alignment_display() { + #[repr(C, align(128))] + struct Aligned { + bytes: [u8; 128], + } + + impl_known_layout!(elain::Align::<8>); + + let aligned = Aligned { bytes: [0; 128] }; + + let bytes = &aligned.bytes[1..]; + let addr = crate::util::AsAddress::addr(bytes); + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new_checked(bytes).to_string(), + format!("The conversion failed because the address of the source is not a multiple of the alignment of the destination type.\n\ + \nSource type: &[u8]\ + \nSource address: 0x{:x} (a multiple of 1)\ + \nDestination type: elain::Align<8>\ + \nDestination alignment: 8", addr) + ); + + let bytes = &aligned.bytes[2..]; + let addr = crate::util::AsAddress::addr(bytes); + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new_checked(bytes).to_string(), + format!("The conversion failed because the address of the source is not a multiple of the alignment of the destination type.\n\ + \nSource type: &[u8]\ + \nSource address: 0x{:x} (a multiple of 2)\ + \nDestination type: elain::Align<8>\ + \nDestination alignment: 8", addr) + ); + + let bytes = &aligned.bytes[3..]; + let addr = crate::util::AsAddress::addr(bytes); + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new_checked(bytes).to_string(), + format!("The conversion failed because the address of the source is not a multiple of the alignment of the destination type.\n\ + \nSource type: &[u8]\ + \nSource address: 0x{:x} (a multiple of 1)\ + \nDestination type: elain::Align<8>\ + \nDestination alignment: 8", addr) + ); + + let bytes = &aligned.bytes[4..]; + let addr = crate::util::AsAddress::addr(bytes); + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new_checked(bytes).to_string(), + format!("The conversion failed because the address of the source is not a multiple of the alignment of the destination type.\n\ + \nSource type: &[u8]\ + \nSource address: 0x{:x} (a multiple of 4)\ + \nDestination type: elain::Align<8>\ + \nDestination alignment: 8", addr) + ); + } + + #[test] + fn size_display() { + assert_eq!( + SizeError::<_, [u8]>::new(&[0u8; 2][..]).to_string(), + "The conversion failed because the source was incorrectly sized to complete the conversion into the destination type.\n\ + \nSource type: &[u8]\ + \nSource size: 2 bytes\ + \nDestination type: [u8]" + ); + + assert_eq!( + SizeError::<_, [u8; 2]>::new(&[0u8; 1][..]).to_string(), + "The conversion failed because the source was incorrectly sized to complete the conversion into the destination type.\n\ + \nSource type: &[u8]\ + \nSource size: 1 byte\ + \nDestination size: 2 bytes\ + \nDestination type: [u8; 2]" + ); + } + + #[test] + fn validity_display() { + assert_eq!( + ValidityError::<_, bool>::new(&[2u8; 1][..]).to_string(), + "The conversion failed because the source bytes are not a valid value of the destination type.\n\ + \n\ + Destination type: bool" + ); + } +} diff --git a/vendor/zerocopy/src/impls.rs b/vendor/zerocopy/src/impls.rs new file mode 100644 index 00000000..462fa45c --- /dev/null +++ b/vendor/zerocopy/src/impls.rs @@ -0,0 +1,2120 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{ + cell::{Cell, UnsafeCell}, + mem::MaybeUninit as CoreMaybeUninit, + ptr::NonNull, +}; + +use super::*; + +// SAFETY: Per the reference [1], "the unit tuple (`()`) ... is guaranteed as a +// zero-sized type to have a size of 0 and an alignment of 1." +// - `Immutable`: `()` self-evidently does not contain any `UnsafeCell`s. +// - `TryFromBytes` (with no validator), `FromZeros`, `FromBytes`: There is only +// one possible sequence of 0 bytes, and `()` is inhabited. +// - `IntoBytes`: Since `()` has size 0, it contains no padding bytes. +// - `Unaligned`: `()` has alignment 1. +// +// [1] https://doc.rust-lang.org/1.81.0/reference/type-layout.html#tuple-layout +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!((): Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_unaligned!(()); +}; + +// SAFETY: +// - `Immutable`: These types self-evidently do not contain any `UnsafeCell`s. +// - `TryFromBytes` (with no validator), `FromZeros`, `FromBytes`: all bit +// patterns are valid for numeric types [1] +// - `IntoBytes`: numeric types have no padding bytes [1] +// - `Unaligned` (`u8` and `i8` only): The reference [2] specifies the size of +// `u8` and `i8` as 1 byte. We also know that: +// - Alignment is >= 1 [3] +// - Size is an integer multiple of alignment [4] +// - The only value >= 1 for which 1 is an integer multiple is 1 Therefore, +// the only possible alignment for `u8` and `i8` is 1. +// +// [1] Per https://doc.rust-lang.org/1.81.0/reference/types/numeric.html#bit-validity: +// +// For every numeric type, `T`, the bit validity of `T` is equivalent to +// the bit validity of `[u8; size_of::<T>()]`. An uninitialized byte is +// not a valid `u8`. +// +// [2] https://doc.rust-lang.org/1.81.0/reference/type-layout.html#primitive-data-layout +// +// [3] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#size-and-alignment: +// +// Alignment is measured in bytes, and must be at least 1. +// +// [4] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#size-and-alignment: +// +// The size of a value is always a multiple of its alignment. +// +// FIXME(#278): Once we've updated the trait docs to refer to `u8`s rather than +// bits or bytes, update this comment, especially the reference to [1]. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(u8: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + unsafe_impl!(i8: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_unaligned!(u8, i8); + unsafe_impl!(u16: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(i16: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(u32: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(i32: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(u64: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(i64: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(u128: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(i128: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(usize: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(isize: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(f32: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(f64: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + #[cfg(feature = "float-nightly")] + unsafe_impl!(#[cfg_attr(doc_cfg, doc(cfg(feature = "float-nightly")))] f16: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + #[cfg(feature = "float-nightly")] + unsafe_impl!(#[cfg_attr(doc_cfg, doc(cfg(feature = "float-nightly")))] f128: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); +}; + +// SAFETY: +// - `Immutable`: `bool` self-evidently does not contain any `UnsafeCell`s. +// - `FromZeros`: Valid since "[t]he value false has the bit pattern 0x00" [1]. +// - `IntoBytes`: Since "the boolean type has a size and alignment of 1 each" +// and "The value false has the bit pattern 0x00 and the value true has the +// bit pattern 0x01" [1]. Thus, the only byte of the bool is always +// initialized. +// - `Unaligned`: Per the reference [1], "[a]n object with the boolean type has +// a size and alignment of 1 each." +// +// [1] https://doc.rust-lang.org/1.81.0/reference/types/boolean.html +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl!(bool: Immutable, FromZeros, IntoBytes, Unaligned) }; +assert_unaligned!(bool); + +// SAFETY: The impl must only return `true` for its argument if the original +// `Maybe<bool>` refers to a valid `bool`. We only return true if the `u8` value +// is 0 or 1, and both of these are valid values for `bool` [1]. +// +// [1] Per https://doc.rust-lang.org/1.81.0/reference/types/boolean.html: +// +// The value false has the bit pattern 0x00 and the value true has the bit +// pattern 0x01. +const _: () = unsafe { + unsafe_impl!(=> TryFromBytes for bool; |byte| { + let byte = byte.transmute::<u8, invariant::Valid, _>(); + *byte.unaligned_as_ref() < 2 + }) +}; +impl_size_eq!(bool, u8); + +// SAFETY: +// - `Immutable`: `char` self-evidently does not contain any `UnsafeCell`s. +// - `FromZeros`: Per reference [1], "[a] value of type char is a Unicode scalar +// value (i.e. a code point that is not a surrogate), represented as a 32-bit +// unsigned word in the 0x0000 to 0xD7FF or 0xE000 to 0x10FFFF range" which +// contains 0x0000. +// - `IntoBytes`: `char` is per reference [1] "represented as a 32-bit unsigned +// word" (`u32`) which is `IntoBytes`. Note that unlike `u32`, not all bit +// patterns are valid for `char`. +// +// [1] https://doc.rust-lang.org/1.81.0/reference/types/textual.html +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl!(char: Immutable, FromZeros, IntoBytes) }; + +// SAFETY: The impl must only return `true` for its argument if the original +// `Maybe<char>` refers to a valid `char`. `char::from_u32` guarantees that it +// returns `None` if its input is not a valid `char` [1]. +// +// [1] Per https://doc.rust-lang.org/core/primitive.char.html#method.from_u32: +// +// `from_u32()` will return `None` if the input is not a valid value for a +// `char`. +const _: () = unsafe { + unsafe_impl!(=> TryFromBytes for char; |c| { + let c = c.transmute::<Unalign<u32>, invariant::Valid, _>(); + let c = c.read_unaligned().into_inner(); + char::from_u32(c).is_some() + }); +}; + +impl_size_eq!(char, Unalign<u32>); + +// SAFETY: Per the Reference [1], `str` has the same layout as `[u8]`. +// - `Immutable`: `[u8]` does not contain any `UnsafeCell`s. +// - `FromZeros`, `IntoBytes`, `Unaligned`: `[u8]` is `FromZeros`, `IntoBytes`, +// and `Unaligned`. +// +// Note that we don't `assert_unaligned!(str)` because `assert_unaligned!` uses +// `align_of`, which only works for `Sized` types. +// +// FIXME(#429): Improve safety proof for `FromZeros` and `IntoBytes`; having the same +// layout as `[u8]` isn't sufficient. +// +// [1] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#str-layout: +// +// String slices are a UTF-8 representation of characters that have the same +// layout as slices of type `[u8]`. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl!(str: Immutable, FromZeros, IntoBytes, Unaligned) }; + +// SAFETY: The impl must only return `true` for its argument if the original +// `Maybe<str>` refers to a valid `str`. `str::from_utf8` guarantees that it +// returns `Err` if its input is not a valid `str` [1]. +// +// [2] Per https://doc.rust-lang.org/core/str/fn.from_utf8.html#errors: +// +// Returns `Err` if the slice is not UTF-8. +const _: () = unsafe { + unsafe_impl!(=> TryFromBytes for str; |c| { + let c = c.transmute::<[u8], invariant::Valid, _>(); + let c = c.unaligned_as_ref(); + core::str::from_utf8(c).is_ok() + }) +}; + +impl_size_eq!(str, [u8]); + +macro_rules! unsafe_impl_try_from_bytes_for_nonzero { + ($($nonzero:ident[$prim:ty]),*) => { + $( + unsafe_impl!(=> TryFromBytes for $nonzero; |n| { + impl_size_eq!($nonzero, Unalign<$prim>); + + let n = n.transmute::<Unalign<$prim>, invariant::Valid, _>(); + $nonzero::new(n.read_unaligned().into_inner()).is_some() + }); + )* + } +} + +// `NonZeroXxx` is `IntoBytes`, but not `FromZeros` or `FromBytes`. +// +// SAFETY: +// - `IntoBytes`: `NonZeroXxx` has the same layout as its associated primitive. +// Since it is the same size, this guarantees it has no padding - integers +// have no padding, and there's no room for padding if it can represent all +// of the same values except 0. +// - `Unaligned`: `NonZeroU8` and `NonZeroI8` document that `Option<NonZeroU8>` +// and `Option<NonZeroI8>` both have size 1. [1] [2] This is worded in a way +// that makes it unclear whether it's meant as a guarantee, but given the +// purpose of those types, it's virtually unthinkable that that would ever +// change. `Option` cannot be smaller than its contained type, which implies +// that, and `NonZeroX8` are of size 1 or 0. `NonZeroX8` can represent +// multiple states, so they cannot be 0 bytes, which means that they must be 1 +// byte. The only valid alignment for a 1-byte type is 1. +// +// FIXME(#429): +// - Add quotes from documentation. +// - Add safety comment for `Immutable`. How can we prove that `NonZeroXxx` +// doesn't contain any `UnsafeCell`s? It's obviously true, but it's not clear +// how we'd prove it short of adding text to the stdlib docs that says so +// explicitly, which likely wouldn't be accepted. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/num/type.NonZeroU8.html: +// +// `NonZeroU8` is guaranteed to have the same layout and bit validity as `u8` with +// the exception that 0 is not a valid instance. +// +// [2] Per https://doc.rust-lang.org/1.81.0/std/num/type.NonZeroI8.html: +// +// `NonZeroI8` is guaranteed to have the same layout and bit validity as `i8` with +// the exception that 0 is not a valid instance. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(NonZeroU8: Immutable, IntoBytes, Unaligned); + unsafe_impl!(NonZeroI8: Immutable, IntoBytes, Unaligned); + assert_unaligned!(NonZeroU8, NonZeroI8); + unsafe_impl!(NonZeroU16: Immutable, IntoBytes); + unsafe_impl!(NonZeroI16: Immutable, IntoBytes); + unsafe_impl!(NonZeroU32: Immutable, IntoBytes); + unsafe_impl!(NonZeroI32: Immutable, IntoBytes); + unsafe_impl!(NonZeroU64: Immutable, IntoBytes); + unsafe_impl!(NonZeroI64: Immutable, IntoBytes); + unsafe_impl!(NonZeroU128: Immutable, IntoBytes); + unsafe_impl!(NonZeroI128: Immutable, IntoBytes); + unsafe_impl!(NonZeroUsize: Immutable, IntoBytes); + unsafe_impl!(NonZeroIsize: Immutable, IntoBytes); + unsafe_impl_try_from_bytes_for_nonzero!( + NonZeroU8[u8], + NonZeroI8[i8], + NonZeroU16[u16], + NonZeroI16[i16], + NonZeroU32[u32], + NonZeroI32[i32], + NonZeroU64[u64], + NonZeroI64[i64], + NonZeroU128[u128], + NonZeroI128[i128], + NonZeroUsize[usize], + NonZeroIsize[isize] + ); +}; + +// SAFETY: +// - `TryFromBytes` (with no validator), `FromZeros`, `FromBytes`, `IntoBytes`: +// The Rust compiler reuses `0` value to represent `None`, so +// `size_of::<Option<NonZeroXxx>>() == size_of::<xxx>()`; see `NonZeroXxx` +// documentation. +// - `Unaligned`: `NonZeroU8` and `NonZeroI8` document that `Option<NonZeroU8>` +// and `Option<NonZeroI8>` both have size 1. [1] [2] This is worded in a way +// that makes it unclear whether it's meant as a guarantee, but given the +// purpose of those types, it's virtually unthinkable that that would ever +// change. The only valid alignment for a 1-byte type is 1. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/num/type.NonZeroU8.html: +// +// `Option<NonZeroU8>` is guaranteed to be compatible with `u8`, including in FFI. +// +// Thanks to the null pointer optimization, `NonZeroU8` and `Option<NonZeroU8>` +// are guaranteed to have the same size and alignment: +// +// [2] Per https://doc.rust-lang.org/1.81.0/std/num/type.NonZeroI8.html: +// +// `Option<NonZeroI8>` is guaranteed to be compatible with `i8`, including in FFI. +// +// Thanks to the null pointer optimization, `NonZeroI8` and `Option<NonZeroI8>` +// are guaranteed to have the same size and alignment: +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(Option<NonZeroU8>: TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + unsafe_impl!(Option<NonZeroI8>: TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_unaligned!(Option<NonZeroU8>, Option<NonZeroI8>); + unsafe_impl!(Option<NonZeroU16>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroI16>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroU32>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroI32>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroU64>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroI64>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroU128>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroI128>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroUsize>: TryFromBytes, FromZeros, FromBytes, IntoBytes); + unsafe_impl!(Option<NonZeroIsize>: TryFromBytes, FromZeros, FromBytes, IntoBytes); +}; + +// SAFETY: While it's not fully documented, the consensus is that `Box<T>` does +// not contain any `UnsafeCell`s for `T: Sized` [1]. This is not a complete +// proof, but we are accepting this as a known risk per #1358. +// +// [1] https://github.com/rust-lang/unsafe-code-guidelines/issues/492 +#[cfg(feature = "alloc")] +const _: () = unsafe { + unsafe_impl!( + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + T: Sized => Immutable for Box<T> + ) +}; + +// SAFETY: The following types can be transmuted from `[0u8; size_of::<T>()]`. [1] +// +// [1] Per https://doc.rust-lang.org/1.89.0/core/option/index.html#representation: +// +// Rust guarantees to optimize the following types `T` such that [`Option<T>`] +// has the same size and alignment as `T`. In some of these cases, Rust +// further guarantees that `transmute::<_, Option<T>>([0u8; size_of::<T>()])` +// is sound and produces `Option::<T>::None`. These cases are identified by +// the second column: +// +// | `T` | `transmute::<_, Option<T>>([0u8; size_of::<T>()])` sound? | +// |-----------------------------------|-----------------------------------------------------------| +// | [`Box<U>`] | when `U: Sized` | +// | `&U` | when `U: Sized` | +// | `&mut U` | when `U: Sized` | +// | [`ptr::NonNull<U>`] | when `U: Sized` | +// | `fn`, `extern "C" fn`[^extern_fn] | always | +// +// [^extern_fn]: this remains true for `unsafe` variants, any argument/return +// types, and any other ABI: `[unsafe] extern "abi" fn` (_e.g._, `extern +// "system" fn`) +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + #[cfg(feature = "alloc")] + unsafe_impl!( + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + T => TryFromBytes for Option<Box<T>>; |c| pointer::is_zeroed(c) + ); + #[cfg(feature = "alloc")] + unsafe_impl!( + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + T => FromZeros for Option<Box<T>> + ); + unsafe_impl!( + T => TryFromBytes for Option<&'_ T>; |c| pointer::is_zeroed(c) + ); + unsafe_impl!(T => FromZeros for Option<&'_ T>); + unsafe_impl!( + T => TryFromBytes for Option<&'_ mut T>; |c| pointer::is_zeroed(c) + ); + unsafe_impl!(T => FromZeros for Option<&'_ mut T>); + unsafe_impl!( + T => TryFromBytes for Option<NonNull<T>>; |c| pointer::is_zeroed(c) + ); + unsafe_impl!(T => FromZeros for Option<NonNull<T>>); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => FromZeros for opt_fn!(...)); + unsafe_impl_for_power_set!( + A, B, C, D, E, F, G, H, I, J, K, L -> M => TryFromBytes for opt_fn!(...); + |c| pointer::is_zeroed(c) + ); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => FromZeros for opt_unsafe_fn!(...)); + unsafe_impl_for_power_set!( + A, B, C, D, E, F, G, H, I, J, K, L -> M => TryFromBytes for opt_unsafe_fn!(...); + |c| pointer::is_zeroed(c) + ); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => FromZeros for opt_extern_c_fn!(...)); + unsafe_impl_for_power_set!( + A, B, C, D, E, F, G, H, I, J, K, L -> M => TryFromBytes for opt_extern_c_fn!(...); + |c| pointer::is_zeroed(c) + ); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => FromZeros for opt_unsafe_extern_c_fn!(...)); + unsafe_impl_for_power_set!( + A, B, C, D, E, F, G, H, I, J, K, L -> M => TryFromBytes for opt_unsafe_extern_c_fn!(...); + |c| pointer::is_zeroed(c) + ); +}; + +// SAFETY: `[unsafe] [extern "C"] fn()` self-evidently do not contain +// `UnsafeCell`s. This is not a proof, but we are accepting this as a known risk +// per #1358. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => Immutable for opt_fn!(...)); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => Immutable for opt_unsafe_fn!(...)); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => Immutable for opt_extern_c_fn!(...)); + unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => Immutable for opt_unsafe_extern_c_fn!(...)); +}; + +#[cfg(all( + not(no_zerocopy_target_has_atomics_1_60_0), + any( + target_has_atomic = "8", + target_has_atomic = "16", + target_has_atomic = "32", + target_has_atomic = "64", + target_has_atomic = "ptr" + ) +))] +#[cfg_attr(doc_cfg, doc(cfg(rust = "1.60.0")))] +mod atomics { + use super::*; + + macro_rules! impl_traits_for_atomics { + ($($atomics:ident [$primitives:ident]),* $(,)?) => { + $( + impl_known_layout!($atomics); + impl_for_transmute_from!(=> TryFromBytes for $atomics [UnsafeCell<$primitives>]); + impl_for_transmute_from!(=> FromZeros for $atomics [UnsafeCell<$primitives>]); + impl_for_transmute_from!(=> FromBytes for $atomics [UnsafeCell<$primitives>]); + impl_for_transmute_from!(=> IntoBytes for $atomics [UnsafeCell<$primitives>]); + )* + }; + } + + /// Implements `TransmuteFrom` for `$atomic`, `$prim`, and + /// `UnsafeCell<$prim>`. + /// + /// # Safety + /// + /// `$atomic` must have the same size and bit validity as `$prim`. + macro_rules! unsafe_impl_transmute_from_for_atomic { + ($($($tyvar:ident)? => $atomic:ty [$prim:ty]),*) => {{ + crate::util::macros::__unsafe(); + + use core::cell::UnsafeCell; + use crate::pointer::{PtrInner, SizeEq, TransmuteFrom, invariant::Valid}; + + $( + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same size and bit validity. + unsafe impl<$($tyvar)?> TransmuteFrom<$atomic, Valid, Valid> for $prim {} + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same size and bit validity. + unsafe impl<$($tyvar)?> TransmuteFrom<$prim, Valid, Valid> for $atomic {} + + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same size. + unsafe impl<$($tyvar)?> SizeEq<$atomic> for $prim { + #[inline] + fn cast_from_raw(a: PtrInner<'_, $atomic>) -> PtrInner<'_, $prim> { + // SAFETY: The caller promised that `$atomic` and + // `$prim` have the same size. Thus, this cast preserves + // address, referent size, and provenance. + unsafe { cast!(a) } + } + } + // SAFETY: See previous safety comment. + unsafe impl<$($tyvar)?> SizeEq<$prim> for $atomic { + #[inline] + fn cast_from_raw(p: PtrInner<'_, $prim>) -> PtrInner<'_, $atomic> { + // SAFETY: See previous safety comment. + unsafe { cast!(p) } + } + } + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same size. `UnsafeCell<T>` has the same size as `T` [1]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.UnsafeCell.html#memory-layout: + // + // `UnsafeCell<T>` has the same in-memory representation as + // its inner type `T`. A consequence of this guarantee is that + // it is possible to convert between `T` and `UnsafeCell<T>`. + unsafe impl<$($tyvar)?> SizeEq<$atomic> for UnsafeCell<$prim> { + #[inline] + fn cast_from_raw(a: PtrInner<'_, $atomic>) -> PtrInner<'_, UnsafeCell<$prim>> { + // SAFETY: See previous safety comment. + unsafe { cast!(a) } + } + } + // SAFETY: See previous safety comment. + unsafe impl<$($tyvar)?> SizeEq<UnsafeCell<$prim>> for $atomic { + #[inline] + fn cast_from_raw(p: PtrInner<'_, UnsafeCell<$prim>>) -> PtrInner<'_, $atomic> { + // SAFETY: See previous safety comment. + unsafe { cast!(p) } + } + } + + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same bit validity. `UnsafeCell<T>` has the same bit + // validity as `T` [1]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.UnsafeCell.html#memory-layout: + // + // `UnsafeCell<T>` has the same in-memory representation as + // its inner type `T`. A consequence of this guarantee is that + // it is possible to convert between `T` and `UnsafeCell<T>`. + unsafe impl<$($tyvar)?> TransmuteFrom<$atomic, Valid, Valid> for core::cell::UnsafeCell<$prim> {} + // SAFETY: See previous safety comment. + unsafe impl<$($tyvar)?> TransmuteFrom<core::cell::UnsafeCell<$prim>, Valid, Valid> for $atomic {} + )* + }}; + } + + #[cfg(target_has_atomic = "8")] + #[cfg_attr(doc_cfg, doc(cfg(target_has_atomic = "8")))] + mod atomic_8 { + use core::sync::atomic::{AtomicBool, AtomicI8, AtomicU8}; + + use super::*; + + impl_traits_for_atomics!(AtomicU8[u8], AtomicI8[i8]); + + impl_known_layout!(AtomicBool); + + impl_for_transmute_from!(=> TryFromBytes for AtomicBool [UnsafeCell<bool>]); + impl_for_transmute_from!(=> FromZeros for AtomicBool [UnsafeCell<bool>]); + impl_for_transmute_from!(=> IntoBytes for AtomicBool [UnsafeCell<bool>]); + + // SAFETY: Per [1], `AtomicBool`, `AtomicU8`, and `AtomicI8` have the + // same size as `bool`, `u8`, and `i8` respectively. Since a type's + // alignment cannot be smaller than 1 [2], and since its alignment + // cannot be greater than its size [3], the only possible value for the + // alignment is 1. Thus, it is sound to implement `Unaligned`. + // + // [1] Per (for example) https://doc.rust-lang.org/1.81.0/std/sync/atomic/struct.AtomicU8.html: + // + // This type has the same size, alignment, and bit validity as the + // underlying integer type + // + // [2] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#size-and-alignment: + // + // Alignment is measured in bytes, and must be at least 1. + // + // [3] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#size-and-alignment: + // + // The size of a value is always a multiple of its alignment. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + unsafe_impl!(AtomicBool: Unaligned); + unsafe_impl!(AtomicU8: Unaligned); + unsafe_impl!(AtomicI8: Unaligned); + assert_unaligned!(AtomicBool, AtomicU8, AtomicI8); + }; + + // SAFETY: `AtomicU8`, `AtomicI8`, and `AtomicBool` have the same size + // and bit validity as `u8`, `i8`, and `bool` respectively [1][2][3]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU8.html: + // + // This type has the same size, alignment, and bit validity as the + // underlying integer type, `u8`. + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI8.html: + // + // This type has the same size, alignment, and bit validity as the + // underlying integer type, `i8`. + // + // [3] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicBool.html: + // + // This type has the same size, alignment, and bit validity a `bool`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + unsafe_impl_transmute_from_for_atomic!( + => AtomicU8 [u8], + => AtomicI8 [i8], + => AtomicBool [bool] + ) + }; + } + + #[cfg(target_has_atomic = "16")] + #[cfg_attr(doc_cfg, doc(cfg(target_has_atomic = "16")))] + mod atomic_16 { + use core::sync::atomic::{AtomicI16, AtomicU16}; + + use super::*; + + impl_traits_for_atomics!(AtomicU16[u16], AtomicI16[i16]); + + // SAFETY: `AtomicU16` and `AtomicI16` have the same size and bit + // validity as `u16` and `i16` respectively [1][2]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU16.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `u16`. + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI16.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `i16`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + unsafe_impl_transmute_from_for_atomic!(=> AtomicU16 [u16], => AtomicI16 [i16]) + }; + } + + #[cfg(target_has_atomic = "32")] + #[cfg_attr(doc_cfg, doc(cfg(target_has_atomic = "32")))] + mod atomic_32 { + use core::sync::atomic::{AtomicI32, AtomicU32}; + + use super::*; + + impl_traits_for_atomics!(AtomicU32[u32], AtomicI32[i32]); + + // SAFETY: `AtomicU32` and `AtomicI32` have the same size and bit + // validity as `u32` and `i32` respectively [1][2]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU32.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `u32`. + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI32.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `i32`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + unsafe_impl_transmute_from_for_atomic!(=> AtomicU32 [u32], => AtomicI32 [i32]) + }; + } + + #[cfg(target_has_atomic = "64")] + #[cfg_attr(doc_cfg, doc(cfg(target_has_atomic = "64")))] + mod atomic_64 { + use core::sync::atomic::{AtomicI64, AtomicU64}; + + use super::*; + + impl_traits_for_atomics!(AtomicU64[u64], AtomicI64[i64]); + + // SAFETY: `AtomicU64` and `AtomicI64` have the same size and bit + // validity as `u64` and `i64` respectively [1][2]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU64.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `u64`. + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI64.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `i64`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + unsafe_impl_transmute_from_for_atomic!(=> AtomicU64 [u64], => AtomicI64 [i64]) + }; + } + + #[cfg(target_has_atomic = "ptr")] + #[cfg_attr(doc_cfg, doc(cfg(target_has_atomic = "ptr")))] + mod atomic_ptr { + use core::sync::atomic::{AtomicIsize, AtomicPtr, AtomicUsize}; + + use super::*; + + impl_traits_for_atomics!(AtomicUsize[usize], AtomicIsize[isize]); + + impl_known_layout!(T => AtomicPtr<T>); + + // FIXME(#170): Implement `FromBytes` and `IntoBytes` once we implement + // those traits for `*mut T`. + impl_for_transmute_from!(T => TryFromBytes for AtomicPtr<T> [UnsafeCell<*mut T>]); + impl_for_transmute_from!(T => FromZeros for AtomicPtr<T> [UnsafeCell<*mut T>]); + + // SAFETY: `AtomicUsize` and `AtomicIsize` have the same size and bit + // validity as `usize` and `isize` respectively [1][2]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicUsize.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `usize`. + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicIsize.html: + // + // This type has the same size and bit validity as the underlying + // integer type, `isize`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + unsafe_impl_transmute_from_for_atomic!(=> AtomicUsize [usize], => AtomicIsize [isize]) + }; + + // SAFETY: Per + // https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicPtr.html: + // + // This type has the same size and bit validity as a `*mut T`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { unsafe_impl_transmute_from_for_atomic!(T => AtomicPtr<T> [*mut T]) }; + } +} + +// SAFETY: Per reference [1]: "For all T, the following are guaranteed: +// size_of::<PhantomData<T>>() == 0 align_of::<PhantomData<T>>() == 1". This +// gives: +// - `Immutable`: `PhantomData` has no fields. +// - `TryFromBytes` (with no validator), `FromZeros`, `FromBytes`: There is only +// one possible sequence of 0 bytes, and `PhantomData` is inhabited. +// - `IntoBytes`: Since `PhantomData` has size 0, it contains no padding bytes. +// - `Unaligned`: Per the preceding reference, `PhantomData` has alignment 1. +// +// [1] https://doc.rust-lang.org/1.81.0/std/marker/struct.PhantomData.html#layout-1 +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(T: ?Sized => Immutable for PhantomData<T>); + unsafe_impl!(T: ?Sized => TryFromBytes for PhantomData<T>); + unsafe_impl!(T: ?Sized => FromZeros for PhantomData<T>); + unsafe_impl!(T: ?Sized => FromBytes for PhantomData<T>); + unsafe_impl!(T: ?Sized => IntoBytes for PhantomData<T>); + unsafe_impl!(T: ?Sized => Unaligned for PhantomData<T>); + assert_unaligned!(PhantomData<()>, PhantomData<u8>, PhantomData<u64>); +}; + +impl_for_transmute_from!(T: TryFromBytes => TryFromBytes for Wrapping<T>[<T>]); +impl_for_transmute_from!(T: FromZeros => FromZeros for Wrapping<T>[<T>]); +impl_for_transmute_from!(T: FromBytes => FromBytes for Wrapping<T>[<T>]); +impl_for_transmute_from!(T: IntoBytes => IntoBytes for Wrapping<T>[<T>]); +assert_unaligned!(Wrapping<()>, Wrapping<u8>); + +// SAFETY: Per [1], `Wrapping<T>` has the same layout as `T`. Since its single +// field (of type `T`) is public, it would be a breaking change to add or remove +// fields. Thus, we know that `Wrapping<T>` contains a `T` (as opposed to just +// having the same size and alignment as `T`) with no pre- or post-padding. +// Thus, `Wrapping<T>` must have `UnsafeCell`s covering the same byte ranges as +// `Inner = T`. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/num/struct.Wrapping.html#layout-1: +// +// `Wrapping<T>` is guaranteed to have the same layout and ABI as `T` +const _: () = unsafe { unsafe_impl!(T: Immutable => Immutable for Wrapping<T>) }; + +// SAFETY: Per [1] in the preceding safety comment, `Wrapping<T>` has the same +// alignment as `T`. +const _: () = unsafe { unsafe_impl!(T: Unaligned => Unaligned for Wrapping<T>) }; + +// SAFETY: `TryFromBytes` (with no validator), `FromZeros`, `FromBytes`: +// `MaybeUninit<T>` has no restrictions on its contents. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(T => TryFromBytes for CoreMaybeUninit<T>); + unsafe_impl!(T => FromZeros for CoreMaybeUninit<T>); + unsafe_impl!(T => FromBytes for CoreMaybeUninit<T>); +}; + +// SAFETY: `MaybeUninit<T>` has `UnsafeCell`s covering the same byte ranges as +// `Inner = T`. This is not explicitly documented, but it can be inferred. Per +// [1], `MaybeUninit<T>` has the same size as `T`. Further, note the signature +// of `MaybeUninit::assume_init_ref` [2]: +// +// pub unsafe fn assume_init_ref(&self) -> &T +// +// If the argument `&MaybeUninit<T>` and the returned `&T` had `UnsafeCell`s at +// different offsets, this would be unsound. Its existence is proof that this is +// not the case. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: +// +// `MaybeUninit<T>` is guaranteed to have the same size, alignment, and ABI as +// `T`. +// +// [2] https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#method.assume_init_ref +const _: () = unsafe { unsafe_impl!(T: Immutable => Immutable for CoreMaybeUninit<T>) }; + +// SAFETY: Per [1] in the preceding safety comment, `MaybeUninit<T>` has the +// same alignment as `T`. +const _: () = unsafe { unsafe_impl!(T: Unaligned => Unaligned for CoreMaybeUninit<T>) }; +assert_unaligned!(CoreMaybeUninit<()>, CoreMaybeUninit<u8>); + +// SAFETY: `ManuallyDrop<T>` has the same layout as `T` [1]. This strongly +// implies, but does not guarantee, that it contains `UnsafeCell`s covering the +// same byte ranges as in `T`. However, it also implements `Defer<Target = T>` +// [2], which provides the ability to convert `&ManuallyDrop<T> -> &T`. This, +// combined with having the same size as `T`, implies that `ManuallyDrop<T>` +// exactly contains a `T` with the same fields and `UnsafeCell`s covering the +// same byte ranges, or else the `Deref` impl would permit safe code to obtain +// different shared references to the same region of memory with different +// `UnsafeCell` coverage, which would in turn permit interior mutation that +// would violate the invariants of a shared reference. +// +// [1] Per https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html: +// +// `ManuallyDrop<T>` is guaranteed to have the same layout and bit validity as +// `T` +// +// [2] https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html#impl-Deref-for-ManuallyDrop%3CT%3E +const _: () = unsafe { unsafe_impl!(T: ?Sized + Immutable => Immutable for ManuallyDrop<T>) }; + +impl_for_transmute_from!(T: ?Sized + TryFromBytes => TryFromBytes for ManuallyDrop<T>[<T>]); +impl_for_transmute_from!(T: ?Sized + FromZeros => FromZeros for ManuallyDrop<T>[<T>]); +impl_for_transmute_from!(T: ?Sized + FromBytes => FromBytes for ManuallyDrop<T>[<T>]); +impl_for_transmute_from!(T: ?Sized + IntoBytes => IntoBytes for ManuallyDrop<T>[<T>]); +// SAFETY: `ManuallyDrop<T>` has the same layout as `T` [1], and thus has the +// same alignment as `T`. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: +// +// `ManuallyDrop<T>` is guaranteed to have the same layout and bit validity as +// `T` +const _: () = unsafe { unsafe_impl!(T: ?Sized + Unaligned => Unaligned for ManuallyDrop<T>) }; +assert_unaligned!(ManuallyDrop<()>, ManuallyDrop<u8>); + +impl_for_transmute_from!(T: ?Sized + TryFromBytes => TryFromBytes for Cell<T>[UnsafeCell<T>]); +impl_for_transmute_from!(T: ?Sized + FromZeros => FromZeros for Cell<T>[UnsafeCell<T>]); +impl_for_transmute_from!(T: ?Sized + FromBytes => FromBytes for Cell<T>[UnsafeCell<T>]); +impl_for_transmute_from!(T: ?Sized + IntoBytes => IntoBytes for Cell<T>[UnsafeCell<T>]); +// SAFETY: `Cell<T>` has the same in-memory representation as `T` [1], and thus +// has the same alignment as `T`. +// +// [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.Cell.html#memory-layout: +// +// `Cell<T>` has the same in-memory representation as its inner type `T`. +const _: () = unsafe { unsafe_impl!(T: ?Sized + Unaligned => Unaligned for Cell<T>) }; + +impl_for_transmute_from!(T: ?Sized + FromZeros => FromZeros for UnsafeCell<T>[<T>]); +impl_for_transmute_from!(T: ?Sized + FromBytes => FromBytes for UnsafeCell<T>[<T>]); +impl_for_transmute_from!(T: ?Sized + IntoBytes => IntoBytes for UnsafeCell<T>[<T>]); +// SAFETY: `UnsafeCell<T>` has the same in-memory representation as `T` [1], and +// thus has the same alignment as `T`. +// +// [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: +// +// `UnsafeCell<T>` has the same in-memory representation as its inner type +// `T`. +const _: () = unsafe { unsafe_impl!(T: ?Sized + Unaligned => Unaligned for UnsafeCell<T>) }; +assert_unaligned!(UnsafeCell<()>, UnsafeCell<u8>); + +// SAFETY: See safety comment in `is_bit_valid` impl. +unsafe impl<T: TryFromBytes + ?Sized> TryFromBytes for UnsafeCell<T> { + #[allow(clippy::missing_inline_in_public_items)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + } + + #[inline] + fn is_bit_valid<A: invariant::Reference>(candidate: Maybe<'_, Self, A>) -> bool { + // The only way to implement this function is using an exclusive-aliased + // pointer. `UnsafeCell`s cannot be read via shared-aliased pointers + // (other than by using `unsafe` code, which we can't use since we can't + // guarantee how our users are accessing or modifying the `UnsafeCell`). + // + // `is_bit_valid` is documented as panicking or failing to monomorphize + // if called with a shared-aliased pointer on a type containing an + // `UnsafeCell`. In practice, it will always be a monomorphization error. + // Since `is_bit_valid` is `#[doc(hidden)]` and only called directly + // from this crate, we only need to worry about our own code incorrectly + // calling `UnsafeCell::is_bit_valid`. The post-monomorphization error + // makes it easier to test that this is truly the case, and also means + // that if we make a mistake, it will cause downstream code to fail to + // compile, which will immediately surface the mistake and give us a + // chance to fix it quickly. + let c = candidate.into_exclusive_or_pme(); + + // SAFETY: Since `UnsafeCell<T>` and `T` have the same layout and bit + // validity, `UnsafeCell<T>` is bit-valid exactly when its wrapped `T` + // is. Thus, this is a sound implementation of + // `UnsafeCell::is_bit_valid`. + T::is_bit_valid(c.get_mut()) + } +} + +// SAFETY: Per the reference [1]: +// +// An array of `[T; N]` has a size of `size_of::<T>() * N` and the same +// alignment of `T`. Arrays are laid out so that the zero-based `nth` element +// of the array is offset from the start of the array by `n * size_of::<T>()` +// bytes. +// +// ... +// +// Slices have the same layout as the section of the array they slice. +// +// In other words, the layout of a `[T]` or `[T; N]` is a sequence of `T`s laid +// out back-to-back with no bytes in between. Therefore, `[T]` or `[T; N]` are +// `Immutable`, `TryFromBytes`, `FromZeros`, `FromBytes`, and `IntoBytes` if `T` +// is (respectively). Furthermore, since an array/slice has "the same alignment +// of `T`", `[T]` and `[T; N]` are `Unaligned` if `T` is. +// +// Note that we don't `assert_unaligned!` for slice types because +// `assert_unaligned!` uses `align_of`, which only works for `Sized` types. +// +// [1] https://doc.rust-lang.org/1.81.0/reference/type-layout.html#array-layout +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(const N: usize, T: Immutable => Immutable for [T; N]); + unsafe_impl!(const N: usize, T: TryFromBytes => TryFromBytes for [T; N]; |c| { + // Note that this call may panic, but it would still be sound even if it + // did. `is_bit_valid` does not promise that it will not panic (in fact, + // it explicitly warns that it's a possibility), and we have not + // violated any safety invariants that we must fix before returning. + <[T] as TryFromBytes>::is_bit_valid(c.as_slice()) + }); + unsafe_impl!(const N: usize, T: FromZeros => FromZeros for [T; N]); + unsafe_impl!(const N: usize, T: FromBytes => FromBytes for [T; N]); + unsafe_impl!(const N: usize, T: IntoBytes => IntoBytes for [T; N]); + unsafe_impl!(const N: usize, T: Unaligned => Unaligned for [T; N]); + assert_unaligned!([(); 0], [(); 1], [u8; 0], [u8; 1]); + unsafe_impl!(T: Immutable => Immutable for [T]); + unsafe_impl!(T: TryFromBytes => TryFromBytes for [T]; |c| { + // SAFETY: Per the reference [1]: + // + // An array of `[T; N]` has a size of `size_of::<T>() * N` and the + // same alignment of `T`. Arrays are laid out so that the zero-based + // `nth` element of the array is offset from the start of the array by + // `n * size_of::<T>()` bytes. + // + // ... + // + // Slices have the same layout as the section of the array they slice. + // + // In other words, the layout of a `[T] is a sequence of `T`s laid out + // back-to-back with no bytes in between. If all elements in `candidate` + // are `is_bit_valid`, so too is `candidate`. + // + // Note that any of the below calls may panic, but it would still be + // sound even if it did. `is_bit_valid` does not promise that it will + // not panic (in fact, it explicitly warns that it's a possibility), and + // we have not violated any safety invariants that we must fix before + // returning. + c.iter().all(<T as TryFromBytes>::is_bit_valid) + }); + unsafe_impl!(T: FromZeros => FromZeros for [T]); + unsafe_impl!(T: FromBytes => FromBytes for [T]); + unsafe_impl!(T: IntoBytes => IntoBytes for [T]); + unsafe_impl!(T: Unaligned => Unaligned for [T]); +}; + +// SAFETY: +// - `Immutable`: Raw pointers do not contain any `UnsafeCell`s. +// - `FromZeros`: For thin pointers (note that `T: Sized`), the zero pointer is +// considered "null". [1] No operations which require provenance are legal on +// null pointers, so this is not a footgun. +// - `TryFromBytes`: By the same reasoning as for `FromZeroes`, we can implement +// `TryFromBytes` for thin pointers provided that +// [`TryFromByte::is_bit_valid`] only produces `true` for zeroed bytes. +// +// NOTE(#170): Implementing `FromBytes` and `IntoBytes` for raw pointers would +// be sound, but carries provenance footguns. We want to support `FromBytes` and +// `IntoBytes` for raw pointers eventually, but we are holding off until we can +// figure out how to address those footguns. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/ptr/fn.null.html: +// +// Creates a null raw pointer. +// +// This function is equivalent to zero-initializing the pointer: +// `MaybeUninit::<*const T>::zeroed().assume_init()`. +// +// The resulting pointer has the address 0. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(T: ?Sized => Immutable for *const T); + unsafe_impl!(T: ?Sized => Immutable for *mut T); + unsafe_impl!(T => TryFromBytes for *const T; |c| pointer::is_zeroed(c)); + unsafe_impl!(T => FromZeros for *const T); + unsafe_impl!(T => TryFromBytes for *mut T; |c| pointer::is_zeroed(c)); + unsafe_impl!(T => FromZeros for *mut T); +}; + +// SAFETY: `NonNull<T>` self-evidently does not contain `UnsafeCell`s. This is +// not a proof, but we are accepting this as a known risk per #1358. +const _: () = unsafe { unsafe_impl!(T: ?Sized => Immutable for NonNull<T>) }; + +// SAFETY: Reference types do not contain any `UnsafeCell`s. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl!(T: ?Sized => Immutable for &'_ T); + unsafe_impl!(T: ?Sized => Immutable for &'_ mut T); +}; + +// SAFETY: `Option` is not `#[non_exhaustive]` [1], which means that the types +// in its variants cannot change, and no new variants can be added. `Option<T>` +// does not contain any `UnsafeCell`s outside of `T`. [1] +// +// [1] https://doc.rust-lang.org/core/option/enum.Option.html +const _: () = unsafe { unsafe_impl!(T: Immutable => Immutable for Option<T>) }; + +// SIMD support +// +// Per the Unsafe Code Guidelines Reference [1]: +// +// Packed SIMD vector types are `repr(simd)` homogeneous tuple-structs +// containing `N` elements of type `T` where `N` is a power-of-two and the +// size and alignment requirements of `T` are equal: +// +// ```rust +// #[repr(simd)] +// struct Vector<T, N>(T_0, ..., T_(N - 1)); +// ``` +// +// ... +// +// The size of `Vector` is `N * size_of::<T>()` and its alignment is an +// implementation-defined function of `T` and `N` greater than or equal to +// `align_of::<T>()`. +// +// ... +// +// Vector elements are laid out in source field order, enabling random access +// to vector elements by reinterpreting the vector as an array: +// +// ```rust +// union U { +// vec: Vector<T, N>, +// arr: [T; N] +// } +// +// assert_eq!(size_of::<Vector<T, N>>(), size_of::<[T; N]>()); +// assert!(align_of::<Vector<T, N>>() >= align_of::<[T; N]>()); +// +// unsafe { +// let u = U { vec: Vector<T, N>(t_0, ..., t_(N - 1)) }; +// +// assert_eq!(u.vec.0, u.arr[0]); +// // ... +// assert_eq!(u.vec.(N - 1), u.arr[N - 1]); +// } +// ``` +// +// Given this background, we can observe that: +// - The size and bit pattern requirements of a SIMD type are equivalent to the +// equivalent array type. Thus, for any SIMD type whose primitive `T` is +// `Immutable`, `TryFromBytes`, `FromZeros`, `FromBytes`, or `IntoBytes`, that +// SIMD type is also `Immutable`, `TryFromBytes`, `FromZeros`, `FromBytes`, or +// `IntoBytes` respectively. +// - Since no upper bound is placed on the alignment, no SIMD type can be +// guaranteed to be `Unaligned`. +// +// Also per [1]: +// +// This chapter represents the consensus from issue #38. The statements in +// here are not (yet) "guaranteed" not to change until an RFC ratifies them. +// +// See issue #38 [2]. While this behavior is not technically guaranteed, the +// likelihood that the behavior will change such that SIMD types are no longer +// `TryFromBytes`, `FromZeros`, `FromBytes`, or `IntoBytes` is next to zero, as +// that would defeat the entire purpose of SIMD types. Nonetheless, we put this +// behavior behind the `simd` Cargo feature, which requires consumers to opt +// into this stability hazard. +// +// [1] https://rust-lang.github.io/unsafe-code-guidelines/layout/packed-simd-vectors.html +// [2] https://github.com/rust-lang/unsafe-code-guidelines/issues/38 +#[cfg(feature = "simd")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "simd")))] +mod simd { + /// Defines a module which implements `TryFromBytes`, `FromZeros`, + /// `FromBytes`, and `IntoBytes` for a set of types from a module in + /// `core::arch`. + /// + /// `$arch` is both the name of the defined module and the name of the + /// module in `core::arch`, and `$typ` is the list of items from that module + /// to implement `FromZeros`, `FromBytes`, and `IntoBytes` for. + #[allow(unused_macros)] // `allow(unused_macros)` is needed because some + // target/feature combinations don't emit any impls + // and thus don't use this macro. + macro_rules! simd_arch_mod { + ($(#[cfg $cfg:tt])* $(#[cfg_attr $cfg_attr:tt])? $arch:ident, $mod:ident, $($typ:ident),*) => { + $(#[cfg $cfg])* + #[cfg_attr(doc_cfg, doc(cfg $($cfg)*))] + $(#[cfg_attr $cfg_attr])? + mod $mod { + use core::arch::$arch::{$($typ),*}; + + use crate::*; + impl_known_layout!($($typ),*); + // SAFETY: See comment on module definition for justification. + #[allow(clippy::multiple_unsafe_ops_per_block)] + const _: () = unsafe { + $( unsafe_impl!($typ: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); )* + }; + } + }; + } + + #[rustfmt::skip] + const _: () = { + simd_arch_mod!( + #[cfg(target_arch = "x86")] + x86, x86, __m128, __m128d, __m128i, __m256, __m256d, __m256i + ); + #[cfg(not(no_zerocopy_simd_x86_avx12_1_89_0))] + simd_arch_mod!( + #[cfg(target_arch = "x86")] + #[cfg_attr(doc_cfg, doc(cfg(rust = "1.89.0")))] + x86, x86_nightly, __m512bh, __m512, __m512d, __m512i + ); + simd_arch_mod!( + #[cfg(target_arch = "x86_64")] + x86_64, x86_64, __m128, __m128d, __m128i, __m256, __m256d, __m256i + ); + #[cfg(not(no_zerocopy_simd_x86_avx12_1_89_0))] + simd_arch_mod!( + #[cfg(target_arch = "x86_64")] + #[cfg_attr(doc_cfg, doc(cfg(rust = "1.89.0")))] + x86_64, x86_64_nightly, __m512bh, __m512, __m512d, __m512i + ); + simd_arch_mod!( + #[cfg(target_arch = "wasm32")] + wasm32, wasm32, v128 + ); + simd_arch_mod!( + #[cfg(all(feature = "simd-nightly", target_arch = "powerpc"))] + powerpc, powerpc, vector_bool_long, vector_double, vector_signed_long, vector_unsigned_long + ); + simd_arch_mod!( + #[cfg(all(feature = "simd-nightly", target_arch = "powerpc64"))] + powerpc64, powerpc64, vector_bool_long, vector_double, vector_signed_long, vector_unsigned_long + ); + #[cfg(not(no_zerocopy_aarch64_simd_1_59_0))] + simd_arch_mod!( + // NOTE(https://github.com/rust-lang/stdarch/issues/1484): NEON intrinsics are currently + // broken on big-endian platforms. + #[cfg(all(target_arch = "aarch64", target_endian = "little"))] + #[cfg_attr(doc_cfg, doc(cfg(rust = "1.59.0")))] + aarch64, aarch64, float32x2_t, float32x4_t, float64x1_t, float64x2_t, int8x8_t, int8x8x2_t, + int8x8x3_t, int8x8x4_t, int8x16_t, int8x16x2_t, int8x16x3_t, int8x16x4_t, int16x4_t, + int16x8_t, int32x2_t, int32x4_t, int64x1_t, int64x2_t, poly8x8_t, poly8x8x2_t, poly8x8x3_t, + poly8x8x4_t, poly8x16_t, poly8x16x2_t, poly8x16x3_t, poly8x16x4_t, poly16x4_t, poly16x8_t, + poly64x1_t, poly64x2_t, uint8x8_t, uint8x8x2_t, uint8x8x3_t, uint8x8x4_t, uint8x16_t, + uint8x16x2_t, uint8x16x3_t, uint8x16x4_t, uint16x4_t, uint16x4x2_t, uint16x4x3_t, + uint16x4x4_t, uint16x8_t, uint32x2_t, uint32x4_t, uint64x1_t, uint64x2_t + ); + }; +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::pointer::invariant; + + #[test] + fn test_impls() { + // A type that can supply test cases for testing + // `TryFromBytes::is_bit_valid`. All types passed to `assert_impls!` + // must implement this trait; that macro uses it to generate runtime + // tests for `TryFromBytes` impls. + // + // All `T: FromBytes` types are provided with a blanket impl. Other + // types must implement `TryFromBytesTestable` directly (ie using + // `impl_try_from_bytes_testable!`). + trait TryFromBytesTestable { + fn with_passing_test_cases<F: Fn(Box<Self>)>(f: F); + fn with_failing_test_cases<F: Fn(&mut [u8])>(f: F); + } + + impl<T: FromBytes> TryFromBytesTestable for T { + fn with_passing_test_cases<F: Fn(Box<Self>)>(f: F) { + // Test with a zeroed value. + f(Self::new_box_zeroed().unwrap()); + + let ffs = { + let mut t = Self::new_zeroed(); + let ptr: *mut T = &mut t; + // SAFETY: `T: FromBytes` + unsafe { ptr::write_bytes(ptr.cast::<u8>(), 0xFF, mem::size_of::<T>()) }; + t + }; + + // Test with a value initialized with 0xFF. + f(Box::new(ffs)); + } + + fn with_failing_test_cases<F: Fn(&mut [u8])>(_f: F) {} + } + + macro_rules! impl_try_from_bytes_testable_for_null_pointer_optimization { + ($($tys:ty),*) => { + $( + impl TryFromBytesTestable for Option<$tys> { + fn with_passing_test_cases<F: Fn(Box<Self>)>(f: F) { + // Test with a zeroed value. + f(Box::new(None)); + } + + fn with_failing_test_cases<F: Fn(&mut [u8])>(f: F) { + for pos in 0..mem::size_of::<Self>() { + let mut bytes = [0u8; mem::size_of::<Self>()]; + bytes[pos] = 0x01; + f(&mut bytes[..]); + } + } + } + )* + }; + } + + // Implements `TryFromBytesTestable`. + macro_rules! impl_try_from_bytes_testable { + // Base case for recursion (when the list of types has run out). + (=> @success $($success_case:expr),* $(, @failure $($failure_case:expr),*)?) => {}; + // Implements for type(s) with no type parameters. + ($ty:ty $(,$tys:ty)* => @success $($success_case:expr),* $(, @failure $($failure_case:expr),*)?) => { + impl TryFromBytesTestable for $ty { + impl_try_from_bytes_testable!( + @methods @success $($success_case),* + $(, @failure $($failure_case),*)? + ); + } + impl_try_from_bytes_testable!($($tys),* => @success $($success_case),* $(, @failure $($failure_case),*)?); + }; + // Implements for multiple types with no type parameters. + ($($($ty:ty),* => @success $($success_case:expr), * $(, @failure $($failure_case:expr),*)?;)*) => { + $( + impl_try_from_bytes_testable!($($ty),* => @success $($success_case),* $(, @failure $($failure_case),*)*); + )* + }; + // Implements only the methods; caller must invoke this from inside + // an impl block. + (@methods @success $($success_case:expr),* $(, @failure $($failure_case:expr),*)?) => { + fn with_passing_test_cases<F: Fn(Box<Self>)>(_f: F) { + $( + _f(Box::<Self>::from($success_case)); + )* + } + + fn with_failing_test_cases<F: Fn(&mut [u8])>(_f: F) { + $($( + let mut case = $failure_case; + _f(case.as_mut_bytes()); + )*)? + } + }; + } + + impl_try_from_bytes_testable_for_null_pointer_optimization!( + Box<UnsafeCell<NotZerocopy>>, + &'static UnsafeCell<NotZerocopy>, + &'static mut UnsafeCell<NotZerocopy>, + NonNull<UnsafeCell<NotZerocopy>>, + fn(), + FnManyArgs, + extern "C" fn(), + ECFnManyArgs + ); + + macro_rules! bx { + ($e:expr) => { + Box::new($e) + }; + } + + // Note that these impls are only for types which are not `FromBytes`. + // `FromBytes` types are covered by a preceding blanket impl. + impl_try_from_bytes_testable!( + bool => @success true, false, + @failure 2u8, 3u8, 0xFFu8; + char => @success '\u{0}', '\u{D7FF}', '\u{E000}', '\u{10FFFF}', + @failure 0xD800u32, 0xDFFFu32, 0x110000u32; + str => @success "", "hello", "❤️🧡💛💚💙💜", + @failure [0, 159, 146, 150]; + [u8] => @success vec![].into_boxed_slice(), vec![0, 1, 2].into_boxed_slice(); + NonZeroU8, NonZeroI8, NonZeroU16, NonZeroI16, NonZeroU32, + NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, + NonZeroUsize, NonZeroIsize + => @success Self::new(1).unwrap(), + // Doing this instead of `0` ensures that we always satisfy + // the size and alignment requirements of `Self` (whereas `0` + // may be any integer type with a different size or alignment + // than some `NonZeroXxx` types). + @failure Option::<Self>::None; + [bool; 0] => @success []; + [bool; 1] + => @success [true], [false], + @failure [2u8], [3u8], [0xFFu8]; + [bool] + => @success vec![true, false].into_boxed_slice(), vec![false, true].into_boxed_slice(), + @failure [2u8], [3u8], [0xFFu8], [0u8, 1u8, 2u8]; + Unalign<bool> + => @success Unalign::new(false), Unalign::new(true), + @failure 2u8, 0xFFu8; + ManuallyDrop<bool> + => @success ManuallyDrop::new(false), ManuallyDrop::new(true), + @failure 2u8, 0xFFu8; + ManuallyDrop<[u8]> + => @success bx!(ManuallyDrop::new([])), bx!(ManuallyDrop::new([0u8])), bx!(ManuallyDrop::new([0u8, 1u8])); + ManuallyDrop<[bool]> + => @success bx!(ManuallyDrop::new([])), bx!(ManuallyDrop::new([false])), bx!(ManuallyDrop::new([false, true])), + @failure [2u8], [3u8], [0xFFu8], [0u8, 1u8, 2u8]; + ManuallyDrop<[UnsafeCell<u8>]> + => @success bx!(ManuallyDrop::new([UnsafeCell::new(0)])), bx!(ManuallyDrop::new([UnsafeCell::new(0), UnsafeCell::new(1)])); + ManuallyDrop<[UnsafeCell<bool>]> + => @success bx!(ManuallyDrop::new([UnsafeCell::new(false)])), bx!(ManuallyDrop::new([UnsafeCell::new(false), UnsafeCell::new(true)])), + @failure [2u8], [3u8], [0xFFu8], [0u8, 1u8, 2u8]; + Wrapping<bool> + => @success Wrapping(false), Wrapping(true), + @failure 2u8, 0xFFu8; + *const NotZerocopy + => @success ptr::null::<NotZerocopy>(), + @failure [0x01; mem::size_of::<*const NotZerocopy>()]; + *mut NotZerocopy + => @success ptr::null_mut::<NotZerocopy>(), + @failure [0x01; mem::size_of::<*mut NotZerocopy>()]; + ); + + // Use the trick described in [1] to allow us to call methods + // conditional on certain trait bounds. + // + // In all of these cases, methods return `Option<R>`, where `R` is the + // return type of the method we're conditionally calling. The "real" + // implementations (the ones defined in traits using `&self`) return + // `Some`, and the default implementations (the ones defined as inherent + // methods using `&mut self`) return `None`. + // + // [1] https://github.com/dtolnay/case-studies/blob/master/autoref-specialization/README.md + mod autoref_trick { + use super::*; + + pub(super) struct AutorefWrapper<T: ?Sized>(pub(super) PhantomData<T>); + + pub(super) trait TestIsBitValidShared<T: ?Sized> { + #[allow(clippy::needless_lifetimes)] + fn test_is_bit_valid_shared<'ptr, A: invariant::Reference>( + &self, + candidate: Maybe<'ptr, T, A>, + ) -> Option<bool>; + } + + impl<T: TryFromBytes + Immutable + ?Sized> TestIsBitValidShared<T> for AutorefWrapper<T> { + #[allow(clippy::needless_lifetimes)] + fn test_is_bit_valid_shared<'ptr, A: invariant::Reference>( + &self, + candidate: Maybe<'ptr, T, A>, + ) -> Option<bool> { + Some(T::is_bit_valid(candidate)) + } + } + + pub(super) trait TestTryFromRef<T: ?Sized> { + #[allow(clippy::needless_lifetimes)] + fn test_try_from_ref<'bytes>( + &self, + bytes: &'bytes [u8], + ) -> Option<Option<&'bytes T>>; + } + + impl<T: TryFromBytes + Immutable + KnownLayout + ?Sized> TestTryFromRef<T> for AutorefWrapper<T> { + #[allow(clippy::needless_lifetimes)] + fn test_try_from_ref<'bytes>( + &self, + bytes: &'bytes [u8], + ) -> Option<Option<&'bytes T>> { + Some(T::try_ref_from_bytes(bytes).ok()) + } + } + + pub(super) trait TestTryFromMut<T: ?Sized> { + #[allow(clippy::needless_lifetimes)] + fn test_try_from_mut<'bytes>( + &self, + bytes: &'bytes mut [u8], + ) -> Option<Option<&'bytes mut T>>; + } + + impl<T: TryFromBytes + IntoBytes + KnownLayout + ?Sized> TestTryFromMut<T> for AutorefWrapper<T> { + #[allow(clippy::needless_lifetimes)] + fn test_try_from_mut<'bytes>( + &self, + bytes: &'bytes mut [u8], + ) -> Option<Option<&'bytes mut T>> { + Some(T::try_mut_from_bytes(bytes).ok()) + } + } + + pub(super) trait TestTryReadFrom<T> { + fn test_try_read_from(&self, bytes: &[u8]) -> Option<Option<T>>; + } + + impl<T: TryFromBytes> TestTryReadFrom<T> for AutorefWrapper<T> { + fn test_try_read_from(&self, bytes: &[u8]) -> Option<Option<T>> { + Some(T::try_read_from_bytes(bytes).ok()) + } + } + + pub(super) trait TestAsBytes<T: ?Sized> { + #[allow(clippy::needless_lifetimes)] + fn test_as_bytes<'slf, 't>(&'slf self, t: &'t T) -> Option<&'t [u8]>; + } + + impl<T: IntoBytes + Immutable + ?Sized> TestAsBytes<T> for AutorefWrapper<T> { + #[allow(clippy::needless_lifetimes)] + fn test_as_bytes<'slf, 't>(&'slf self, t: &'t T) -> Option<&'t [u8]> { + Some(t.as_bytes()) + } + } + } + + use autoref_trick::*; + + // Asserts that `$ty` is one of a list of types which are allowed to not + // provide a "real" implementation for `$fn_name`. Since the + // `autoref_trick` machinery fails silently, this allows us to ensure + // that the "default" impls are only being used for types which we + // expect. + // + // Note that, since this is a runtime test, it is possible to have an + // allowlist which is too restrictive if the function in question is + // never called for a particular type. For example, if `as_bytes` is not + // supported for a particular type, and so `test_as_bytes` returns + // `None`, methods such as `test_try_from_ref` may never be called for + // that type. As a result, it's possible that, for example, adding + // `as_bytes` support for a type would cause other allowlist assertions + // to fail. This means that allowlist assertion failures should not + // automatically be taken as a sign of a bug. + macro_rules! assert_on_allowlist { + ($fn_name:ident($ty:ty) $(: $($tys:ty),*)?) => {{ + use core::any::TypeId; + + let allowlist: &[TypeId] = &[ $($(TypeId::of::<$tys>()),*)? ]; + let allowlist_names: &[&str] = &[ $($(stringify!($tys)),*)? ]; + + let id = TypeId::of::<$ty>(); + assert!(allowlist.contains(&id), "{} is not on allowlist for {}: {:?}", stringify!($ty), stringify!($fn_name), allowlist_names); + }}; + } + + // Asserts that `$ty` implements any `$trait` and doesn't implement any + // `!$trait`. Note that all `$trait`s must come before any `!$trait`s. + // + // For `T: TryFromBytes`, uses `TryFromBytesTestable` to test success + // and failure cases. + macro_rules! assert_impls { + ($ty:ty: TryFromBytes) => { + // "Default" implementations that match the "real" + // implementations defined in the `autoref_trick` module above. + #[allow(unused, non_local_definitions)] + impl AutorefWrapper<$ty> { + #[allow(clippy::needless_lifetimes)] + fn test_is_bit_valid_shared<'ptr, A: invariant::Reference>( + &mut self, + candidate: Maybe<'ptr, $ty, A>, + ) -> Option<bool> { + assert_on_allowlist!( + test_is_bit_valid_shared($ty): + ManuallyDrop<UnsafeCell<()>>, + ManuallyDrop<[UnsafeCell<u8>]>, + ManuallyDrop<[UnsafeCell<bool>]>, + CoreMaybeUninit<NotZerocopy>, + CoreMaybeUninit<UnsafeCell<()>>, + Wrapping<UnsafeCell<()>> + ); + + None + } + + #[allow(clippy::needless_lifetimes)] + fn test_try_from_ref<'bytes>(&mut self, _bytes: &'bytes [u8]) -> Option<Option<&'bytes $ty>> { + assert_on_allowlist!( + test_try_from_ref($ty): + ManuallyDrop<[UnsafeCell<bool>]> + ); + + None + } + + #[allow(clippy::needless_lifetimes)] + fn test_try_from_mut<'bytes>(&mut self, _bytes: &'bytes mut [u8]) -> Option<Option<&'bytes mut $ty>> { + assert_on_allowlist!( + test_try_from_mut($ty): + Option<Box<UnsafeCell<NotZerocopy>>>, + Option<&'static UnsafeCell<NotZerocopy>>, + Option<&'static mut UnsafeCell<NotZerocopy>>, + Option<NonNull<UnsafeCell<NotZerocopy>>>, + Option<fn()>, + Option<FnManyArgs>, + Option<extern "C" fn()>, + Option<ECFnManyArgs>, + *const NotZerocopy, + *mut NotZerocopy + ); + + None + } + + fn test_try_read_from(&mut self, _bytes: &[u8]) -> Option<Option<&$ty>> { + assert_on_allowlist!( + test_try_read_from($ty): + str, + ManuallyDrop<[u8]>, + ManuallyDrop<[bool]>, + ManuallyDrop<[UnsafeCell<bool>]>, + [u8], + [bool] + ); + + None + } + + fn test_as_bytes(&mut self, _t: &$ty) -> Option<&[u8]> { + assert_on_allowlist!( + test_as_bytes($ty): + Option<&'static UnsafeCell<NotZerocopy>>, + Option<&'static mut UnsafeCell<NotZerocopy>>, + Option<NonNull<UnsafeCell<NotZerocopy>>>, + Option<Box<UnsafeCell<NotZerocopy>>>, + Option<fn()>, + Option<FnManyArgs>, + Option<extern "C" fn()>, + Option<ECFnManyArgs>, + CoreMaybeUninit<u8>, + CoreMaybeUninit<NotZerocopy>, + CoreMaybeUninit<UnsafeCell<()>>, + ManuallyDrop<UnsafeCell<()>>, + ManuallyDrop<[UnsafeCell<u8>]>, + ManuallyDrop<[UnsafeCell<bool>]>, + Wrapping<UnsafeCell<()>>, + *const NotZerocopy, + *mut NotZerocopy + ); + + None + } + } + + <$ty as TryFromBytesTestable>::with_passing_test_cases(|mut val| { + // FIXME(#494): These tests only get exercised for types + // which are `IntoBytes`. Once we implement #494, we should + // be able to support non-`IntoBytes` types by zeroing + // padding. + + // We define `w` and `ww` since, in the case of the inherent + // methods, Rust thinks they're both borrowed mutably at the + // same time (given how we use them below). If we just + // defined a single `w` and used it for multiple operations, + // this would conflict. + // + // We `#[allow(unused_mut]` for the cases where the "real" + // impls are used, which take `&self`. + #[allow(unused_mut)] + let (mut w, mut ww) = (AutorefWrapper::<$ty>(PhantomData), AutorefWrapper::<$ty>(PhantomData)); + + let c = Ptr::from_ref(&*val); + let c = c.forget_aligned(); + // SAFETY: FIXME(#899): This is unsound. `$ty` is not + // necessarily `IntoBytes`, but that's the corner we've + // backed ourselves into by using `Ptr::from_ref`. + let c = unsafe { c.assume_initialized() }; + let res = w.test_is_bit_valid_shared(c); + if let Some(res) = res { + assert!(res, "{}::is_bit_valid({:?}) (shared `Ptr`): got false, expected true", stringify!($ty), val); + } + + let c = Ptr::from_mut(&mut *val); + let c = c.forget_aligned(); + // SAFETY: FIXME(#899): This is unsound. `$ty` is not + // necessarily `IntoBytes`, but that's the corner we've + // backed ourselves into by using `Ptr::from_ref`. + let c = unsafe { c.assume_initialized() }; + let res = <$ty as TryFromBytes>::is_bit_valid(c); + assert!(res, "{}::is_bit_valid({:?}) (exclusive `Ptr`): got false, expected true", stringify!($ty), val); + + // `bytes` is `Some(val.as_bytes())` if `$ty: IntoBytes + + // Immutable` and `None` otherwise. + let bytes = w.test_as_bytes(&*val); + + // The inner closure returns + // `Some($ty::try_ref_from_bytes(bytes))` if `$ty: + // Immutable` and `None` otherwise. + let res = bytes.and_then(|bytes| ww.test_try_from_ref(bytes)); + if let Some(res) = res { + assert!(res.is_some(), "{}::try_ref_from_bytes({:?}): got `None`, expected `Some`", stringify!($ty), val); + } + + if let Some(bytes) = bytes { + // We need to get a mutable byte slice, and so we clone + // into a `Vec`. However, we also need these bytes to + // satisfy `$ty`'s alignment requirement, which isn't + // guaranteed for `Vec<u8>`. In order to get around + // this, we create a `Vec` which is twice as long as we + // need. There is guaranteed to be an aligned byte range + // of size `size_of_val(val)` within that range. + let val = &*val; + let size = mem::size_of_val(val); + let align = mem::align_of_val(val); + + let mut vec = bytes.to_vec(); + vec.extend(bytes); + let slc = vec.as_slice(); + let offset = slc.as_ptr().align_offset(align); + let bytes_mut = &mut vec.as_mut_slice()[offset..offset+size]; + bytes_mut.copy_from_slice(bytes); + + let res = ww.test_try_from_mut(bytes_mut); + if let Some(res) = res { + assert!(res.is_some(), "{}::try_mut_from_bytes({:?}): got `None`, expected `Some`", stringify!($ty), val); + } + } + + let res = bytes.and_then(|bytes| ww.test_try_read_from(bytes)); + if let Some(res) = res { + assert!(res.is_some(), "{}::try_read_from_bytes({:?}): got `None`, expected `Some`", stringify!($ty), val); + } + }); + #[allow(clippy::as_conversions)] + <$ty as TryFromBytesTestable>::with_failing_test_cases(|c| { + #[allow(unused_mut)] // For cases where the "real" impls are used, which take `&self`. + let mut w = AutorefWrapper::<$ty>(PhantomData); + + // This is `Some($ty::try_ref_from_bytes(c))` if `$ty: + // Immutable` and `None` otherwise. + let res = w.test_try_from_ref(c); + if let Some(res) = res { + assert!(res.is_none(), "{}::try_ref_from_bytes({:?}): got Some, expected None", stringify!($ty), c); + } + + let res = w.test_try_from_mut(c); + if let Some(res) = res { + assert!(res.is_none(), "{}::try_mut_from_bytes({:?}): got Some, expected None", stringify!($ty), c); + } + + + let res = w.test_try_read_from(c); + if let Some(res) = res { + assert!(res.is_none(), "{}::try_read_from_bytes({:?}): got Some, expected None", stringify!($ty), c); + } + }); + + #[allow(dead_code)] + const _: () = { static_assertions::assert_impl_all!($ty: TryFromBytes); }; + }; + ($ty:ty: $trait:ident) => { + #[allow(dead_code)] + const _: () = { static_assertions::assert_impl_all!($ty: $trait); }; + }; + ($ty:ty: !$trait:ident) => { + #[allow(dead_code)] + const _: () = { static_assertions::assert_not_impl_any!($ty: $trait); }; + }; + ($ty:ty: $($trait:ident),* $(,)? $(!$negative_trait:ident),*) => { + $( + assert_impls!($ty: $trait); + )* + + $( + assert_impls!($ty: !$negative_trait); + )* + }; + } + + // NOTE: The negative impl assertions here are not necessarily + // prescriptive. They merely serve as change detectors to make sure + // we're aware of what trait impls are getting added with a given + // change. Of course, some impls would be invalid (e.g., `bool: + // FromBytes`), and so this change detection is very important. + + assert_impls!( + (): KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned + ); + assert_impls!( + u8: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned + ); + assert_impls!( + i8: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned + ); + assert_impls!( + u16: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + i16: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + u32: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + i32: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + u64: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + i64: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + u128: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + i128: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + usize: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + isize: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + #[cfg(feature = "float-nightly")] + assert_impls!( + f16: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + f32: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + f64: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + #[cfg(feature = "float-nightly")] + assert_impls!( + f128: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + !Unaligned + ); + assert_impls!( + bool: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + IntoBytes, + Unaligned, + !FromBytes + ); + assert_impls!( + char: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + str: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + IntoBytes, + Unaligned, + !FromBytes + ); + + assert_impls!( + NonZeroU8: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + Unaligned, + !FromZeros, + !FromBytes + ); + assert_impls!( + NonZeroI8: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + Unaligned, + !FromZeros, + !FromBytes + ); + assert_impls!( + NonZeroU16: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroI16: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroU32: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroI32: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroU64: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroI64: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroU128: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroI128: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroUsize: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + assert_impls!( + NonZeroIsize: KnownLayout, + Immutable, + TryFromBytes, + IntoBytes, + !FromBytes, + !Unaligned + ); + + assert_impls!(Option<NonZeroU8>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_impls!(Option<NonZeroI8>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_impls!(Option<NonZeroU16>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroI16>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroU32>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroI32>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroU64>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroI64>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroU128>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroI128>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroUsize>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + assert_impls!(Option<NonZeroIsize>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); + + // Implements none of the ZC traits. + struct NotZerocopy; + + #[rustfmt::skip] + type FnManyArgs = fn( + NotZerocopy, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, + ) -> (NotZerocopy, NotZerocopy); + + // Allowed, because we're not actually using this type for FFI. + #[allow(improper_ctypes_definitions)] + #[rustfmt::skip] + type ECFnManyArgs = extern "C" fn( + NotZerocopy, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, + ) -> (NotZerocopy, NotZerocopy); + + #[cfg(feature = "alloc")] + assert_impls!(Option<Box<UnsafeCell<NotZerocopy>>>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<Box<[UnsafeCell<NotZerocopy>]>>: KnownLayout, !Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<&'static UnsafeCell<NotZerocopy>>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<&'static [UnsafeCell<NotZerocopy>]>: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<&'static mut UnsafeCell<NotZerocopy>>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<&'static mut [UnsafeCell<NotZerocopy>]>: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<NonNull<UnsafeCell<NotZerocopy>>>: KnownLayout, TryFromBytes, FromZeros, Immutable, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<NonNull<[UnsafeCell<NotZerocopy>]>>: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<fn()>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<FnManyArgs>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<extern "C" fn()>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Option<ECFnManyArgs>: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + + assert_impls!(PhantomData<NotZerocopy>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_impls!(PhantomData<UnsafeCell<()>>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + assert_impls!(PhantomData<[u8]>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + + assert_impls!(ManuallyDrop<u8>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + // This test is important because it allows us to test our hand-rolled + // implementation of `<ManuallyDrop<T> as TryFromBytes>::is_bit_valid`. + assert_impls!(ManuallyDrop<bool>: KnownLayout, Immutable, TryFromBytes, FromZeros, IntoBytes, Unaligned, !FromBytes); + assert_impls!(ManuallyDrop<[u8]>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + // This test is important because it allows us to test our hand-rolled + // implementation of `<ManuallyDrop<T> as TryFromBytes>::is_bit_valid`. + assert_impls!(ManuallyDrop<[bool]>: KnownLayout, Immutable, TryFromBytes, FromZeros, IntoBytes, Unaligned, !FromBytes); + assert_impls!(ManuallyDrop<NotZerocopy>: !Immutable, !TryFromBytes, !KnownLayout, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(ManuallyDrop<[NotZerocopy]>: KnownLayout, !Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(ManuallyDrop<UnsafeCell<()>>: KnownLayout, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned, !Immutable); + assert_impls!(ManuallyDrop<[UnsafeCell<u8>]>: KnownLayout, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned, !Immutable); + assert_impls!(ManuallyDrop<[UnsafeCell<bool>]>: KnownLayout, TryFromBytes, FromZeros, IntoBytes, Unaligned, !Immutable, !FromBytes); + + assert_impls!(CoreMaybeUninit<u8>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, Unaligned, !IntoBytes); + assert_impls!(CoreMaybeUninit<NotZerocopy>: KnownLayout, TryFromBytes, FromZeros, FromBytes, !Immutable, !IntoBytes, !Unaligned); + assert_impls!(CoreMaybeUninit<UnsafeCell<()>>: KnownLayout, TryFromBytes, FromZeros, FromBytes, Unaligned, !Immutable, !IntoBytes); + + assert_impls!(Wrapping<u8>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + // This test is important because it allows us to test our hand-rolled + // implementation of `<Wrapping<T> as TryFromBytes>::is_bit_valid`. + assert_impls!(Wrapping<bool>: KnownLayout, Immutable, TryFromBytes, FromZeros, IntoBytes, Unaligned, !FromBytes); + assert_impls!(Wrapping<NotZerocopy>: KnownLayout, !Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(Wrapping<UnsafeCell<()>>: KnownLayout, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned, !Immutable); + + assert_impls!(Unalign<u8>: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, Unaligned); + // This test is important because it allows us to test our hand-rolled + // implementation of `<Unalign<T> as TryFromBytes>::is_bit_valid`. + assert_impls!(Unalign<bool>: KnownLayout, Immutable, TryFromBytes, FromZeros, IntoBytes, Unaligned, !FromBytes); + assert_impls!(Unalign<NotZerocopy>: KnownLayout, Unaligned, !Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes); + + assert_impls!( + [u8]: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned + ); + assert_impls!( + [bool]: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + IntoBytes, + Unaligned, + !FromBytes + ); + assert_impls!([NotZerocopy]: KnownLayout, !Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!( + [u8; 0]: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned, + ); + assert_impls!( + [NotZerocopy; 0]: KnownLayout, + !Immutable, + !TryFromBytes, + !FromZeros, + !FromBytes, + !IntoBytes, + !Unaligned + ); + assert_impls!( + [u8; 1]: KnownLayout, + Immutable, + TryFromBytes, + FromZeros, + FromBytes, + IntoBytes, + Unaligned, + ); + assert_impls!( + [NotZerocopy; 1]: KnownLayout, + !Immutable, + !TryFromBytes, + !FromZeros, + !FromBytes, + !IntoBytes, + !Unaligned + ); + + assert_impls!(*const NotZerocopy: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(*mut NotZerocopy: KnownLayout, Immutable, TryFromBytes, FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(*const [NotZerocopy]: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(*mut [NotZerocopy]: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(*const dyn Debug: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + assert_impls!(*mut dyn Debug: KnownLayout, Immutable, !TryFromBytes, !FromZeros, !FromBytes, !IntoBytes, !Unaligned); + + #[cfg(feature = "simd")] + { + #[allow(unused_macros)] + macro_rules! test_simd_arch_mod { + ($arch:ident, $($typ:ident),*) => { + { + use core::arch::$arch::{$($typ),*}; + use crate::*; + $( assert_impls!($typ: KnownLayout, Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes, !Unaligned); )* + } + }; + } + #[cfg(target_arch = "x86")] + test_simd_arch_mod!(x86, __m128, __m128d, __m128i, __m256, __m256d, __m256i); + + #[cfg(all(not(no_zerocopy_simd_x86_avx12_1_89_0), target_arch = "x86"))] + test_simd_arch_mod!(x86, __m512bh, __m512, __m512d, __m512i); + + #[cfg(target_arch = "x86_64")] + test_simd_arch_mod!(x86_64, __m128, __m128d, __m128i, __m256, __m256d, __m256i); + + #[cfg(all(not(no_zerocopy_simd_x86_avx12_1_89_0), target_arch = "x86_64"))] + test_simd_arch_mod!(x86_64, __m512bh, __m512, __m512d, __m512i); + + #[cfg(target_arch = "wasm32")] + test_simd_arch_mod!(wasm32, v128); + + #[cfg(all(feature = "simd-nightly", target_arch = "powerpc"))] + test_simd_arch_mod!( + powerpc, + vector_bool_long, + vector_double, + vector_signed_long, + vector_unsigned_long + ); + + #[cfg(all(feature = "simd-nightly", target_arch = "powerpc64"))] + test_simd_arch_mod!( + powerpc64, + vector_bool_long, + vector_double, + vector_signed_long, + vector_unsigned_long + ); + #[cfg(all(target_arch = "aarch64", not(no_zerocopy_aarch64_simd_1_59_0)))] + #[rustfmt::skip] + test_simd_arch_mod!( + aarch64, float32x2_t, float32x4_t, float64x1_t, float64x2_t, int8x8_t, int8x8x2_t, + int8x8x3_t, int8x8x4_t, int8x16_t, int8x16x2_t, int8x16x3_t, int8x16x4_t, int16x4_t, + int16x8_t, int32x2_t, int32x4_t, int64x1_t, int64x2_t, poly8x8_t, poly8x8x2_t, poly8x8x3_t, + poly8x8x4_t, poly8x16_t, poly8x16x2_t, poly8x16x3_t, poly8x16x4_t, poly16x4_t, poly16x8_t, + poly64x1_t, poly64x2_t, uint8x8_t, uint8x8x2_t, uint8x8x3_t, uint8x8x4_t, uint8x16_t, + uint8x16x2_t, uint8x16x3_t, uint8x16x4_t, uint16x4_t, uint16x4x2_t, uint16x4x3_t, + uint16x4x4_t, uint16x8_t, uint32x2_t, uint32x4_t, uint64x1_t, uint64x2_t + ); + } + } +} diff --git a/vendor/zerocopy/src/layout.rs b/vendor/zerocopy/src/layout.rs new file mode 100644 index 00000000..7ddd3638 --- /dev/null +++ b/vendor/zerocopy/src/layout.rs @@ -0,0 +1,2067 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{mem, num::NonZeroUsize}; + +use crate::util; + +/// The target pointer width, counted in bits. +const POINTER_WIDTH_BITS: usize = mem::size_of::<usize>() * 8; + +/// The layout of a type which might be dynamically-sized. +/// +/// `DstLayout` describes the layout of sized types, slice types, and "slice +/// DSTs" - ie, those that are known by the type system to have a trailing slice +/// (as distinguished from `dyn Trait` types - such types *might* have a +/// trailing slice type, but the type system isn't aware of it). +/// +/// Note that `DstLayout` does not have any internal invariants, so no guarantee +/// is made that a `DstLayout` conforms to any of Rust's requirements regarding +/// the layout of real Rust types or instances of types. +#[doc(hidden)] +#[allow(missing_debug_implementations, missing_copy_implementations)] +#[cfg_attr(any(kani, test), derive(Debug, PartialEq, Eq))] +#[derive(Copy, Clone)] +pub struct DstLayout { + pub(crate) align: NonZeroUsize, + pub(crate) size_info: SizeInfo, + // Is it guaranteed statically (without knowing a value's runtime metadata) + // that the top-level type contains no padding? This does *not* apply + // recursively - for example, `[(u8, u16)]` has `statically_shallow_unpadded + // = true` even though this type likely has padding inside each `(u8, u16)`. + pub(crate) statically_shallow_unpadded: bool, +} + +#[cfg_attr(any(kani, test), derive(Debug, PartialEq, Eq))] +#[derive(Copy, Clone)] +pub(crate) enum SizeInfo<E = usize> { + Sized { size: usize }, + SliceDst(TrailingSliceLayout<E>), +} + +#[cfg_attr(any(kani, test), derive(Debug, PartialEq, Eq))] +#[derive(Copy, Clone)] +pub(crate) struct TrailingSliceLayout<E = usize> { + // The offset of the first byte of the trailing slice field. Note that this + // is NOT the same as the minimum size of the type. For example, consider + // the following type: + // + // struct Foo { + // a: u16, + // b: u8, + // c: [u8], + // } + // + // In `Foo`, `c` is at byte offset 3. When `c.len() == 0`, `c` is followed + // by a padding byte. + pub(crate) offset: usize, + // The size of the element type of the trailing slice field. + pub(crate) elem_size: E, +} + +impl SizeInfo { + /// Attempts to create a `SizeInfo` from `Self` in which `elem_size` is a + /// `NonZeroUsize`. If `elem_size` is 0, returns `None`. + #[allow(unused)] + const fn try_to_nonzero_elem_size(&self) -> Option<SizeInfo<NonZeroUsize>> { + Some(match *self { + SizeInfo::Sized { size } => SizeInfo::Sized { size }, + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) => { + if let Some(elem_size) = NonZeroUsize::new(elem_size) { + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) + } else { + return None; + } + } + }) + } +} + +#[doc(hidden)] +#[derive(Copy, Clone)] +#[cfg_attr(test, derive(Debug))] +#[allow(missing_debug_implementations)] +pub enum CastType { + Prefix, + Suffix, +} + +#[cfg_attr(test, derive(Debug))] +pub(crate) enum MetadataCastError { + Alignment, + Size, +} + +impl DstLayout { + /// The minimum possible alignment of a type. + const MIN_ALIGN: NonZeroUsize = match NonZeroUsize::new(1) { + Some(min_align) => min_align, + None => const_unreachable!(), + }; + + /// The maximum theoretic possible alignment of a type. + /// + /// For compatibility with future Rust versions, this is defined as the + /// maximum power-of-two that fits into a `usize`. See also + /// [`DstLayout::CURRENT_MAX_ALIGN`]. + pub(crate) const THEORETICAL_MAX_ALIGN: NonZeroUsize = + match NonZeroUsize::new(1 << (POINTER_WIDTH_BITS - 1)) { + Some(max_align) => max_align, + None => const_unreachable!(), + }; + + /// The current, documented max alignment of a type \[1\]. + /// + /// \[1\] Per <https://doc.rust-lang.org/reference/type-layout.html#the-alignment-modifiers>: + /// + /// The alignment value must be a power of two from 1 up to + /// 2<sup>29</sup>. + #[cfg(not(kani))] + #[cfg(not(target_pointer_width = "16"))] + pub(crate) const CURRENT_MAX_ALIGN: NonZeroUsize = match NonZeroUsize::new(1 << 28) { + Some(max_align) => max_align, + None => const_unreachable!(), + }; + + #[cfg(not(kani))] + #[cfg(target_pointer_width = "16")] + pub(crate) const CURRENT_MAX_ALIGN: NonZeroUsize = match NonZeroUsize::new(1 << 15) { + Some(max_align) => max_align, + None => const_unreachable!(), + }; + + /// Assumes that this layout lacks static shallow padding. + /// + /// # Panics + /// + /// This method does not panic. + /// + /// # Safety + /// + /// If `self` describes the size and alignment of type that lacks static + /// shallow padding, unsafe code may assume that the result of this method + /// accurately reflects the size, alignment, and lack of static shallow + /// padding of that type. + const fn assume_shallow_unpadded(self) -> Self { + Self { statically_shallow_unpadded: true, ..self } + } + + /// Constructs a `DstLayout` for a zero-sized type with `repr_align` + /// alignment (or 1). If `repr_align` is provided, then it must be a power + /// of two. + /// + /// # Panics + /// + /// This function panics if the supplied `repr_align` is not a power of two. + /// + /// # Safety + /// + /// Unsafe code may assume that the contract of this function is satisfied. + #[doc(hidden)] + #[must_use] + #[inline] + pub const fn new_zst(repr_align: Option<NonZeroUsize>) -> DstLayout { + let align = match repr_align { + Some(align) => align, + None => Self::MIN_ALIGN, + }; + + const_assert!(align.get().is_power_of_two()); + + DstLayout { + align, + size_info: SizeInfo::Sized { size: 0 }, + statically_shallow_unpadded: true, + } + } + + /// Constructs a `DstLayout` which describes `T` and assumes `T` may contain + /// padding. + /// + /// # Safety + /// + /// Unsafe code may assume that `DstLayout` is the correct layout for `T`. + #[doc(hidden)] + #[must_use] + #[inline] + pub const fn for_type<T>() -> DstLayout { + // SAFETY: `align` is correct by construction. `T: Sized`, and so it is + // sound to initialize `size_info` to `SizeInfo::Sized { size }`; the + // `size` field is also correct by construction. `unpadded` can safely + // default to `false`. + DstLayout { + align: match NonZeroUsize::new(mem::align_of::<T>()) { + Some(align) => align, + None => const_unreachable!(), + }, + size_info: SizeInfo::Sized { size: mem::size_of::<T>() }, + statically_shallow_unpadded: false, + } + } + + /// Constructs a `DstLayout` which describes a `T` that does not contain + /// padding. + /// + /// # Safety + /// + /// Unsafe code may assume that `DstLayout` is the correct layout for `T`. + #[doc(hidden)] + #[must_use] + #[inline] + pub const fn for_unpadded_type<T>() -> DstLayout { + Self::for_type::<T>().assume_shallow_unpadded() + } + + /// Constructs a `DstLayout` which describes `[T]`. + /// + /// # Safety + /// + /// Unsafe code may assume that `DstLayout` is the correct layout for `[T]`. + pub(crate) const fn for_slice<T>() -> DstLayout { + // SAFETY: The alignment of a slice is equal to the alignment of its + // element type, and so `align` is initialized correctly. + // + // Since this is just a slice type, there is no offset between the + // beginning of the type and the beginning of the slice, so it is + // correct to set `offset: 0`. The `elem_size` is correct by + // construction. Since `[T]` is a (degenerate case of a) slice DST, it + // is correct to initialize `size_info` to `SizeInfo::SliceDst`. + DstLayout { + align: match NonZeroUsize::new(mem::align_of::<T>()) { + Some(align) => align, + None => const_unreachable!(), + }, + size_info: SizeInfo::SliceDst(TrailingSliceLayout { + offset: 0, + elem_size: mem::size_of::<T>(), + }), + statically_shallow_unpadded: true, + } + } + + /// Constructs a complete `DstLayout` reflecting a `repr(C)` struct with the + /// given alignment modifiers and fields. + /// + /// This method cannot be used to match the layout of a record with the + /// default representation, as that representation is mostly unspecified. + /// + /// # Safety + /// + /// For any definition of a `repr(C)` struct, if this method is invoked with + /// alignment modifiers and fields corresponding to that definition, the + /// resulting `DstLayout` will correctly encode the layout of that struct. + /// + /// We make no guarantees to the behavior of this method when it is invoked + /// with arguments that cannot correspond to a valid `repr(C)` struct. + #[must_use] + #[inline] + pub const fn for_repr_c_struct( + repr_align: Option<NonZeroUsize>, + repr_packed: Option<NonZeroUsize>, + fields: &[DstLayout], + ) -> DstLayout { + let mut layout = DstLayout::new_zst(repr_align); + + let mut i = 0; + #[allow(clippy::arithmetic_side_effects)] + while i < fields.len() { + #[allow(clippy::indexing_slicing)] + let field = fields[i]; + layout = layout.extend(field, repr_packed); + i += 1; + } + + layout = layout.pad_to_align(); + + // SAFETY: `layout` accurately describes the layout of a `repr(C)` + // struct with `repr_align` or `repr_packed` alignment modifications and + // the given `fields`. The `layout` is constructed using a sequence of + // invocations of `DstLayout::{new_zst,extend,pad_to_align}`. The + // documentation of these items vows that invocations in this manner + // will accurately describe a type, so long as: + // + // - that type is `repr(C)`, + // - its fields are enumerated in the order they appear, + // - the presence of `repr_align` and `repr_packed` are correctly accounted for. + // + // We respect all three of these preconditions above. + layout + } + + /// Like `Layout::extend`, this creates a layout that describes a record + /// whose layout consists of `self` followed by `next` that includes the + /// necessary inter-field padding, but not any trailing padding. + /// + /// In order to match the layout of a `#[repr(C)]` struct, this method + /// should be invoked for each field in declaration order. To add trailing + /// padding, call `DstLayout::pad_to_align` after extending the layout for + /// all fields. If `self` corresponds to a type marked with + /// `repr(packed(N))`, then `repr_packed` should be set to `Some(N)`, + /// otherwise `None`. + /// + /// This method cannot be used to match the layout of a record with the + /// default representation, as that representation is mostly unspecified. + /// + /// # Safety + /// + /// If a (potentially hypothetical) valid `repr(C)` Rust type begins with + /// fields whose layout are `self`, and those fields are immediately + /// followed by a field whose layout is `field`, then unsafe code may rely + /// on `self.extend(field, repr_packed)` producing a layout that correctly + /// encompasses those two components. + /// + /// We make no guarantees to the behavior of this method if these fragments + /// cannot appear in a valid Rust type (e.g., the concatenation of the + /// layouts would lead to a size larger than `isize::MAX`). + #[doc(hidden)] + #[must_use] + #[inline] + pub const fn extend(self, field: DstLayout, repr_packed: Option<NonZeroUsize>) -> Self { + use util::{max, min, padding_needed_for}; + + // If `repr_packed` is `None`, there are no alignment constraints, and + // the value can be defaulted to `THEORETICAL_MAX_ALIGN`. + let max_align = match repr_packed { + Some(max_align) => max_align, + None => Self::THEORETICAL_MAX_ALIGN, + }; + + const_assert!(max_align.get().is_power_of_two()); + + // We use Kani to prove that this method is robust to future increases + // in Rust's maximum allowed alignment. However, if such a change ever + // actually occurs, we'd like to be notified via assertion failures. + #[cfg(not(kani))] + { + const_debug_assert!(self.align.get() <= DstLayout::CURRENT_MAX_ALIGN.get()); + const_debug_assert!(field.align.get() <= DstLayout::CURRENT_MAX_ALIGN.get()); + if let Some(repr_packed) = repr_packed { + const_debug_assert!(repr_packed.get() <= DstLayout::CURRENT_MAX_ALIGN.get()); + } + } + + // The field's alignment is clamped by `repr_packed` (i.e., the + // `repr(packed(N))` attribute, if any) [1]. + // + // [1] Per https://doc.rust-lang.org/reference/type-layout.html#the-alignment-modifiers: + // + // The alignments of each field, for the purpose of positioning + // fields, is the smaller of the specified alignment and the alignment + // of the field's type. + let field_align = min(field.align, max_align); + + // The struct's alignment is the maximum of its previous alignment and + // `field_align`. + let align = max(self.align, field_align); + + let (interfield_padding, size_info) = match self.size_info { + // If the layout is already a DST, we panic; DSTs cannot be extended + // with additional fields. + SizeInfo::SliceDst(..) => const_panic!("Cannot extend a DST with additional fields."), + + SizeInfo::Sized { size: preceding_size } => { + // Compute the minimum amount of inter-field padding needed to + // satisfy the field's alignment, and offset of the trailing + // field. [1] + // + // [1] Per https://doc.rust-lang.org/reference/type-layout.html#the-alignment-modifiers: + // + // Inter-field padding is guaranteed to be the minimum + // required in order to satisfy each field's (possibly + // altered) alignment. + let padding = padding_needed_for(preceding_size, field_align); + + // This will not panic (and is proven to not panic, with Kani) + // if the layout components can correspond to a leading layout + // fragment of a valid Rust type, but may panic otherwise (e.g., + // combining or aligning the components would create a size + // exceeding `isize::MAX`). + let offset = match preceding_size.checked_add(padding) { + Some(offset) => offset, + None => const_panic!("Adding padding to `self`'s size overflows `usize`."), + }; + + ( + padding, + match field.size_info { + SizeInfo::Sized { size: field_size } => { + // If the trailing field is sized, the resulting layout + // will be sized. Its size will be the sum of the + // preceding layout, the size of the new field, and the + // size of inter-field padding between the two. + // + // This will not panic (and is proven with Kani to not + // panic) if the layout components can correspond to a + // leading layout fragment of a valid Rust type, but may + // panic otherwise (e.g., combining or aligning the + // components would create a size exceeding + // `usize::MAX`). + let size = match offset.checked_add(field_size) { + Some(size) => size, + None => const_panic!("`field` cannot be appended without the total size overflowing `usize`"), + }; + SizeInfo::Sized { size } + } + SizeInfo::SliceDst(TrailingSliceLayout { + offset: trailing_offset, + elem_size, + }) => { + // If the trailing field is dynamically sized, so too + // will the resulting layout. The offset of the trailing + // slice component is the sum of the offset of the + // trailing field and the trailing slice offset within + // that field. + // + // This will not panic (and is proven with Kani to not + // panic) if the layout components can correspond to a + // leading layout fragment of a valid Rust type, but may + // panic otherwise (e.g., combining or aligning the + // components would create a size exceeding + // `usize::MAX`). + let offset = match offset.checked_add(trailing_offset) { + Some(offset) => offset, + None => const_panic!("`field` cannot be appended without the total size overflowing `usize`"), + }; + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) + } + }, + ) + } + }; + + let statically_shallow_unpadded = self.statically_shallow_unpadded + && field.statically_shallow_unpadded + && interfield_padding == 0; + + DstLayout { align, size_info, statically_shallow_unpadded } + } + + /// Like `Layout::pad_to_align`, this routine rounds the size of this layout + /// up to the nearest multiple of this type's alignment or `repr_packed` + /// (whichever is less). This method leaves DST layouts unchanged, since the + /// trailing padding of DSTs is computed at runtime. + /// + /// The accompanying boolean is `true` if the resulting composition of + /// fields necessitated static (as opposed to dynamic) padding; otherwise + /// `false`. + /// + /// In order to match the layout of a `#[repr(C)]` struct, this method + /// should be invoked after the invocations of [`DstLayout::extend`]. If + /// `self` corresponds to a type marked with `repr(packed(N))`, then + /// `repr_packed` should be set to `Some(N)`, otherwise `None`. + /// + /// This method cannot be used to match the layout of a record with the + /// default representation, as that representation is mostly unspecified. + /// + /// # Safety + /// + /// If a (potentially hypothetical) valid `repr(C)` type begins with fields + /// whose layout are `self` followed only by zero or more bytes of trailing + /// padding (not included in `self`), then unsafe code may rely on + /// `self.pad_to_align(repr_packed)` producing a layout that correctly + /// encapsulates the layout of that type. + /// + /// We make no guarantees to the behavior of this method if `self` cannot + /// appear in a valid Rust type (e.g., because the addition of trailing + /// padding would lead to a size larger than `isize::MAX`). + #[doc(hidden)] + #[must_use] + #[inline] + pub const fn pad_to_align(self) -> Self { + use util::padding_needed_for; + + let (static_padding, size_info) = match self.size_info { + // For sized layouts, we add the minimum amount of trailing padding + // needed to satisfy alignment. + SizeInfo::Sized { size: unpadded_size } => { + let padding = padding_needed_for(unpadded_size, self.align); + let size = match unpadded_size.checked_add(padding) { + Some(size) => size, + None => const_panic!("Adding padding caused size to overflow `usize`."), + }; + (padding, SizeInfo::Sized { size }) + } + // For DST layouts, trailing padding depends on the length of the + // trailing DST and is computed at runtime. This does not alter the + // offset or element size of the layout, so we leave `size_info` + // unchanged. + size_info @ SizeInfo::SliceDst(_) => (0, size_info), + }; + + let statically_shallow_unpadded = self.statically_shallow_unpadded && static_padding == 0; + + DstLayout { align: self.align, size_info, statically_shallow_unpadded } + } + + /// Produces `true` if `self` requires static padding; otherwise `false`. + #[must_use] + #[inline(always)] + pub const fn requires_static_padding(self) -> bool { + !self.statically_shallow_unpadded + } + + /// Produces `true` if there exists any metadata for which a type of layout + /// `self` would require dynamic trailing padding; otherwise `false`. + #[must_use] + #[inline(always)] + pub const fn requires_dynamic_padding(self) -> bool { + // A `% self.align.get()` cannot panic, since `align` is non-zero. + #[allow(clippy::arithmetic_side_effects)] + match self.size_info { + SizeInfo::Sized { .. } => false, + SizeInfo::SliceDst(trailing_slice_layout) => { + // SAFETY: This predicate is formally proved sound by + // `proofs::prove_requires_dynamic_padding`. + trailing_slice_layout.offset % self.align.get() != 0 + || trailing_slice_layout.elem_size % self.align.get() != 0 + } + } + } + + /// Validates that a cast is sound from a layout perspective. + /// + /// Validates that the size and alignment requirements of a type with the + /// layout described in `self` would not be violated by performing a + /// `cast_type` cast from a pointer with address `addr` which refers to a + /// memory region of size `bytes_len`. + /// + /// If the cast is valid, `validate_cast_and_convert_metadata` returns + /// `(elems, split_at)`. If `self` describes a dynamically-sized type, then + /// `elems` is the maximum number of trailing slice elements for which a + /// cast would be valid (for sized types, `elem` is meaningless and should + /// be ignored). `split_at` is the index at which to split the memory region + /// in order for the prefix (suffix) to contain the result of the cast, and + /// in order for the remaining suffix (prefix) to contain the leftover + /// bytes. + /// + /// There are three conditions under which a cast can fail: + /// - The smallest possible value for the type is larger than the provided + /// memory region + /// - A prefix cast is requested, and `addr` does not satisfy `self`'s + /// alignment requirement + /// - A suffix cast is requested, and `addr + bytes_len` does not satisfy + /// `self`'s alignment requirement (as a consequence, since all instances + /// of the type are a multiple of its alignment, no size for the type will + /// result in a starting address which is properly aligned) + /// + /// # Safety + /// + /// The caller may assume that this implementation is correct, and may rely + /// on that assumption for the soundness of their code. In particular, the + /// caller may assume that, if `validate_cast_and_convert_metadata` returns + /// `Some((elems, split_at))`, then: + /// - A pointer to the type (for dynamically sized types, this includes + /// `elems` as its pointer metadata) describes an object of size `size <= + /// bytes_len` + /// - If this is a prefix cast: + /// - `addr` satisfies `self`'s alignment + /// - `size == split_at` + /// - If this is a suffix cast: + /// - `split_at == bytes_len - size` + /// - `addr + split_at` satisfies `self`'s alignment + /// + /// Note that this method does *not* ensure that a pointer constructed from + /// its return values will be a valid pointer. In particular, this method + /// does not reason about `isize` overflow, which is a requirement of many + /// Rust pointer APIs, and may at some point be determined to be a validity + /// invariant of pointer types themselves. This should never be a problem so + /// long as the arguments to this method are derived from a known-valid + /// pointer (e.g., one derived from a safe Rust reference), but it is + /// nonetheless the caller's responsibility to justify that pointer + /// arithmetic will not overflow based on a safety argument *other than* the + /// mere fact that this method returned successfully. + /// + /// # Panics + /// + /// `validate_cast_and_convert_metadata` will panic if `self` describes a + /// DST whose trailing slice element is zero-sized. + /// + /// If `addr + bytes_len` overflows `usize`, + /// `validate_cast_and_convert_metadata` may panic, or it may return + /// incorrect results. No guarantees are made about when + /// `validate_cast_and_convert_metadata` will panic. The caller should not + /// rely on `validate_cast_and_convert_metadata` panicking in any particular + /// condition, even if `debug_assertions` are enabled. + #[allow(unused)] + #[inline(always)] + pub(crate) const fn validate_cast_and_convert_metadata( + &self, + addr: usize, + bytes_len: usize, + cast_type: CastType, + ) -> Result<(usize, usize), MetadataCastError> { + // `debug_assert!`, but with `#[allow(clippy::arithmetic_side_effects)]`. + macro_rules! __const_debug_assert { + ($e:expr $(, $msg:expr)?) => { + const_debug_assert!({ + #[allow(clippy::arithmetic_side_effects)] + let e = $e; + e + } $(, $msg)?); + }; + } + + // Note that, in practice, `self` is always a compile-time constant. We + // do this check earlier than needed to ensure that we always panic as a + // result of bugs in the program (such as calling this function on an + // invalid type) instead of allowing this panic to be hidden if the cast + // would have failed anyway for runtime reasons (such as a too-small + // memory region). + // + // FIXME(#67): Once our MSRV is 1.65, use let-else: + // https://blog.rust-lang.org/2022/11/03/Rust-1.65.0.html#let-else-statements + let size_info = match self.size_info.try_to_nonzero_elem_size() { + Some(size_info) => size_info, + None => const_panic!("attempted to cast to slice type with zero-sized element"), + }; + + // Precondition + __const_debug_assert!( + addr.checked_add(bytes_len).is_some(), + "`addr` + `bytes_len` > usize::MAX" + ); + + // Alignment checks go in their own block to avoid introducing variables + // into the top-level scope. + { + // We check alignment for `addr` (for prefix casts) or `addr + + // bytes_len` (for suffix casts). For a prefix cast, the correctness + // of this check is trivial - `addr` is the address the object will + // live at. + // + // For a suffix cast, we know that all valid sizes for the type are + // a multiple of the alignment (and by safety precondition, we know + // `DstLayout` may only describe valid Rust types). Thus, a + // validly-sized instance which lives at a validly-aligned address + // must also end at a validly-aligned address. Thus, if the end + // address for a suffix cast (`addr + bytes_len`) is not aligned, + // then no valid start address will be aligned either. + let offset = match cast_type { + CastType::Prefix => 0, + CastType::Suffix => bytes_len, + }; + + // Addition is guaranteed not to overflow because `offset <= + // bytes_len`, and `addr + bytes_len <= usize::MAX` is a + // precondition of this method. Modulus is guaranteed not to divide + // by 0 because `align` is non-zero. + #[allow(clippy::arithmetic_side_effects)] + if (addr + offset) % self.align.get() != 0 { + return Err(MetadataCastError::Alignment); + } + } + + let (elems, self_bytes) = match size_info { + SizeInfo::Sized { size } => { + if size > bytes_len { + return Err(MetadataCastError::Size); + } + (0, size) + } + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) => { + // Calculate the maximum number of bytes that could be consumed + // - any number of bytes larger than this will either not be a + // multiple of the alignment, or will be larger than + // `bytes_len`. + let max_total_bytes = + util::round_down_to_next_multiple_of_alignment(bytes_len, self.align); + // Calculate the maximum number of bytes that could be consumed + // by the trailing slice. + // + // FIXME(#67): Once our MSRV is 1.65, use let-else: + // https://blog.rust-lang.org/2022/11/03/Rust-1.65.0.html#let-else-statements + let max_slice_and_padding_bytes = match max_total_bytes.checked_sub(offset) { + Some(max) => max, + // `bytes_len` too small even for 0 trailing slice elements. + None => return Err(MetadataCastError::Size), + }; + + // Calculate the number of elements that fit in + // `max_slice_and_padding_bytes`; any remaining bytes will be + // considered padding. + // + // Guaranteed not to divide by zero: `elem_size` is non-zero. + #[allow(clippy::arithmetic_side_effects)] + let elems = max_slice_and_padding_bytes / elem_size.get(); + // Guaranteed not to overflow on multiplication: `usize::MAX >= + // max_slice_and_padding_bytes >= (max_slice_and_padding_bytes / + // elem_size) * elem_size`. + // + // Guaranteed not to overflow on addition: + // - max_slice_and_padding_bytes == max_total_bytes - offset + // - elems * elem_size <= max_slice_and_padding_bytes == max_total_bytes - offset + // - elems * elem_size + offset <= max_total_bytes <= usize::MAX + #[allow(clippy::arithmetic_side_effects)] + let without_padding = offset + elems * elem_size.get(); + // `self_bytes` is equal to the offset bytes plus the bytes + // consumed by the trailing slice plus any padding bytes + // required to satisfy the alignment. Note that we have computed + // the maximum number of trailing slice elements that could fit + // in `self_bytes`, so any padding is guaranteed to be less than + // the size of an extra element. + // + // Guaranteed not to overflow: + // - By previous comment: without_padding == elems * elem_size + + // offset <= max_total_bytes + // - By construction, `max_total_bytes` is a multiple of + // `self.align`. + // - At most, adding padding needed to round `without_padding` + // up to the next multiple of the alignment will bring + // `self_bytes` up to `max_total_bytes`. + #[allow(clippy::arithmetic_side_effects)] + let self_bytes = + without_padding + util::padding_needed_for(without_padding, self.align); + (elems, self_bytes) + } + }; + + __const_debug_assert!(self_bytes <= bytes_len); + + let split_at = match cast_type { + CastType::Prefix => self_bytes, + // Guaranteed not to underflow: + // - In the `Sized` branch, only returns `size` if `size <= + // bytes_len`. + // - In the `SliceDst` branch, calculates `self_bytes <= + // max_toatl_bytes`, which is upper-bounded by `bytes_len`. + #[allow(clippy::arithmetic_side_effects)] + CastType::Suffix => bytes_len - self_bytes, + }; + + Ok((elems, split_at)) + } +} + +pub(crate) use cast_from_raw::cast_from_raw; +mod cast_from_raw { + use crate::{pointer::PtrInner, *}; + + /// Implements [`<Dst as SizeEq<Src>>::cast_from_raw`][cast_from_raw]. + /// + /// # PME + /// + /// Generates a post-monomorphization error if it is not possible to satisfy + /// the soundness conditions of [`SizeEq::cast_from_raw`][cast_from_raw] + /// for `Src` and `Dst`. + /// + /// [cast_from_raw]: crate::pointer::SizeEq::cast_from_raw + // + // FIXME(#1817): Support Sized->Unsized and Unsized->Sized casts + pub(crate) fn cast_from_raw<Src, Dst>(src: PtrInner<'_, Src>) -> PtrInner<'_, Dst> + where + Src: KnownLayout<PointerMetadata = usize> + ?Sized, + Dst: KnownLayout<PointerMetadata = usize> + ?Sized, + { + // At compile time (specifically, post-monomorphization time), we need + // to compute two things: + // - Whether, given *any* `*Src`, it is possible to construct a `*Dst` + // which addresses the same number of bytes (ie, whether, for any + // `Src` pointer metadata, there exists `Dst` pointer metadata that + // addresses the same number of bytes) + // - If this is possible, any information necessary to perform the + // `Src`->`Dst` metadata conversion at runtime. + // + // Assume that `Src` and `Dst` are slice DSTs, and define: + // - `S_OFF = Src::LAYOUT.size_info.offset` + // - `S_ELEM = Src::LAYOUT.size_info.elem_size` + // - `D_OFF = Dst::LAYOUT.size_info.offset` + // - `D_ELEM = Dst::LAYOUT.size_info.elem_size` + // + // We are trying to solve the following equation: + // + // D_OFF + d_meta * D_ELEM = S_OFF + s_meta * S_ELEM + // + // At runtime, we will be attempting to compute `d_meta`, given `s_meta` + // (a runtime value) and all other parameters (which are compile-time + // values). We can solve like so: + // + // D_OFF + d_meta * D_ELEM = S_OFF + s_meta * S_ELEM + // + // d_meta * D_ELEM = S_OFF - D_OFF + s_meta * S_ELEM + // + // d_meta = (S_OFF - D_OFF + s_meta * S_ELEM)/D_ELEM + // + // Since `d_meta` will be a `usize`, we need the right-hand side to be + // an integer, and this needs to hold for *any* value of `s_meta` (in + // order for our conversion to be infallible - ie, to not have to reject + // certain values of `s_meta` at runtime). This means that: + // - `s_meta * S_ELEM` must be a multiple of `D_ELEM` + // - Since this must hold for any value of `s_meta`, `S_ELEM` must be a + // multiple of `D_ELEM` + // - `S_OFF - D_OFF` must be a multiple of `D_ELEM` + // + // Thus, let `OFFSET_DELTA_ELEMS = (S_OFF - D_OFF)/D_ELEM` and + // `ELEM_MULTIPLE = S_ELEM/D_ELEM`. We can rewrite the above expression + // as: + // + // d_meta = (S_OFF - D_OFF + s_meta * S_ELEM)/D_ELEM + // + // d_meta = OFFSET_DELTA_ELEMS + s_meta * ELEM_MULTIPLE + // + // Thus, we just need to compute the following and confirm that they + // have integer solutions in order to both a) determine whether + // infallible `Src` -> `Dst` casts are possible and, b) pre-compute the + // parameters necessary to perform those casts at runtime. These + // parameters are encapsulated in `CastParams`, which acts as a witness + // that such infallible casts are possible. + + /// The parameters required in order to perform a pointer cast from + /// `Src` to `Dst` as described above. + /// + /// These are a compile-time function of the layouts of `Src` and `Dst`. + /// + /// # Safety + /// + /// `offset_delta_elems` and `elem_multiple` must be valid as described + /// above. + /// + /// `Src`'s alignment must not be smaller than `Dst`'s alignment. + #[derive(Copy, Clone)] + struct CastParams { + offset_delta_elems: usize, + elem_multiple: usize, + } + + impl CastParams { + const fn try_compute(src: &DstLayout, dst: &DstLayout) -> Option<CastParams> { + if src.align.get() < dst.align.get() { + return None; + } + + let (src, dst) = if let (SizeInfo::SliceDst(src), SizeInfo::SliceDst(dst)) = + (src.size_info, dst.size_info) + { + (src, dst) + } else { + return None; + }; + + let offset_delta = if let Some(od) = src.offset.checked_sub(dst.offset) { + od + } else { + return None; + }; + + let dst_elem_size = if let Some(e) = NonZeroUsize::new(dst.elem_size) { + e + } else { + return None; + }; + + // PANICS: `dst_elem_size: NonZeroUsize`, so this won't div by zero. + #[allow(clippy::arithmetic_side_effects)] + let delta_mod_other_elem = offset_delta % dst_elem_size.get(); + + // PANICS: `dst_elem_size: NonZeroUsize`, so this won't div by zero. + #[allow(clippy::arithmetic_side_effects)] + let elem_remainder = src.elem_size % dst_elem_size.get(); + + if delta_mod_other_elem != 0 || src.elem_size < dst.elem_size || elem_remainder != 0 + { + return None; + } + + // PANICS: `dst_elem_size: NonZeroUsize`, so this won't div by zero. + #[allow(clippy::arithmetic_side_effects)] + let offset_delta_elems = offset_delta / dst_elem_size.get(); + + // PANICS: `dst_elem_size: NonZeroUsize`, so this won't div by zero. + #[allow(clippy::arithmetic_side_effects)] + let elem_multiple = src.elem_size / dst_elem_size.get(); + + // SAFETY: We checked above that `src.align >= dst.align`. + Some(CastParams { + // SAFETY: We checked above that this is an exact ratio. + offset_delta_elems, + // SAFETY: We checked above that this is an exact ratio. + elem_multiple, + }) + } + + /// # Safety + /// + /// `src_meta` describes a `Src` whose size is no larger than + /// `isize::MAX`. + /// + /// The returned metadata describes a `Dst` of the same size as the + /// original `Src`. + unsafe fn cast_metadata(self, src_meta: usize) -> usize { + #[allow(unused)] + use crate::util::polyfills::*; + + // SAFETY: `self` is a witness that the following equation + // holds: + // + // D_OFF + d_meta * D_ELEM = S_OFF + s_meta * S_ELEM + // + // Since the caller promises that `src_meta` is valid `Src` + // metadata, this math will not overflow, and the returned value + // will describe a `Dst` of the same size. + #[allow(unstable_name_collisions, clippy::multiple_unsafe_ops_per_block)] + unsafe { + self.offset_delta_elems + .unchecked_add(src_meta.unchecked_mul(self.elem_multiple)) + } + } + } + + trait Params<Src: ?Sized> { + const CAST_PARAMS: CastParams; + } + + impl<Src, Dst> Params<Src> for Dst + where + Src: KnownLayout + ?Sized, + Dst: KnownLayout<PointerMetadata = usize> + ?Sized, + { + const CAST_PARAMS: CastParams = + match CastParams::try_compute(&Src::LAYOUT, &Dst::LAYOUT) { + Some(params) => params, + None => const_panic!( + "cannot `transmute_ref!` or `transmute_mut!` between incompatible types" + ), + }; + } + + let src_meta = <Src as KnownLayout>::pointer_to_metadata(src.as_non_null().as_ptr()); + let params = <Dst as Params<Src>>::CAST_PARAMS; + + // SAFETY: `src: PtrInner`, and so by invariant on `PtrInner`, `src`'s + // referent is no larger than `isize::MAX`. + let dst_meta = unsafe { params.cast_metadata(src_meta) }; + + let dst = <Dst as KnownLayout>::raw_from_ptr_len(src.as_non_null().cast(), dst_meta); + + // SAFETY: By post-condition on `params.cast_metadata`, `dst` addresses + // the same number of bytes as `src`. Since `src: PtrInner`, `src` has + // provenance for its entire referent, which lives inside of a single + // allocation. Since `dst` has the same address as `src` and was + // constructed using provenance-preserving operations, it addresses a + // subset of those bytes, and has provenance for those bytes. + unsafe { PtrInner::new(dst) } + } +} + +// FIXME(#67): For some reason, on our MSRV toolchain, this `allow` isn't +// enforced despite having `#![allow(unknown_lints)]` at the crate root, but +// putting it here works. Once our MSRV is high enough that this bug has been +// fixed, remove this `allow`. +#[allow(unknown_lints)] +#[cfg(test)] +mod tests { + use super::*; + + /// Tests of when a sized `DstLayout` is extended with a sized field. + #[allow(clippy::decimal_literal_representation)] + #[test] + fn test_dst_layout_extend_sized_with_sized() { + // This macro constructs a layout corresponding to a `u8` and extends it + // with a zero-sized trailing field of given alignment `n`. The macro + // tests that the resulting layout has both size and alignment `min(n, + // P)` for all valid values of `repr(packed(P))`. + macro_rules! test_align_is_size { + ($n:expr) => { + let base = DstLayout::for_type::<u8>(); + let trailing_field = DstLayout::for_type::<elain::Align<$n>>(); + + let packs = + core::iter::once(None).chain((0..29).map(|p| NonZeroUsize::new(2usize.pow(p)))); + + for pack in packs { + let composite = base.extend(trailing_field, pack); + let max_align = pack.unwrap_or(DstLayout::CURRENT_MAX_ALIGN); + let align = $n.min(max_align.get()); + assert_eq!( + composite, + DstLayout { + align: NonZeroUsize::new(align).unwrap(), + size_info: SizeInfo::Sized { size: align }, + statically_shallow_unpadded: false, + } + ) + } + }; + } + + test_align_is_size!(1); + test_align_is_size!(2); + test_align_is_size!(4); + test_align_is_size!(8); + test_align_is_size!(16); + test_align_is_size!(32); + test_align_is_size!(64); + test_align_is_size!(128); + test_align_is_size!(256); + test_align_is_size!(512); + test_align_is_size!(1024); + test_align_is_size!(2048); + test_align_is_size!(4096); + test_align_is_size!(8192); + test_align_is_size!(16384); + test_align_is_size!(32768); + test_align_is_size!(65536); + test_align_is_size!(131072); + test_align_is_size!(262144); + test_align_is_size!(524288); + test_align_is_size!(1048576); + test_align_is_size!(2097152); + test_align_is_size!(4194304); + test_align_is_size!(8388608); + test_align_is_size!(16777216); + test_align_is_size!(33554432); + test_align_is_size!(67108864); + test_align_is_size!(33554432); + test_align_is_size!(134217728); + test_align_is_size!(268435456); + } + + /// Tests of when a sized `DstLayout` is extended with a DST field. + #[test] + fn test_dst_layout_extend_sized_with_dst() { + // Test that for all combinations of real-world alignments and + // `repr_packed` values, that the extension of a sized `DstLayout`` with + // a DST field correctly computes the trailing offset in the composite + // layout. + + let aligns = (0..29).map(|p| NonZeroUsize::new(2usize.pow(p)).unwrap()); + let packs = core::iter::once(None).chain(aligns.clone().map(Some)); + + for align in aligns { + for pack in packs.clone() { + let base = DstLayout::for_type::<u8>(); + let elem_size = 42; + let trailing_field_offset = 11; + + let trailing_field = DstLayout { + align, + size_info: SizeInfo::SliceDst(TrailingSliceLayout { elem_size, offset: 11 }), + statically_shallow_unpadded: false, + }; + + let composite = base.extend(trailing_field, pack); + + let max_align = pack.unwrap_or(DstLayout::CURRENT_MAX_ALIGN).get(); + + let align = align.get().min(max_align); + + assert_eq!( + composite, + DstLayout { + align: NonZeroUsize::new(align).unwrap(), + size_info: SizeInfo::SliceDst(TrailingSliceLayout { + elem_size, + offset: align + trailing_field_offset, + }), + statically_shallow_unpadded: false, + } + ) + } + } + } + + /// Tests that calling `pad_to_align` on a sized `DstLayout` adds the + /// expected amount of trailing padding. + #[test] + fn test_dst_layout_pad_to_align_with_sized() { + // For all valid alignments `align`, construct a one-byte layout aligned + // to `align`, call `pad_to_align`, and assert that the size of the + // resulting layout is equal to `align`. + for align in (0..29).map(|p| NonZeroUsize::new(2usize.pow(p)).unwrap()) { + let layout = DstLayout { + align, + size_info: SizeInfo::Sized { size: 1 }, + statically_shallow_unpadded: true, + }; + + assert_eq!( + layout.pad_to_align(), + DstLayout { + align, + size_info: SizeInfo::Sized { size: align.get() }, + statically_shallow_unpadded: align.get() == 1 + } + ); + } + + // Test explicitly-provided combinations of unpadded and padded + // counterparts. + + macro_rules! test { + (unpadded { size: $unpadded_size:expr, align: $unpadded_align:expr } + => padded { size: $padded_size:expr, align: $padded_align:expr }) => { + let unpadded = DstLayout { + align: NonZeroUsize::new($unpadded_align).unwrap(), + size_info: SizeInfo::Sized { size: $unpadded_size }, + statically_shallow_unpadded: false, + }; + let padded = unpadded.pad_to_align(); + + assert_eq!( + padded, + DstLayout { + align: NonZeroUsize::new($padded_align).unwrap(), + size_info: SizeInfo::Sized { size: $padded_size }, + statically_shallow_unpadded: false, + } + ); + }; + } + + test!(unpadded { size: 0, align: 4 } => padded { size: 0, align: 4 }); + test!(unpadded { size: 1, align: 4 } => padded { size: 4, align: 4 }); + test!(unpadded { size: 2, align: 4 } => padded { size: 4, align: 4 }); + test!(unpadded { size: 3, align: 4 } => padded { size: 4, align: 4 }); + test!(unpadded { size: 4, align: 4 } => padded { size: 4, align: 4 }); + test!(unpadded { size: 5, align: 4 } => padded { size: 8, align: 4 }); + test!(unpadded { size: 6, align: 4 } => padded { size: 8, align: 4 }); + test!(unpadded { size: 7, align: 4 } => padded { size: 8, align: 4 }); + test!(unpadded { size: 8, align: 4 } => padded { size: 8, align: 4 }); + + let current_max_align = DstLayout::CURRENT_MAX_ALIGN.get(); + + test!(unpadded { size: 1, align: current_max_align } + => padded { size: current_max_align, align: current_max_align }); + + test!(unpadded { size: current_max_align + 1, align: current_max_align } + => padded { size: current_max_align * 2, align: current_max_align }); + } + + /// Tests that calling `pad_to_align` on a DST `DstLayout` is a no-op. + #[test] + fn test_dst_layout_pad_to_align_with_dst() { + for align in (0..29).map(|p| NonZeroUsize::new(2usize.pow(p)).unwrap()) { + for offset in 0..10 { + for elem_size in 0..10 { + let layout = DstLayout { + align, + size_info: SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }), + statically_shallow_unpadded: false, + }; + assert_eq!(layout.pad_to_align(), layout); + } + } + } + } + + // This test takes a long time when running under Miri, so we skip it in + // that case. This is acceptable because this is a logic test that doesn't + // attempt to expose UB. + #[test] + #[cfg_attr(miri, ignore)] + fn test_validate_cast_and_convert_metadata() { + #[allow(non_local_definitions)] + impl From<usize> for SizeInfo { + fn from(size: usize) -> SizeInfo { + SizeInfo::Sized { size } + } + } + + #[allow(non_local_definitions)] + impl From<(usize, usize)> for SizeInfo { + fn from((offset, elem_size): (usize, usize)) -> SizeInfo { + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) + } + } + + fn layout<S: Into<SizeInfo>>(s: S, align: usize) -> DstLayout { + DstLayout { + size_info: s.into(), + align: NonZeroUsize::new(align).unwrap(), + statically_shallow_unpadded: false, + } + } + + /// This macro accepts arguments in the form of: + /// + /// layout(_, _).validate(_, _, _), Ok(Some((_, _))) + /// | | | | | | | + /// size ---------+ | | | | | | + /// align -----------+ | | | | | + /// addr ------------------------+ | | | | + /// bytes_len ----------------------+ | | | + /// cast_type -------------------------+ | | + /// elems ------------------------------------------+ | + /// split_at ------------------------------------------+ + /// + /// `.validate` is shorthand for `.validate_cast_and_convert_metadata` + /// for brevity. + /// + /// Each argument can either be an iterator or a wildcard. Each + /// wildcarded variable is implicitly replaced by an iterator over a + /// representative sample of values for that variable. Each `test!` + /// invocation iterates over every combination of values provided by + /// each variable's iterator (ie, the cartesian product) and validates + /// that the results are expected. + /// + /// The final argument uses the same syntax, but it has a different + /// meaning: + /// - If it is `Ok(pat)`, then the pattern `pat` is supplied to + /// a matching assert to validate the computed result for each + /// combination of input values. + /// - If it is `Err(Some(msg) | None)`, then `test!` validates that the + /// call to `validate_cast_and_convert_metadata` panics with the given + /// panic message or, if the current Rust toolchain version is too + /// early to support panicking in `const fn`s, panics with *some* + /// message. In the latter case, the `const_panic!` macro is used, + /// which emits code which causes a non-panicking error at const eval + /// time, but which does panic when invoked at runtime. Thus, it is + /// merely difficult to predict the *value* of this panic. We deem + /// that testing against the real panic strings on stable and nightly + /// toolchains is enough to ensure correctness. + /// + /// Note that the meta-variables that match these variables have the + /// `tt` type, and some valid expressions are not valid `tt`s (such as + /// `a..b`). In this case, wrap the expression in parentheses, and it + /// will become valid `tt`. + macro_rules! test { + ( + layout($size:tt, $align:tt) + .validate($addr:tt, $bytes_len:tt, $cast_type:tt), $expect:pat $(,)? + ) => { + itertools::iproduct!( + test!(@generate_size $size), + test!(@generate_align $align), + test!(@generate_usize $addr), + test!(@generate_usize $bytes_len), + test!(@generate_cast_type $cast_type) + ).for_each(|(size_info, align, addr, bytes_len, cast_type)| { + // Temporarily disable the panic hook installed by the test + // harness. If we don't do this, all panic messages will be + // kept in an internal log. On its own, this isn't a + // problem, but if a non-caught panic ever happens (ie, in + // code later in this test not in this macro), all of the + // previously-buffered messages will be dumped, hiding the + // real culprit. + let previous_hook = std::panic::take_hook(); + // I don't understand why, but this seems to be required in + // addition to the previous line. + std::panic::set_hook(Box::new(|_| {})); + let actual = std::panic::catch_unwind(|| { + layout(size_info, align).validate_cast_and_convert_metadata(addr, bytes_len, cast_type) + }).map_err(|d| { + let msg = d.downcast::<&'static str>().ok().map(|s| *s.as_ref()); + assert!(msg.is_some() || cfg!(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0), "non-string panic messages are not permitted when usage of panic in const fn is enabled"); + msg + }); + std::panic::set_hook(previous_hook); + + assert!( + matches!(actual, $expect), + "layout({:?}, {}).validate_cast_and_convert_metadata({}, {}, {:?})" ,size_info, align, addr, bytes_len, cast_type + ); + }); + }; + (@generate_usize _) => { 0..8 }; + // Generate sizes for both Sized and !Sized types. + (@generate_size _) => { + test!(@generate_size (_)).chain(test!(@generate_size (_, _))) + }; + // Generate sizes for both Sized and !Sized types by chaining + // specified iterators for each. + (@generate_size ($sized_sizes:tt | $unsized_sizes:tt)) => { + test!(@generate_size ($sized_sizes)).chain(test!(@generate_size $unsized_sizes)) + }; + // Generate sizes for Sized types. + (@generate_size (_)) => { test!(@generate_size (0..8)) }; + (@generate_size ($sizes:expr)) => { $sizes.into_iter().map(Into::<SizeInfo>::into) }; + // Generate sizes for !Sized types. + (@generate_size ($min_sizes:tt, $elem_sizes:tt)) => { + itertools::iproduct!( + test!(@generate_min_size $min_sizes), + test!(@generate_elem_size $elem_sizes) + ).map(Into::<SizeInfo>::into) + }; + (@generate_fixed_size _) => { (0..8).into_iter().map(Into::<SizeInfo>::into) }; + (@generate_min_size _) => { 0..8 }; + (@generate_elem_size _) => { 1..8 }; + (@generate_align _) => { [1, 2, 4, 8, 16] }; + (@generate_opt_usize _) => { [None].into_iter().chain((0..8).map(Some).into_iter()) }; + (@generate_cast_type _) => { [CastType::Prefix, CastType::Suffix] }; + (@generate_cast_type $variant:ident) => { [CastType::$variant] }; + // Some expressions need to be wrapped in parentheses in order to be + // valid `tt`s (required by the top match pattern). See the comment + // below for more details. This arm removes these parentheses to + // avoid generating an `unused_parens` warning. + (@$_:ident ($vals:expr)) => { $vals }; + (@$_:ident $vals:expr) => { $vals }; + } + + const EVENS: [usize; 8] = [0, 2, 4, 6, 8, 10, 12, 14]; + const ODDS: [usize; 8] = [1, 3, 5, 7, 9, 11, 13, 15]; + + // base_size is too big for the memory region. + test!( + layout(((1..8) | ((1..8), (1..8))), _).validate([0], [0], _), + Ok(Err(MetadataCastError::Size)) + ); + test!( + layout(((2..8) | ((2..8), (2..8))), _).validate([0], [1], Prefix), + Ok(Err(MetadataCastError::Size)) + ); + test!( + layout(((2..8) | ((2..8), (2..8))), _).validate([0x1000_0000 - 1], [1], Suffix), + Ok(Err(MetadataCastError::Size)) + ); + + // addr is unaligned for prefix cast + test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(Err(MetadataCastError::Alignment))); + test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(Err(MetadataCastError::Alignment))); + + // addr is aligned, but end of buffer is unaligned for suffix cast + test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(Err(MetadataCastError::Alignment))); + test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(Err(MetadataCastError::Alignment))); + + // Unfortunately, these constants cannot easily be used in the + // implementation of `validate_cast_and_convert_metadata`, since + // `panic!` consumes a string literal, not an expression. + // + // It's important that these messages be in a separate module. If they + // were at the function's top level, we'd pass them to `test!` as, e.g., + // `Err(TRAILING)`, which would run into a subtle Rust footgun - the + // `TRAILING` identifier would be treated as a pattern to match rather + // than a value to check for equality. + mod msgs { + pub(super) const TRAILING: &str = + "attempted to cast to slice type with zero-sized element"; + pub(super) const OVERFLOW: &str = "`addr` + `bytes_len` > usize::MAX"; + } + + // casts with ZST trailing element types are unsupported + test!(layout((_, [0]), _).validate(_, _, _), Err(Some(msgs::TRAILING) | None),); + + // addr + bytes_len must not overflow usize + test!(layout(_, _).validate([usize::MAX], (1..100), _), Err(Some(msgs::OVERFLOW) | None)); + test!(layout(_, _).validate((1..100), [usize::MAX], _), Err(Some(msgs::OVERFLOW) | None)); + test!( + layout(_, _).validate( + [usize::MAX / 2 + 1, usize::MAX], + [usize::MAX / 2 + 1, usize::MAX], + _ + ), + Err(Some(msgs::OVERFLOW) | None) + ); + + // Validates that `validate_cast_and_convert_metadata` satisfies its own + // documented safety postconditions, and also a few other properties + // that aren't documented but we want to guarantee anyway. + fn validate_behavior( + (layout, addr, bytes_len, cast_type): (DstLayout, usize, usize, CastType), + ) { + if let Ok((elems, split_at)) = + layout.validate_cast_and_convert_metadata(addr, bytes_len, cast_type) + { + let (size_info, align) = (layout.size_info, layout.align); + let debug_str = format!( + "layout({:?}, {}).validate_cast_and_convert_metadata({}, {}, {:?}) => ({}, {})", + size_info, align, addr, bytes_len, cast_type, elems, split_at + ); + + // If this is a sized type (no trailing slice), then `elems` is + // meaningless, but in practice we set it to 0. Callers are not + // allowed to rely on this, but a lot of math is nicer if + // they're able to, and some callers might accidentally do that. + let sized = matches!(layout.size_info, SizeInfo::Sized { .. }); + assert!(!(sized && elems != 0), "{}", debug_str); + + let resulting_size = match layout.size_info { + SizeInfo::Sized { size } => size, + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) => { + let padded_size = |elems| { + let without_padding = offset + elems * elem_size; + without_padding + util::padding_needed_for(without_padding, align) + }; + + let resulting_size = padded_size(elems); + // Test that `validate_cast_and_convert_metadata` + // computed the largest possible value that fits in the + // given range. + assert!(padded_size(elems + 1) > bytes_len, "{}", debug_str); + resulting_size + } + }; + + // Test safety postconditions guaranteed by + // `validate_cast_and_convert_metadata`. + assert!(resulting_size <= bytes_len, "{}", debug_str); + match cast_type { + CastType::Prefix => { + assert_eq!(addr % align, 0, "{}", debug_str); + assert_eq!(resulting_size, split_at, "{}", debug_str); + } + CastType::Suffix => { + assert_eq!(split_at, bytes_len - resulting_size, "{}", debug_str); + assert_eq!((addr + split_at) % align, 0, "{}", debug_str); + } + } + } else { + let min_size = match layout.size_info { + SizeInfo::Sized { size } => size, + SizeInfo::SliceDst(TrailingSliceLayout { offset, .. }) => { + offset + util::padding_needed_for(offset, layout.align) + } + }; + + // If a cast is invalid, it is either because... + // 1. there are insufficient bytes at the given region for type: + let insufficient_bytes = bytes_len < min_size; + // 2. performing the cast would misalign type: + let base = match cast_type { + CastType::Prefix => 0, + CastType::Suffix => bytes_len, + }; + let misaligned = (base + addr) % layout.align != 0; + + assert!(insufficient_bytes || misaligned); + } + } + + let sizes = 0..8; + let elem_sizes = 1..8; + let size_infos = sizes + .clone() + .map(Into::<SizeInfo>::into) + .chain(itertools::iproduct!(sizes, elem_sizes).map(Into::<SizeInfo>::into)); + let layouts = itertools::iproduct!(size_infos, [1, 2, 4, 8, 16, 32]) + .filter(|(size_info, align)| !matches!(size_info, SizeInfo::Sized { size } if size % align != 0)) + .map(|(size_info, align)| layout(size_info, align)); + itertools::iproduct!(layouts, 0..8, 0..8, [CastType::Prefix, CastType::Suffix]) + .for_each(validate_behavior); + } + + #[test] + #[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] + fn test_validate_rust_layout() { + use core::{ + convert::TryInto as _, + ptr::{self, NonNull}, + }; + + use crate::util::testutil::*; + + // This test synthesizes pointers with various metadata and uses Rust's + // built-in APIs to confirm that Rust makes decisions about type layout + // which are consistent with what we believe is guaranteed by the + // language. If this test fails, it doesn't just mean our code is wrong + // - it means we're misunderstanding the language's guarantees. + + #[derive(Debug)] + struct MacroArgs { + offset: usize, + align: NonZeroUsize, + elem_size: Option<usize>, + } + + /// # Safety + /// + /// `test` promises to only call `addr_of_slice_field` on a `NonNull<T>` + /// which points to a valid `T`. + /// + /// `with_elems` must produce a pointer which points to a valid `T`. + fn test<T: ?Sized, W: Fn(usize) -> NonNull<T>>( + args: MacroArgs, + with_elems: W, + addr_of_slice_field: Option<fn(NonNull<T>) -> NonNull<u8>>, + ) { + let dst = args.elem_size.is_some(); + let layout = { + let size_info = match args.elem_size { + Some(elem_size) => { + SizeInfo::SliceDst(TrailingSliceLayout { offset: args.offset, elem_size }) + } + None => SizeInfo::Sized { + // Rust only supports types whose sizes are a multiple + // of their alignment. If the macro created a type like + // this: + // + // #[repr(C, align(2))] + // struct Foo([u8; 1]); + // + // ...then Rust will automatically round the type's size + // up to 2. + size: args.offset + util::padding_needed_for(args.offset, args.align), + }, + }; + DstLayout { size_info, align: args.align, statically_shallow_unpadded: false } + }; + + for elems in 0..128 { + let ptr = with_elems(elems); + + if let Some(addr_of_slice_field) = addr_of_slice_field { + let slc_field_ptr = addr_of_slice_field(ptr).as_ptr(); + // SAFETY: Both `slc_field_ptr` and `ptr` are pointers to + // the same valid Rust object. + // Work around https://github.com/rust-lang/rust-clippy/issues/12280 + let offset: usize = + unsafe { slc_field_ptr.byte_offset_from(ptr.as_ptr()).try_into().unwrap() }; + assert_eq!(offset, args.offset); + } + + // SAFETY: `ptr` points to a valid `T`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + let (size, align) = unsafe { + (mem::size_of_val_raw(ptr.as_ptr()), mem::align_of_val_raw(ptr.as_ptr())) + }; + + // Avoid expensive allocation when running under Miri. + let assert_msg = if !cfg!(miri) { + format!("\n{:?}\nsize:{}, align:{}", args, size, align) + } else { + String::new() + }; + + let without_padding = + args.offset + args.elem_size.map(|elem_size| elems * elem_size).unwrap_or(0); + assert!(size >= without_padding, "{}", assert_msg); + assert_eq!(align, args.align.get(), "{}", assert_msg); + + // This encodes the most important part of the test: our + // understanding of how Rust determines the layout of repr(C) + // types. Sized repr(C) types are trivial, but DST types have + // some subtlety. Note that: + // - For sized types, `without_padding` is just the size of the + // type that we constructed for `Foo`. Since we may have + // requested a larger alignment, `Foo` may actually be larger + // than this, hence `padding_needed_for`. + // - For unsized types, `without_padding` is dynamically + // computed from the offset, the element size, and element + // count. We expect that the size of the object should be + // `offset + elem_size * elems` rounded up to the next + // alignment. + let expected_size = + without_padding + util::padding_needed_for(without_padding, args.align); + assert_eq!(expected_size, size, "{}", assert_msg); + + // For zero-sized element types, + // `validate_cast_and_convert_metadata` just panics, so we skip + // testing those types. + if args.elem_size.map(|elem_size| elem_size > 0).unwrap_or(true) { + let addr = ptr.addr().get(); + let (got_elems, got_split_at) = layout + .validate_cast_and_convert_metadata(addr, size, CastType::Prefix) + .unwrap(); + // Avoid expensive allocation when running under Miri. + let assert_msg = if !cfg!(miri) { + format!( + "{}\nvalidate_cast_and_convert_metadata({}, {})", + assert_msg, addr, size, + ) + } else { + String::new() + }; + assert_eq!(got_split_at, size, "{}", assert_msg); + if dst { + assert!(got_elems >= elems, "{}", assert_msg); + if got_elems != elems { + // If `validate_cast_and_convert_metadata` + // returned more elements than `elems`, that + // means that `elems` is not the maximum number + // of elements that can fit in `size` - in other + // words, there is enough padding at the end of + // the value to fit at least one more element. + // If we use this metadata to synthesize a + // pointer, despite having a different element + // count, we still expect it to have the same + // size. + let got_ptr = with_elems(got_elems); + // SAFETY: `got_ptr` is a pointer to a valid `T`. + let size_of_got_ptr = unsafe { mem::size_of_val_raw(got_ptr.as_ptr()) }; + assert_eq!(size_of_got_ptr, size, "{}", assert_msg); + } + } else { + // For sized casts, the returned element value is + // technically meaningless, and we don't guarantee any + // particular value. In practice, it's always zero. + assert_eq!(got_elems, 0, "{}", assert_msg) + } + } + } + } + + macro_rules! validate_against_rust { + ($offset:literal, $align:literal $(, $elem_size:literal)?) => {{ + #[repr(C, align($align))] + struct Foo([u8; $offset]$(, [[u8; $elem_size]])?); + + let args = MacroArgs { + offset: $offset, + align: $align.try_into().unwrap(), + elem_size: { + #[allow(unused)] + let ret = None::<usize>; + $(let ret = Some($elem_size);)? + ret + } + }; + + #[repr(C, align($align))] + struct FooAlign; + // Create an aligned buffer to use in order to synthesize + // pointers to `Foo`. We don't ever load values from these + // pointers - we just do arithmetic on them - so having a "real" + // block of memory as opposed to a validly-aligned-but-dangling + // pointer is only necessary to make Miri happy since we run it + // with "strict provenance" checking enabled. + let aligned_buf = Align::<_, FooAlign>::new([0u8; 1024]); + let with_elems = |elems| { + let slc = NonNull::slice_from_raw_parts(NonNull::from(&aligned_buf.t), elems); + #[allow(clippy::as_conversions)] + NonNull::new(slc.as_ptr() as *mut Foo).unwrap() + }; + let addr_of_slice_field = { + #[allow(unused)] + let f = None::<fn(NonNull<Foo>) -> NonNull<u8>>; + $( + // SAFETY: `test` promises to only call `f` with a `ptr` + // to a valid `Foo`. + let f: Option<fn(NonNull<Foo>) -> NonNull<u8>> = Some(|ptr: NonNull<Foo>| unsafe { + NonNull::new(ptr::addr_of_mut!((*ptr.as_ptr()).1)).unwrap().cast::<u8>() + }); + let _ = $elem_size; + )? + f + }; + + test::<Foo, _>(args, with_elems, addr_of_slice_field); + }}; + } + + // Every permutation of: + // - offset in [0, 4] + // - align in [1, 16] + // - elem_size in [0, 4] (plus no elem_size) + validate_against_rust!(0, 1); + validate_against_rust!(0, 1, 0); + validate_against_rust!(0, 1, 1); + validate_against_rust!(0, 1, 2); + validate_against_rust!(0, 1, 3); + validate_against_rust!(0, 1, 4); + validate_against_rust!(0, 2); + validate_against_rust!(0, 2, 0); + validate_against_rust!(0, 2, 1); + validate_against_rust!(0, 2, 2); + validate_against_rust!(0, 2, 3); + validate_against_rust!(0, 2, 4); + validate_against_rust!(0, 4); + validate_against_rust!(0, 4, 0); + validate_against_rust!(0, 4, 1); + validate_against_rust!(0, 4, 2); + validate_against_rust!(0, 4, 3); + validate_against_rust!(0, 4, 4); + validate_against_rust!(0, 8); + validate_against_rust!(0, 8, 0); + validate_against_rust!(0, 8, 1); + validate_against_rust!(0, 8, 2); + validate_against_rust!(0, 8, 3); + validate_against_rust!(0, 8, 4); + validate_against_rust!(0, 16); + validate_against_rust!(0, 16, 0); + validate_against_rust!(0, 16, 1); + validate_against_rust!(0, 16, 2); + validate_against_rust!(0, 16, 3); + validate_against_rust!(0, 16, 4); + validate_against_rust!(1, 1); + validate_against_rust!(1, 1, 0); + validate_against_rust!(1, 1, 1); + validate_against_rust!(1, 1, 2); + validate_against_rust!(1, 1, 3); + validate_against_rust!(1, 1, 4); + validate_against_rust!(1, 2); + validate_against_rust!(1, 2, 0); + validate_against_rust!(1, 2, 1); + validate_against_rust!(1, 2, 2); + validate_against_rust!(1, 2, 3); + validate_against_rust!(1, 2, 4); + validate_against_rust!(1, 4); + validate_against_rust!(1, 4, 0); + validate_against_rust!(1, 4, 1); + validate_against_rust!(1, 4, 2); + validate_against_rust!(1, 4, 3); + validate_against_rust!(1, 4, 4); + validate_against_rust!(1, 8); + validate_against_rust!(1, 8, 0); + validate_against_rust!(1, 8, 1); + validate_against_rust!(1, 8, 2); + validate_against_rust!(1, 8, 3); + validate_against_rust!(1, 8, 4); + validate_against_rust!(1, 16); + validate_against_rust!(1, 16, 0); + validate_against_rust!(1, 16, 1); + validate_against_rust!(1, 16, 2); + validate_against_rust!(1, 16, 3); + validate_against_rust!(1, 16, 4); + validate_against_rust!(2, 1); + validate_against_rust!(2, 1, 0); + validate_against_rust!(2, 1, 1); + validate_against_rust!(2, 1, 2); + validate_against_rust!(2, 1, 3); + validate_against_rust!(2, 1, 4); + validate_against_rust!(2, 2); + validate_against_rust!(2, 2, 0); + validate_against_rust!(2, 2, 1); + validate_against_rust!(2, 2, 2); + validate_against_rust!(2, 2, 3); + validate_against_rust!(2, 2, 4); + validate_against_rust!(2, 4); + validate_against_rust!(2, 4, 0); + validate_against_rust!(2, 4, 1); + validate_against_rust!(2, 4, 2); + validate_against_rust!(2, 4, 3); + validate_against_rust!(2, 4, 4); + validate_against_rust!(2, 8); + validate_against_rust!(2, 8, 0); + validate_against_rust!(2, 8, 1); + validate_against_rust!(2, 8, 2); + validate_against_rust!(2, 8, 3); + validate_against_rust!(2, 8, 4); + validate_against_rust!(2, 16); + validate_against_rust!(2, 16, 0); + validate_against_rust!(2, 16, 1); + validate_against_rust!(2, 16, 2); + validate_against_rust!(2, 16, 3); + validate_against_rust!(2, 16, 4); + validate_against_rust!(3, 1); + validate_against_rust!(3, 1, 0); + validate_against_rust!(3, 1, 1); + validate_against_rust!(3, 1, 2); + validate_against_rust!(3, 1, 3); + validate_against_rust!(3, 1, 4); + validate_against_rust!(3, 2); + validate_against_rust!(3, 2, 0); + validate_against_rust!(3, 2, 1); + validate_against_rust!(3, 2, 2); + validate_against_rust!(3, 2, 3); + validate_against_rust!(3, 2, 4); + validate_against_rust!(3, 4); + validate_against_rust!(3, 4, 0); + validate_against_rust!(3, 4, 1); + validate_against_rust!(3, 4, 2); + validate_against_rust!(3, 4, 3); + validate_against_rust!(3, 4, 4); + validate_against_rust!(3, 8); + validate_against_rust!(3, 8, 0); + validate_against_rust!(3, 8, 1); + validate_against_rust!(3, 8, 2); + validate_against_rust!(3, 8, 3); + validate_against_rust!(3, 8, 4); + validate_against_rust!(3, 16); + validate_against_rust!(3, 16, 0); + validate_against_rust!(3, 16, 1); + validate_against_rust!(3, 16, 2); + validate_against_rust!(3, 16, 3); + validate_against_rust!(3, 16, 4); + validate_against_rust!(4, 1); + validate_against_rust!(4, 1, 0); + validate_against_rust!(4, 1, 1); + validate_against_rust!(4, 1, 2); + validate_against_rust!(4, 1, 3); + validate_against_rust!(4, 1, 4); + validate_against_rust!(4, 2); + validate_against_rust!(4, 2, 0); + validate_against_rust!(4, 2, 1); + validate_against_rust!(4, 2, 2); + validate_against_rust!(4, 2, 3); + validate_against_rust!(4, 2, 4); + validate_against_rust!(4, 4); + validate_against_rust!(4, 4, 0); + validate_against_rust!(4, 4, 1); + validate_against_rust!(4, 4, 2); + validate_against_rust!(4, 4, 3); + validate_against_rust!(4, 4, 4); + validate_against_rust!(4, 8); + validate_against_rust!(4, 8, 0); + validate_against_rust!(4, 8, 1); + validate_against_rust!(4, 8, 2); + validate_against_rust!(4, 8, 3); + validate_against_rust!(4, 8, 4); + validate_against_rust!(4, 16); + validate_against_rust!(4, 16, 0); + validate_against_rust!(4, 16, 1); + validate_against_rust!(4, 16, 2); + validate_against_rust!(4, 16, 3); + validate_against_rust!(4, 16, 4); + } +} + +#[cfg(kani)] +mod proofs { + use core::alloc::Layout; + + use super::*; + + impl kani::Arbitrary for DstLayout { + fn any() -> Self { + let align: NonZeroUsize = kani::any(); + let size_info: SizeInfo = kani::any(); + + kani::assume(align.is_power_of_two()); + kani::assume(align < DstLayout::THEORETICAL_MAX_ALIGN); + + // For testing purposes, we most care about instantiations of + // `DstLayout` that can correspond to actual Rust types. We use + // `Layout` to verify that our `DstLayout` satisfies the validity + // conditions of Rust layouts. + kani::assume( + match size_info { + SizeInfo::Sized { size } => Layout::from_size_align(size, align.get()), + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size: _ }) => { + // `SliceDst` cannot encode an exact size, but we know + // it is at least `offset` bytes. + Layout::from_size_align(offset, align.get()) + } + } + .is_ok(), + ); + + Self { align: align, size_info: size_info, statically_shallow_unpadded: kani::any() } + } + } + + impl kani::Arbitrary for SizeInfo { + fn any() -> Self { + let is_sized: bool = kani::any(); + + match is_sized { + true => { + let size: usize = kani::any(); + + kani::assume(size <= isize::MAX as _); + + SizeInfo::Sized { size } + } + false => SizeInfo::SliceDst(kani::any()), + } + } + } + + impl kani::Arbitrary for TrailingSliceLayout { + fn any() -> Self { + let elem_size: usize = kani::any(); + let offset: usize = kani::any(); + + kani::assume(elem_size < isize::MAX as _); + kani::assume(offset < isize::MAX as _); + + TrailingSliceLayout { elem_size, offset } + } + } + + #[kani::proof] + fn prove_requires_dynamic_padding() { + let layout: DstLayout = kani::any(); + + let SizeInfo::SliceDst(size_info) = layout.size_info else { + kani::assume(false); + loop {} + }; + + let meta: usize = kani::any(); + + let Some(trailing_slice_size) = size_info.elem_size.checked_mul(meta) else { + // The `trailing_slice_size` exceeds `usize::MAX`; `meta` is invalid. + kani::assume(false); + loop {} + }; + + let Some(unpadded_size) = size_info.offset.checked_add(trailing_slice_size) else { + // The `unpadded_size` exceeds `usize::MAX`; `meta`` is invalid. + kani::assume(false); + loop {} + }; + + if unpadded_size >= isize::MAX as usize { + // The `unpadded_size` exceeds `isize::MAX`; `meta` is invalid. + kani::assume(false); + loop {} + } + + let trailing_padding = util::padding_needed_for(unpadded_size, layout.align); + + if !layout.requires_dynamic_padding() { + assert!(trailing_padding == 0); + } + } + + #[kani::proof] + fn prove_dst_layout_extend() { + use crate::util::{max, min, padding_needed_for}; + + let base: DstLayout = kani::any(); + let field: DstLayout = kani::any(); + let packed: Option<NonZeroUsize> = kani::any(); + + if let Some(max_align) = packed { + kani::assume(max_align.is_power_of_two()); + kani::assume(base.align <= max_align); + } + + // The base can only be extended if it's sized. + kani::assume(matches!(base.size_info, SizeInfo::Sized { .. })); + let base_size = if let SizeInfo::Sized { size } = base.size_info { + size + } else { + unreachable!(); + }; + + // Under the above conditions, `DstLayout::extend` will not panic. + let composite = base.extend(field, packed); + + // The field's alignment is clamped by `max_align` (i.e., the + // `packed` attribute, if any) [1]. + // + // [1] Per https://doc.rust-lang.org/reference/type-layout.html#the-alignment-modifiers: + // + // The alignments of each field, for the purpose of positioning + // fields, is the smaller of the specified alignment and the + // alignment of the field's type. + let field_align = min(field.align, packed.unwrap_or(DstLayout::THEORETICAL_MAX_ALIGN)); + + // The struct's alignment is the maximum of its previous alignment and + // `field_align`. + assert_eq!(composite.align, max(base.align, field_align)); + + // Compute the minimum amount of inter-field padding needed to + // satisfy the field's alignment, and offset of the trailing field. + // [1] + // + // [1] Per https://doc.rust-lang.org/reference/type-layout.html#the-alignment-modifiers: + // + // Inter-field padding is guaranteed to be the minimum required in + // order to satisfy each field's (possibly altered) alignment. + let padding = padding_needed_for(base_size, field_align); + let offset = base_size + padding; + + // For testing purposes, we'll also construct `alloc::Layout` + // stand-ins for `DstLayout`, and show that `extend` behaves + // comparably on both types. + let base_analog = Layout::from_size_align(base_size, base.align.get()).unwrap(); + + match field.size_info { + SizeInfo::Sized { size: field_size } => { + if let SizeInfo::Sized { size: composite_size } = composite.size_info { + // If the trailing field is sized, the resulting layout will + // be sized. Its size will be the sum of the preceding + // layout, the size of the new field, and the size of + // inter-field padding between the two. + assert_eq!(composite_size, offset + field_size); + + let field_analog = + Layout::from_size_align(field_size, field_align.get()).unwrap(); + + if let Ok((actual_composite, actual_offset)) = base_analog.extend(field_analog) + { + assert_eq!(actual_offset, offset); + assert_eq!(actual_composite.size(), composite_size); + assert_eq!(actual_composite.align(), composite.align.get()); + } else { + // An error here reflects that composite of `base` + // and `field` cannot correspond to a real Rust type + // fragment, because such a fragment would violate + // the basic invariants of a valid Rust layout. At + // the time of writing, `DstLayout` is a little more + // permissive than `Layout`, so we don't assert + // anything in this branch (e.g., unreachability). + } + } else { + panic!("The composite of two sized layouts must be sized.") + } + } + SizeInfo::SliceDst(TrailingSliceLayout { + offset: field_offset, + elem_size: field_elem_size, + }) => { + if let SizeInfo::SliceDst(TrailingSliceLayout { + offset: composite_offset, + elem_size: composite_elem_size, + }) = composite.size_info + { + // The offset of the trailing slice component is the sum + // of the offset of the trailing field and the trailing + // slice offset within that field. + assert_eq!(composite_offset, offset + field_offset); + // The elem size is unchanged. + assert_eq!(composite_elem_size, field_elem_size); + + let field_analog = + Layout::from_size_align(field_offset, field_align.get()).unwrap(); + + if let Ok((actual_composite, actual_offset)) = base_analog.extend(field_analog) + { + assert_eq!(actual_offset, offset); + assert_eq!(actual_composite.size(), composite_offset); + assert_eq!(actual_composite.align(), composite.align.get()); + } else { + // An error here reflects that composite of `base` + // and `field` cannot correspond to a real Rust type + // fragment, because such a fragment would violate + // the basic invariants of a valid Rust layout. At + // the time of writing, `DstLayout` is a little more + // permissive than `Layout`, so we don't assert + // anything in this branch (e.g., unreachability). + } + } else { + panic!("The extension of a layout with a DST must result in a DST.") + } + } + } + } + + #[kani::proof] + #[kani::should_panic] + fn prove_dst_layout_extend_dst_panics() { + let base: DstLayout = kani::any(); + let field: DstLayout = kani::any(); + let packed: Option<NonZeroUsize> = kani::any(); + + if let Some(max_align) = packed { + kani::assume(max_align.is_power_of_two()); + kani::assume(base.align <= max_align); + } + + kani::assume(matches!(base.size_info, SizeInfo::SliceDst(..))); + + let _ = base.extend(field, packed); + } + + #[kani::proof] + fn prove_dst_layout_pad_to_align() { + use crate::util::padding_needed_for; + + let layout: DstLayout = kani::any(); + + let padded = layout.pad_to_align(); + + // Calling `pad_to_align` does not alter the `DstLayout`'s alignment. + assert_eq!(padded.align, layout.align); + + if let SizeInfo::Sized { size: unpadded_size } = layout.size_info { + if let SizeInfo::Sized { size: padded_size } = padded.size_info { + // If the layout is sized, it will remain sized after padding is + // added. Its sum will be its unpadded size and the size of the + // trailing padding needed to satisfy its alignment + // requirements. + let padding = padding_needed_for(unpadded_size, layout.align); + assert_eq!(padded_size, unpadded_size + padding); + + // Prove that calling `DstLayout::pad_to_align` behaves + // identically to `Layout::pad_to_align`. + let layout_analog = + Layout::from_size_align(unpadded_size, layout.align.get()).unwrap(); + let padded_analog = layout_analog.pad_to_align(); + assert_eq!(padded_analog.align(), layout.align.get()); + assert_eq!(padded_analog.size(), padded_size); + } else { + panic!("The padding of a sized layout must result in a sized layout.") + } + } else { + // If the layout is a DST, padding cannot be statically added. + assert_eq!(padded.size_info, layout.size_info); + } + } +} diff --git a/vendor/zerocopy/src/lib.rs b/vendor/zerocopy/src/lib.rs new file mode 100644 index 00000000..714a4b60 --- /dev/null +++ b/vendor/zerocopy/src/lib.rs @@ -0,0 +1,6775 @@ +// Copyright 2018 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// After updating the following doc comment, make sure to run the following +// command to update `README.md` based on its contents: +// +// cargo -q run --manifest-path tools/Cargo.toml -p generate-readme > README.md + +//! ***<span style="font-size: 140%">Fast, safe, <span +//! style="color:red;">compile error</span>. Pick two.</span>*** +//! +//! Zerocopy makes zero-cost memory manipulation effortless. We write `unsafe` +//! so you don't have to. +//! +//! *For an overview of what's changed from zerocopy 0.7, check out our [release +//! notes][release-notes], which include a step-by-step upgrading guide.* +//! +//! *Have questions? Need more out of zerocopy? Submit a [customer request +//! issue][customer-request-issue] or ask the maintainers on +//! [GitHub][github-q-a] or [Discord][discord]!* +//! +//! [customer-request-issue]: https://github.com/google/zerocopy/issues/new/choose +//! [release-notes]: https://github.com/google/zerocopy/discussions/1680 +//! [github-q-a]: https://github.com/google/zerocopy/discussions/categories/q-a +//! [discord]: https://discord.gg/MAvWH2R6zk +//! +//! # Overview +//! +//! ##### Conversion Traits +//! +//! Zerocopy provides four derivable traits for zero-cost conversions: +//! - [`TryFromBytes`] indicates that a type may safely be converted from +//! certain byte sequences (conditional on runtime checks) +//! - [`FromZeros`] indicates that a sequence of zero bytes represents a valid +//! instance of a type +//! - [`FromBytes`] indicates that a type may safely be converted from an +//! arbitrary byte sequence +//! - [`IntoBytes`] indicates that a type may safely be converted *to* a byte +//! sequence +//! +//! These traits support sized types, slices, and [slice DSTs][slice-dsts]. +//! +//! [slice-dsts]: KnownLayout#dynamically-sized-types +//! +//! ##### Marker Traits +//! +//! Zerocopy provides three derivable marker traits that do not provide any +//! functionality themselves, but are required to call certain methods provided +//! by the conversion traits: +//! - [`KnownLayout`] indicates that zerocopy can reason about certain layout +//! qualities of a type +//! - [`Immutable`] indicates that a type is free from interior mutability, +//! except by ownership or an exclusive (`&mut`) borrow +//! - [`Unaligned`] indicates that a type's alignment requirement is 1 +//! +//! You should generally derive these marker traits whenever possible. +//! +//! ##### Conversion Macros +//! +//! Zerocopy provides six macros for safe casting between types: +//! +//! - ([`try_`][try_transmute])[`transmute`] (conditionally) converts a value of +//! one type to a value of another type of the same size +//! - ([`try_`][try_transmute_mut])[`transmute_mut`] (conditionally) converts a +//! mutable reference of one type to a mutable reference of another type of +//! the same size +//! - ([`try_`][try_transmute_ref])[`transmute_ref`] (conditionally) converts a +//! mutable or immutable reference of one type to an immutable reference of +//! another type of the same size +//! +//! These macros perform *compile-time* size and alignment checks, meaning that +//! unconditional casts have zero cost at runtime. Conditional casts do not need +//! to validate size or alignment runtime, but do need to validate contents. +//! +//! These macros cannot be used in generic contexts. For generic conversions, +//! use the methods defined by the [conversion traits](#conversion-traits). +//! +//! ##### Byteorder-Aware Numerics +//! +//! Zerocopy provides byte-order aware integer types that support these +//! conversions; see the [`byteorder`] module. These types are especially useful +//! for network parsing. +//! +//! # Cargo Features +//! +//! - **`alloc`** +//! By default, `zerocopy` is `no_std`. When the `alloc` feature is enabled, +//! the `alloc` crate is added as a dependency, and some allocation-related +//! functionality is added. +//! +//! - **`std`** +//! By default, `zerocopy` is `no_std`. When the `std` feature is enabled, the +//! `std` crate is added as a dependency (ie, `no_std` is disabled), and +//! support for some `std` types is added. `std` implies `alloc`. +//! +//! - **`derive`** +//! Provides derives for the core marker traits via the `zerocopy-derive` +//! crate. These derives are re-exported from `zerocopy`, so it is not +//! necessary to depend on `zerocopy-derive` directly. +//! +//! However, you may experience better compile times if you instead directly +//! depend on both `zerocopy` and `zerocopy-derive` in your `Cargo.toml`, +//! since doing so will allow Rust to compile these crates in parallel. To do +//! so, do *not* enable the `derive` feature, and list both dependencies in +//! your `Cargo.toml` with the same leading non-zero version number; e.g: +//! +//! ```toml +//! [dependencies] +//! zerocopy = "0.X" +//! zerocopy-derive = "0.X" +//! ``` +//! +//! To avoid the risk of [duplicate import errors][duplicate-import-errors] if +//! one of your dependencies enables zerocopy's `derive` feature, import +//! derives as `use zerocopy_derive::*` rather than by name (e.g., `use +//! zerocopy_derive::FromBytes`). +//! +//! - **`simd`** +//! When the `simd` feature is enabled, `FromZeros`, `FromBytes`, and +//! `IntoBytes` impls are emitted for all stable SIMD types which exist on the +//! target platform. Note that the layout of SIMD types is not yet stabilized, +//! so these impls may be removed in the future if layout changes make them +//! invalid. For more information, see the Unsafe Code Guidelines Reference +//! page on the [layout of packed SIMD vectors][simd-layout]. +//! +//! - **`simd-nightly`** +//! Enables the `simd` feature and adds support for SIMD types which are only +//! available on nightly. Since these types are unstable, support for any type +//! may be removed at any point in the future. +//! +//! - **`float-nightly`** +//! Adds support for the unstable `f16` and `f128` types. These types are +//! not yet fully implemented and may not be supported on all platforms. +//! +//! [duplicate-import-errors]: https://github.com/google/zerocopy/issues/1587 +//! [simd-layout]: https://rust-lang.github.io/unsafe-code-guidelines/layout/packed-simd-vectors.html +//! +//! # Security Ethos +//! +//! Zerocopy is expressly designed for use in security-critical contexts. We +//! strive to ensure that that zerocopy code is sound under Rust's current +//! memory model, and *any future memory model*. We ensure this by: +//! - **...not 'guessing' about Rust's semantics.** +//! We annotate `unsafe` code with a precise rationale for its soundness that +//! cites a relevant section of Rust's official documentation. When Rust's +//! documented semantics are unclear, we work with the Rust Operational +//! Semantics Team to clarify Rust's documentation. +//! - **...rigorously testing our implementation.** +//! We run tests using [Miri], ensuring that zerocopy is sound across a wide +//! array of supported target platforms of varying endianness and pointer +//! width, and across both current and experimental memory models of Rust. +//! - **...formally proving the correctness of our implementation.** +//! We apply formal verification tools like [Kani][kani] to prove zerocopy's +//! correctness. +//! +//! For more information, see our full [soundness policy]. +//! +//! [Miri]: https://github.com/rust-lang/miri +//! [Kani]: https://github.com/model-checking/kani +//! [soundness policy]: https://github.com/google/zerocopy/blob/main/POLICIES.md#soundness +//! +//! # Relationship to Project Safe Transmute +//! +//! [Project Safe Transmute] is an official initiative of the Rust Project to +//! develop language-level support for safer transmutation. The Project consults +//! with crates like zerocopy to identify aspects of safer transmutation that +//! would benefit from compiler support, and has developed an [experimental, +//! compiler-supported analysis][mcp-transmutability] which determines whether, +//! for a given type, any value of that type may be soundly transmuted into +//! another type. Once this functionality is sufficiently mature, zerocopy +//! intends to replace its internal transmutability analysis (implemented by our +//! custom derives) with the compiler-supported one. This change will likely be +//! an implementation detail that is invisible to zerocopy's users. +//! +//! Project Safe Transmute will not replace the need for most of zerocopy's +//! higher-level abstractions. The experimental compiler analysis is a tool for +//! checking the soundness of `unsafe` code, not a tool to avoid writing +//! `unsafe` code altogether. For the foreseeable future, crates like zerocopy +//! will still be required in order to provide higher-level abstractions on top +//! of the building block provided by Project Safe Transmute. +//! +//! [Project Safe Transmute]: https://rust-lang.github.io/rfcs/2835-project-safe-transmute.html +//! [mcp-transmutability]: https://github.com/rust-lang/compiler-team/issues/411 +//! +//! # MSRV +//! +//! See our [MSRV policy]. +//! +//! [MSRV policy]: https://github.com/google/zerocopy/blob/main/POLICIES.md#msrv +//! +//! # Changelog +//! +//! Zerocopy uses [GitHub Releases]. +//! +//! [GitHub Releases]: https://github.com/google/zerocopy/releases +//! +//! # Thanks +//! +//! Zerocopy is maintained by engineers at Google with help from [many wonderful +//! contributors][contributors]. Thank you to everyone who has lent a hand in +//! making Rust a little more secure! +//! +//! [contributors]: https://github.com/google/zerocopy/graphs/contributors + +// Sometimes we want to use lints which were added after our MSRV. +// `unknown_lints` is `warn` by default and we deny warnings in CI, so without +// this attribute, any unknown lint would cause a CI failure when testing with +// our MSRV. +#![allow(unknown_lints, non_local_definitions, unreachable_patterns)] +#![deny(renamed_and_removed_lints)] +#![deny( + anonymous_parameters, + deprecated_in_future, + late_bound_lifetime_arguments, + missing_copy_implementations, + missing_debug_implementations, + missing_docs, + path_statements, + patterns_in_fns_without_body, + rust_2018_idioms, + trivial_numeric_casts, + unreachable_pub, + unsafe_op_in_unsafe_fn, + unused_extern_crates, + // We intentionally choose not to deny `unused_qualifications`. When items + // are added to the prelude (e.g., `core::mem::size_of`), this has the + // consequence of making some uses trigger this lint on the latest toolchain + // (e.g., `mem::size_of`), but fixing it (e.g. by replacing with `size_of`) + // does not work on older toolchains. + // + // We tested a more complicated fix in #1413, but ultimately decided that, + // since this lint is just a minor style lint, the complexity isn't worth it + // - it's fine to occasionally have unused qualifications slip through, + // especially since these do not affect our user-facing API in any way. + variant_size_differences +)] +#![cfg_attr( + __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS, + deny(fuzzy_provenance_casts, lossy_provenance_casts) +)] +#![deny( + clippy::all, + clippy::alloc_instead_of_core, + clippy::arithmetic_side_effects, + clippy::as_underscore, + clippy::assertions_on_result_states, + clippy::as_conversions, + clippy::correctness, + clippy::dbg_macro, + clippy::decimal_literal_representation, + clippy::double_must_use, + clippy::get_unwrap, + clippy::indexing_slicing, + clippy::missing_inline_in_public_items, + clippy::missing_safety_doc, + clippy::multiple_unsafe_ops_per_block, + clippy::must_use_candidate, + clippy::must_use_unit, + clippy::obfuscated_if_else, + clippy::perf, + clippy::print_stdout, + clippy::return_self_not_must_use, + clippy::std_instead_of_core, + clippy::style, + clippy::suspicious, + clippy::todo, + clippy::undocumented_unsafe_blocks, + clippy::unimplemented, + clippy::unnested_or_patterns, + clippy::unwrap_used, + clippy::use_debug +)] +// `clippy::incompatible_msrv` (implied by `clippy::suspicious`): This sometimes +// has false positives, and we test on our MSRV in CI, so it doesn't help us +// anyway. +#![allow(clippy::needless_lifetimes, clippy::type_complexity, clippy::incompatible_msrv)] +#![deny( + rustdoc::bare_urls, + rustdoc::broken_intra_doc_links, + rustdoc::invalid_codeblock_attributes, + rustdoc::invalid_html_tags, + rustdoc::invalid_rust_codeblocks, + rustdoc::missing_crate_level_docs, + rustdoc::private_intra_doc_links +)] +// In test code, it makes sense to weight more heavily towards concise, readable +// code over correct or debuggable code. +#![cfg_attr(any(test, kani), allow( + // In tests, you get line numbers and have access to source code, so panic + // messages are less important. You also often unwrap a lot, which would + // make expect'ing instead very verbose. + clippy::unwrap_used, + // In tests, there's no harm to "panic risks" - the worst that can happen is + // that your test will fail, and you'll fix it. By contrast, panic risks in + // production code introduce the possibly of code panicking unexpectedly "in + // the field". + clippy::arithmetic_side_effects, + clippy::indexing_slicing, +))] +#![cfg_attr(not(any(test, kani, feature = "std")), no_std)] +#![cfg_attr( + all(feature = "simd-nightly", target_arch = "arm"), + feature(stdarch_arm_neon_intrinsics) +)] +#![cfg_attr( + all(feature = "simd-nightly", any(target_arch = "powerpc", target_arch = "powerpc64")), + feature(stdarch_powerpc) +)] +#![cfg_attr(feature = "float-nightly", feature(f16, f128))] +#![cfg_attr(doc_cfg, feature(doc_cfg))] +#![cfg_attr(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS, feature(coverage_attribute))] +#![cfg_attr( + any(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS, miri), + feature(layout_for_ptr) +)] + +// This is a hack to allow zerocopy-derive derives to work in this crate. They +// assume that zerocopy is linked as an extern crate, so they access items from +// it as `zerocopy::Xxx`. This makes that still work. +#[cfg(any(feature = "derive", test))] +extern crate self as zerocopy; + +#[doc(hidden)] +#[macro_use] +pub mod util; + +pub mod byte_slice; +pub mod byteorder; +mod deprecated; + +#[doc(hidden)] +pub mod doctests; + +// This module is `pub` so that zerocopy's error types and error handling +// documentation is grouped together in a cohesive module. In practice, we +// expect most users to use the re-export of `error`'s items to avoid identifier +// stuttering. +pub mod error; +mod impls; +#[doc(hidden)] +pub mod layout; +mod macros; +#[doc(hidden)] +pub mod pointer; +mod r#ref; +mod split_at; +// FIXME(#252): If we make this pub, come up with a better name. +mod wrappers; + +use core::{ + cell::{Cell, UnsafeCell}, + cmp::Ordering, + fmt::{self, Debug, Display, Formatter}, + hash::Hasher, + marker::PhantomData, + mem::{self, ManuallyDrop, MaybeUninit as CoreMaybeUninit}, + num::{ + NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128, + NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize, Wrapping, + }, + ops::{Deref, DerefMut}, + ptr::{self, NonNull}, + slice, +}; +#[cfg(feature = "std")] +use std::io; + +use crate::pointer::invariant::{self, BecauseExclusive}; +pub use crate::{ + byte_slice::*, + byteorder::*, + error::*, + r#ref::*, + split_at::{Split, SplitAt}, + wrappers::*, +}; + +#[cfg(any(feature = "alloc", test, kani))] +extern crate alloc; +#[cfg(any(feature = "alloc", test))] +use alloc::{boxed::Box, vec::Vec}; +#[cfg(any(feature = "alloc", test))] +use core::alloc::Layout; + +use util::MetadataOf; + +// Used by `KnownLayout`. +#[doc(hidden)] +pub use crate::layout::*; +// Used by `TryFromBytes::is_bit_valid`. +#[doc(hidden)] +pub use crate::pointer::{invariant::BecauseImmutable, Maybe, Ptr}; +// For each trait polyfill, as soon as the corresponding feature is stable, the +// polyfill import will be unused because method/function resolution will prefer +// the inherent method/function over a trait method/function. Thus, we suppress +// the `unused_imports` warning. +// +// See the documentation on `util::polyfills` for more information. +#[allow(unused_imports)] +use crate::util::polyfills::{self, NonNullExt as _, NumExt as _}; + +#[rustversion::nightly] +#[cfg(all(test, not(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)))] +const _: () = { + #[deprecated = "some tests may be skipped due to missing RUSTFLAGS=\"--cfg __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS\""] + const _WARNING: () = (); + #[warn(deprecated)] + _WARNING +}; + +// These exist so that code which was written against the old names will get +// less confusing error messages when they upgrade to a more recent version of +// zerocopy. On our MSRV toolchain, the error messages read, for example: +// +// error[E0603]: trait `FromZeroes` is private +// --> examples/deprecated.rs:1:15 +// | +// 1 | use zerocopy::FromZeroes; +// | ^^^^^^^^^^ private trait +// | +// note: the trait `FromZeroes` is defined here +// --> /Users/josh/workspace/zerocopy/src/lib.rs:1845:5 +// | +// 1845 | use FromZeros as FromZeroes; +// | ^^^^^^^^^^^^^^^^^^^^^^^ +// +// The "note" provides enough context to make it easy to figure out how to fix +// the error. +/// Implements [`KnownLayout`]. +/// +/// This derive analyzes various aspects of a type's layout that are needed for +/// some of zerocopy's APIs. It can be applied to structs, enums, and unions; +/// e.g.: +/// +/// ``` +/// # use zerocopy_derive::KnownLayout; +/// #[derive(KnownLayout)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(KnownLayout)] +/// enum MyEnum { +/// # V00, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(KnownLayout)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// # Limitations +/// +/// This derive cannot currently be applied to unsized structs without an +/// explicit `repr` attribute. +/// +/// Some invocations of this derive run afoul of a [known bug] in Rust's type +/// privacy checker. For example, this code: +/// +/// ```compile_fail,E0446 +/// use zerocopy::*; +/// # use zerocopy_derive::*; +/// +/// #[derive(KnownLayout)] +/// #[repr(C)] +/// pub struct PublicType { +/// leading: Foo, +/// trailing: Bar, +/// } +/// +/// #[derive(KnownLayout)] +/// struct Foo; +/// +/// #[derive(KnownLayout)] +/// struct Bar; +/// ``` +/// +/// ...results in a compilation error: +/// +/// ```text +/// error[E0446]: private type `Bar` in public interface +/// --> examples/bug.rs:3:10 +/// | +/// 3 | #[derive(KnownLayout)] +/// | ^^^^^^^^^^^ can't leak private type +/// ... +/// 14 | struct Bar; +/// | ---------- `Bar` declared as private +/// | +/// = note: this error originates in the derive macro `KnownLayout` (in Nightly builds, run with -Z macro-backtrace for more info) +/// ``` +/// +/// This issue arises when `#[derive(KnownLayout)]` is applied to `repr(C)` +/// structs whose trailing field type is less public than the enclosing struct. +/// +/// To work around this, mark the trailing field type `pub` and annotate it with +/// `#[doc(hidden)]`; e.g.: +/// +/// ```no_run +/// use zerocopy::*; +/// # use zerocopy_derive::*; +/// +/// #[derive(KnownLayout)] +/// #[repr(C)] +/// pub struct PublicType { +/// leading: Foo, +/// trailing: Bar, +/// } +/// +/// #[derive(KnownLayout)] +/// struct Foo; +/// +/// #[doc(hidden)] +/// #[derive(KnownLayout)] +/// pub struct Bar; // <- `Bar` is now also `pub` +/// ``` +/// +/// [known bug]: https://github.com/rust-lang/rust/issues/45713 +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::KnownLayout; +#[allow(unused)] +use {FromZeros as FromZeroes, IntoBytes as AsBytes, Ref as LayoutVerified}; + +/// Indicates that zerocopy can reason about certain aspects of a type's layout. +/// +/// This trait is required by many of zerocopy's APIs. It supports sized types, +/// slices, and [slice DSTs](#dynamically-sized-types). +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(KnownLayout)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::KnownLayout; +/// #[derive(KnownLayout)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(KnownLayout)] +/// enum MyEnum { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(KnownLayout)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive performs a sophisticated analysis to deduce the layout +/// characteristics of types. You **must** implement this trait via the derive. +/// +/// # Dynamically-sized types +/// +/// `KnownLayout` supports slice-based dynamically sized types ("slice DSTs"). +/// +/// A slice DST is a type whose trailing field is either a slice or another +/// slice DST, rather than a type with fixed size. For example: +/// +/// ``` +/// #[repr(C)] +/// struct PacketHeader { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[repr(C)] +/// struct Packet { +/// header: PacketHeader, +/// body: [u8], +/// } +/// ``` +/// +/// It can be useful to think of slice DSTs as a generalization of slices - in +/// other words, a normal slice is just the special case of a slice DST with +/// zero leading fields. In particular: +/// - Like slices, slice DSTs can have different lengths at runtime +/// - Like slices, slice DSTs cannot be passed by-value, but only by reference +/// or via other indirection such as `Box` +/// - Like slices, a reference (or `Box`, or other pointer type) to a slice DST +/// encodes the number of elements in the trailing slice field +/// +/// ## Slice DST layout +/// +/// Just like other composite Rust types, the layout of a slice DST is not +/// well-defined unless it is specified using an explicit `#[repr(...)]` +/// attribute such as `#[repr(C)]`. [Other representations are +/// supported][reprs], but in this section, we'll use `#[repr(C)]` as our +/// example. +/// +/// A `#[repr(C)]` slice DST is laid out [just like sized `#[repr(C)]` +/// types][repr-c-structs], but the presence of a variable-length field +/// introduces the possibility of *dynamic padding*. In particular, it may be +/// necessary to add trailing padding *after* the trailing slice field in order +/// to satisfy the outer type's alignment, and the amount of padding required +/// may be a function of the length of the trailing slice field. This is just a +/// natural consequence of the normal `#[repr(C)]` rules applied to slice DSTs, +/// but it can result in surprising behavior. For example, consider the +/// following type: +/// +/// ``` +/// #[repr(C)] +/// struct Foo { +/// a: u32, +/// b: u8, +/// z: [u16], +/// } +/// ``` +/// +/// Assuming that `u32` has alignment 4 (this is not true on all platforms), +/// then `Foo` has alignment 4 as well. Here is the smallest possible value for +/// `Foo`: +/// +/// ```text +/// byte offset | 01234567 +/// field | aaaab--- +/// >< +/// ``` +/// +/// In this value, `z` has length 0. Abiding by `#[repr(C)]`, the lowest offset +/// that we can place `z` at is 5, but since `z` has alignment 2, we need to +/// round up to offset 6. This means that there is one byte of padding between +/// `b` and `z`, then 0 bytes of `z` itself (denoted `><` in this diagram), and +/// then two bytes of padding after `z` in order to satisfy the overall +/// alignment of `Foo`. The size of this instance is 8 bytes. +/// +/// What about if `z` has length 1? +/// +/// ```text +/// byte offset | 01234567 +/// field | aaaab-zz +/// ``` +/// +/// In this instance, `z` has length 1, and thus takes up 2 bytes. That means +/// that we no longer need padding after `z` in order to satisfy `Foo`'s +/// alignment. We've now seen two different values of `Foo` with two different +/// lengths of `z`, but they both have the same size - 8 bytes. +/// +/// What about if `z` has length 2? +/// +/// ```text +/// byte offset | 012345678901 +/// field | aaaab-zzzz-- +/// ``` +/// +/// Now `z` has length 2, and thus takes up 4 bytes. This brings our un-padded +/// size to 10, and so we now need another 2 bytes of padding after `z` to +/// satisfy `Foo`'s alignment. +/// +/// Again, all of this is just a logical consequence of the `#[repr(C)]` rules +/// applied to slice DSTs, but it can be surprising that the amount of trailing +/// padding becomes a function of the trailing slice field's length, and thus +/// can only be computed at runtime. +/// +/// [reprs]: https://doc.rust-lang.org/reference/type-layout.html#representations +/// [repr-c-structs]: https://doc.rust-lang.org/reference/type-layout.html#reprc-structs +/// +/// ## What is a valid size? +/// +/// There are two places in zerocopy's API that we refer to "a valid size" of a +/// type. In normal casts or conversions, where the source is a byte slice, we +/// need to know whether the source byte slice is a valid size of the +/// destination type. In prefix or suffix casts, we need to know whether *there +/// exists* a valid size of the destination type which fits in the source byte +/// slice and, if so, what the largest such size is. +/// +/// As outlined above, a slice DST's size is defined by the number of elements +/// in its trailing slice field. However, there is not necessarily a 1-to-1 +/// mapping between trailing slice field length and overall size. As we saw in +/// the previous section with the type `Foo`, instances with both 0 and 1 +/// elements in the trailing `z` field result in a `Foo` whose size is 8 bytes. +/// +/// When we say "x is a valid size of `T`", we mean one of two things: +/// - If `T: Sized`, then we mean that `x == size_of::<T>()` +/// - If `T` is a slice DST, then we mean that there exists a `len` such that the instance of +/// `T` with `len` trailing slice elements has size `x` +/// +/// When we say "largest possible size of `T` that fits in a byte slice", we +/// mean one of two things: +/// - If `T: Sized`, then we mean `size_of::<T>()` if the byte slice is at least +/// `size_of::<T>()` bytes long +/// - If `T` is a slice DST, then we mean to consider all values, `len`, such +/// that the instance of `T` with `len` trailing slice elements fits in the +/// byte slice, and to choose the largest such `len`, if any +/// +/// +/// # Safety +/// +/// This trait does not convey any safety guarantees to code outside this crate. +/// +/// You must not rely on the `#[doc(hidden)]` internals of `KnownLayout`. Future +/// releases of zerocopy may make backwards-breaking changes to these items, +/// including changes that only affect soundness, which may cause code which +/// uses those items to silently become unsound. +/// +#[cfg_attr(feature = "derive", doc = "[derive]: zerocopy_derive::KnownLayout")] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.KnownLayout.html"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(KnownLayout)]` to `{Self}`") +)] +pub unsafe trait KnownLayout { + // The `Self: Sized` bound makes it so that `KnownLayout` can still be + // object safe. It's not currently object safe thanks to `const LAYOUT`, and + // it likely won't be in the future, but there's no reason not to be + // forwards-compatible with object safety. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + /// The type of metadata stored in a pointer to `Self`. + /// + /// This is `()` for sized types and `usize` for slice DSTs. + type PointerMetadata: PointerMetadata; + + /// A maybe-uninitialized analog of `Self` + /// + /// # Safety + /// + /// `Self::LAYOUT` and `Self::MaybeUninit::LAYOUT` are identical. + /// `Self::MaybeUninit` admits uninitialized bytes in all positions. + #[doc(hidden)] + type MaybeUninit: ?Sized + KnownLayout<PointerMetadata = Self::PointerMetadata>; + + /// The layout of `Self`. + /// + /// # Safety + /// + /// Callers may assume that `LAYOUT` accurately reflects the layout of + /// `Self`. In particular: + /// - `LAYOUT.align` is equal to `Self`'s alignment + /// - If `Self: Sized`, then `LAYOUT.size_info == SizeInfo::Sized { size }` + /// where `size == size_of::<Self>()` + /// - If `Self` is a slice DST, then `LAYOUT.size_info == + /// SizeInfo::SliceDst(slice_layout)` where: + /// - The size, `size`, of an instance of `Self` with `elems` trailing + /// slice elements is equal to `slice_layout.offset + + /// slice_layout.elem_size * elems` rounded up to the nearest multiple + /// of `LAYOUT.align` + /// - For such an instance, any bytes in the range `[slice_layout.offset + + /// slice_layout.elem_size * elems, size)` are padding and must not be + /// assumed to be initialized + #[doc(hidden)] + const LAYOUT: DstLayout; + + /// SAFETY: The returned pointer has the same address and provenance as + /// `bytes`. If `Self` is a DST, the returned pointer's referent has `elems` + /// elements in its trailing slice. + #[doc(hidden)] + fn raw_from_ptr_len(bytes: NonNull<u8>, meta: Self::PointerMetadata) -> NonNull<Self>; + + /// Extracts the metadata from a pointer to `Self`. + /// + /// # Safety + /// + /// `pointer_to_metadata` always returns the correct metadata stored in + /// `ptr`. + #[doc(hidden)] + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata; + + /// Computes the length of the byte range addressed by `ptr`. + /// + /// Returns `None` if the resulting length would not fit in an `usize`. + /// + /// # Safety + /// + /// Callers may assume that `size_of_val_raw` always returns the correct + /// size. + /// + /// Callers may assume that, if `ptr` addresses a byte range whose length + /// fits in an `usize`, this will return `Some`. + #[doc(hidden)] + #[must_use] + #[inline(always)] + fn size_of_val_raw(ptr: NonNull<Self>) -> Option<usize> { + let meta = Self::pointer_to_metadata(ptr.as_ptr()); + // SAFETY: `size_for_metadata` promises to only return `None` if the + // resulting size would not fit in a `usize`. + Self::size_for_metadata(meta) + } + + #[doc(hidden)] + #[must_use] + #[inline(always)] + fn raw_dangling() -> NonNull<Self> { + let meta = Self::PointerMetadata::from_elem_count(0); + Self::raw_from_ptr_len(NonNull::dangling(), meta) + } + + /// Computes the size of an object of type `Self` with the given pointer + /// metadata. + /// + /// # Safety + /// + /// `size_for_metadata` promises to return `None` if and only if the + /// resulting size would not fit in a `usize`. Note that the returned size + /// could exceed the actual maximum valid size of an allocated object, + /// `isize::MAX`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::KnownLayout; + /// + /// assert_eq!(u8::size_for_metadata(()), Some(1)); + /// assert_eq!(u16::size_for_metadata(()), Some(2)); + /// assert_eq!(<[u8]>::size_for_metadata(42), Some(42)); + /// assert_eq!(<[u16]>::size_for_metadata(42), Some(84)); + /// + /// // This size exceeds the maximum valid object size (`isize::MAX`): + /// assert_eq!(<[u8]>::size_for_metadata(usize::MAX), Some(usize::MAX)); + /// + /// // This size, if computed, would exceed `usize::MAX`: + /// assert_eq!(<[u16]>::size_for_metadata(usize::MAX), None); + /// ``` + #[inline(always)] + fn size_for_metadata(meta: Self::PointerMetadata) -> Option<usize> { + meta.size_for_metadata(Self::LAYOUT) + } +} + +/// Efficiently produces the [`TrailingSliceLayout`] of `T`. +#[inline(always)] +pub(crate) fn trailing_slice_layout<T>() -> TrailingSliceLayout +where + T: ?Sized + KnownLayout<PointerMetadata = usize>, +{ + trait LayoutFacts { + const SIZE_INFO: TrailingSliceLayout; + } + + impl<T: ?Sized> LayoutFacts for T + where + T: KnownLayout<PointerMetadata = usize>, + { + const SIZE_INFO: TrailingSliceLayout = match T::LAYOUT.size_info { + crate::SizeInfo::Sized { .. } => const_panic!("unreachable"), + crate::SizeInfo::SliceDst(info) => info, + }; + } + + T::SIZE_INFO +} + +/// The metadata associated with a [`KnownLayout`] type. +#[doc(hidden)] +pub trait PointerMetadata: Copy + Eq + Debug { + /// Constructs a `Self` from an element count. + /// + /// If `Self = ()`, this returns `()`. If `Self = usize`, this returns + /// `elems`. No other types are currently supported. + fn from_elem_count(elems: usize) -> Self; + + /// Computes the size of the object with the given layout and pointer + /// metadata. + /// + /// # Panics + /// + /// If `Self = ()`, `layout` must describe a sized type. If `Self = usize`, + /// `layout` must describe a slice DST. Otherwise, `size_for_metadata` may + /// panic. + /// + /// # Safety + /// + /// `size_for_metadata` promises to only return `None` if the resulting size + /// would not fit in a `usize`. + fn size_for_metadata(self, layout: DstLayout) -> Option<usize>; +} + +impl PointerMetadata for () { + #[inline] + #[allow(clippy::unused_unit)] + fn from_elem_count(_elems: usize) -> () {} + + #[inline] + fn size_for_metadata(self, layout: DstLayout) -> Option<usize> { + match layout.size_info { + SizeInfo::Sized { size } => Some(size), + // NOTE: This branch is unreachable, but we return `None` rather + // than `unreachable!()` to avoid generating panic paths. + SizeInfo::SliceDst(_) => None, + } + } +} + +impl PointerMetadata for usize { + #[inline] + fn from_elem_count(elems: usize) -> usize { + elems + } + + #[inline] + fn size_for_metadata(self, layout: DstLayout) -> Option<usize> { + match layout.size_info { + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) => { + let slice_len = elem_size.checked_mul(self)?; + let without_padding = offset.checked_add(slice_len)?; + without_padding.checked_add(util::padding_needed_for(without_padding, layout.align)) + } + // NOTE: This branch is unreachable, but we return `None` rather + // than `unreachable!()` to avoid generating panic paths. + SizeInfo::Sized { .. } => None, + } + } +} + +// SAFETY: Delegates safety to `DstLayout::for_slice`. +unsafe impl<T> KnownLayout for [T] { + #[allow(clippy::missing_inline_in_public_items, dead_code)] + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + } + + type PointerMetadata = usize; + + // SAFETY: `CoreMaybeUninit<T>::LAYOUT` and `T::LAYOUT` are identical + // because `CoreMaybeUninit<T>` has the same size and alignment as `T` [1]. + // Consequently, `[CoreMaybeUninit<T>]::LAYOUT` and `[T]::LAYOUT` are + // identical, because they both lack a fixed-sized prefix and because they + // inherit the alignments of their inner element type (which are identical) + // [2][3]. + // + // `[CoreMaybeUninit<T>]` admits uninitialized bytes at all positions + // because `CoreMaybeUninit<T>` admits uninitialized bytes at all positions + // and because the inner elements of `[CoreMaybeUninit<T>]` are laid out + // back-to-back [2][3]. + // + // [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: + // + // `MaybeUninit<T>` is guaranteed to have the same size, alignment, and ABI as + // `T` + // + // [2] Per https://doc.rust-lang.org/1.82.0/reference/type-layout.html#slice-layout: + // + // Slices have the same layout as the section of the array they slice. + // + // [3] Per https://doc.rust-lang.org/1.82.0/reference/type-layout.html#array-layout: + // + // An array of `[T; N]` has a size of `size_of::<T>() * N` and the same + // alignment of `T`. Arrays are laid out so that the zero-based `nth` + // element of the array is offset from the start of the array by `n * + // size_of::<T>()` bytes. + type MaybeUninit = [CoreMaybeUninit<T>]; + + const LAYOUT: DstLayout = DstLayout::for_slice::<T>(); + + // SAFETY: `.cast` preserves address and provenance. The returned pointer + // refers to an object with `elems` elements by construction. + #[inline(always)] + fn raw_from_ptr_len(data: NonNull<u8>, elems: usize) -> NonNull<Self> { + // FIXME(#67): Remove this allow. See NonNullExt for more details. + #[allow(unstable_name_collisions)] + NonNull::slice_from_raw_parts(data.cast::<T>(), elems) + } + + #[inline(always)] + fn pointer_to_metadata(ptr: *mut [T]) -> usize { + #[allow(clippy::as_conversions)] + let slc = ptr as *const [()]; + + // SAFETY: + // - `()` has alignment 1, so `slc` is trivially aligned. + // - `slc` was derived from a non-null pointer. + // - The size is 0 regardless of the length, so it is sound to + // materialize a reference regardless of location. + // - By invariant, `self.ptr` has valid provenance. + let slc = unsafe { &*slc }; + + // This is correct because the preceding `as` cast preserves the number + // of slice elements. [1] + // + // [1] Per https://doc.rust-lang.org/reference/expressions/operator-expr.html#pointer-to-pointer-cast: + // + // For slice types like `[T]` and `[U]`, the raw pointer types `*const + // [T]`, `*mut [T]`, `*const [U]`, and `*mut [U]` encode the number of + // elements in this slice. Casts between these raw pointer types + // preserve the number of elements. ... The same holds for `str` and + // any compound type whose unsized tail is a slice type, such as + // struct `Foo(i32, [u8])` or `(u64, Foo)`. + slc.len() + } +} + +#[rustfmt::skip] +impl_known_layout!( + (), + u8, i8, u16, i16, u32, i32, u64, i64, u128, i128, usize, isize, f32, f64, + bool, char, + NonZeroU8, NonZeroI8, NonZeroU16, NonZeroI16, NonZeroU32, NonZeroI32, + NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize +); +#[rustfmt::skip] +#[cfg(feature = "float-nightly")] +impl_known_layout!( + #[cfg_attr(doc_cfg, doc(cfg(feature = "float-nightly")))] + f16, + #[cfg_attr(doc_cfg, doc(cfg(feature = "float-nightly")))] + f128 +); +#[rustfmt::skip] +impl_known_layout!( + T => Option<T>, + T: ?Sized => PhantomData<T>, + T => Wrapping<T>, + T => CoreMaybeUninit<T>, + T: ?Sized => *const T, + T: ?Sized => *mut T, + T: ?Sized => &'_ T, + T: ?Sized => &'_ mut T, +); +impl_known_layout!(const N: usize, T => [T; N]); + +// SAFETY: `str` has the same representation as `[u8]`. `ManuallyDrop<T>` [1], +// `UnsafeCell<T>` [2], and `Cell<T>` [3] have the same representation as `T`. +// +// [1] Per https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html: +// +// `ManuallyDrop<T>` is guaranteed to have the same layout and bit validity as +// `T` +// +// [2] Per https://doc.rust-lang.org/1.85.0/core/cell/struct.UnsafeCell.html#memory-layout: +// +// `UnsafeCell<T>` has the same in-memory representation as its inner type +// `T`. +// +// [3] Per https://doc.rust-lang.org/1.85.0/core/cell/struct.Cell.html#memory-layout: +// +// `Cell<T>` has the same in-memory representation as `T`. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + unsafe_impl_known_layout!( + #[repr([u8])] + str + ); + unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T)] ManuallyDrop<T>); + unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T)] UnsafeCell<T>); + unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T)] Cell<T>); +}; + +// SAFETY: +// - By consequence of the invariant on `T::MaybeUninit` that `T::LAYOUT` and +// `T::MaybeUninit::LAYOUT` are equal, `T` and `T::MaybeUninit` have the same: +// - Fixed prefix size +// - Alignment +// - (For DSTs) trailing slice element size +// - By consequence of the above, referents `T::MaybeUninit` and `T` have the +// require the same kind of pointer metadata, and thus it is valid to perform +// an `as` cast from `*mut T` and `*mut T::MaybeUninit`, and this operation +// preserves referent size (ie, `size_of_val_raw`). +const _: () = unsafe { + unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T::MaybeUninit)] MaybeUninit<T>) +}; + +/// Analyzes whether a type is [`FromZeros`]. +/// +/// This derive analyzes, at compile time, whether the annotated type satisfies +/// the [safety conditions] of `FromZeros` and implements `FromZeros` and its +/// supertraits if it is sound to do so. This derive can be applied to structs, +/// enums, and unions; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{FromZeros, Immutable}; +/// #[derive(FromZeros)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromZeros)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant0, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromZeros, Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// [safety conditions]: trait@FromZeros#safety +/// +/// # Analysis +/// +/// *This section describes, roughly, the analysis performed by this derive to +/// determine whether it is sound to implement `FromZeros` for a given type. +/// Unless you are modifying the implementation of this derive, or attempting to +/// manually implement `FromZeros` for a type yourself, you don't need to read +/// this section.* +/// +/// If a type has the following properties, then this derive can implement +/// `FromZeros` for that type: +/// +/// - If the type is a struct, all of its fields must be `FromZeros`. +/// - If the type is an enum: +/// - It must have a defined representation (`repr`s `C`, `u8`, `u16`, `u32`, +/// `u64`, `usize`, `i8`, `i16`, `i32`, `i64`, or `isize`). +/// - It must have a variant with a discriminant/tag of `0`, and its fields +/// must be `FromZeros`. See [the reference] for a description of +/// discriminant values are specified. +/// - The fields of that variant must be `FromZeros`. +/// +/// This analysis is subject to change. Unsafe code may *only* rely on the +/// documented [safety conditions] of `FromZeros`, and must *not* rely on the +/// implementation details of this derive. +/// +/// [the reference]: https://doc.rust-lang.org/reference/items/enumerations.html#custom-discriminant-values-for-fieldless-enumerations +/// +/// ## Why isn't an explicit representation required for structs? +/// +/// Neither this derive, nor the [safety conditions] of `FromZeros`, requires +/// that structs are marked with `#[repr(C)]`. +/// +/// Per the [Rust reference](reference), +/// +/// > The representation of a type can change the padding between fields, but +/// > does not change the layout of the fields themselves. +/// +/// [reference]: https://doc.rust-lang.org/reference/type-layout.html#representations +/// +/// Since the layout of structs only consists of padding bytes and field bytes, +/// a struct is soundly `FromZeros` if: +/// 1. its padding is soundly `FromZeros`, and +/// 2. its fields are soundly `FromZeros`. +/// +/// The answer to the first question is always yes: padding bytes do not have +/// any validity constraints. A [discussion] of this question in the Unsafe Code +/// Guidelines Working Group concluded that it would be virtually unimaginable +/// for future versions of rustc to add validity constraints to padding bytes. +/// +/// [discussion]: https://github.com/rust-lang/unsafe-code-guidelines/issues/174 +/// +/// Whether a struct is soundly `FromZeros` therefore solely depends on whether +/// its fields are `FromZeros`. +// FIXME(#146): Document why we don't require an enum to have an explicit `repr` +// attribute. +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::FromZeros; +/// Analyzes whether a type is [`Immutable`]. +/// +/// This derive analyzes, at compile time, whether the annotated type satisfies +/// the [safety conditions] of `Immutable` and implements `Immutable` if it is +/// sound to do so. This derive can be applied to structs, enums, and unions; +/// e.g.: +/// +/// ``` +/// # use zerocopy_derive::Immutable; +/// #[derive(Immutable)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Immutable)] +/// enum MyEnum { +/// # Variant0, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// # Analysis +/// +/// *This section describes, roughly, the analysis performed by this derive to +/// determine whether it is sound to implement `Immutable` for a given type. +/// Unless you are modifying the implementation of this derive, you don't need +/// to read this section.* +/// +/// If a type has the following properties, then this derive can implement +/// `Immutable` for that type: +/// +/// - All fields must be `Immutable`. +/// +/// This analysis is subject to change. Unsafe code may *only* rely on the +/// documented [safety conditions] of `Immutable`, and must *not* rely on the +/// implementation details of this derive. +/// +/// [safety conditions]: trait@Immutable#safety +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::Immutable; + +/// Types which are free from interior mutability. +/// +/// `T: Immutable` indicates that `T` does not permit interior mutation, except +/// by ownership or an exclusive (`&mut`) borrow. +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(Immutable)]`][derive] (requires the `derive` Cargo feature); +/// e.g.: +/// +/// ``` +/// # use zerocopy_derive::Immutable; +/// #[derive(Immutable)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Immutable)] +/// enum MyEnum { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive performs a sophisticated, compile-time safety analysis to +/// determine whether a type is `Immutable`. +/// +/// # Safety +/// +/// Unsafe code outside of this crate must not make any assumptions about `T` +/// based on `T: Immutable`. We reserve the right to relax the requirements for +/// `Immutable` in the future, and if unsafe code outside of this crate makes +/// assumptions based on `T: Immutable`, future relaxations may cause that code +/// to become unsound. +/// +// # Safety (Internal) +// +// If `T: Immutable`, unsafe code *inside of this crate* may assume that, given +// `t: &T`, `t` does not contain any [`UnsafeCell`]s at any byte location +// within the byte range addressed by `t`. This includes ranges of length 0 +// (e.g., `UnsafeCell<()>` and `[UnsafeCell<u8>; 0]`). If a type implements +// `Immutable` which violates this assumptions, it may cause this crate to +// exhibit [undefined behavior]. +// +// [`UnsafeCell`]: core::cell::UnsafeCell +// [undefined behavior]: https://raphlinus.github.io/programming/rust/2018/08/17/undefined-behavior.html +#[cfg_attr( + feature = "derive", + doc = "[derive]: zerocopy_derive::Immutable", + doc = "[derive-analysis]: zerocopy_derive::Immutable#analysis" +)] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Immutable.html"), + doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Immutable.html#analysis"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(Immutable)]` to `{Self}`") +)] +pub unsafe trait Immutable { + // The `Self: Sized` bound makes it so that `Immutable` is still object + // safe. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; +} + +/// Implements [`TryFromBytes`]. +/// +/// This derive synthesizes the runtime checks required to check whether a +/// sequence of initialized bytes corresponds to a valid instance of a type. +/// This derive can be applied to structs, enums, and unions; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{TryFromBytes, Immutable}; +/// #[derive(TryFromBytes)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(TryFromBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # V00, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(TryFromBytes, Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// # Portability +/// +/// To ensure consistent endianness for enums with multi-byte representations, +/// explicitly specify and convert each discriminant using `.to_le()` or +/// `.to_be()`; e.g.: +/// +/// ``` +/// # use zerocopy_derive::TryFromBytes; +/// // `DataStoreVersion` is encoded in little-endian. +/// #[derive(TryFromBytes)] +/// #[repr(u32)] +/// pub enum DataStoreVersion { +/// /// Version 1 of the data store. +/// V1 = 9u32.to_le(), +/// +/// /// Version 2 of the data store. +/// V2 = 10u32.to_le(), +/// } +/// ``` +/// +/// [safety conditions]: trait@TryFromBytes#safety +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::TryFromBytes; + +/// Types for which some bit patterns are valid. +/// +/// A memory region of the appropriate length which contains initialized bytes +/// can be viewed as a `TryFromBytes` type so long as the runtime value of those +/// bytes corresponds to a [*valid instance*] of that type. For example, +/// [`bool`] is `TryFromBytes`, so zerocopy can transmute a [`u8`] into a +/// [`bool`] so long as it first checks that the value of the [`u8`] is `0` or +/// `1`. +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(TryFromBytes)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{TryFromBytes, Immutable}; +/// #[derive(TryFromBytes)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(TryFromBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # V00, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(TryFromBytes, Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive ensures that the runtime check of whether bytes correspond to a +/// valid instance is sound. You **must** implement this trait via the derive. +/// +/// # What is a "valid instance"? +/// +/// In Rust, each type has *bit validity*, which refers to the set of bit +/// patterns which may appear in an instance of that type. It is impossible for +/// safe Rust code to produce values which violate bit validity (ie, values +/// outside of the "valid" set of bit patterns). If `unsafe` code produces an +/// invalid value, this is considered [undefined behavior]. +/// +/// Rust's bit validity rules are currently being decided, which means that some +/// types have three classes of bit patterns: those which are definitely valid, +/// and whose validity is documented in the language; those which may or may not +/// be considered valid at some point in the future; and those which are +/// definitely invalid. +/// +/// Zerocopy takes a conservative approach, and only considers a bit pattern to +/// be valid if its validity is a documented guarantee provided by the +/// language. +/// +/// For most use cases, Rust's current guarantees align with programmers' +/// intuitions about what ought to be valid. As a result, zerocopy's +/// conservatism should not affect most users. +/// +/// If you are negatively affected by lack of support for a particular type, +/// we encourage you to let us know by [filing an issue][github-repo]. +/// +/// # `TryFromBytes` is not symmetrical with [`IntoBytes`] +/// +/// There are some types which implement both `TryFromBytes` and [`IntoBytes`], +/// but for which `TryFromBytes` is not guaranteed to accept all byte sequences +/// produced by `IntoBytes`. In other words, for some `T: TryFromBytes + +/// IntoBytes`, there exist values of `t: T` such that +/// `TryFromBytes::try_ref_from_bytes(t.as_bytes()) == None`. Code should not +/// generally assume that values produced by `IntoBytes` will necessarily be +/// accepted as valid by `TryFromBytes`. +/// +/// # Safety +/// +/// On its own, `T: TryFromBytes` does not make any guarantees about the layout +/// or representation of `T`. It merely provides the ability to perform a +/// validity check at runtime via methods like [`try_ref_from_bytes`]. +/// +/// You must not rely on the `#[doc(hidden)]` internals of `TryFromBytes`. +/// Future releases of zerocopy may make backwards-breaking changes to these +/// items, including changes that only affect soundness, which may cause code +/// which uses those items to silently become unsound. +/// +/// [undefined behavior]: https://raphlinus.github.io/programming/rust/2018/08/17/undefined-behavior.html +/// [github-repo]: https://github.com/google/zerocopy +/// [`try_ref_from_bytes`]: TryFromBytes::try_ref_from_bytes +/// [*valid instance*]: #what-is-a-valid-instance +#[cfg_attr(feature = "derive", doc = "[derive]: zerocopy_derive::TryFromBytes")] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.TryFromBytes.html"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(TryFromBytes)]` to `{Self}`") +)] +pub unsafe trait TryFromBytes { + // The `Self: Sized` bound makes it so that `TryFromBytes` is still object + // safe. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + /// Does a given memory range contain a valid instance of `Self`? + /// + /// # Safety + /// + /// Unsafe code may assume that, if `is_bit_valid(candidate)` returns true, + /// `*candidate` contains a valid `Self`. + /// + /// # Panics + /// + /// `is_bit_valid` may panic. Callers are responsible for ensuring that any + /// `unsafe` code remains sound even in the face of `is_bit_valid` + /// panicking. (We support user-defined validation routines; so long as + /// these routines are not required to be `unsafe`, there is no way to + /// ensure that these do not generate panics.) + /// + /// Besides user-defined validation routines panicking, `is_bit_valid` will + /// either panic or fail to compile if called on a pointer with [`Shared`] + /// aliasing when `Self: !Immutable`. + /// + /// [`UnsafeCell`]: core::cell::UnsafeCell + /// [`Shared`]: invariant::Shared + #[doc(hidden)] + fn is_bit_valid<A: invariant::Reference>(candidate: Maybe<'_, Self, A>) -> bool; + + /// Attempts to interpret the given `source` as a `&Self`. + /// + /// If the bytes of `source` are a valid instance of `Self`, this method + /// returns a reference to those bytes interpreted as a `Self`. If the + /// length of `source` is not a [valid size of `Self`][valid-size], or if + /// `source` is not appropriately aligned, or if `source` is not a valid + /// instance of `Self`, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][ConvertError::from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = ZSTy::try_ref_from_bytes(0u16.as_bytes()); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the byte sequence `0xC0C0`. + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &[0xC0, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5][..]; + /// + /// let packet = Packet::try_ref_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[0, 1], [2, 3], [4, 5]]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0x10, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5][..]; + /// assert!(Packet::try_ref_from_bytes(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_ref_from_bytes(source: &[u8]) -> Result<&Self, TryCastError<&[u8], Self>> + where + Self: KnownLayout + Immutable, + { + static_assert_dst_is_not_zst!(Self); + match Ptr::from_ref(source).try_cast_into_no_leftover::<Self, BecauseImmutable>(None) { + Ok(source) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match source.try_into_valid() { + Ok(valid) => Ok(valid.as_ref()), + Err(e) => { + Err(e.map_src(|src| src.as_bytes::<BecauseImmutable>().as_ref()).into()) + } + } + } + Err(e) => Err(e.map_src(Ptr::as_ref).into()), + } + } + + /// Attempts to interpret the prefix of the given `source` as a `&Self`. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the leading bytes of `source`. If that prefix is a valid + /// instance of `Self`, this method returns a reference to those bytes + /// interpreted as `Self`, and a reference to the remaining bytes. If there + /// are insufficient bytes, or if `source` is not appropriately aligned, or + /// if those bytes are not a valid instance of `Self`, this returns `Err`. + /// If [`Self: Unaligned`][self-unaligned], you can [infallibly discard the + /// alignment error][ConvertError::from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = ZSTy::try_ref_from_prefix(0u16.as_bytes()); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &[0xC0, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// + /// let (packet, suffix) = Packet::try_ref_from_prefix(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[0, 1], [2, 3], [4, 5]]); + /// assert_eq!(suffix, &[6u8][..]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0x10, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// assert!(Packet::try_ref_from_prefix(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_ref_from_prefix(source: &[u8]) -> Result<(&Self, &[u8]), TryCastError<&[u8], Self>> + where + Self: KnownLayout + Immutable, + { + static_assert_dst_is_not_zst!(Self); + try_ref_from_prefix_suffix(source, CastType::Prefix, None) + } + + /// Attempts to interpret the suffix of the given `source` as a `&Self`. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the trailing bytes of `source`. If that suffix is a + /// valid instance of `Self`, this method returns a reference to those bytes + /// interpreted as `Self`, and a reference to the preceding bytes. If there + /// are insufficient bytes, or if the suffix of `source` would not be + /// appropriately aligned, or if the suffix is not a valid instance of + /// `Self`, this returns `Err`. If [`Self: Unaligned`][self-unaligned], you + /// can [infallibly discard the alignment error][ConvertError::from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = ZSTy::try_ref_from_suffix(0u16.as_bytes()); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &[0, 0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7][..]; + /// + /// let (prefix, packet) = Packet::try_ref_from_suffix(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// assert_eq!(prefix, &[0u8][..]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 77, 240, 0xC0, 0x10][..]; + /// assert!(Packet::try_ref_from_suffix(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_ref_from_suffix(source: &[u8]) -> Result<(&[u8], &Self), TryCastError<&[u8], Self>> + where + Self: KnownLayout + Immutable, + { + static_assert_dst_is_not_zst!(Self); + try_ref_from_prefix_suffix(source, CastType::Suffix, None).map(swap) + } + + /// Attempts to interpret the given `source` as a `&mut Self` without + /// copying. + /// + /// If the bytes of `source` are a valid instance of `Self`, this method + /// returns a reference to those bytes interpreted as a `Self`. If the + /// length of `source` is not a [valid size of `Self`][valid-size], or if + /// `source` is not appropriately aligned, or if `source` is not a valid + /// instance of `Self`, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][ConvertError::from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let mut source = [85, 85]; + /// let _ = ZSTy::try_mut_from_bytes(&mut source[..]); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &mut [0xC0, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5][..]; + /// + /// let packet = Packet::try_mut_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[0, 1], [2, 3], [4, 5]]); + /// + /// packet.temperature = 111; + /// + /// assert_eq!(bytes, [0xC0, 0xC0, 240, 111, 0, 1, 2, 3, 4, 5]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0x10, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// assert!(Packet::try_mut_from_bytes(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_mut_from_bytes(bytes: &mut [u8]) -> Result<&mut Self, TryCastError<&mut [u8], Self>> + where + Self: KnownLayout + IntoBytes, + { + static_assert_dst_is_not_zst!(Self); + match Ptr::from_mut(bytes).try_cast_into_no_leftover::<Self, BecauseExclusive>(None) { + Ok(source) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match source.try_into_valid() { + Ok(source) => Ok(source.as_mut()), + Err(e) => { + Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into()) + } + } + } + Err(e) => Err(e.map_src(Ptr::as_mut).into()), + } + } + + /// Attempts to interpret the prefix of the given `source` as a `&mut + /// Self`. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the leading bytes of `source`. If that prefix is a valid + /// instance of `Self`, this method returns a reference to those bytes + /// interpreted as `Self`, and a reference to the remaining bytes. If there + /// are insufficient bytes, or if `source` is not appropriately aligned, or + /// if the bytes are not a valid instance of `Self`, this returns `Err`. If + /// [`Self: Unaligned`][self-unaligned], you can [infallibly discard the + /// alignment error][ConvertError::from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let mut source = [85, 85]; + /// let _ = ZSTy::try_mut_from_prefix(&mut source[..]); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &mut [0xC0, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// + /// let (packet, suffix) = Packet::try_mut_from_prefix(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[0, 1], [2, 3], [4, 5]]); + /// assert_eq!(suffix, &[6u8][..]); + /// + /// packet.temperature = 111; + /// suffix[0] = 222; + /// + /// assert_eq!(bytes, [0xC0, 0xC0, 240, 111, 0, 1, 2, 3, 4, 5, 222]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0x10, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// assert!(Packet::try_mut_from_prefix(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_mut_from_prefix( + source: &mut [u8], + ) -> Result<(&mut Self, &mut [u8]), TryCastError<&mut [u8], Self>> + where + Self: KnownLayout + IntoBytes, + { + static_assert_dst_is_not_zst!(Self); + try_mut_from_prefix_suffix(source, CastType::Prefix, None) + } + + /// Attempts to interpret the suffix of the given `source` as a `&mut + /// Self`. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the trailing bytes of `source`. If that suffix is a + /// valid instance of `Self`, this method returns a reference to those bytes + /// interpreted as `Self`, and a reference to the preceding bytes. If there + /// are insufficient bytes, or if the suffix of `source` would not be + /// appropriately aligned, or if the suffix is not a valid instance of + /// `Self`, this returns `Err`. If [`Self: Unaligned`][self-unaligned], you + /// can [infallibly discard the alignment error][ConvertError::from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let mut source = [85, 85]; + /// let _ = ZSTy::try_mut_from_suffix(&mut source[..]); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &mut [0, 0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7][..]; + /// + /// let (prefix, packet) = Packet::try_mut_from_suffix(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// assert_eq!(prefix, &[0u8][..]); + /// + /// prefix[0] = 111; + /// packet.temperature = 222; + /// + /// assert_eq!(bytes, [111, 0xC0, 0xC0, 240, 222, 2, 3, 4, 5, 6, 7]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 77, 240, 0xC0, 0x10][..]; + /// assert!(Packet::try_mut_from_suffix(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_mut_from_suffix( + source: &mut [u8], + ) -> Result<(&mut [u8], &mut Self), TryCastError<&mut [u8], Self>> + where + Self: KnownLayout + IntoBytes, + { + static_assert_dst_is_not_zst!(Self); + try_mut_from_prefix_suffix(source, CastType::Suffix, None).map(swap) + } + + /// Attempts to interpret the given `source` as a `&Self` with a DST length + /// equal to `count`. + /// + /// This method attempts to return a reference to `source` interpreted as a + /// `Self` with `count` trailing elements. If the length of `source` is not + /// equal to the size of `Self` with `count` elements, if `source` is not + /// appropriately aligned, or if `source` does not contain a valid instance + /// of `Self`, this returns `Err`. If [`Self: Unaligned`][self-unaligned], + /// you can [infallibly discard the alignment error][ConvertError::from]. + /// + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &[0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7][..]; + /// + /// let packet = Packet::try_ref_from_bytes_with_elems(bytes, 3).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 77, 240, 0xC0, 0xC0][..]; + /// assert!(Packet::try_ref_from_bytes_with_elems(bytes, 3).is_err()); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`try_ref_from_bytes`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use core::num::NonZeroU16; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: NonZeroU16, + /// trailing_dst: [()], + /// } + /// + /// let src = 0xCAFEu16.as_bytes(); + /// let zsty = ZSTy::try_ref_from_bytes_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`try_ref_from_bytes`]: TryFromBytes::try_ref_from_bytes + #[must_use = "has no side effects"] + #[inline] + fn try_ref_from_bytes_with_elems( + source: &[u8], + count: usize, + ) -> Result<&Self, TryCastError<&[u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + Immutable, + { + match Ptr::from_ref(source).try_cast_into_no_leftover::<Self, BecauseImmutable>(Some(count)) + { + Ok(source) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match source.try_into_valid() { + Ok(source) => Ok(source.as_ref()), + Err(e) => { + Err(e.map_src(|src| src.as_bytes::<BecauseImmutable>().as_ref()).into()) + } + } + } + Err(e) => Err(e.map_src(Ptr::as_ref).into()), + } + } + + /// Attempts to interpret the prefix of the given `source` as a `&Self` with + /// a DST length equal to `count`. + /// + /// This method attempts to return a reference to the prefix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the remaining bytes. If the length of `source` is less than the size + /// of `Self` with `count` elements, if `source` is not appropriately + /// aligned, or if the prefix of `source` does not contain a valid instance + /// of `Self`, this returns `Err`. If [`Self: Unaligned`][self-unaligned], + /// you can [infallibly discard the alignment error][ConvertError::from]. + /// + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &[0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7, 8][..]; + /// + /// let (packet, suffix) = Packet::try_ref_from_prefix_with_elems(bytes, 3).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// assert_eq!(suffix, &[8u8][..]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 77, 240, 0xC0, 0xC0][..]; + /// assert!(Packet::try_ref_from_prefix_with_elems(bytes, 3).is_err()); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`try_ref_from_prefix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use core::num::NonZeroU16; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: NonZeroU16, + /// trailing_dst: [()], + /// } + /// + /// let src = 0xCAFEu16.as_bytes(); + /// let (zsty, _) = ZSTy::try_ref_from_prefix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`try_ref_from_prefix`]: TryFromBytes::try_ref_from_prefix + #[must_use = "has no side effects"] + #[inline] + fn try_ref_from_prefix_with_elems( + source: &[u8], + count: usize, + ) -> Result<(&Self, &[u8]), TryCastError<&[u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + Immutable, + { + try_ref_from_prefix_suffix(source, CastType::Prefix, Some(count)) + } + + /// Attempts to interpret the suffix of the given `source` as a `&Self` with + /// a DST length equal to `count`. + /// + /// This method attempts to return a reference to the suffix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the preceding bytes. If the length of `source` is less than the size + /// of `Self` with `count` elements, if the suffix of `source` is not + /// appropriately aligned, or if the suffix of `source` does not contain a + /// valid instance of `Self`, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][ConvertError::from]. + /// + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &[123, 0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7][..]; + /// + /// let (prefix, packet) = Packet::try_ref_from_suffix_with_elems(bytes, 3).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// assert_eq!(prefix, &[123u8][..]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 77, 240, 0xC0, 0xC0][..]; + /// assert!(Packet::try_ref_from_suffix_with_elems(bytes, 3).is_err()); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`try_ref_from_prefix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use core::num::NonZeroU16; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: NonZeroU16, + /// trailing_dst: [()], + /// } + /// + /// let src = 0xCAFEu16.as_bytes(); + /// let (_, zsty) = ZSTy::try_ref_from_suffix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`try_ref_from_prefix`]: TryFromBytes::try_ref_from_prefix + #[must_use = "has no side effects"] + #[inline] + fn try_ref_from_suffix_with_elems( + source: &[u8], + count: usize, + ) -> Result<(&[u8], &Self), TryCastError<&[u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + Immutable, + { + try_ref_from_prefix_suffix(source, CastType::Suffix, Some(count)).map(swap) + } + + /// Attempts to interpret the given `source` as a `&mut Self` with a DST + /// length equal to `count`. + /// + /// This method attempts to return a reference to `source` interpreted as a + /// `Self` with `count` trailing elements. If the length of `source` is not + /// equal to the size of `Self` with `count` elements, if `source` is not + /// appropriately aligned, or if `source` does not contain a valid instance + /// of `Self`, this returns `Err`. If [`Self: Unaligned`][self-unaligned], + /// you can [infallibly discard the alignment error][ConvertError::from]. + /// + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &mut [0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7][..]; + /// + /// let packet = Packet::try_mut_from_bytes_with_elems(bytes, 3).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// + /// packet.temperature = 111; + /// + /// assert_eq!(bytes, [0xC0, 0xC0, 240, 111, 2, 3, 4, 5, 6, 7]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 77, 240, 0xC0, 0xC0][..]; + /// assert!(Packet::try_mut_from_bytes_with_elems(bytes, 3).is_err()); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`try_mut_from_bytes`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use core::num::NonZeroU16; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: NonZeroU16, + /// trailing_dst: [()], + /// } + /// + /// let mut src = 0xCAFEu16; + /// let src = src.as_mut_bytes(); + /// let zsty = ZSTy::try_mut_from_bytes_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`try_mut_from_bytes`]: TryFromBytes::try_mut_from_bytes + #[must_use = "has no side effects"] + #[inline] + fn try_mut_from_bytes_with_elems( + source: &mut [u8], + count: usize, + ) -> Result<&mut Self, TryCastError<&mut [u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + IntoBytes, + { + match Ptr::from_mut(source).try_cast_into_no_leftover::<Self, BecauseExclusive>(Some(count)) + { + Ok(source) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match source.try_into_valid() { + Ok(source) => Ok(source.as_mut()), + Err(e) => { + Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into()) + } + } + } + Err(e) => Err(e.map_src(Ptr::as_mut).into()), + } + } + + /// Attempts to interpret the prefix of the given `source` as a `&mut Self` + /// with a DST length equal to `count`. + /// + /// This method attempts to return a reference to the prefix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the remaining bytes. If the length of `source` is less than the size + /// of `Self` with `count` elements, if `source` is not appropriately + /// aligned, or if the prefix of `source` does not contain a valid instance + /// of `Self`, this returns `Err`. If [`Self: Unaligned`][self-unaligned], + /// you can [infallibly discard the alignment error][ConvertError::from]. + /// + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &mut [0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7, 8][..]; + /// + /// let (packet, suffix) = Packet::try_mut_from_prefix_with_elems(bytes, 3).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// assert_eq!(suffix, &[8u8][..]); + /// + /// packet.temperature = 111; + /// suffix[0] = 222; + /// + /// assert_eq!(bytes, [0xC0, 0xC0, 240, 111, 2, 3, 4, 5, 6, 7, 222]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 77, 240, 0xC0, 0xC0][..]; + /// assert!(Packet::try_mut_from_prefix_with_elems(bytes, 3).is_err()); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`try_mut_from_prefix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use core::num::NonZeroU16; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: NonZeroU16, + /// trailing_dst: [()], + /// } + /// + /// let mut src = 0xCAFEu16; + /// let src = src.as_mut_bytes(); + /// let (zsty, _) = ZSTy::try_mut_from_prefix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`try_mut_from_prefix`]: TryFromBytes::try_mut_from_prefix + #[must_use = "has no side effects"] + #[inline] + fn try_mut_from_prefix_with_elems( + source: &mut [u8], + count: usize, + ) -> Result<(&mut Self, &mut [u8]), TryCastError<&mut [u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + IntoBytes, + { + try_mut_from_prefix_suffix(source, CastType::Prefix, Some(count)) + } + + /// Attempts to interpret the suffix of the given `source` as a `&mut Self` + /// with a DST length equal to `count`. + /// + /// This method attempts to return a reference to the suffix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the preceding bytes. If the length of `source` is less than the size + /// of `Self` with `count` elements, if the suffix of `source` is not + /// appropriately aligned, or if the suffix of `source` does not contain a + /// valid instance of `Self`, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][ConvertError::from]. + /// + /// [self-unaligned]: Unaligned + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// marshmallows: [[u8; 2]], + /// } + /// + /// let bytes = &mut [123, 0xC0, 0xC0, 240, 77, 2, 3, 4, 5, 6, 7][..]; + /// + /// let (prefix, packet) = Packet::try_mut_from_suffix_with_elems(bytes, 3).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(packet.marshmallows, [[2, 3], [4, 5], [6, 7]]); + /// assert_eq!(prefix, &[123u8][..]); + /// + /// prefix[0] = 111; + /// packet.temperature = 222; + /// + /// assert_eq!(bytes, [111, 0xC0, 0xC0, 240, 222, 2, 3, 4, 5, 6, 7]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 77, 240, 0xC0, 0xC0][..]; + /// assert!(Packet::try_mut_from_suffix_with_elems(bytes, 3).is_err()); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`try_mut_from_prefix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use core::num::NonZeroU16; + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(TryFromBytes, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: NonZeroU16, + /// trailing_dst: [()], + /// } + /// + /// let mut src = 0xCAFEu16; + /// let src = src.as_mut_bytes(); + /// let (_, zsty) = ZSTy::try_mut_from_suffix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`try_mut_from_prefix`]: TryFromBytes::try_mut_from_prefix + #[must_use = "has no side effects"] + #[inline] + fn try_mut_from_suffix_with_elems( + source: &mut [u8], + count: usize, + ) -> Result<(&mut [u8], &mut Self), TryCastError<&mut [u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + IntoBytes, + { + try_mut_from_prefix_suffix(source, CastType::Suffix, Some(count)).map(swap) + } + + /// Attempts to read the given `source` as a `Self`. + /// + /// If `source.len() != size_of::<Self>()` or the bytes are not a valid + /// instance of `Self`, this returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// } + /// + /// let bytes = &[0xC0, 0xC0, 240, 77][..]; + /// + /// let packet = Packet::try_read_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &mut [0x10, 0xC0, 240, 77][..]; + /// assert!(Packet::try_read_from_bytes(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_read_from_bytes(source: &[u8]) -> Result<Self, TryReadError<&[u8], Self>> + where + Self: Sized, + { + let candidate = match CoreMaybeUninit::<Self>::read_from_bytes(source) { + Ok(candidate) => candidate, + Err(e) => { + return Err(TryReadError::Size(e.with_dst())); + } + }; + // SAFETY: `candidate` was copied from from `source: &[u8]`, so all of + // its bytes are initialized. + unsafe { try_read_from(source, candidate) } + } + + /// Attempts to read a `Self` from the prefix of the given `source`. + /// + /// This attempts to read a `Self` from the first `size_of::<Self>()` bytes + /// of `source`, returning that `Self` and any remaining bytes. If + /// `source.len() < size_of::<Self>()` or the bytes are not a valid instance + /// of `Self`, it returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &[0xC0, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// + /// let (packet, suffix) = Packet::try_read_from_prefix(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(suffix, &[0u8, 1, 2, 3, 4, 5, 6][..]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0x10, 0xC0, 240, 77, 0, 1, 2, 3, 4, 5, 6][..]; + /// assert!(Packet::try_read_from_prefix(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_read_from_prefix(source: &[u8]) -> Result<(Self, &[u8]), TryReadError<&[u8], Self>> + where + Self: Sized, + { + let (candidate, suffix) = match CoreMaybeUninit::<Self>::read_from_prefix(source) { + Ok(candidate) => candidate, + Err(e) => { + return Err(TryReadError::Size(e.with_dst())); + } + }; + // SAFETY: `candidate` was copied from from `source: &[u8]`, so all of + // its bytes are initialized. + unsafe { try_read_from(source, candidate).map(|slf| (slf, suffix)) } + } + + /// Attempts to read a `Self` from the suffix of the given `source`. + /// + /// This attempts to read a `Self` from the last `size_of::<Self>()` bytes + /// of `source`, returning that `Self` and any preceding bytes. If + /// `source.len() < size_of::<Self>()` or the bytes are not a valid instance + /// of `Self`, it returns `Err`. + /// + /// # Examples + /// + /// ``` + /// # #![allow(non_camel_case_types)] // For C0::xC0 + /// use zerocopy::TryFromBytes; + /// # use zerocopy_derive::*; + /// + /// // The only valid value of this type is the byte `0xC0` + /// #[derive(TryFromBytes)] + /// #[repr(u8)] + /// enum C0 { xC0 = 0xC0 } + /// + /// // The only valid value of this type is the bytes `0xC0C0`. + /// #[derive(TryFromBytes)] + /// #[repr(C)] + /// struct C0C0(C0, C0); + /// + /// #[derive(TryFromBytes)] + /// #[repr(C)] + /// struct Packet { + /// magic_number: C0C0, + /// mug_size: u8, + /// temperature: u8, + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 0xC0, 0xC0, 240, 77][..]; + /// + /// let (prefix, packet) = Packet::try_read_from_suffix(bytes).unwrap(); + /// + /// assert_eq!(packet.mug_size, 240); + /// assert_eq!(packet.temperature, 77); + /// assert_eq!(prefix, &[0u8, 1, 2, 3, 4, 5][..]); + /// + /// // These bytes are not valid instance of `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 0x10, 0xC0, 240, 77][..]; + /// assert!(Packet::try_read_from_suffix(bytes).is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn try_read_from_suffix(source: &[u8]) -> Result<(&[u8], Self), TryReadError<&[u8], Self>> + where + Self: Sized, + { + let (prefix, candidate) = match CoreMaybeUninit::<Self>::read_from_suffix(source) { + Ok(candidate) => candidate, + Err(e) => { + return Err(TryReadError::Size(e.with_dst())); + } + }; + // SAFETY: `candidate` was copied from from `source: &[u8]`, so all of + // its bytes are initialized. + unsafe { try_read_from(source, candidate).map(|slf| (prefix, slf)) } + } +} + +#[inline(always)] +fn try_ref_from_prefix_suffix<T: TryFromBytes + KnownLayout + Immutable + ?Sized>( + source: &[u8], + cast_type: CastType, + meta: Option<T::PointerMetadata>, +) -> Result<(&T, &[u8]), TryCastError<&[u8], T>> { + match Ptr::from_ref(source).try_cast_into::<T, BecauseImmutable>(cast_type, meta) { + Ok((source, prefix_suffix)) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match source.try_into_valid() { + Ok(valid) => Ok((valid.as_ref(), prefix_suffix.as_ref())), + Err(e) => Err(e.map_src(|src| src.as_bytes::<BecauseImmutable>().as_ref()).into()), + } + } + Err(e) => Err(e.map_src(Ptr::as_ref).into()), + } +} + +#[inline(always)] +fn try_mut_from_prefix_suffix<T: IntoBytes + TryFromBytes + KnownLayout + ?Sized>( + candidate: &mut [u8], + cast_type: CastType, + meta: Option<T::PointerMetadata>, +) -> Result<(&mut T, &mut [u8]), TryCastError<&mut [u8], T>> { + match Ptr::from_mut(candidate).try_cast_into::<T, BecauseExclusive>(cast_type, meta) { + Ok((candidate, prefix_suffix)) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match candidate.try_into_valid() { + Ok(valid) => Ok((valid.as_mut(), prefix_suffix.as_mut())), + Err(e) => Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into()), + } + } + Err(e) => Err(e.map_src(Ptr::as_mut).into()), + } +} + +#[inline(always)] +fn swap<T, U>((t, u): (T, U)) -> (U, T) { + (u, t) +} + +/// # Safety +/// +/// All bytes of `candidate` must be initialized. +#[inline(always)] +unsafe fn try_read_from<S, T: TryFromBytes>( + source: S, + mut candidate: CoreMaybeUninit<T>, +) -> Result<T, TryReadError<S, T>> { + // We use `from_mut` despite not mutating via `c_ptr` so that we don't need + // to add a `T: Immutable` bound. + let c_ptr = Ptr::from_mut(&mut candidate); + // SAFETY: `c_ptr` has no uninitialized sub-ranges because it derived from + // `candidate`, which the caller promises is entirely initialized. Since + // `candidate` is a `MaybeUninit`, it has no validity requirements, and so + // no values written to an `Initialized` `c_ptr` can violate its validity. + // Since `c_ptr` has `Exclusive` aliasing, no mutations may happen except + // via `c_ptr` so long as it is live, so we don't need to worry about the + // fact that `c_ptr` may have more restricted validity than `candidate`. + let c_ptr = unsafe { c_ptr.assume_validity::<invariant::Initialized>() }; + let c_ptr = c_ptr.transmute(); + + // Since we don't have `T: KnownLayout`, we hack around that by using + // `Wrapping<T>`, which implements `KnownLayout` even if `T` doesn't. + // + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to fix + // before returning. + // + // Note that one panic or post-monomorphization error condition is calling + // `try_into_valid` (and thus `is_bit_valid`) with a shared pointer when + // `Self: !Immutable`. Since `Self: Immutable`, this panic condition will + // not happen. + if !Wrapping::<T>::is_bit_valid(c_ptr.forget_aligned()) { + return Err(ValidityError::new(source).into()); + } + + fn _assert_same_size_and_validity<T>() + where + Wrapping<T>: pointer::TransmuteFrom<T, invariant::Valid, invariant::Valid>, + T: pointer::TransmuteFrom<Wrapping<T>, invariant::Valid, invariant::Valid>, + { + } + + _assert_same_size_and_validity::<T>(); + + // SAFETY: We just validated that `candidate` contains a valid + // `Wrapping<T>`, which has the same size and bit validity as `T`, as + // guaranteed by the preceding type assertion. + Ok(unsafe { candidate.assume_init() }) +} + +/// Types for which a sequence of `0` bytes is a valid instance. +/// +/// Any memory region of the appropriate length which is guaranteed to contain +/// only zero bytes can be viewed as any `FromZeros` type with no runtime +/// overhead. This is useful whenever memory is known to be in a zeroed state, +/// such memory returned from some allocation routines. +/// +/// # Warning: Padding bytes +/// +/// Note that, when a value is moved or copied, only the non-padding bytes of +/// that value are guaranteed to be preserved. It is unsound to assume that +/// values written to padding bytes are preserved after a move or copy. For more +/// details, see the [`FromBytes` docs][frombytes-warning-padding-bytes]. +/// +/// [frombytes-warning-padding-bytes]: FromBytes#warning-padding-bytes +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(FromZeros)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{FromZeros, Immutable}; +/// #[derive(FromZeros)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromZeros)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant0, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromZeros, Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive performs a sophisticated, compile-time safety analysis to +/// determine whether a type is `FromZeros`. +/// +/// # Safety +/// +/// *This section describes what is required in order for `T: FromZeros`, and +/// what unsafe code may assume of such types. If you don't plan on implementing +/// `FromZeros` manually, and you don't plan on writing unsafe code that +/// operates on `FromZeros` types, then you don't need to read this section.* +/// +/// If `T: FromZeros`, then unsafe code may assume that it is sound to produce a +/// `T` whose bytes are all initialized to zero. If a type is marked as +/// `FromZeros` which violates this contract, it may cause undefined behavior. +/// +/// `#[derive(FromZeros)]` only permits [types which satisfy these +/// requirements][derive-analysis]. +/// +#[cfg_attr( + feature = "derive", + doc = "[derive]: zerocopy_derive::FromZeros", + doc = "[derive-analysis]: zerocopy_derive::FromZeros#analysis" +)] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromZeros.html"), + doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromZeros.html#analysis"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(FromZeros)]` to `{Self}`") +)] +pub unsafe trait FromZeros: TryFromBytes { + // The `Self: Sized` bound makes it so that `FromZeros` is still object + // safe. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + /// Overwrites `self` with zeros. + /// + /// Sets every byte in `self` to 0. While this is similar to doing `*self = + /// Self::new_zeroed()`, it differs in that `zero` does not semantically + /// drop the current value and replace it with a new one — it simply + /// modifies the bytes of the existing value. + /// + /// # Examples + /// + /// ``` + /// # use zerocopy::FromZeros; + /// # use zerocopy_derive::*; + /// # + /// #[derive(FromZeros)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let mut header = PacketHeader { + /// src_port: 100u16.to_be_bytes(), + /// dst_port: 200u16.to_be_bytes(), + /// length: 300u16.to_be_bytes(), + /// checksum: 400u16.to_be_bytes(), + /// }; + /// + /// header.zero(); + /// + /// assert_eq!(header.src_port, [0, 0]); + /// assert_eq!(header.dst_port, [0, 0]); + /// assert_eq!(header.length, [0, 0]); + /// assert_eq!(header.checksum, [0, 0]); + /// ``` + #[inline(always)] + fn zero(&mut self) { + let slf: *mut Self = self; + let len = mem::size_of_val(self); + // SAFETY: + // - `self` is guaranteed by the type system to be valid for writes of + // size `size_of_val(self)`. + // - `u8`'s alignment is 1, and thus `self` is guaranteed to be aligned + // as required by `u8`. + // - Since `Self: FromZeros`, the all-zeros instance is a valid instance + // of `Self.` + // + // FIXME(#429): Add references to docs and quotes. + unsafe { ptr::write_bytes(slf.cast::<u8>(), 0, len) }; + } + + /// Creates an instance of `Self` from zeroed bytes. + /// + /// # Examples + /// + /// ``` + /// # use zerocopy::FromZeros; + /// # use zerocopy_derive::*; + /// # + /// #[derive(FromZeros)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let header: PacketHeader = FromZeros::new_zeroed(); + /// + /// assert_eq!(header.src_port, [0, 0]); + /// assert_eq!(header.dst_port, [0, 0]); + /// assert_eq!(header.length, [0, 0]); + /// assert_eq!(header.checksum, [0, 0]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + fn new_zeroed() -> Self + where + Self: Sized, + { + // SAFETY: `FromZeros` says that the all-zeros bit pattern is legal. + unsafe { mem::zeroed() } + } + + /// Creates a `Box<Self>` from zeroed bytes. + /// + /// This function is useful for allocating large values on the heap and + /// zero-initializing them, without ever creating a temporary instance of + /// `Self` on the stack. For example, `<[u8; 1048576]>::new_box_zeroed()` + /// will allocate `[u8; 1048576]` directly on the heap; it does not require + /// storing `[u8; 1048576]` in a temporary variable on the stack. + /// + /// On systems that use a heap implementation that supports allocating from + /// pre-zeroed memory, using `new_box_zeroed` (or related functions) may + /// have performance benefits. + /// + /// # Errors + /// + /// Returns an error on allocation failure. Allocation failure is guaranteed + /// never to cause a panic or an abort. + #[must_use = "has no side effects (other than allocation)"] + #[cfg(any(feature = "alloc", test))] + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + #[inline] + fn new_box_zeroed() -> Result<Box<Self>, AllocError> + where + Self: Sized, + { + // If `T` is a ZST, then return a proper boxed instance of it. There is + // no allocation, but `Box` does require a correct dangling pointer. + let layout = Layout::new::<Self>(); + if layout.size() == 0 { + // Construct the `Box` from a dangling pointer to avoid calling + // `Self::new_zeroed`. This ensures that stack space is never + // allocated for `Self` even on lower opt-levels where this branch + // might not get optimized out. + + // SAFETY: Per [1], when `T` is a ZST, `Box<T>`'s only validity + // requirements are that the pointer is non-null and sufficiently + // aligned. Per [2], `NonNull::dangling` produces a pointer which + // is sufficiently aligned. Since the produced pointer is a + // `NonNull`, it is non-null. + // + // [1] Per https://doc.rust-lang.org/1.81.0/std/boxed/index.html#memory-layout: + // + // For zero-sized values, the `Box` pointer has to be non-null and sufficiently aligned. + // + // [2] Per https://doc.rust-lang.org/std/ptr/struct.NonNull.html#method.dangling: + // + // Creates a new `NonNull` that is dangling, but well-aligned. + return Ok(unsafe { Box::from_raw(NonNull::dangling().as_ptr()) }); + } + + // FIXME(#429): Add a "SAFETY" comment and remove this `allow`. + #[allow(clippy::undocumented_unsafe_blocks)] + let ptr = unsafe { alloc::alloc::alloc_zeroed(layout).cast::<Self>() }; + if ptr.is_null() { + return Err(AllocError); + } + // FIXME(#429): Add a "SAFETY" comment and remove this `allow`. + #[allow(clippy::undocumented_unsafe_blocks)] + Ok(unsafe { Box::from_raw(ptr) }) + } + + /// Creates a `Box<[Self]>` (a boxed slice) from zeroed bytes. + /// + /// This function is useful for allocating large values of `[Self]` on the + /// heap and zero-initializing them, without ever creating a temporary + /// instance of `[Self; _]` on the stack. For example, + /// `u8::new_box_slice_zeroed(1048576)` will allocate the slice directly on + /// the heap; it does not require storing the slice on the stack. + /// + /// On systems that use a heap implementation that supports allocating from + /// pre-zeroed memory, using `new_box_slice_zeroed` may have performance + /// benefits. + /// + /// If `Self` is a zero-sized type, then this function will return a + /// `Box<[Self]>` that has the correct `len`. Such a box cannot contain any + /// actual information, but its `len()` property will report the correct + /// value. + /// + /// # Errors + /// + /// Returns an error on allocation failure. Allocation failure is + /// guaranteed never to cause a panic or an abort. + #[must_use = "has no side effects (other than allocation)"] + #[cfg(feature = "alloc")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + #[inline] + fn new_box_zeroed_with_elems(count: usize) -> Result<Box<Self>, AllocError> + where + Self: KnownLayout<PointerMetadata = usize>, + { + // SAFETY: `alloc::alloc::alloc_zeroed` is a valid argument of + // `new_box`. The referent of the pointer returned by `alloc_zeroed` + // (and, consequently, the `Box` derived from it) is a valid instance of + // `Self`, because `Self` is `FromZeros`. + unsafe { crate::util::new_box(count, alloc::alloc::alloc_zeroed) } + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromZeros::new_box_zeroed_with_elems`")] + #[doc(hidden)] + #[cfg(feature = "alloc")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + #[must_use = "has no side effects (other than allocation)"] + #[inline(always)] + fn new_box_slice_zeroed(len: usize) -> Result<Box<[Self]>, AllocError> + where + Self: Sized, + { + <[Self]>::new_box_zeroed_with_elems(len) + } + + /// Creates a `Vec<Self>` from zeroed bytes. + /// + /// This function is useful for allocating large values of `Vec`s and + /// zero-initializing them, without ever creating a temporary instance of + /// `[Self; _]` (or many temporary instances of `Self`) on the stack. For + /// example, `u8::new_vec_zeroed(1048576)` will allocate directly on the + /// heap; it does not require storing intermediate values on the stack. + /// + /// On systems that use a heap implementation that supports allocating from + /// pre-zeroed memory, using `new_vec_zeroed` may have performance benefits. + /// + /// If `Self` is a zero-sized type, then this function will return a + /// `Vec<Self>` that has the correct `len`. Such a `Vec` cannot contain any + /// actual information, but its `len()` property will report the correct + /// value. + /// + /// # Errors + /// + /// Returns an error on allocation failure. Allocation failure is + /// guaranteed never to cause a panic or an abort. + #[must_use = "has no side effects (other than allocation)"] + #[cfg(feature = "alloc")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] + #[inline(always)] + fn new_vec_zeroed(len: usize) -> Result<Vec<Self>, AllocError> + where + Self: Sized, + { + <[Self]>::new_box_zeroed_with_elems(len).map(Into::into) + } + + /// Extends a `Vec<Self>` by pushing `additional` new items onto the end of + /// the vector. The new items are initialized with zeros. + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[cfg(feature = "alloc")] + #[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.57.0", feature = "alloc"))))] + #[inline(always)] + fn extend_vec_zeroed(v: &mut Vec<Self>, additional: usize) -> Result<(), AllocError> + where + Self: Sized, + { + // PANICS: We pass `v.len()` for `position`, so the `position > v.len()` + // panic condition is not satisfied. + <Self as FromZeros>::insert_vec_zeroed(v, v.len(), additional) + } + + /// Inserts `additional` new items into `Vec<Self>` at `position`. The new + /// items are initialized with zeros. + /// + /// # Panics + /// + /// Panics if `position > v.len()`. + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[cfg(feature = "alloc")] + #[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.57.0", feature = "alloc"))))] + #[inline] + fn insert_vec_zeroed( + v: &mut Vec<Self>, + position: usize, + additional: usize, + ) -> Result<(), AllocError> + where + Self: Sized, + { + assert!(position <= v.len()); + // We only conditionally compile on versions on which `try_reserve` is + // stable; the Clippy lint is a false positive. + v.try_reserve(additional).map_err(|_| AllocError)?; + // SAFETY: The `try_reserve` call guarantees that these cannot overflow: + // * `ptr.add(position)` + // * `position + additional` + // * `v.len() + additional` + // + // `v.len() - position` cannot overflow because we asserted that + // `position <= v.len()`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + // This is a potentially overlapping copy. + let ptr = v.as_mut_ptr(); + #[allow(clippy::arithmetic_side_effects)] + ptr.add(position).copy_to(ptr.add(position + additional), v.len() - position); + ptr.add(position).write_bytes(0, additional); + #[allow(clippy::arithmetic_side_effects)] + v.set_len(v.len() + additional); + } + + Ok(()) + } +} + +/// Analyzes whether a type is [`FromBytes`]. +/// +/// This derive analyzes, at compile time, whether the annotated type satisfies +/// the [safety conditions] of `FromBytes` and implements `FromBytes` and its +/// supertraits if it is sound to do so. This derive can be applied to structs, +/// enums, and unions; +/// e.g.: +/// +/// ``` +/// # use zerocopy_derive::{FromBytes, FromZeros, Immutable}; +/// #[derive(FromBytes)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # V00, V01, V02, V03, V04, V05, V06, V07, V08, V09, V0A, V0B, V0C, V0D, V0E, +/// # V0F, V10, V11, V12, V13, V14, V15, V16, V17, V18, V19, V1A, V1B, V1C, V1D, +/// # V1E, V1F, V20, V21, V22, V23, V24, V25, V26, V27, V28, V29, V2A, V2B, V2C, +/// # V2D, V2E, V2F, V30, V31, V32, V33, V34, V35, V36, V37, V38, V39, V3A, V3B, +/// # V3C, V3D, V3E, V3F, V40, V41, V42, V43, V44, V45, V46, V47, V48, V49, V4A, +/// # V4B, V4C, V4D, V4E, V4F, V50, V51, V52, V53, V54, V55, V56, V57, V58, V59, +/// # V5A, V5B, V5C, V5D, V5E, V5F, V60, V61, V62, V63, V64, V65, V66, V67, V68, +/// # V69, V6A, V6B, V6C, V6D, V6E, V6F, V70, V71, V72, V73, V74, V75, V76, V77, +/// # V78, V79, V7A, V7B, V7C, V7D, V7E, V7F, V80, V81, V82, V83, V84, V85, V86, +/// # V87, V88, V89, V8A, V8B, V8C, V8D, V8E, V8F, V90, V91, V92, V93, V94, V95, +/// # V96, V97, V98, V99, V9A, V9B, V9C, V9D, V9E, V9F, VA0, VA1, VA2, VA3, VA4, +/// # VA5, VA6, VA7, VA8, VA9, VAA, VAB, VAC, VAD, VAE, VAF, VB0, VB1, VB2, VB3, +/// # VB4, VB5, VB6, VB7, VB8, VB9, VBA, VBB, VBC, VBD, VBE, VBF, VC0, VC1, VC2, +/// # VC3, VC4, VC5, VC6, VC7, VC8, VC9, VCA, VCB, VCC, VCD, VCE, VCF, VD0, VD1, +/// # VD2, VD3, VD4, VD5, VD6, VD7, VD8, VD9, VDA, VDB, VDC, VDD, VDE, VDF, VE0, +/// # VE1, VE2, VE3, VE4, VE5, VE6, VE7, VE8, VE9, VEA, VEB, VEC, VED, VEE, VEF, +/// # VF0, VF1, VF2, VF3, VF4, VF5, VF6, VF7, VF8, VF9, VFA, VFB, VFC, VFD, VFE, +/// # VFF, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromBytes, Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// [safety conditions]: trait@FromBytes#safety +/// +/// # Analysis +/// +/// *This section describes, roughly, the analysis performed by this derive to +/// determine whether it is sound to implement `FromBytes` for a given type. +/// Unless you are modifying the implementation of this derive, or attempting to +/// manually implement `FromBytes` for a type yourself, you don't need to read +/// this section.* +/// +/// If a type has the following properties, then this derive can implement +/// `FromBytes` for that type: +/// +/// - If the type is a struct, all of its fields must be `FromBytes`. +/// - If the type is an enum: +/// - It must have a defined representation which is one of `u8`, `u16`, `i8`, +/// or `i16`. +/// - The maximum number of discriminants must be used (so that every possible +/// bit pattern is a valid one). +/// - Its fields must be `FromBytes`. +/// +/// This analysis is subject to change. Unsafe code may *only* rely on the +/// documented [safety conditions] of `FromBytes`, and must *not* rely on the +/// implementation details of this derive. +/// +/// ## Why isn't an explicit representation required for structs? +/// +/// Neither this derive, nor the [safety conditions] of `FromBytes`, requires +/// that structs are marked with `#[repr(C)]`. +/// +/// Per the [Rust reference](reference), +/// +/// > The representation of a type can change the padding between fields, but +/// > does not change the layout of the fields themselves. +/// +/// [reference]: https://doc.rust-lang.org/reference/type-layout.html#representations +/// +/// Since the layout of structs only consists of padding bytes and field bytes, +/// a struct is soundly `FromBytes` if: +/// 1. its padding is soundly `FromBytes`, and +/// 2. its fields are soundly `FromBytes`. +/// +/// The answer to the first question is always yes: padding bytes do not have +/// any validity constraints. A [discussion] of this question in the Unsafe Code +/// Guidelines Working Group concluded that it would be virtually unimaginable +/// for future versions of rustc to add validity constraints to padding bytes. +/// +/// [discussion]: https://github.com/rust-lang/unsafe-code-guidelines/issues/174 +/// +/// Whether a struct is soundly `FromBytes` therefore solely depends on whether +/// its fields are `FromBytes`. +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::FromBytes; + +/// Types for which any bit pattern is valid. +/// +/// Any memory region of the appropriate length which contains initialized bytes +/// can be viewed as any `FromBytes` type with no runtime overhead. This is +/// useful for efficiently parsing bytes as structured data. +/// +/// # Warning: Padding bytes +/// +/// Note that, when a value is moved or copied, only the non-padding bytes of +/// that value are guaranteed to be preserved. It is unsound to assume that +/// values written to padding bytes are preserved after a move or copy. For +/// example, the following is unsound: +/// +/// ```rust,no_run +/// use core::mem::{size_of, transmute}; +/// use zerocopy::FromZeros; +/// # use zerocopy_derive::*; +/// +/// // Assume `Foo` is a type with padding bytes. +/// #[derive(FromZeros, Default)] +/// struct Foo { +/// # /* +/// ... +/// # */ +/// } +/// +/// let mut foo: Foo = Foo::default(); +/// FromZeros::zero(&mut foo); +/// // UNSOUND: Although `FromZeros::zero` writes zeros to all bytes of `foo`, +/// // those writes are not guaranteed to be preserved in padding bytes when +/// // `foo` is moved, so this may expose padding bytes as `u8`s. +/// let foo_bytes: [u8; size_of::<Foo>()] = unsafe { transmute(foo) }; +/// ``` +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(FromBytes)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{FromBytes, Immutable}; +/// #[derive(FromBytes)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # V00, V01, V02, V03, V04, V05, V06, V07, V08, V09, V0A, V0B, V0C, V0D, V0E, +/// # V0F, V10, V11, V12, V13, V14, V15, V16, V17, V18, V19, V1A, V1B, V1C, V1D, +/// # V1E, V1F, V20, V21, V22, V23, V24, V25, V26, V27, V28, V29, V2A, V2B, V2C, +/// # V2D, V2E, V2F, V30, V31, V32, V33, V34, V35, V36, V37, V38, V39, V3A, V3B, +/// # V3C, V3D, V3E, V3F, V40, V41, V42, V43, V44, V45, V46, V47, V48, V49, V4A, +/// # V4B, V4C, V4D, V4E, V4F, V50, V51, V52, V53, V54, V55, V56, V57, V58, V59, +/// # V5A, V5B, V5C, V5D, V5E, V5F, V60, V61, V62, V63, V64, V65, V66, V67, V68, +/// # V69, V6A, V6B, V6C, V6D, V6E, V6F, V70, V71, V72, V73, V74, V75, V76, V77, +/// # V78, V79, V7A, V7B, V7C, V7D, V7E, V7F, V80, V81, V82, V83, V84, V85, V86, +/// # V87, V88, V89, V8A, V8B, V8C, V8D, V8E, V8F, V90, V91, V92, V93, V94, V95, +/// # V96, V97, V98, V99, V9A, V9B, V9C, V9D, V9E, V9F, VA0, VA1, VA2, VA3, VA4, +/// # VA5, VA6, VA7, VA8, VA9, VAA, VAB, VAC, VAD, VAE, VAF, VB0, VB1, VB2, VB3, +/// # VB4, VB5, VB6, VB7, VB8, VB9, VBA, VBB, VBC, VBD, VBE, VBF, VC0, VC1, VC2, +/// # VC3, VC4, VC5, VC6, VC7, VC8, VC9, VCA, VCB, VCC, VCD, VCE, VCF, VD0, VD1, +/// # VD2, VD3, VD4, VD5, VD6, VD7, VD8, VD9, VDA, VDB, VDC, VDD, VDE, VDF, VE0, +/// # VE1, VE2, VE3, VE4, VE5, VE6, VE7, VE8, VE9, VEA, VEB, VEC, VED, VEE, VEF, +/// # VF0, VF1, VF2, VF3, VF4, VF5, VF6, VF7, VF8, VF9, VFA, VFB, VFC, VFD, VFE, +/// # VFF, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(FromBytes, Immutable)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive performs a sophisticated, compile-time safety analysis to +/// determine whether a type is `FromBytes`. +/// +/// # Safety +/// +/// *This section describes what is required in order for `T: FromBytes`, and +/// what unsafe code may assume of such types. If you don't plan on implementing +/// `FromBytes` manually, and you don't plan on writing unsafe code that +/// operates on `FromBytes` types, then you don't need to read this section.* +/// +/// If `T: FromBytes`, then unsafe code may assume that it is sound to produce a +/// `T` whose bytes are initialized to any sequence of valid `u8`s (in other +/// words, any byte value which is not uninitialized). If a type is marked as +/// `FromBytes` which violates this contract, it may cause undefined behavior. +/// +/// `#[derive(FromBytes)]` only permits [types which satisfy these +/// requirements][derive-analysis]. +/// +#[cfg_attr( + feature = "derive", + doc = "[derive]: zerocopy_derive::FromBytes", + doc = "[derive-analysis]: zerocopy_derive::FromBytes#analysis" +)] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromBytes.html"), + doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromBytes.html#analysis"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(FromBytes)]` to `{Self}`") +)] +pub unsafe trait FromBytes: FromZeros { + // The `Self: Sized` bound makes it so that `FromBytes` is still object + // safe. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + /// Interprets the given `source` as a `&Self`. + /// + /// This method attempts to return a reference to `source` interpreted as a + /// `Self`. If the length of `source` is not a [valid size of + /// `Self`][valid-size], or if `source` is not appropriately aligned, this + /// returns `Err`. If [`Self: Unaligned`][self-unaligned], you can + /// [infallibly discard the alignment error][size-error-from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = ZSTy::ref_from_bytes(0u16.as_bytes()); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// #[derive(FromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// header: PacketHeader, + /// body: [u8], + /// } + /// + /// // These bytes encode a `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11][..]; + /// + /// let packet = Packet::ref_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.header.src_port, [0, 1]); + /// assert_eq!(packet.header.dst_port, [2, 3]); + /// assert_eq!(packet.header.length, [4, 5]); + /// assert_eq!(packet.header.checksum, [6, 7]); + /// assert_eq!(packet.body, [8, 9, 10, 11]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn ref_from_bytes(source: &[u8]) -> Result<&Self, CastError<&[u8], Self>> + where + Self: KnownLayout + Immutable, + { + static_assert_dst_is_not_zst!(Self); + match Ptr::from_ref(source).try_cast_into_no_leftover::<_, BecauseImmutable>(None) { + Ok(ptr) => Ok(ptr.recall_validity().as_ref()), + Err(err) => Err(err.map_src(|src| src.as_ref())), + } + } + + /// Interprets the prefix of the given `source` as a `&Self` without + /// copying. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the leading bytes of `source`, then attempts to return + /// both a reference to those bytes interpreted as a `Self`, and a reference + /// to the remaining bytes. If there are insufficient bytes, or if `source` + /// is not appropriately aligned, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. See [`ref_from_prefix_with_elems`], which does + /// support such types. Attempting to use this method on such types results + /// in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = ZSTy::ref_from_prefix(0u16.as_bytes()); // ⚠ Compile Error! + /// ``` + /// + /// [`ref_from_prefix_with_elems`]: FromBytes::ref_from_prefix_with_elems + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// #[derive(FromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// header: PacketHeader, + /// body: [[u8; 2]], + /// } + /// + /// // These are more bytes than are needed to encode a `Packet`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14][..]; + /// + /// let (packet, suffix) = Packet::ref_from_prefix(bytes).unwrap(); + /// + /// assert_eq!(packet.header.src_port, [0, 1]); + /// assert_eq!(packet.header.dst_port, [2, 3]); + /// assert_eq!(packet.header.length, [4, 5]); + /// assert_eq!(packet.header.checksum, [6, 7]); + /// assert_eq!(packet.body, [[8, 9], [10, 11], [12, 13]]); + /// assert_eq!(suffix, &[14u8][..]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn ref_from_prefix(source: &[u8]) -> Result<(&Self, &[u8]), CastError<&[u8], Self>> + where + Self: KnownLayout + Immutable, + { + static_assert_dst_is_not_zst!(Self); + ref_from_prefix_suffix(source, None, CastType::Prefix) + } + + /// Interprets the suffix of the given bytes as a `&Self`. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the trailing bytes of `source`, then attempts to return + /// both a reference to those bytes interpreted as a `Self`, and a reference + /// to the preceding bytes. If there are insufficient bytes, or if that + /// suffix of `source` is not appropriately aligned, this returns `Err`. If + /// [`Self: Unaligned`][self-unaligned], you can [infallibly discard the + /// alignment error][size-error-from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. See [`ref_from_suffix_with_elems`], which does + /// support such types. Attempting to use this method on such types results + /// in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = ZSTy::ref_from_suffix(0u16.as_bytes()); // ⚠ Compile Error! + /// ``` + /// + /// [`ref_from_suffix_with_elems`]: FromBytes::ref_from_suffix_with_elems + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct PacketTrailer { + /// frame_check_sequence: [u8; 4], + /// } + /// + /// // These are more bytes than are needed to encode a `PacketTrailer`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (prefix, trailer) = PacketTrailer::ref_from_suffix(bytes).unwrap(); + /// + /// assert_eq!(prefix, &[0, 1, 2, 3, 4, 5][..]); + /// assert_eq!(trailer.frame_check_sequence, [6, 7, 8, 9]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn ref_from_suffix(source: &[u8]) -> Result<(&[u8], &Self), CastError<&[u8], Self>> + where + Self: Immutable + KnownLayout, + { + static_assert_dst_is_not_zst!(Self); + ref_from_prefix_suffix(source, None, CastType::Suffix).map(swap) + } + + /// Interprets the given `source` as a `&mut Self`. + /// + /// This method attempts to return a reference to `source` interpreted as a + /// `Self`. If the length of `source` is not a [valid size of + /// `Self`][valid-size], or if `source` is not appropriately aligned, this + /// returns `Err`. If [`Self: Unaligned`][self-unaligned], you can + /// [infallibly discard the alignment error][size-error-from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. See [`mut_from_prefix_with_elems`], which does + /// support such types. Attempting to use this method on such types results + /// in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let mut source = [85, 85]; + /// let _ = ZSTy::mut_from_bytes(&mut source[..]); // ⚠ Compile Error! + /// ``` + /// + /// [`mut_from_prefix_with_elems`]: FromBytes::mut_from_prefix_with_elems + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// // These bytes encode a `PacketHeader`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7][..]; + /// + /// let header = PacketHeader::mut_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(header.src_port, [0, 1]); + /// assert_eq!(header.dst_port, [2, 3]); + /// assert_eq!(header.length, [4, 5]); + /// assert_eq!(header.checksum, [6, 7]); + /// + /// header.checksum = [0, 0]; + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 4, 5, 0, 0]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn mut_from_bytes(source: &mut [u8]) -> Result<&mut Self, CastError<&mut [u8], Self>> + where + Self: IntoBytes + KnownLayout, + { + static_assert_dst_is_not_zst!(Self); + match Ptr::from_mut(source).try_cast_into_no_leftover::<_, BecauseExclusive>(None) { + Ok(ptr) => Ok(ptr.recall_validity::<_, (_, (_, _))>().as_mut()), + Err(err) => Err(err.map_src(|src| src.as_mut())), + } + } + + /// Interprets the prefix of the given `source` as a `&mut Self` without + /// copying. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the leading bytes of `source`, then attempts to return + /// both a reference to those bytes interpreted as a `Self`, and a reference + /// to the remaining bytes. If there are insufficient bytes, or if `source` + /// is not appropriately aligned, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. See [`mut_from_suffix_with_elems`], which does + /// support such types. Attempting to use this method on such types results + /// in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let mut source = [85, 85]; + /// let _ = ZSTy::mut_from_prefix(&mut source[..]); // ⚠ Compile Error! + /// ``` + /// + /// [`mut_from_suffix_with_elems`]: FromBytes::mut_from_suffix_with_elems + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// // These are more bytes than are needed to encode a `PacketHeader`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (header, body) = PacketHeader::mut_from_prefix(bytes).unwrap(); + /// + /// assert_eq!(header.src_port, [0, 1]); + /// assert_eq!(header.dst_port, [2, 3]); + /// assert_eq!(header.length, [4, 5]); + /// assert_eq!(header.checksum, [6, 7]); + /// assert_eq!(body, &[8, 9][..]); + /// + /// header.checksum = [0, 0]; + /// body.fill(1); + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 4, 5, 0, 0, 1, 1]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn mut_from_prefix( + source: &mut [u8], + ) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>> + where + Self: IntoBytes + KnownLayout, + { + static_assert_dst_is_not_zst!(Self); + mut_from_prefix_suffix(source, None, CastType::Prefix) + } + + /// Interprets the suffix of the given `source` as a `&mut Self` without + /// copying. + /// + /// This method computes the [largest possible size of `Self`][valid-size] + /// that can fit in the trailing bytes of `source`, then attempts to return + /// both a reference to those bytes interpreted as a `Self`, and a reference + /// to the preceding bytes. If there are insufficient bytes, or if that + /// suffix of `source` is not appropriately aligned, this returns `Err`. If + /// [`Self: Unaligned`][self-unaligned], you can [infallibly discard the + /// alignment error][size-error-from]. + /// + /// `Self` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, IntoBytes, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let mut source = [85, 85]; + /// let _ = ZSTy::mut_from_suffix(&mut source[..]); // ⚠ Compile Error! + /// ``` + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct PacketTrailer { + /// frame_check_sequence: [u8; 4], + /// } + /// + /// // These are more bytes than are needed to encode a `PacketTrailer`. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (prefix, trailer) = PacketTrailer::mut_from_suffix(bytes).unwrap(); + /// + /// assert_eq!(prefix, &[0u8, 1, 2, 3, 4, 5][..]); + /// assert_eq!(trailer.frame_check_sequence, [6, 7, 8, 9]); + /// + /// prefix.fill(0); + /// trailer.frame_check_sequence.fill(1); + /// + /// assert_eq!(bytes, [0, 0, 0, 0, 0, 0, 1, 1, 1, 1]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn mut_from_suffix( + source: &mut [u8], + ) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>> + where + Self: IntoBytes + KnownLayout, + { + static_assert_dst_is_not_zst!(Self); + mut_from_prefix_suffix(source, None, CastType::Suffix).map(swap) + } + + /// Interprets the given `source` as a `&Self` with a DST length equal to + /// `count`. + /// + /// This method attempts to return a reference to `source` interpreted as a + /// `Self` with `count` trailing elements. If the length of `source` is not + /// equal to the size of `Self` with `count` elements, or if `source` is not + /// appropriately aligned, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Debug, PartialEq, Eq)] + /// #[derive(FromBytes, Immutable)] + /// #[repr(C)] + /// struct Pixel { + /// r: u8, + /// g: u8, + /// b: u8, + /// a: u8, + /// } + /// + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7][..]; + /// + /// let pixels = <[Pixel]>::ref_from_bytes_with_elems(bytes, 2).unwrap(); + /// + /// assert_eq!(pixels, &[ + /// Pixel { r: 0, g: 1, b: 2, a: 3 }, + /// Pixel { r: 4, g: 5, b: 6, a: 7 }, + /// ]); + /// + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`ref_from_bytes`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let src = &[85, 85][..]; + /// let zsty = ZSTy::ref_from_bytes_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`ref_from_bytes`]: FromBytes::ref_from_bytes + #[must_use = "has no side effects"] + #[inline] + fn ref_from_bytes_with_elems( + source: &[u8], + count: usize, + ) -> Result<&Self, CastError<&[u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + Immutable, + { + let source = Ptr::from_ref(source); + let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count)); + match maybe_slf { + Ok(slf) => Ok(slf.recall_validity().as_ref()), + Err(err) => Err(err.map_src(|s| s.as_ref())), + } + } + + /// Interprets the prefix of the given `source` as a DST `&Self` with length + /// equal to `count`. + /// + /// This method attempts to return a reference to the prefix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the remaining bytes. If there are insufficient bytes, or if `source` + /// is not appropriately aligned, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Debug, PartialEq, Eq)] + /// #[derive(FromBytes, Immutable)] + /// #[repr(C)] + /// struct Pixel { + /// r: u8, + /// g: u8, + /// b: u8, + /// a: u8, + /// } + /// + /// // These are more bytes than are needed to encode two `Pixel`s. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (pixels, suffix) = <[Pixel]>::ref_from_prefix_with_elems(bytes, 2).unwrap(); + /// + /// assert_eq!(pixels, &[ + /// Pixel { r: 0, g: 1, b: 2, a: 3 }, + /// Pixel { r: 4, g: 5, b: 6, a: 7 }, + /// ]); + /// + /// assert_eq!(suffix, &[8, 9]); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`ref_from_prefix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let src = &[85, 85][..]; + /// let (zsty, _) = ZSTy::ref_from_prefix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`ref_from_prefix`]: FromBytes::ref_from_prefix + #[must_use = "has no side effects"] + #[inline] + fn ref_from_prefix_with_elems( + source: &[u8], + count: usize, + ) -> Result<(&Self, &[u8]), CastError<&[u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + Immutable, + { + ref_from_prefix_suffix(source, Some(count), CastType::Prefix) + } + + /// Interprets the suffix of the given `source` as a DST `&Self` with length + /// equal to `count`. + /// + /// This method attempts to return a reference to the suffix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the preceding bytes. If there are insufficient bytes, or if that + /// suffix of `source` is not appropriately aligned, this returns `Err`. If + /// [`Self: Unaligned`][self-unaligned], you can [infallibly discard the + /// alignment error][size-error-from]. + /// + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Debug, PartialEq, Eq)] + /// #[derive(FromBytes, Immutable)] + /// #[repr(C)] + /// struct Pixel { + /// r: u8, + /// g: u8, + /// b: u8, + /// a: u8, + /// } + /// + /// // These are more bytes than are needed to encode two `Pixel`s. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (prefix, pixels) = <[Pixel]>::ref_from_suffix_with_elems(bytes, 2).unwrap(); + /// + /// assert_eq!(prefix, &[0, 1]); + /// + /// assert_eq!(pixels, &[ + /// Pixel { r: 2, g: 3, b: 4, a: 5 }, + /// Pixel { r: 6, g: 7, b: 8, a: 9 }, + /// ]); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`ref_from_suffix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let src = &[85, 85][..]; + /// let (_, zsty) = ZSTy::ref_from_suffix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`ref_from_suffix`]: FromBytes::ref_from_suffix + #[must_use = "has no side effects"] + #[inline] + fn ref_from_suffix_with_elems( + source: &[u8], + count: usize, + ) -> Result<(&[u8], &Self), CastError<&[u8], Self>> + where + Self: KnownLayout<PointerMetadata = usize> + Immutable, + { + ref_from_prefix_suffix(source, Some(count), CastType::Suffix).map(swap) + } + + /// Interprets the given `source` as a `&mut Self` with a DST length equal + /// to `count`. + /// + /// This method attempts to return a reference to `source` interpreted as a + /// `Self` with `count` trailing elements. If the length of `source` is not + /// equal to the size of `Self` with `count` elements, or if `source` is not + /// appropriately aligned, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Debug, PartialEq, Eq)] + /// #[derive(KnownLayout, FromBytes, IntoBytes, Immutable)] + /// #[repr(C)] + /// struct Pixel { + /// r: u8, + /// g: u8, + /// b: u8, + /// a: u8, + /// } + /// + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7][..]; + /// + /// let pixels = <[Pixel]>::mut_from_bytes_with_elems(bytes, 2).unwrap(); + /// + /// assert_eq!(pixels, &[ + /// Pixel { r: 0, g: 1, b: 2, a: 3 }, + /// Pixel { r: 4, g: 5, b: 6, a: 7 }, + /// ]); + /// + /// pixels[1] = Pixel { r: 0, g: 0, b: 0, a: 0 }; + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 0, 0, 0, 0]); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`mut_from`] which + /// do not take an explicit count do not support such types. + /// + /// ``` + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, Immutable, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let src = &mut [85, 85][..]; + /// let zsty = ZSTy::mut_from_bytes_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`mut_from`]: FromBytes::mut_from + #[must_use = "has no side effects"] + #[inline] + fn mut_from_bytes_with_elems( + source: &mut [u8], + count: usize, + ) -> Result<&mut Self, CastError<&mut [u8], Self>> + where + Self: IntoBytes + KnownLayout<PointerMetadata = usize> + Immutable, + { + let source = Ptr::from_mut(source); + let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count)); + match maybe_slf { + Ok(slf) => Ok(slf + .recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>() + .as_mut()), + Err(err) => Err(err.map_src(|s| s.as_mut())), + } + } + + /// Interprets the prefix of the given `source` as a `&mut Self` with DST + /// length equal to `count`. + /// + /// This method attempts to return a reference to the prefix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the preceding bytes. If there are insufficient bytes, or if `source` + /// is not appropriately aligned, this returns `Err`. If [`Self: + /// Unaligned`][self-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Debug, PartialEq, Eq)] + /// #[derive(KnownLayout, FromBytes, IntoBytes, Immutable)] + /// #[repr(C)] + /// struct Pixel { + /// r: u8, + /// g: u8, + /// b: u8, + /// a: u8, + /// } + /// + /// // These are more bytes than are needed to encode two `Pixel`s. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (pixels, suffix) = <[Pixel]>::mut_from_prefix_with_elems(bytes, 2).unwrap(); + /// + /// assert_eq!(pixels, &[ + /// Pixel { r: 0, g: 1, b: 2, a: 3 }, + /// Pixel { r: 4, g: 5, b: 6, a: 7 }, + /// ]); + /// + /// assert_eq!(suffix, &[8, 9]); + /// + /// pixels[1] = Pixel { r: 0, g: 0, b: 0, a: 0 }; + /// suffix.fill(1); + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 0, 0, 0, 0, 1, 1]); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`mut_from_prefix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, Immutable, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let src = &mut [85, 85][..]; + /// let (zsty, _) = ZSTy::mut_from_prefix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`mut_from_prefix`]: FromBytes::mut_from_prefix + #[must_use = "has no side effects"] + #[inline] + fn mut_from_prefix_with_elems( + source: &mut [u8], + count: usize, + ) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>> + where + Self: IntoBytes + KnownLayout<PointerMetadata = usize>, + { + mut_from_prefix_suffix(source, Some(count), CastType::Prefix) + } + + /// Interprets the suffix of the given `source` as a `&mut Self` with DST + /// length equal to `count`. + /// + /// This method attempts to return a reference to the suffix of `source` + /// interpreted as a `Self` with `count` trailing elements, and a reference + /// to the remaining bytes. If there are insufficient bytes, or if that + /// suffix of `source` is not appropriately aligned, this returns `Err`. If + /// [`Self: Unaligned`][self-unaligned], you can [infallibly discard the + /// alignment error][size-error-from]. + /// + /// [self-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Debug, PartialEq, Eq)] + /// #[derive(FromBytes, IntoBytes, Immutable)] + /// #[repr(C)] + /// struct Pixel { + /// r: u8, + /// g: u8, + /// b: u8, + /// a: u8, + /// } + /// + /// // These are more bytes than are needed to encode two `Pixel`s. + /// let bytes = &mut [0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (prefix, pixels) = <[Pixel]>::mut_from_suffix_with_elems(bytes, 2).unwrap(); + /// + /// assert_eq!(prefix, &[0, 1]); + /// + /// assert_eq!(pixels, &[ + /// Pixel { r: 2, g: 3, b: 4, a: 5 }, + /// Pixel { r: 6, g: 7, b: 8, a: 9 }, + /// ]); + /// + /// prefix.fill(9); + /// pixels[1] = Pixel { r: 0, g: 0, b: 0, a: 0 }; + /// + /// assert_eq!(bytes, [9, 9, 2, 3, 4, 5, 0, 0, 0, 0]); + /// ``` + /// + /// Since an explicit `count` is provided, this method supports types with + /// zero-sized trailing slice elements. Methods such as [`mut_from_suffix`] + /// which do not take an explicit count do not support such types. + /// + /// ``` + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, Immutable, KnownLayout)] + /// #[repr(C, packed)] + /// struct ZSTy { + /// leading_sized: [u8; 2], + /// trailing_dst: [()], + /// } + /// + /// let src = &mut [85, 85][..]; + /// let (_, zsty) = ZSTy::mut_from_suffix_with_elems(src, 42).unwrap(); + /// assert_eq!(zsty.trailing_dst.len(), 42); + /// ``` + /// + /// [`mut_from_suffix`]: FromBytes::mut_from_suffix + #[must_use = "has no side effects"] + #[inline] + fn mut_from_suffix_with_elems( + source: &mut [u8], + count: usize, + ) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>> + where + Self: IntoBytes + KnownLayout<PointerMetadata = usize>, + { + mut_from_prefix_suffix(source, Some(count), CastType::Suffix).map(swap) + } + + /// Reads a copy of `Self` from the given `source`. + /// + /// If `source.len() != size_of::<Self>()`, `read_from_bytes` returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// // These bytes encode a `PacketHeader`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7][..]; + /// + /// let header = PacketHeader::read_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(header.src_port, [0, 1]); + /// assert_eq!(header.dst_port, [2, 3]); + /// assert_eq!(header.length, [4, 5]); + /// assert_eq!(header.checksum, [6, 7]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn read_from_bytes(source: &[u8]) -> Result<Self, SizeError<&[u8], Self>> + where + Self: Sized, + { + match Ref::<_, Unalign<Self>>::sized_from(source) { + Ok(r) => Ok(Ref::read(&r).into_inner()), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => { + // SAFETY: `Unalign<Self>` is trivially aligned, so + // `Ref::sized_from` cannot fail due to unmet alignment + // requirements. + unsafe { core::hint::unreachable_unchecked() } + } + Err(CastError::Validity(i)) => match i {}, + } + } + + /// Reads a copy of `Self` from the prefix of the given `source`. + /// + /// This attempts to read a `Self` from the first `size_of::<Self>()` bytes + /// of `source`, returning that `Self` and any remaining bytes. If + /// `source.len() < size_of::<Self>()`, it returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// // These are more bytes than are needed to encode a `PacketHeader`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (header, body) = PacketHeader::read_from_prefix(bytes).unwrap(); + /// + /// assert_eq!(header.src_port, [0, 1]); + /// assert_eq!(header.dst_port, [2, 3]); + /// assert_eq!(header.length, [4, 5]); + /// assert_eq!(header.checksum, [6, 7]); + /// assert_eq!(body, [8, 9]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn read_from_prefix(source: &[u8]) -> Result<(Self, &[u8]), SizeError<&[u8], Self>> + where + Self: Sized, + { + match Ref::<_, Unalign<Self>>::sized_from_prefix(source) { + Ok((r, suffix)) => Ok((Ref::read(&r).into_inner(), suffix)), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => { + // SAFETY: `Unalign<Self>` is trivially aligned, so + // `Ref::sized_from_prefix` cannot fail due to unmet alignment + // requirements. + unsafe { core::hint::unreachable_unchecked() } + } + Err(CastError::Validity(i)) => match i {}, + } + } + + /// Reads a copy of `Self` from the suffix of the given `source`. + /// + /// This attempts to read a `Self` from the last `size_of::<Self>()` bytes + /// of `source`, returning that `Self` and any preceding bytes. If + /// `source.len() < size_of::<Self>()`, it returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::FromBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes)] + /// #[repr(C)] + /// struct PacketTrailer { + /// frame_check_sequence: [u8; 4], + /// } + /// + /// // These are more bytes than are needed to encode a `PacketTrailer`. + /// let bytes = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let (prefix, trailer) = PacketTrailer::read_from_suffix(bytes).unwrap(); + /// + /// assert_eq!(prefix, [0, 1, 2, 3, 4, 5]); + /// assert_eq!(trailer.frame_check_sequence, [6, 7, 8, 9]); + /// ``` + #[must_use = "has no side effects"] + #[inline] + fn read_from_suffix(source: &[u8]) -> Result<(&[u8], Self), SizeError<&[u8], Self>> + where + Self: Sized, + { + match Ref::<_, Unalign<Self>>::sized_from_suffix(source) { + Ok((prefix, r)) => Ok((prefix, Ref::read(&r).into_inner())), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => { + // SAFETY: `Unalign<Self>` is trivially aligned, so + // `Ref::sized_from_suffix` cannot fail due to unmet alignment + // requirements. + unsafe { core::hint::unreachable_unchecked() } + } + Err(CastError::Validity(i)) => match i {}, + } + } + + /// Reads a copy of `self` from an `io::Read`. + /// + /// This is useful for interfacing with operating system byte sinks (files, + /// sockets, etc.). + /// + /// # Examples + /// + /// ```no_run + /// use zerocopy::{byteorder::big_endian::*, FromBytes}; + /// use std::fs::File; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes)] + /// #[repr(C)] + /// struct BitmapFileHeader { + /// signature: [u8; 2], + /// size: U32, + /// reserved: U64, + /// offset: U64, + /// } + /// + /// let mut file = File::open("image.bin").unwrap(); + /// let header = BitmapFileHeader::read_from_io(&mut file).unwrap(); + /// ``` + #[cfg(feature = "std")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))] + #[inline(always)] + fn read_from_io<R>(mut src: R) -> io::Result<Self> + where + Self: Sized, + R: io::Read, + { + // NOTE(#2319, #2320): We do `buf.zero()` separately rather than + // constructing `let buf = CoreMaybeUninit::zeroed()` because, if `Self` + // contains padding bytes, then a typed copy of `CoreMaybeUninit<Self>` + // will not necessarily preserve zeros written to those padding byte + // locations, and so `buf` could contain uninitialized bytes. + let mut buf = CoreMaybeUninit::<Self>::uninit(); + buf.zero(); + + let ptr = Ptr::from_mut(&mut buf); + // SAFETY: After `buf.zero()`, `buf` consists entirely of initialized, + // zeroed bytes. Since `MaybeUninit` has no validity requirements, `ptr` + // cannot be used to write values which will violate `buf`'s bit + // validity. Since `ptr` has `Exclusive` aliasing, nothing other than + // `ptr` may be used to mutate `ptr`'s referent, and so its bit validity + // cannot be violated even though `buf` may have more permissive bit + // validity than `ptr`. + let ptr = unsafe { ptr.assume_validity::<invariant::Initialized>() }; + let ptr = ptr.as_bytes::<BecauseExclusive>(); + src.read_exact(ptr.as_mut())?; + // SAFETY: `buf` entirely consists of initialized bytes, and `Self` is + // `FromBytes`. + Ok(unsafe { buf.assume_init() }) + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::ref_from_bytes`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn ref_from(source: &[u8]) -> Option<&Self> + where + Self: KnownLayout + Immutable, + { + Self::ref_from_bytes(source).ok() + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::mut_from_bytes`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn mut_from(source: &mut [u8]) -> Option<&mut Self> + where + Self: KnownLayout + IntoBytes, + { + Self::mut_from_bytes(source).ok() + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::ref_from_prefix_with_elems`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn slice_from_prefix(source: &[u8], count: usize) -> Option<(&[Self], &[u8])> + where + Self: Sized + Immutable, + { + <[Self]>::ref_from_prefix_with_elems(source, count).ok() + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::ref_from_suffix_with_elems`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn slice_from_suffix(source: &[u8], count: usize) -> Option<(&[u8], &[Self])> + where + Self: Sized + Immutable, + { + <[Self]>::ref_from_suffix_with_elems(source, count).ok() + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::mut_from_prefix_with_elems`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn mut_slice_from_prefix(source: &mut [u8], count: usize) -> Option<(&mut [Self], &mut [u8])> + where + Self: Sized + IntoBytes, + { + <[Self]>::mut_from_prefix_with_elems(source, count).ok() + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::mut_from_suffix_with_elems`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn mut_slice_from_suffix(source: &mut [u8], count: usize) -> Option<(&mut [u8], &mut [Self])> + where + Self: Sized + IntoBytes, + { + <[Self]>::mut_from_suffix_with_elems(source, count).ok() + } + + #[deprecated(since = "0.8.0", note = "renamed to `FromBytes::read_from_bytes`")] + #[doc(hidden)] + #[must_use = "has no side effects"] + #[inline(always)] + fn read_from(source: &[u8]) -> Option<Self> + where + Self: Sized, + { + Self::read_from_bytes(source).ok() + } +} + +/// Interprets the given affix of the given bytes as a `&Self`. +/// +/// This method computes the largest possible size of `Self` that can fit in the +/// prefix or suffix bytes of `source`, then attempts to return both a reference +/// to those bytes interpreted as a `Self`, and a reference to the excess bytes. +/// If there are insufficient bytes, or if that affix of `source` is not +/// appropriately aligned, this returns `Err`. +#[inline(always)] +fn ref_from_prefix_suffix<T: FromBytes + KnownLayout + Immutable + ?Sized>( + source: &[u8], + meta: Option<T::PointerMetadata>, + cast_type: CastType, +) -> Result<(&T, &[u8]), CastError<&[u8], T>> { + let (slf, prefix_suffix) = Ptr::from_ref(source) + .try_cast_into::<_, BecauseImmutable>(cast_type, meta) + .map_err(|err| err.map_src(|s| s.as_ref()))?; + Ok((slf.recall_validity().as_ref(), prefix_suffix.as_ref())) +} + +/// Interprets the given affix of the given bytes as a `&mut Self` without +/// copying. +/// +/// This method computes the largest possible size of `Self` that can fit in the +/// prefix or suffix bytes of `source`, then attempts to return both a reference +/// to those bytes interpreted as a `Self`, and a reference to the excess bytes. +/// If there are insufficient bytes, or if that affix of `source` is not +/// appropriately aligned, this returns `Err`. +#[inline(always)] +fn mut_from_prefix_suffix<T: FromBytes + IntoBytes + KnownLayout + ?Sized>( + source: &mut [u8], + meta: Option<T::PointerMetadata>, + cast_type: CastType, +) -> Result<(&mut T, &mut [u8]), CastError<&mut [u8], T>> { + let (slf, prefix_suffix) = Ptr::from_mut(source) + .try_cast_into::<_, BecauseExclusive>(cast_type, meta) + .map_err(|err| err.map_src(|s| s.as_mut()))?; + Ok((slf.recall_validity::<_, (_, (_, _))>().as_mut(), prefix_suffix.as_mut())) +} + +/// Analyzes whether a type is [`IntoBytes`]. +/// +/// This derive analyzes, at compile time, whether the annotated type satisfies +/// the [safety conditions] of `IntoBytes` and implements `IntoBytes` if it is +/// sound to do so. This derive can be applied to structs and enums (see below +/// for union support); e.g.: +/// +/// ``` +/// # use zerocopy_derive::{IntoBytes}; +/// #[derive(IntoBytes)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(IntoBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// [safety conditions]: trait@IntoBytes#safety +/// +/// # Error Messages +/// +/// On Rust toolchains prior to 1.78.0, due to the way that the custom derive +/// for `IntoBytes` is implemented, you may get an error like this: +/// +/// ```text +/// error[E0277]: the trait bound `(): PaddingFree<Foo, true>` is not satisfied +/// --> lib.rs:23:10 +/// | +/// 1 | #[derive(IntoBytes)] +/// | ^^^^^^^^^ the trait `PaddingFree<Foo, true>` is not implemented for `()` +/// | +/// = help: the following implementations were found: +/// <() as PaddingFree<T, false>> +/// ``` +/// +/// This error indicates that the type being annotated has padding bytes, which +/// is illegal for `IntoBytes` types. Consider reducing the alignment of some +/// fields by using types in the [`byteorder`] module, wrapping field types in +/// [`Unalign`], adding explicit struct fields where those padding bytes would +/// be, or using `#[repr(packed)]`. See the Rust Reference's page on [type +/// layout] for more information about type layout and padding. +/// +/// [type layout]: https://doc.rust-lang.org/reference/type-layout.html +/// +/// # Unions +/// +/// Currently, union bit validity is [up in the air][union-validity], and so +/// zerocopy does not support `#[derive(IntoBytes)]` on unions by default. +/// However, implementing `IntoBytes` on a union type is likely sound on all +/// existing Rust toolchains - it's just that it may become unsound in the +/// future. You can opt-in to `#[derive(IntoBytes)]` support on unions by +/// passing the unstable `zerocopy_derive_union_into_bytes` cfg: +/// +/// ```shell +/// $ RUSTFLAGS='--cfg zerocopy_derive_union_into_bytes' cargo build +/// ``` +/// +/// However, it is your responsibility to ensure that this derive is sound on +/// the specific versions of the Rust toolchain you are using! We make no +/// stability or soundness guarantees regarding this cfg, and may remove it at +/// any point. +/// +/// We are actively working with Rust to stabilize the necessary language +/// guarantees to support this in a forwards-compatible way, which will enable +/// us to remove the cfg gate. As part of this effort, we need to know how much +/// demand there is for this feature. If you would like to use `IntoBytes` on +/// unions, [please let us know][discussion]. +/// +/// [union-validity]: https://github.com/rust-lang/unsafe-code-guidelines/issues/438 +/// [discussion]: https://github.com/google/zerocopy/discussions/1802 +/// +/// # Analysis +/// +/// *This section describes, roughly, the analysis performed by this derive to +/// determine whether it is sound to implement `IntoBytes` for a given type. +/// Unless you are modifying the implementation of this derive, or attempting to +/// manually implement `IntoBytes` for a type yourself, you don't need to read +/// this section.* +/// +/// If a type has the following properties, then this derive can implement +/// `IntoBytes` for that type: +/// +/// - If the type is a struct, its fields must be [`IntoBytes`]. Additionally: +/// - if the type is `repr(transparent)` or `repr(packed)`, it is +/// [`IntoBytes`] if its fields are [`IntoBytes`]; else, +/// - if the type is `repr(C)` with at most one field, it is [`IntoBytes`] +/// if its field is [`IntoBytes`]; else, +/// - if the type has no generic parameters, it is [`IntoBytes`] if the type +/// is sized and has no padding bytes; else, +/// - if the type is `repr(C)`, its fields must be [`Unaligned`]. +/// - If the type is an enum: +/// - It must have a defined representation (`repr`s `C`, `u8`, `u16`, `u32`, +/// `u64`, `usize`, `i8`, `i16`, `i32`, `i64`, or `isize`). +/// - It must have no padding bytes. +/// - Its fields must be [`IntoBytes`]. +/// +/// This analysis is subject to change. Unsafe code may *only* rely on the +/// documented [safety conditions] of `FromBytes`, and must *not* rely on the +/// implementation details of this derive. +/// +/// [Rust Reference]: https://doc.rust-lang.org/reference/type-layout.html +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::IntoBytes; + +/// Types that can be converted to an immutable slice of initialized bytes. +/// +/// Any `IntoBytes` type can be converted to a slice of initialized bytes of the +/// same size. This is useful for efficiently serializing structured data as raw +/// bytes. +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(IntoBytes)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::IntoBytes; +/// #[derive(IntoBytes)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(IntoBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant0, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive performs a sophisticated, compile-time safety analysis to +/// determine whether a type is `IntoBytes`. See the [derive +/// documentation][derive] for guidance on how to interpret error messages +/// produced by the derive's analysis. +/// +/// # Safety +/// +/// *This section describes what is required in order for `T: IntoBytes`, and +/// what unsafe code may assume of such types. If you don't plan on implementing +/// `IntoBytes` manually, and you don't plan on writing unsafe code that +/// operates on `IntoBytes` types, then you don't need to read this section.* +/// +/// If `T: IntoBytes`, then unsafe code may assume that it is sound to treat any +/// `t: T` as an immutable `[u8]` of length `size_of_val(t)`. If a type is +/// marked as `IntoBytes` which violates this contract, it may cause undefined +/// behavior. +/// +/// `#[derive(IntoBytes)]` only permits [types which satisfy these +/// requirements][derive-analysis]. +/// +#[cfg_attr( + feature = "derive", + doc = "[derive]: zerocopy_derive::IntoBytes", + doc = "[derive-analysis]: zerocopy_derive::IntoBytes#analysis" +)] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.IntoBytes.html"), + doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.IntoBytes.html#analysis"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(IntoBytes)]` to `{Self}`") +)] +pub unsafe trait IntoBytes { + // The `Self: Sized` bound makes it so that this function doesn't prevent + // `IntoBytes` from being object safe. Note that other `IntoBytes` methods + // prevent object safety, but those provide a benefit in exchange for object + // safety. If at some point we remove those methods, change their type + // signatures, or move them out of this trait so that `IntoBytes` is object + // safe again, it's important that this function not prevent object safety. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + /// Gets the bytes of this value. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::IntoBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(IntoBytes, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let header = PacketHeader { + /// src_port: [0, 1], + /// dst_port: [2, 3], + /// length: [4, 5], + /// checksum: [6, 7], + /// }; + /// + /// let bytes = header.as_bytes(); + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 4, 5, 6, 7]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + fn as_bytes(&self) -> &[u8] + where + Self: Immutable, + { + // Note that this method does not have a `Self: Sized` bound; + // `size_of_val` works for unsized values too. + let len = mem::size_of_val(self); + let slf: *const Self = self; + + // SAFETY: + // - `slf.cast::<u8>()` is valid for reads for `len * size_of::<u8>()` + // many bytes because... + // - `slf` is the same pointer as `self`, and `self` is a reference + // which points to an object whose size is `len`. Thus... + // - The entire region of `len` bytes starting at `slf` is contained + // within a single allocation. + // - `slf` is non-null. + // - `slf` is trivially aligned to `align_of::<u8>() == 1`. + // - `Self: IntoBytes` ensures that all of the bytes of `slf` are + // initialized. + // - Since `slf` is derived from `self`, and `self` is an immutable + // reference, the only other references to this memory region that + // could exist are other immutable references, and those don't allow + // mutation. `Self: Immutable` prohibits types which contain + // `UnsafeCell`s, which are the only types for which this rule + // wouldn't be sufficient. + // - The total size of the resulting slice is no larger than + // `isize::MAX` because no allocation produced by safe code can be + // larger than `isize::MAX`. + // + // FIXME(#429): Add references to docs and quotes. + unsafe { slice::from_raw_parts(slf.cast::<u8>(), len) } + } + + /// Gets the bytes of this value mutably. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::IntoBytes; + /// # use zerocopy_derive::*; + /// + /// # #[derive(Eq, PartialEq, Debug)] + /// #[derive(FromBytes, IntoBytes, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let mut header = PacketHeader { + /// src_port: [0, 1], + /// dst_port: [2, 3], + /// length: [4, 5], + /// checksum: [6, 7], + /// }; + /// + /// let bytes = header.as_mut_bytes(); + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 4, 5, 6, 7]); + /// + /// bytes.reverse(); + /// + /// assert_eq!(header, PacketHeader { + /// src_port: [7, 6], + /// dst_port: [5, 4], + /// length: [3, 2], + /// checksum: [1, 0], + /// }); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + fn as_mut_bytes(&mut self) -> &mut [u8] + where + Self: FromBytes, + { + // Note that this method does not have a `Self: Sized` bound; + // `size_of_val` works for unsized values too. + let len = mem::size_of_val(self); + let slf: *mut Self = self; + + // SAFETY: + // - `slf.cast::<u8>()` is valid for reads and writes for `len * + // size_of::<u8>()` many bytes because... + // - `slf` is the same pointer as `self`, and `self` is a reference + // which points to an object whose size is `len`. Thus... + // - The entire region of `len` bytes starting at `slf` is contained + // within a single allocation. + // - `slf` is non-null. + // - `slf` is trivially aligned to `align_of::<u8>() == 1`. + // - `Self: IntoBytes` ensures that all of the bytes of `slf` are + // initialized. + // - `Self: FromBytes` ensures that no write to this memory region + // could result in it containing an invalid `Self`. + // - Since `slf` is derived from `self`, and `self` is a mutable + // reference, no other references to this memory region can exist. + // - The total size of the resulting slice is no larger than + // `isize::MAX` because no allocation produced by safe code can be + // larger than `isize::MAX`. + // + // FIXME(#429): Add references to docs and quotes. + unsafe { slice::from_raw_parts_mut(slf.cast::<u8>(), len) } + } + + /// Writes a copy of `self` to `dst`. + /// + /// If `dst.len() != size_of_val(self)`, `write_to` returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::IntoBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(IntoBytes, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let header = PacketHeader { + /// src_port: [0, 1], + /// dst_port: [2, 3], + /// length: [4, 5], + /// checksum: [6, 7], + /// }; + /// + /// let mut bytes = [0, 0, 0, 0, 0, 0, 0, 0]; + /// + /// header.write_to(&mut bytes[..]); + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 4, 5, 6, 7]); + /// ``` + /// + /// If too many or too few target bytes are provided, `write_to` returns + /// `Err` and leaves the target bytes unmodified: + /// + /// ``` + /// # use zerocopy::IntoBytes; + /// # let header = u128::MAX; + /// let mut excessive_bytes = &mut [0u8; 128][..]; + /// + /// let write_result = header.write_to(excessive_bytes); + /// + /// assert!(write_result.is_err()); + /// assert_eq!(excessive_bytes, [0u8; 128]); + /// ``` + #[must_use = "callers should check the return value to see if the operation succeeded"] + #[inline] + #[allow(clippy::mut_from_ref)] // False positive: `&self -> &mut [u8]` + fn write_to(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> + where + Self: Immutable, + { + let src = self.as_bytes(); + if dst.len() == src.len() { + // SAFETY: Within this branch of the conditional, we have ensured + // that `dst.len()` is equal to `src.len()`. Neither the size of the + // source nor the size of the destination change between the above + // size check and the invocation of `copy_unchecked`. + unsafe { util::copy_unchecked(src, dst) } + Ok(()) + } else { + Err(SizeError::new(self)) + } + } + + /// Writes a copy of `self` to the prefix of `dst`. + /// + /// `write_to_prefix` writes `self` to the first `size_of_val(self)` bytes + /// of `dst`. If `dst.len() < size_of_val(self)`, it returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::IntoBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(IntoBytes, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let header = PacketHeader { + /// src_port: [0, 1], + /// dst_port: [2, 3], + /// length: [4, 5], + /// checksum: [6, 7], + /// }; + /// + /// let mut bytes = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + /// + /// header.write_to_prefix(&mut bytes[..]); + /// + /// assert_eq!(bytes, [0, 1, 2, 3, 4, 5, 6, 7, 0, 0]); + /// ``` + /// + /// If insufficient target bytes are provided, `write_to_prefix` returns + /// `Err` and leaves the target bytes unmodified: + /// + /// ``` + /// # use zerocopy::IntoBytes; + /// # let header = u128::MAX; + /// let mut insufficient_bytes = &mut [0, 0][..]; + /// + /// let write_result = header.write_to_suffix(insufficient_bytes); + /// + /// assert!(write_result.is_err()); + /// assert_eq!(insufficient_bytes, [0, 0]); + /// ``` + #[must_use = "callers should check the return value to see if the operation succeeded"] + #[inline] + #[allow(clippy::mut_from_ref)] // False positive: `&self -> &mut [u8]` + fn write_to_prefix(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> + where + Self: Immutable, + { + let src = self.as_bytes(); + match dst.get_mut(..src.len()) { + Some(dst) => { + // SAFETY: Within this branch of the `match`, we have ensured + // through fallible subslicing that `dst.len()` is equal to + // `src.len()`. Neither the size of the source nor the size of + // the destination change between the above subslicing operation + // and the invocation of `copy_unchecked`. + unsafe { util::copy_unchecked(src, dst) } + Ok(()) + } + None => Err(SizeError::new(self)), + } + } + + /// Writes a copy of `self` to the suffix of `dst`. + /// + /// `write_to_suffix` writes `self` to the last `size_of_val(self)` bytes of + /// `dst`. If `dst.len() < size_of_val(self)`, it returns `Err`. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::IntoBytes; + /// # use zerocopy_derive::*; + /// + /// #[derive(IntoBytes, Immutable)] + /// #[repr(C)] + /// struct PacketHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// let header = PacketHeader { + /// src_port: [0, 1], + /// dst_port: [2, 3], + /// length: [4, 5], + /// checksum: [6, 7], + /// }; + /// + /// let mut bytes = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + /// + /// header.write_to_suffix(&mut bytes[..]); + /// + /// assert_eq!(bytes, [0, 0, 0, 1, 2, 3, 4, 5, 6, 7]); + /// + /// let mut insufficient_bytes = &mut [0, 0][..]; + /// + /// let write_result = header.write_to_suffix(insufficient_bytes); + /// + /// assert!(write_result.is_err()); + /// assert_eq!(insufficient_bytes, [0, 0]); + /// ``` + /// + /// If insufficient target bytes are provided, `write_to_suffix` returns + /// `Err` and leaves the target bytes unmodified: + /// + /// ``` + /// # use zerocopy::IntoBytes; + /// # let header = u128::MAX; + /// let mut insufficient_bytes = &mut [0, 0][..]; + /// + /// let write_result = header.write_to_suffix(insufficient_bytes); + /// + /// assert!(write_result.is_err()); + /// assert_eq!(insufficient_bytes, [0, 0]); + /// ``` + #[must_use = "callers should check the return value to see if the operation succeeded"] + #[inline] + #[allow(clippy::mut_from_ref)] // False positive: `&self -> &mut [u8]` + fn write_to_suffix(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> + where + Self: Immutable, + { + let src = self.as_bytes(); + let start = if let Some(start) = dst.len().checked_sub(src.len()) { + start + } else { + return Err(SizeError::new(self)); + }; + let dst = if let Some(dst) = dst.get_mut(start..) { + dst + } else { + // get_mut() should never return None here. We return a `SizeError` + // rather than .unwrap() because in the event the branch is not + // optimized away, returning a value is generally lighter-weight + // than panicking. + return Err(SizeError::new(self)); + }; + // SAFETY: Through fallible subslicing of `dst`, we have ensured that + // `dst.len()` is equal to `src.len()`. Neither the size of the source + // nor the size of the destination change between the above subslicing + // operation and the invocation of `copy_unchecked`. + unsafe { + util::copy_unchecked(src, dst); + } + Ok(()) + } + + /// Writes a copy of `self` to an `io::Write`. + /// + /// This is a shorthand for `dst.write_all(self.as_bytes())`, and is useful + /// for interfacing with operating system byte sinks (files, sockets, etc.). + /// + /// # Examples + /// + /// ```no_run + /// use zerocopy::{byteorder::big_endian::U16, FromBytes, IntoBytes}; + /// use std::fs::File; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, Immutable, KnownLayout)] + /// #[repr(C, packed)] + /// struct GrayscaleImage { + /// height: U16, + /// width: U16, + /// pixels: [U16], + /// } + /// + /// let image = GrayscaleImage::ref_from_bytes(&[0, 0, 0, 0][..]).unwrap(); + /// let mut file = File::create("image.bin").unwrap(); + /// image.write_to_io(&mut file).unwrap(); + /// ``` + /// + /// If the write fails, `write_to_io` returns `Err` and a partial write may + /// have occurred; e.g.: + /// + /// ``` + /// # use zerocopy::IntoBytes; + /// + /// let src = u128::MAX; + /// let mut dst = [0u8; 2]; + /// + /// let write_result = src.write_to_io(&mut dst[..]); + /// + /// assert!(write_result.is_err()); + /// assert_eq!(dst, [255, 255]); + /// ``` + #[cfg(feature = "std")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))] + #[inline(always)] + fn write_to_io<W>(&self, mut dst: W) -> io::Result<()> + where + Self: Immutable, + W: io::Write, + { + dst.write_all(self.as_bytes()) + } + + #[deprecated(since = "0.8.0", note = "`IntoBytes::as_bytes_mut` was renamed to `as_mut_bytes`")] + #[doc(hidden)] + #[inline] + fn as_bytes_mut(&mut self) -> &mut [u8] + where + Self: FromBytes, + { + self.as_mut_bytes() + } +} + +/// Analyzes whether a type is [`Unaligned`]. +/// +/// This derive analyzes, at compile time, whether the annotated type satisfies +/// the [safety conditions] of `Unaligned` and implements `Unaligned` if it is +/// sound to do so. This derive can be applied to structs, enums, and unions; +/// e.g.: +/// +/// ``` +/// # use zerocopy_derive::Unaligned; +/// #[derive(Unaligned)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Unaligned)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant0, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Unaligned)] +/// #[repr(packed)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// # Analysis +/// +/// *This section describes, roughly, the analysis performed by this derive to +/// determine whether it is sound to implement `Unaligned` for a given type. +/// Unless you are modifying the implementation of this derive, or attempting to +/// manually implement `Unaligned` for a type yourself, you don't need to read +/// this section.* +/// +/// If a type has the following properties, then this derive can implement +/// `Unaligned` for that type: +/// +/// - If the type is a struct or union: +/// - If `repr(align(N))` is provided, `N` must equal 1. +/// - If the type is `repr(C)` or `repr(transparent)`, all fields must be +/// [`Unaligned`]. +/// - If the type is not `repr(C)` or `repr(transparent)`, it must be +/// `repr(packed)` or `repr(packed(1))`. +/// - If the type is an enum: +/// - If `repr(align(N))` is provided, `N` must equal 1. +/// - It must be a field-less enum (meaning that all variants have no fields). +/// - It must be `repr(i8)` or `repr(u8)`. +/// +/// [safety conditions]: trait@Unaligned#safety +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::Unaligned; + +/// Types with no alignment requirement. +/// +/// If `T: Unaligned`, then `align_of::<T>() == 1`. +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(Unaligned)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::Unaligned; +/// #[derive(Unaligned)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Unaligned)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant0, +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(Unaligned)] +/// #[repr(packed)] +/// union MyUnion { +/// # variant: u8, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// This derive performs a sophisticated, compile-time safety analysis to +/// determine whether a type is `Unaligned`. +/// +/// # Safety +/// +/// *This section describes what is required in order for `T: Unaligned`, and +/// what unsafe code may assume of such types. If you don't plan on implementing +/// `Unaligned` manually, and you don't plan on writing unsafe code that +/// operates on `Unaligned` types, then you don't need to read this section.* +/// +/// If `T: Unaligned`, then unsafe code may assume that it is sound to produce a +/// reference to `T` at any memory location regardless of alignment. If a type +/// is marked as `Unaligned` which violates this contract, it may cause +/// undefined behavior. +/// +/// `#[derive(Unaligned)]` only permits [types which satisfy these +/// requirements][derive-analysis]. +/// +#[cfg_attr( + feature = "derive", + doc = "[derive]: zerocopy_derive::Unaligned", + doc = "[derive-analysis]: zerocopy_derive::Unaligned#analysis" +)] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Unaligned.html"), + doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Unaligned.html#analysis"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(Unaligned)]` to `{Self}`") +)] +pub unsafe trait Unaligned { + // The `Self: Sized` bound makes it so that `Unaligned` is still object + // safe. + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; +} + +/// Derives optimized [`PartialEq`] and [`Eq`] implementations. +/// +/// This derive can be applied to structs and enums implementing both +/// [`Immutable`] and [`IntoBytes`]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{ByteEq, Immutable, IntoBytes}; +/// #[derive(ByteEq, Immutable, IntoBytes)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(ByteEq, Immutable, IntoBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// The standard library's [`derive(Eq, PartialEq)`][derive@PartialEq] computes +/// equality by individually comparing each field. Instead, the implementation +/// of [`PartialEq::eq`] emitted by `derive(ByteHash)` converts the entirety of +/// `self` and `other` to byte slices and compares those slices for equality. +/// This may have performance advantages. +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::ByteEq; +/// Derives an optimized [`Hash`] implementation. +/// +/// This derive can be applied to structs and enums implementing both +/// [`Immutable`] and [`IntoBytes`]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{ByteHash, Immutable, IntoBytes}; +/// #[derive(ByteHash, Immutable, IntoBytes)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(ByteHash, Immutable, IntoBytes)] +/// #[repr(u8)] +/// enum MyEnum { +/// # Variant, +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// The standard library's [`derive(Hash)`][derive@Hash] produces hashes by +/// individually hashing each field and combining the results. Instead, the +/// implementations of [`Hash::hash()`] and [`Hash::hash_slice()`] generated by +/// `derive(ByteHash)` convert the entirety of `self` to a byte slice and hashes +/// it in a single call to [`Hasher::write()`]. This may have performance +/// advantages. +/// +/// [`Hash`]: core::hash::Hash +/// [`Hash::hash()`]: core::hash::Hash::hash() +/// [`Hash::hash_slice()`]: core::hash::Hash::hash_slice() +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::ByteHash; +/// Implements [`SplitAt`]. +/// +/// This derive can be applied to structs; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{ByteEq, Immutable, IntoBytes}; +/// #[derive(ByteEq, Immutable, IntoBytes)] +/// #[repr(C)] +/// struct MyStruct { +/// # /* +/// ... +/// # */ +/// } +/// ``` +#[cfg(any(feature = "derive", test))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] +pub use zerocopy_derive::SplitAt; + +#[cfg(feature = "alloc")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] +#[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] +mod alloc_support { + use super::*; + + /// Extends a `Vec<T>` by pushing `additional` new items onto the end of the + /// vector. The new items are initialized with zeros. + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[doc(hidden)] + #[deprecated(since = "0.8.0", note = "moved to `FromZeros`")] + #[inline(always)] + pub fn extend_vec_zeroed<T: FromZeros>( + v: &mut Vec<T>, + additional: usize, + ) -> Result<(), AllocError> { + <T as FromZeros>::extend_vec_zeroed(v, additional) + } + + /// Inserts `additional` new items into `Vec<T>` at `position`. The new + /// items are initialized with zeros. + /// + /// # Panics + /// + /// Panics if `position > v.len()`. + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[doc(hidden)] + #[deprecated(since = "0.8.0", note = "moved to `FromZeros`")] + #[inline(always)] + pub fn insert_vec_zeroed<T: FromZeros>( + v: &mut Vec<T>, + position: usize, + additional: usize, + ) -> Result<(), AllocError> { + <T as FromZeros>::insert_vec_zeroed(v, position, additional) + } +} + +#[cfg(feature = "alloc")] +#[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] +#[doc(hidden)] +pub use alloc_support::*; + +#[cfg(test)] +#[allow(clippy::assertions_on_result_states, clippy::unreadable_literal)] +mod tests { + use static_assertions::assert_impl_all; + + use super::*; + use crate::util::testutil::*; + + // An unsized type. + // + // This is used to test the custom derives of our traits. The `[u8]` type + // gets a hand-rolled impl, so it doesn't exercise our custom derives. + #[derive(Debug, Eq, PartialEq, FromBytes, IntoBytes, Unaligned, Immutable)] + #[repr(transparent)] + struct Unsized([u8]); + + impl Unsized { + fn from_mut_slice(slc: &mut [u8]) -> &mut Unsized { + // SAFETY: This *probably* sound - since the layouts of `[u8]` and + // `Unsized` are the same, so are the layouts of `&mut [u8]` and + // `&mut Unsized`. [1] Even if it turns out that this isn't actually + // guaranteed by the language spec, we can just change this since + // it's in test code. + // + // [1] https://github.com/rust-lang/unsafe-code-guidelines/issues/375 + unsafe { mem::transmute(slc) } + } + } + + #[test] + fn test_known_layout() { + // Test that `$ty` and `ManuallyDrop<$ty>` have the expected layout. + // Test that `PhantomData<$ty>` has the same layout as `()` regardless + // of `$ty`. + macro_rules! test { + ($ty:ty, $expect:expr) => { + let expect = $expect; + assert_eq!(<$ty as KnownLayout>::LAYOUT, expect); + assert_eq!(<ManuallyDrop<$ty> as KnownLayout>::LAYOUT, expect); + assert_eq!(<PhantomData<$ty> as KnownLayout>::LAYOUT, <() as KnownLayout>::LAYOUT); + }; + } + + let layout = + |offset, align, trailing_slice_elem_size, statically_shallow_unpadded| DstLayout { + align: NonZeroUsize::new(align).unwrap(), + size_info: match trailing_slice_elem_size { + None => SizeInfo::Sized { size: offset }, + Some(elem_size) => { + SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) + } + }, + statically_shallow_unpadded, + }; + + test!((), layout(0, 1, None, false)); + test!(u8, layout(1, 1, None, false)); + // Use `align_of` because `u64` alignment may be smaller than 8 on some + // platforms. + test!(u64, layout(8, mem::align_of::<u64>(), None, false)); + test!(AU64, layout(8, 8, None, false)); + + test!(Option<&'static ()>, usize::LAYOUT); + + test!([()], layout(0, 1, Some(0), true)); + test!([u8], layout(0, 1, Some(1), true)); + test!(str, layout(0, 1, Some(1), true)); + } + + #[cfg(feature = "derive")] + #[test] + fn test_known_layout_derive() { + // In this and other files (`late_compile_pass.rs`, + // `mid_compile_pass.rs`, and `struct.rs`), we test success and failure + // modes of `derive(KnownLayout)` for the following combination of + // properties: + // + // +------------+--------------------------------------+-----------+ + // | | trailing field properties | | + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // |------------+----------+----------------+----------+-----------| + // | N | N | N | N | KL00 | + // | N | N | N | Y | KL01 | + // | N | N | Y | N | KL02 | + // | N | N | Y | Y | KL03 | + // | N | Y | N | N | KL04 | + // | N | Y | N | Y | KL05 | + // | N | Y | Y | N | KL06 | + // | N | Y | Y | Y | KL07 | + // | Y | N | N | N | KL08 | + // | Y | N | N | Y | KL09 | + // | Y | N | Y | N | KL10 | + // | Y | N | Y | Y | KL11 | + // | Y | Y | N | N | KL12 | + // | Y | Y | N | Y | KL13 | + // | Y | Y | Y | N | KL14 | + // | Y | Y | Y | Y | KL15 | + // +------------+----------+----------------+----------+-----------+ + + struct NotKnownLayout<T = ()> { + _t: T, + } + + #[derive(KnownLayout)] + #[repr(C)] + struct AlignSize<const ALIGN: usize, const SIZE: usize> + where + elain::Align<ALIGN>: elain::Alignment, + { + _align: elain::Align<ALIGN>, + size: [u8; SIZE], + } + + type AU16 = AlignSize<2, 2>; + type AU32 = AlignSize<4, 4>; + + fn _assert_kl<T: ?Sized + KnownLayout>(_: &T) {} + + let sized_layout = |align, size| DstLayout { + align: NonZeroUsize::new(align).unwrap(), + size_info: SizeInfo::Sized { size }, + statically_shallow_unpadded: false, + }; + + let unsized_layout = |align, elem_size, offset, statically_shallow_unpadded| DstLayout { + align: NonZeroUsize::new(align).unwrap(), + size_info: SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }), + statically_shallow_unpadded, + }; + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | N | N | N | Y | KL01 | + #[allow(dead_code)] + #[derive(KnownLayout)] + struct KL01(NotKnownLayout<AU32>, NotKnownLayout<AU16>); + + let expected = DstLayout::for_type::<KL01>(); + + assert_eq!(<KL01 as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL01 as KnownLayout>::LAYOUT, sized_layout(4, 8)); + + // ...with `align(N)`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(align(64))] + struct KL01Align(NotKnownLayout<AU32>, NotKnownLayout<AU16>); + + let expected = DstLayout::for_type::<KL01Align>(); + + assert_eq!(<KL01Align as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL01Align as KnownLayout>::LAYOUT, sized_layout(64, 64)); + + // ...with `packed`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(packed)] + struct KL01Packed(NotKnownLayout<AU32>, NotKnownLayout<AU16>); + + let expected = DstLayout::for_type::<KL01Packed>(); + + assert_eq!(<KL01Packed as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL01Packed as KnownLayout>::LAYOUT, sized_layout(1, 6)); + + // ...with `packed(N)`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(packed(2))] + struct KL01PackedN(NotKnownLayout<AU32>, NotKnownLayout<AU16>); + + assert_impl_all!(KL01PackedN: KnownLayout); + + let expected = DstLayout::for_type::<KL01PackedN>(); + + assert_eq!(<KL01PackedN as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL01PackedN as KnownLayout>::LAYOUT, sized_layout(2, 6)); + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | N | N | Y | Y | KL03 | + #[allow(dead_code)] + #[derive(KnownLayout)] + struct KL03(NotKnownLayout, u8); + + let expected = DstLayout::for_type::<KL03>(); + + assert_eq!(<KL03 as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL03 as KnownLayout>::LAYOUT, sized_layout(1, 1)); + + // ... with `align(N)` + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(align(64))] + struct KL03Align(NotKnownLayout<AU32>, u8); + + let expected = DstLayout::for_type::<KL03Align>(); + + assert_eq!(<KL03Align as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL03Align as KnownLayout>::LAYOUT, sized_layout(64, 64)); + + // ... with `packed`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(packed)] + struct KL03Packed(NotKnownLayout<AU32>, u8); + + let expected = DstLayout::for_type::<KL03Packed>(); + + assert_eq!(<KL03Packed as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL03Packed as KnownLayout>::LAYOUT, sized_layout(1, 5)); + + // ... with `packed(N)` + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(packed(2))] + struct KL03PackedN(NotKnownLayout<AU32>, u8); + + assert_impl_all!(KL03PackedN: KnownLayout); + + let expected = DstLayout::for_type::<KL03PackedN>(); + + assert_eq!(<KL03PackedN as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL03PackedN as KnownLayout>::LAYOUT, sized_layout(2, 6)); + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | N | Y | N | Y | KL05 | + #[allow(dead_code)] + #[derive(KnownLayout)] + struct KL05<T>(u8, T); + + fn _test_kl05<T>(t: T) -> impl KnownLayout { + KL05(0u8, t) + } + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | N | Y | Y | Y | KL07 | + #[allow(dead_code)] + #[derive(KnownLayout)] + struct KL07<T: KnownLayout>(u8, T); + + fn _test_kl07<T: KnownLayout>(t: T) -> impl KnownLayout { + let _ = KL07(0u8, t); + } + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | Y | N | Y | N | KL10 | + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C)] + struct KL10(NotKnownLayout<AU32>, [u8]); + + let expected = DstLayout::new_zst(None) + .extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), None) + .extend(<[u8] as KnownLayout>::LAYOUT, None) + .pad_to_align(); + + assert_eq!(<KL10 as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL10 as KnownLayout>::LAYOUT, unsized_layout(4, 1, 4, false)); + + // ...with `align(N)`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C, align(64))] + struct KL10Align(NotKnownLayout<AU32>, [u8]); + + let repr_align = NonZeroUsize::new(64); + + let expected = DstLayout::new_zst(repr_align) + .extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), None) + .extend(<[u8] as KnownLayout>::LAYOUT, None) + .pad_to_align(); + + assert_eq!(<KL10Align as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL10Align as KnownLayout>::LAYOUT, unsized_layout(64, 1, 4, false)); + + // ...with `packed`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C, packed)] + struct KL10Packed(NotKnownLayout<AU32>, [u8]); + + let repr_packed = NonZeroUsize::new(1); + + let expected = DstLayout::new_zst(None) + .extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), repr_packed) + .extend(<[u8] as KnownLayout>::LAYOUT, repr_packed) + .pad_to_align(); + + assert_eq!(<KL10Packed as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL10Packed as KnownLayout>::LAYOUT, unsized_layout(1, 1, 4, false)); + + // ...with `packed(N)`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C, packed(2))] + struct KL10PackedN(NotKnownLayout<AU32>, [u8]); + + let repr_packed = NonZeroUsize::new(2); + + let expected = DstLayout::new_zst(None) + .extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), repr_packed) + .extend(<[u8] as KnownLayout>::LAYOUT, repr_packed) + .pad_to_align(); + + assert_eq!(<KL10PackedN as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL10PackedN as KnownLayout>::LAYOUT, unsized_layout(2, 1, 4, false)); + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | Y | N | Y | Y | KL11 | + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C)] + struct KL11(NotKnownLayout<AU64>, u8); + + let expected = DstLayout::new_zst(None) + .extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), None) + .extend(<u8 as KnownLayout>::LAYOUT, None) + .pad_to_align(); + + assert_eq!(<KL11 as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL11 as KnownLayout>::LAYOUT, sized_layout(8, 16)); + + // ...with `align(N)`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C, align(64))] + struct KL11Align(NotKnownLayout<AU64>, u8); + + let repr_align = NonZeroUsize::new(64); + + let expected = DstLayout::new_zst(repr_align) + .extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), None) + .extend(<u8 as KnownLayout>::LAYOUT, None) + .pad_to_align(); + + assert_eq!(<KL11Align as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL11Align as KnownLayout>::LAYOUT, sized_layout(64, 64)); + + // ...with `packed`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C, packed)] + struct KL11Packed(NotKnownLayout<AU64>, u8); + + let repr_packed = NonZeroUsize::new(1); + + let expected = DstLayout::new_zst(None) + .extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), repr_packed) + .extend(<u8 as KnownLayout>::LAYOUT, repr_packed) + .pad_to_align(); + + assert_eq!(<KL11Packed as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL11Packed as KnownLayout>::LAYOUT, sized_layout(1, 9)); + + // ...with `packed(N)`: + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C, packed(2))] + struct KL11PackedN(NotKnownLayout<AU64>, u8); + + let repr_packed = NonZeroUsize::new(2); + + let expected = DstLayout::new_zst(None) + .extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), repr_packed) + .extend(<u8 as KnownLayout>::LAYOUT, repr_packed) + .pad_to_align(); + + assert_eq!(<KL11PackedN as KnownLayout>::LAYOUT, expected); + assert_eq!(<KL11PackedN as KnownLayout>::LAYOUT, sized_layout(2, 10)); + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | Y | Y | Y | N | KL14 | + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C)] + struct KL14<T: ?Sized + KnownLayout>(u8, T); + + fn _test_kl14<T: ?Sized + KnownLayout>(kl: &KL14<T>) { + _assert_kl(kl) + } + + // | `repr(C)`? | generic? | `KnownLayout`? | `Sized`? | Type Name | + // | Y | Y | Y | Y | KL15 | + #[allow(dead_code)] + #[derive(KnownLayout)] + #[repr(C)] + struct KL15<T: KnownLayout>(u8, T); + + fn _test_kl15<T: KnownLayout>(t: T) -> impl KnownLayout { + let _ = KL15(0u8, t); + } + + // Test a variety of combinations of field types: + // - () + // - u8 + // - AU16 + // - [()] + // - [u8] + // - [AU16] + + #[allow(clippy::upper_case_acronyms, dead_code)] + #[derive(KnownLayout)] + #[repr(C)] + struct KLTU<T, U: ?Sized>(T, U); + + assert_eq!(<KLTU<(), ()> as KnownLayout>::LAYOUT, sized_layout(1, 0)); + + assert_eq!(<KLTU<(), u8> as KnownLayout>::LAYOUT, sized_layout(1, 1)); + + assert_eq!(<KLTU<(), AU16> as KnownLayout>::LAYOUT, sized_layout(2, 2)); + + assert_eq!(<KLTU<(), [()]> as KnownLayout>::LAYOUT, unsized_layout(1, 0, 0, false)); + + assert_eq!(<KLTU<(), [u8]> as KnownLayout>::LAYOUT, unsized_layout(1, 1, 0, false)); + + assert_eq!(<KLTU<(), [AU16]> as KnownLayout>::LAYOUT, unsized_layout(2, 2, 0, false)); + + assert_eq!(<KLTU<u8, ()> as KnownLayout>::LAYOUT, sized_layout(1, 1)); + + assert_eq!(<KLTU<u8, u8> as KnownLayout>::LAYOUT, sized_layout(1, 2)); + + assert_eq!(<KLTU<u8, AU16> as KnownLayout>::LAYOUT, sized_layout(2, 4)); + + assert_eq!(<KLTU<u8, [()]> as KnownLayout>::LAYOUT, unsized_layout(1, 0, 1, false)); + + assert_eq!(<KLTU<u8, [u8]> as KnownLayout>::LAYOUT, unsized_layout(1, 1, 1, false)); + + assert_eq!(<KLTU<u8, [AU16]> as KnownLayout>::LAYOUT, unsized_layout(2, 2, 2, false)); + + assert_eq!(<KLTU<AU16, ()> as KnownLayout>::LAYOUT, sized_layout(2, 2)); + + assert_eq!(<KLTU<AU16, u8> as KnownLayout>::LAYOUT, sized_layout(2, 4)); + + assert_eq!(<KLTU<AU16, AU16> as KnownLayout>::LAYOUT, sized_layout(2, 4)); + + assert_eq!(<KLTU<AU16, [()]> as KnownLayout>::LAYOUT, unsized_layout(2, 0, 2, false)); + + assert_eq!(<KLTU<AU16, [u8]> as KnownLayout>::LAYOUT, unsized_layout(2, 1, 2, false)); + + assert_eq!(<KLTU<AU16, [AU16]> as KnownLayout>::LAYOUT, unsized_layout(2, 2, 2, false)); + + // Test a variety of field counts. + + #[derive(KnownLayout)] + #[repr(C)] + struct KLF0; + + assert_eq!(<KLF0 as KnownLayout>::LAYOUT, sized_layout(1, 0)); + + #[derive(KnownLayout)] + #[repr(C)] + struct KLF1([u8]); + + assert_eq!(<KLF1 as KnownLayout>::LAYOUT, unsized_layout(1, 1, 0, true)); + + #[derive(KnownLayout)] + #[repr(C)] + struct KLF2(NotKnownLayout<u8>, [u8]); + + assert_eq!(<KLF2 as KnownLayout>::LAYOUT, unsized_layout(1, 1, 1, false)); + + #[derive(KnownLayout)] + #[repr(C)] + struct KLF3(NotKnownLayout<u8>, NotKnownLayout<AU16>, [u8]); + + assert_eq!(<KLF3 as KnownLayout>::LAYOUT, unsized_layout(2, 1, 4, false)); + + #[derive(KnownLayout)] + #[repr(C)] + struct KLF4(NotKnownLayout<u8>, NotKnownLayout<AU16>, NotKnownLayout<AU32>, [u8]); + + assert_eq!(<KLF4 as KnownLayout>::LAYOUT, unsized_layout(4, 1, 8, false)); + } + + #[test] + fn test_object_safety() { + fn _takes_no_cell(_: &dyn Immutable) {} + fn _takes_unaligned(_: &dyn Unaligned) {} + } + + #[test] + fn test_from_zeros_only() { + // Test types that implement `FromZeros` but not `FromBytes`. + + assert!(!bool::new_zeroed()); + assert_eq!(char::new_zeroed(), '\0'); + + #[cfg(feature = "alloc")] + { + assert_eq!(bool::new_box_zeroed(), Ok(Box::new(false))); + assert_eq!(char::new_box_zeroed(), Ok(Box::new('\0'))); + + assert_eq!( + <[bool]>::new_box_zeroed_with_elems(3).unwrap().as_ref(), + [false, false, false] + ); + assert_eq!( + <[char]>::new_box_zeroed_with_elems(3).unwrap().as_ref(), + ['\0', '\0', '\0'] + ); + + assert_eq!(bool::new_vec_zeroed(3).unwrap().as_ref(), [false, false, false]); + assert_eq!(char::new_vec_zeroed(3).unwrap().as_ref(), ['\0', '\0', '\0']); + } + + let mut string = "hello".to_string(); + let s: &mut str = string.as_mut(); + assert_eq!(s, "hello"); + s.zero(); + assert_eq!(s, "\0\0\0\0\0"); + } + + #[test] + fn test_zst_count_preserved() { + // Test that, when an explicit count is provided to for a type with a + // ZST trailing slice element, that count is preserved. This is + // important since, for such types, all element counts result in objects + // of the same size, and so the correct behavior is ambiguous. However, + // preserving the count as requested by the user is the behavior that we + // document publicly. + + // FromZeros methods + #[cfg(feature = "alloc")] + assert_eq!(<[()]>::new_box_zeroed_with_elems(3).unwrap().len(), 3); + #[cfg(feature = "alloc")] + assert_eq!(<()>::new_vec_zeroed(3).unwrap().len(), 3); + + // FromBytes methods + assert_eq!(<[()]>::ref_from_bytes_with_elems(&[][..], 3).unwrap().len(), 3); + assert_eq!(<[()]>::ref_from_prefix_with_elems(&[][..], 3).unwrap().0.len(), 3); + assert_eq!(<[()]>::ref_from_suffix_with_elems(&[][..], 3).unwrap().1.len(), 3); + assert_eq!(<[()]>::mut_from_bytes_with_elems(&mut [][..], 3).unwrap().len(), 3); + assert_eq!(<[()]>::mut_from_prefix_with_elems(&mut [][..], 3).unwrap().0.len(), 3); + assert_eq!(<[()]>::mut_from_suffix_with_elems(&mut [][..], 3).unwrap().1.len(), 3); + } + + #[test] + fn test_read_write() { + const VAL: u64 = 0x12345678; + #[cfg(target_endian = "big")] + const VAL_BYTES: [u8; 8] = VAL.to_be_bytes(); + #[cfg(target_endian = "little")] + const VAL_BYTES: [u8; 8] = VAL.to_le_bytes(); + const ZEROS: [u8; 8] = [0u8; 8]; + + // Test `FromBytes::{read_from, read_from_prefix, read_from_suffix}`. + + assert_eq!(u64::read_from_bytes(&VAL_BYTES[..]), Ok(VAL)); + // The first 8 bytes are from `VAL_BYTES` and the second 8 bytes are all + // zeros. + let bytes_with_prefix: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]); + assert_eq!(u64::read_from_prefix(&bytes_with_prefix[..]), Ok((VAL, &ZEROS[..]))); + assert_eq!(u64::read_from_suffix(&bytes_with_prefix[..]), Ok((&VAL_BYTES[..], 0))); + // The first 8 bytes are all zeros and the second 8 bytes are from + // `VAL_BYTES` + let bytes_with_suffix: [u8; 16] = transmute!([[0; 8], VAL_BYTES]); + assert_eq!(u64::read_from_prefix(&bytes_with_suffix[..]), Ok((0, &VAL_BYTES[..]))); + assert_eq!(u64::read_from_suffix(&bytes_with_suffix[..]), Ok((&ZEROS[..], VAL))); + + // Test `IntoBytes::{write_to, write_to_prefix, write_to_suffix}`. + + let mut bytes = [0u8; 8]; + assert_eq!(VAL.write_to(&mut bytes[..]), Ok(())); + assert_eq!(bytes, VAL_BYTES); + let mut bytes = [0u8; 16]; + assert_eq!(VAL.write_to_prefix(&mut bytes[..]), Ok(())); + let want: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]); + assert_eq!(bytes, want); + let mut bytes = [0u8; 16]; + assert_eq!(VAL.write_to_suffix(&mut bytes[..]), Ok(())); + let want: [u8; 16] = transmute!([[0; 8], VAL_BYTES]); + assert_eq!(bytes, want); + } + + #[test] + #[cfg(feature = "std")] + fn test_read_io_with_padding_soundness() { + // This test is designed to exhibit potential UB in + // `FromBytes::read_from_io`. (see #2319, #2320). + + // On most platforms (where `align_of::<u16>() == 2`), `WithPadding` + // will have inter-field padding between `x` and `y`. + #[derive(FromBytes)] + #[repr(C)] + struct WithPadding { + x: u8, + y: u16, + } + struct ReadsInRead; + impl std::io::Read for ReadsInRead { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { + // This body branches on every byte of `buf`, ensuring that it + // exhibits UB if any byte of `buf` is uninitialized. + if buf.iter().all(|&x| x == 0) { + Ok(buf.len()) + } else { + buf.iter_mut().for_each(|x| *x = 0); + Ok(buf.len()) + } + } + } + assert!(matches!(WithPadding::read_from_io(ReadsInRead), Ok(WithPadding { x: 0, y: 0 }))); + } + + #[test] + #[cfg(feature = "std")] + fn test_read_write_io() { + let mut long_buffer = [0, 0, 0, 0]; + assert!(matches!(u16::MAX.write_to_io(&mut long_buffer[..]), Ok(()))); + assert_eq!(long_buffer, [255, 255, 0, 0]); + assert!(matches!(u16::read_from_io(&long_buffer[..]), Ok(u16::MAX))); + + let mut short_buffer = [0, 0]; + assert!(u32::MAX.write_to_io(&mut short_buffer[..]).is_err()); + assert_eq!(short_buffer, [255, 255]); + assert!(u32::read_from_io(&short_buffer[..]).is_err()); + } + + #[test] + fn test_try_from_bytes_try_read_from() { + assert_eq!(<bool as TryFromBytes>::try_read_from_bytes(&[0]), Ok(false)); + assert_eq!(<bool as TryFromBytes>::try_read_from_bytes(&[1]), Ok(true)); + + assert_eq!(<bool as TryFromBytes>::try_read_from_prefix(&[0, 2]), Ok((false, &[2][..]))); + assert_eq!(<bool as TryFromBytes>::try_read_from_prefix(&[1, 2]), Ok((true, &[2][..]))); + + assert_eq!(<bool as TryFromBytes>::try_read_from_suffix(&[2, 0]), Ok((&[2][..], false))); + assert_eq!(<bool as TryFromBytes>::try_read_from_suffix(&[2, 1]), Ok((&[2][..], true))); + + // If we don't pass enough bytes, it fails. + assert!(matches!( + <u8 as TryFromBytes>::try_read_from_bytes(&[]), + Err(TryReadError::Size(_)) + )); + assert!(matches!( + <u8 as TryFromBytes>::try_read_from_prefix(&[]), + Err(TryReadError::Size(_)) + )); + assert!(matches!( + <u8 as TryFromBytes>::try_read_from_suffix(&[]), + Err(TryReadError::Size(_)) + )); + + // If we pass too many bytes, it fails. + assert!(matches!( + <u8 as TryFromBytes>::try_read_from_bytes(&[0, 0]), + Err(TryReadError::Size(_)) + )); + + // If we pass an invalid value, it fails. + assert!(matches!( + <bool as TryFromBytes>::try_read_from_bytes(&[2]), + Err(TryReadError::Validity(_)) + )); + assert!(matches!( + <bool as TryFromBytes>::try_read_from_prefix(&[2, 0]), + Err(TryReadError::Validity(_)) + )); + assert!(matches!( + <bool as TryFromBytes>::try_read_from_suffix(&[0, 2]), + Err(TryReadError::Validity(_)) + )); + + // Reading from a misaligned buffer should still succeed. Since `AU64`'s + // alignment is 8, and since we read from two adjacent addresses one + // byte apart, it is guaranteed that at least one of them (though + // possibly both) will be misaligned. + let bytes: [u8; 9] = [0, 0, 0, 0, 0, 0, 0, 0, 0]; + assert_eq!(<AU64 as TryFromBytes>::try_read_from_bytes(&bytes[..8]), Ok(AU64(0))); + assert_eq!(<AU64 as TryFromBytes>::try_read_from_bytes(&bytes[1..9]), Ok(AU64(0))); + + assert_eq!( + <AU64 as TryFromBytes>::try_read_from_prefix(&bytes[..8]), + Ok((AU64(0), &[][..])) + ); + assert_eq!( + <AU64 as TryFromBytes>::try_read_from_prefix(&bytes[1..9]), + Ok((AU64(0), &[][..])) + ); + + assert_eq!( + <AU64 as TryFromBytes>::try_read_from_suffix(&bytes[..8]), + Ok((&[][..], AU64(0))) + ); + assert_eq!( + <AU64 as TryFromBytes>::try_read_from_suffix(&bytes[1..9]), + Ok((&[][..], AU64(0))) + ); + } + + #[test] + fn test_ref_from_mut_from() { + // Test `FromBytes::{ref_from, mut_from}{,_prefix,Suffix}` success cases + // Exhaustive coverage for these methods is covered by the `Ref` tests above, + // which these helper methods defer to. + + let mut buf = + Align::<[u8; 16], AU64>::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); + + assert_eq!( + AU64::ref_from_bytes(&buf.t[8..]).unwrap().0.to_ne_bytes(), + [8, 9, 10, 11, 12, 13, 14, 15] + ); + let suffix = AU64::mut_from_bytes(&mut buf.t[8..]).unwrap(); + suffix.0 = 0x0101010101010101; + // The `[u8:9]` is a non-half size of the full buffer, which would catch + // `from_prefix` having the same implementation as `from_suffix` (issues #506, #511). + assert_eq!( + <[u8; 9]>::ref_from_suffix(&buf.t[..]).unwrap(), + (&[0, 1, 2, 3, 4, 5, 6][..], &[7u8, 1, 1, 1, 1, 1, 1, 1, 1]) + ); + let (prefix, suffix) = AU64::mut_from_suffix(&mut buf.t[1..]).unwrap(); + assert_eq!(prefix, &mut [1u8, 2, 3, 4, 5, 6, 7][..]); + suffix.0 = 0x0202020202020202; + let (prefix, suffix) = <[u8; 10]>::mut_from_suffix(&mut buf.t[..]).unwrap(); + assert_eq!(prefix, &mut [0u8, 1, 2, 3, 4, 5][..]); + suffix[0] = 42; + assert_eq!( + <[u8; 9]>::ref_from_prefix(&buf.t[..]).unwrap(), + (&[0u8, 1, 2, 3, 4, 5, 42, 7, 2], &[2u8, 2, 2, 2, 2, 2, 2][..]) + ); + <[u8; 2]>::mut_from_prefix(&mut buf.t[..]).unwrap().0[1] = 30; + assert_eq!(buf.t, [0, 30, 2, 3, 4, 5, 42, 7, 2, 2, 2, 2, 2, 2, 2, 2]); + } + + #[test] + fn test_ref_from_mut_from_error() { + // Test `FromBytes::{ref_from, mut_from}{,_prefix,Suffix}` error cases. + + // Fail because the buffer is too large. + let mut buf = Align::<[u8; 16], AU64>::default(); + // `buf.t` should be aligned to 8, so only the length check should fail. + assert!(AU64::ref_from_bytes(&buf.t[..]).is_err()); + assert!(AU64::mut_from_bytes(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_bytes(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_bytes(&mut buf.t[..]).is_err()); + + // Fail because the buffer is too small. + let mut buf = Align::<[u8; 4], AU64>::default(); + assert!(AU64::ref_from_bytes(&buf.t[..]).is_err()); + assert!(AU64::mut_from_bytes(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_bytes(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_bytes(&mut buf.t[..]).is_err()); + assert!(AU64::ref_from_prefix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_prefix(&mut buf.t[..]).is_err()); + assert!(AU64::ref_from_suffix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_prefix(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_prefix(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_suffix(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_suffix(&mut buf.t[..]).is_err()); + + // Fail because the alignment is insufficient. + let mut buf = Align::<[u8; 13], AU64>::default(); + assert!(AU64::ref_from_bytes(&buf.t[1..]).is_err()); + assert!(AU64::mut_from_bytes(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_bytes(&buf.t[1..]).is_err()); + assert!(AU64::mut_from_bytes(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_prefix(&buf.t[1..]).is_err()); + assert!(AU64::mut_from_prefix(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_suffix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err()); + } + + #[test] + fn test_to_methods() { + /// Run a series of tests by calling `IntoBytes` methods on `t`. + /// + /// `bytes` is the expected byte sequence returned from `t.as_bytes()` + /// before `t` has been modified. `post_mutation` is the expected + /// sequence returned from `t.as_bytes()` after `t.as_mut_bytes()[0]` + /// has had its bits flipped (by applying `^= 0xFF`). + /// + /// `N` is the size of `t` in bytes. + fn test<T: FromBytes + IntoBytes + Immutable + Debug + Eq + ?Sized, const N: usize>( + t: &mut T, + bytes: &[u8], + post_mutation: &T, + ) { + // Test that we can access the underlying bytes, and that we get the + // right bytes and the right number of bytes. + assert_eq!(t.as_bytes(), bytes); + + // Test that changes to the underlying byte slices are reflected in + // the original object. + t.as_mut_bytes()[0] ^= 0xFF; + assert_eq!(t, post_mutation); + t.as_mut_bytes()[0] ^= 0xFF; + + // `write_to` rejects slices that are too small or too large. + assert!(t.write_to(&mut vec![0; N - 1][..]).is_err()); + assert!(t.write_to(&mut vec![0; N + 1][..]).is_err()); + + // `write_to` works as expected. + let mut bytes = [0; N]; + assert_eq!(t.write_to(&mut bytes[..]), Ok(())); + assert_eq!(bytes, t.as_bytes()); + + // `write_to_prefix` rejects slices that are too small. + assert!(t.write_to_prefix(&mut vec![0; N - 1][..]).is_err()); + + // `write_to_prefix` works with exact-sized slices. + let mut bytes = [0; N]; + assert_eq!(t.write_to_prefix(&mut bytes[..]), Ok(())); + assert_eq!(bytes, t.as_bytes()); + + // `write_to_prefix` works with too-large slices, and any bytes past + // the prefix aren't modified. + let mut too_many_bytes = vec![0; N + 1]; + too_many_bytes[N] = 123; + assert_eq!(t.write_to_prefix(&mut too_many_bytes[..]), Ok(())); + assert_eq!(&too_many_bytes[..N], t.as_bytes()); + assert_eq!(too_many_bytes[N], 123); + + // `write_to_suffix` rejects slices that are too small. + assert!(t.write_to_suffix(&mut vec![0; N - 1][..]).is_err()); + + // `write_to_suffix` works with exact-sized slices. + let mut bytes = [0; N]; + assert_eq!(t.write_to_suffix(&mut bytes[..]), Ok(())); + assert_eq!(bytes, t.as_bytes()); + + // `write_to_suffix` works with too-large slices, and any bytes + // before the suffix aren't modified. + let mut too_many_bytes = vec![0; N + 1]; + too_many_bytes[0] = 123; + assert_eq!(t.write_to_suffix(&mut too_many_bytes[..]), Ok(())); + assert_eq!(&too_many_bytes[1..], t.as_bytes()); + assert_eq!(too_many_bytes[0], 123); + } + + #[derive(Debug, Eq, PartialEq, FromBytes, IntoBytes, Immutable)] + #[repr(C)] + struct Foo { + a: u32, + b: Wrapping<u32>, + c: Option<NonZeroU32>, + } + + let expected_bytes: Vec<u8> = if cfg!(target_endian = "little") { + vec![1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0] + } else { + vec![0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0] + }; + let post_mutation_expected_a = + if cfg!(target_endian = "little") { 0x00_00_00_FE } else { 0xFF_00_00_01 }; + test::<_, 12>( + &mut Foo { a: 1, b: Wrapping(2), c: None }, + expected_bytes.as_bytes(), + &Foo { a: post_mutation_expected_a, b: Wrapping(2), c: None }, + ); + test::<_, 3>( + Unsized::from_mut_slice(&mut [1, 2, 3]), + &[1, 2, 3], + Unsized::from_mut_slice(&mut [0xFE, 2, 3]), + ); + } + + #[test] + fn test_array() { + #[derive(FromBytes, IntoBytes, Immutable)] + #[repr(C)] + struct Foo { + a: [u16; 33], + } + + let foo = Foo { a: [0xFFFF; 33] }; + let expected = [0xFFu8; 66]; + assert_eq!(foo.as_bytes(), &expected[..]); + } + + #[test] + fn test_new_zeroed() { + assert!(!bool::new_zeroed()); + assert_eq!(u64::new_zeroed(), 0); + // This test exists in order to exercise unsafe code, especially when + // running under Miri. + #[allow(clippy::unit_cmp)] + { + assert_eq!(<()>::new_zeroed(), ()); + } + } + + #[test] + fn test_transparent_packed_generic_struct() { + #[derive(IntoBytes, FromBytes, Unaligned)] + #[repr(transparent)] + #[allow(dead_code)] // We never construct this type + struct Foo<T> { + _t: T, + _phantom: PhantomData<()>, + } + + assert_impl_all!(Foo<u32>: FromZeros, FromBytes, IntoBytes); + assert_impl_all!(Foo<u8>: Unaligned); + + #[derive(IntoBytes, FromBytes, Unaligned)] + #[repr(C, packed)] + #[allow(dead_code)] // We never construct this type + struct Bar<T, U> { + _t: T, + _u: U, + } + + assert_impl_all!(Bar<u8, AU64>: FromZeros, FromBytes, IntoBytes, Unaligned); + } + + #[cfg(feature = "alloc")] + mod alloc { + use super::*; + + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[test] + fn test_extend_vec_zeroed() { + // Test extending when there is an existing allocation. + let mut v = vec![100u16, 200, 300]; + FromZeros::extend_vec_zeroed(&mut v, 3).unwrap(); + assert_eq!(v.len(), 6); + assert_eq!(&*v, &[100, 200, 300, 0, 0, 0]); + drop(v); + + // Test extending when there is no existing allocation. + let mut v: Vec<u64> = Vec::new(); + FromZeros::extend_vec_zeroed(&mut v, 3).unwrap(); + assert_eq!(v.len(), 3); + assert_eq!(&*v, &[0, 0, 0]); + drop(v); + } + + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[test] + fn test_extend_vec_zeroed_zst() { + // Test extending when there is an existing (fake) allocation. + let mut v = vec![(), (), ()]; + <()>::extend_vec_zeroed(&mut v, 3).unwrap(); + assert_eq!(v.len(), 6); + assert_eq!(&*v, &[(), (), (), (), (), ()]); + drop(v); + + // Test extending when there is no existing (fake) allocation. + let mut v: Vec<()> = Vec::new(); + <()>::extend_vec_zeroed(&mut v, 3).unwrap(); + assert_eq!(&*v, &[(), (), ()]); + drop(v); + } + + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[test] + fn test_insert_vec_zeroed() { + // Insert at start (no existing allocation). + let mut v: Vec<u64> = Vec::new(); + u64::insert_vec_zeroed(&mut v, 0, 2).unwrap(); + assert_eq!(v.len(), 2); + assert_eq!(&*v, &[0, 0]); + drop(v); + + // Insert at start. + let mut v = vec![100u64, 200, 300]; + u64::insert_vec_zeroed(&mut v, 0, 2).unwrap(); + assert_eq!(v.len(), 5); + assert_eq!(&*v, &[0, 0, 100, 200, 300]); + drop(v); + + // Insert at middle. + let mut v = vec![100u64, 200, 300]; + u64::insert_vec_zeroed(&mut v, 1, 1).unwrap(); + assert_eq!(v.len(), 4); + assert_eq!(&*v, &[100, 0, 200, 300]); + drop(v); + + // Insert at end. + let mut v = vec![100u64, 200, 300]; + u64::insert_vec_zeroed(&mut v, 3, 1).unwrap(); + assert_eq!(v.len(), 4); + assert_eq!(&*v, &[100, 200, 300, 0]); + drop(v); + } + + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + #[test] + fn test_insert_vec_zeroed_zst() { + // Insert at start (no existing fake allocation). + let mut v: Vec<()> = Vec::new(); + <()>::insert_vec_zeroed(&mut v, 0, 2).unwrap(); + assert_eq!(v.len(), 2); + assert_eq!(&*v, &[(), ()]); + drop(v); + + // Insert at start. + let mut v = vec![(), (), ()]; + <()>::insert_vec_zeroed(&mut v, 0, 2).unwrap(); + assert_eq!(v.len(), 5); + assert_eq!(&*v, &[(), (), (), (), ()]); + drop(v); + + // Insert at middle. + let mut v = vec![(), (), ()]; + <()>::insert_vec_zeroed(&mut v, 1, 1).unwrap(); + assert_eq!(v.len(), 4); + assert_eq!(&*v, &[(), (), (), ()]); + drop(v); + + // Insert at end. + let mut v = vec![(), (), ()]; + <()>::insert_vec_zeroed(&mut v, 3, 1).unwrap(); + assert_eq!(v.len(), 4); + assert_eq!(&*v, &[(), (), (), ()]); + drop(v); + } + + #[test] + fn test_new_box_zeroed() { + assert_eq!(u64::new_box_zeroed(), Ok(Box::new(0))); + } + + #[test] + fn test_new_box_zeroed_array() { + drop(<[u32; 0x1000]>::new_box_zeroed()); + } + + #[test] + fn test_new_box_zeroed_zst() { + // This test exists in order to exercise unsafe code, especially + // when running under Miri. + #[allow(clippy::unit_cmp)] + { + assert_eq!(<()>::new_box_zeroed(), Ok(Box::new(()))); + } + } + + #[test] + fn test_new_box_zeroed_with_elems() { + let mut s: Box<[u64]> = <[u64]>::new_box_zeroed_with_elems(3).unwrap(); + assert_eq!(s.len(), 3); + assert_eq!(&*s, &[0, 0, 0]); + s[1] = 3; + assert_eq!(&*s, &[0, 3, 0]); + } + + #[test] + fn test_new_box_zeroed_with_elems_empty() { + let s: Box<[u64]> = <[u64]>::new_box_zeroed_with_elems(0).unwrap(); + assert_eq!(s.len(), 0); + } + + #[test] + fn test_new_box_zeroed_with_elems_zst() { + let mut s: Box<[()]> = <[()]>::new_box_zeroed_with_elems(3).unwrap(); + assert_eq!(s.len(), 3); + assert!(s.get(10).is_none()); + // This test exists in order to exercise unsafe code, especially + // when running under Miri. + #[allow(clippy::unit_cmp)] + { + assert_eq!(s[1], ()); + } + s[2] = (); + } + + #[test] + fn test_new_box_zeroed_with_elems_zst_empty() { + let s: Box<[()]> = <[()]>::new_box_zeroed_with_elems(0).unwrap(); + assert_eq!(s.len(), 0); + } + + #[test] + fn new_box_zeroed_with_elems_errors() { + assert_eq!(<[u16]>::new_box_zeroed_with_elems(usize::MAX), Err(AllocError)); + + let max = <usize as core::convert::TryFrom<_>>::try_from(isize::MAX).unwrap(); + assert_eq!( + <[u16]>::new_box_zeroed_with_elems((max / mem::size_of::<u16>()) + 1), + Err(AllocError) + ); + } + } +} diff --git a/vendor/zerocopy/src/macros.rs b/vendor/zerocopy/src/macros.rs new file mode 100644 index 00000000..040efda7 --- /dev/null +++ b/vendor/zerocopy/src/macros.rs @@ -0,0 +1,1626 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +/// Safely transmutes a value of one type to a value of another type of the same +/// size. +/// +/// This macro behaves like an invocation of this function: +/// +/// ```ignore +/// const fn transmute<Src, Dst>(src: Src) -> Dst +/// where +/// Src: IntoBytes, +/// Dst: FromBytes, +/// size_of::<Src>() == size_of::<Dst>(), +/// { +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// However, unlike a function, this macro can only be invoked when the types of +/// `Src` and `Dst` are completely concrete. The types `Src` and `Dst` are +/// inferred from the calling context; they cannot be explicitly specified in +/// the macro invocation. +/// +/// Note that the `Src` produced by the expression `$e` will *not* be dropped. +/// Semantically, its bits will be copied into a new value of type `Dst`, the +/// original `Src` will be forgotten, and the value of type `Dst` will be +/// returned. +/// +/// # `#![allow(shrink)]` +/// +/// If `#![allow(shrink)]` is provided, `transmute!` additionally supports +/// transmutations that shrink the size of the value; e.g.: +/// +/// ``` +/// # use zerocopy::transmute; +/// let u: u32 = transmute!(#![allow(shrink)] 0u64); +/// assert_eq!(u, 0u32); +/// ``` +/// +/// # Examples +/// +/// ``` +/// # use zerocopy::transmute; +/// let one_dimensional: [u8; 8] = [0, 1, 2, 3, 4, 5, 6, 7]; +/// +/// let two_dimensional: [[u8; 4]; 2] = transmute!(one_dimensional); +/// +/// assert_eq!(two_dimensional, [[0, 1, 2, 3], [4, 5, 6, 7]]); +/// ``` +/// +/// # Use in `const` contexts +/// +/// This macro can be invoked in `const` contexts. +#[macro_export] +macro_rules! transmute { + // NOTE: This must be a macro (rather than a function with trait bounds) + // because there's no way, in a generic context, to enforce that two types + // have the same size. `core::mem::transmute` uses compiler magic to enforce + // this so long as the types are concrete. + (#![allow(shrink)] $e:expr) => {{ + let mut e = $e; + if false { + // This branch, though never taken, ensures that the type of `e` is + // `IntoBytes` and that the type of the outer macro invocation + // expression is `FromBytes`. + + fn transmute<Src, Dst>(src: Src) -> Dst + where + Src: $crate::IntoBytes, + Dst: $crate::FromBytes, + { + let _ = src; + loop {} + } + loop {} + #[allow(unreachable_code)] + transmute(e) + } else { + use $crate::util::macro_util::core_reexport::mem::ManuallyDrop; + + // NOTE: `repr(packed)` is important! It ensures that the size of + // `Transmute` won't be rounded up to accommodate `Src`'s or `Dst`'s + // alignment, which would break the size comparison logic below. + // + // As an example of why this is problematic, consider `Src = [u8; + // 5]`, `Dst = u32`. The total size of `Transmute<Src, Dst>` would + // be 8, and so we would reject a `[u8; 5]` to `u32` transmute as + // being size-increasing, which it isn't. + #[repr(C, packed)] + union Transmute<Src, Dst> { + src: ManuallyDrop<Src>, + dst: ManuallyDrop<Dst>, + } + + // SAFETY: `Transmute` is a `repr(C)` union whose `src` field has + // type `ManuallyDrop<Src>`. Thus, the `src` field starts at byte + // offset 0 within `Transmute` [1]. `ManuallyDrop<T>` has the same + // layout and bit validity as `T`, so it is sound to transmute `Src` + // to `Transmute`. + // + // [1] https://doc.rust-lang.org/1.85.0/reference/type-layout.html#reprc-unions + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html: + // + // `ManuallyDrop<T>` is guaranteed to have the same layout and bit + // validity as `T` + let u: Transmute<_, _> = unsafe { + // Clippy: We can't annotate the types; this macro is designed + // to infer the types from the calling context. + #[allow(clippy::missing_transmute_annotations)] + $crate::util::macro_util::core_reexport::mem::transmute(e) + }; + + if false { + // SAFETY: This code is never executed. + e = ManuallyDrop::into_inner(unsafe { u.src }); + // Suppress the `unused_assignments` lint on the previous line. + let _ = e; + loop {} + } else { + // SAFETY: Per the safety comment on `let u` above, the `dst` + // field in `Transmute` starts at byte offset 0, and has the + // same layout and bit validity as `Dst`. + // + // Transmuting `Src` to `Transmute<Src, Dst>` above using + // `core::mem::transmute` ensures that `size_of::<Src>() == + // size_of::<Transmute<Src, Dst>>()`. A `#[repr(C, packed)]` + // union has the maximum size of all of its fields [1], so this + // is equivalent to `size_of::<Src>() >= size_of::<Dst>()`. + // + // The outer `if`'s `false` branch ensures that `Src: IntoBytes` + // and `Dst: FromBytes`. This, combined with the size bound, + // ensures that this transmute is sound. + // + // [1] Per https://doc.rust-lang.org/1.85.0/reference/type-layout.html#reprc-unions: + // + // The union will have a size of the maximum size of all of + // its fields rounded to its alignment + let dst = unsafe { u.dst }; + $crate::util::macro_util::must_use(ManuallyDrop::into_inner(dst)) + } + } + }}; + ($e:expr) => {{ + let e = $e; + if false { + // This branch, though never taken, ensures that the type of `e` is + // `IntoBytes` and that the type of the outer macro invocation + // expression is `FromBytes`. + + fn transmute<Src, Dst>(src: Src) -> Dst + where + Src: $crate::IntoBytes, + Dst: $crate::FromBytes, + { + let _ = src; + loop {} + } + loop {} + #[allow(unreachable_code)] + transmute(e) + } else { + // SAFETY: `core::mem::transmute` ensures that the type of `e` and + // the type of this macro invocation expression have the same size. + // We know this transmute is safe thanks to the `IntoBytes` and + // `FromBytes` bounds enforced by the `false` branch. + let u = unsafe { + // Clippy: We can't annotate the types; this macro is designed + // to infer the types from the calling context. + #[allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] + $crate::util::macro_util::core_reexport::mem::transmute(e) + }; + $crate::util::macro_util::must_use(u) + } + }}; +} + +/// Safely transmutes a mutable or immutable reference of one type to an +/// immutable reference of another type of the same size and compatible +/// alignment. +/// +/// This macro behaves like an invocation of this function: +/// +/// ```ignore +/// fn transmute_ref<'src, 'dst, Src, Dst>(src: &'src Src) -> &'dst Dst +/// where +/// 'src: 'dst, +/// Src: IntoBytes + Immutable + ?Sized, +/// Dst: FromBytes + Immutable + ?Sized, +/// align_of::<Src>() >= align_of::<Dst>(), +/// size_compatible::<Src, Dst>(), +/// { +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// The types `Src` and `Dst` are inferred from the calling context; they cannot +/// be explicitly specified in the macro invocation. +/// +/// # Size compatibility +/// +/// `transmute_ref!` supports transmuting between `Sized` types or between +/// unsized (i.e., `?Sized`) types. It supports any transmutation that preserves +/// the number of bytes of the referent, even if doing so requires updating the +/// metadata stored in an unsized "fat" reference: +/// +/// ``` +/// # use zerocopy::transmute_ref; +/// # use core::mem::size_of_val; // Not in the prelude on our MSRV +/// let src: &[[u8; 2]] = &[[0, 1], [2, 3]][..]; +/// let dst: &[u8] = transmute_ref!(src); +/// +/// assert_eq!(src.len(), 2); +/// assert_eq!(dst.len(), 4); +/// assert_eq!(dst, [0, 1, 2, 3]); +/// assert_eq!(size_of_val(src), size_of_val(dst)); +/// ``` +/// +/// # Errors +/// +/// Violations of the alignment and size compatibility checks are detected +/// *after* the compiler performs monomorphization. This has two important +/// consequences. +/// +/// First, it means that generic code will *never* fail these conditions: +/// +/// ``` +/// # use zerocopy::{transmute_ref, FromBytes, IntoBytes, Immutable}; +/// fn transmute_ref<Src, Dst>(src: &Src) -> &Dst +/// where +/// Src: IntoBytes + Immutable, +/// Dst: FromBytes + Immutable, +/// { +/// transmute_ref!(src) +/// } +/// ``` +/// +/// Instead, failures will only be detected once generic code is instantiated +/// with concrete types: +/// +/// ```compile_fail,E0080 +/// # use zerocopy::{transmute_ref, FromBytes, IntoBytes, Immutable}; +/// # +/// # fn transmute_ref<Src, Dst>(src: &Src) -> &Dst +/// # where +/// # Src: IntoBytes + Immutable, +/// # Dst: FromBytes + Immutable, +/// # { +/// # transmute_ref!(src) +/// # } +/// let src: &u16 = &0; +/// let dst: &u8 = transmute_ref(src); +/// ``` +/// +/// Second, the fact that violations are detected after monomorphization means +/// that `cargo check` will usually not detect errors, even when types are +/// concrete. Instead, `cargo build` must be used to detect such errors. +/// +/// # Examples +/// +/// Transmuting between `Sized` types: +/// +/// ``` +/// # use zerocopy::transmute_ref; +/// let one_dimensional: [u8; 8] = [0, 1, 2, 3, 4, 5, 6, 7]; +/// +/// let two_dimensional: &[[u8; 4]; 2] = transmute_ref!(&one_dimensional); +/// +/// assert_eq!(two_dimensional, &[[0, 1, 2, 3], [4, 5, 6, 7]]); +/// ``` +/// +/// Transmuting between unsized types: +/// +/// ``` +/// # use {zerocopy::*, zerocopy_derive::*}; +/// # type u16 = zerocopy::byteorder::native_endian::U16; +/// # type u32 = zerocopy::byteorder::native_endian::U32; +/// #[derive(KnownLayout, FromBytes, IntoBytes, Immutable)] +/// #[repr(C)] +/// struct SliceDst<T, U> { +/// t: T, +/// u: [U], +/// } +/// +/// type Src = SliceDst<u32, u16>; +/// type Dst = SliceDst<u16, u8>; +/// +/// let src = Src::ref_from_bytes(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); +/// let dst: &Dst = transmute_ref!(src); +/// +/// assert_eq!(src.t.as_bytes(), [0, 1, 2, 3]); +/// assert_eq!(src.u.len(), 2); +/// assert_eq!(src.u.as_bytes(), [4, 5, 6, 7]); +/// +/// assert_eq!(dst.t.as_bytes(), [0, 1]); +/// assert_eq!(dst.u, [2, 3, 4, 5, 6, 7]); +/// ``` +/// +/// # Use in `const` contexts +/// +/// This macro can be invoked in `const` contexts only when `Src: Sized` and +/// `Dst: Sized`. +#[macro_export] +macro_rules! transmute_ref { + ($e:expr) => {{ + // NOTE: This must be a macro (rather than a function with trait bounds) + // because there's no way, in a generic context, to enforce that two + // types have the same size or alignment. + + // Ensure that the source type is a reference or a mutable reference + // (note that mutable references are implicitly reborrowed here). + let e: &_ = $e; + + #[allow(unused, clippy::diverging_sub_expression)] + if false { + // This branch, though never taken, ensures that the type of `e` is + // `&T` where `T: IntoBytes + Immutable`, and that the type of this + // macro expression is `&U` where `U: FromBytes + Immutable`. + + struct AssertSrcIsIntoBytes<'a, T: ?::core::marker::Sized + $crate::IntoBytes>(&'a T); + struct AssertSrcIsImmutable<'a, T: ?::core::marker::Sized + $crate::Immutable>(&'a T); + struct AssertDstIsFromBytes<'a, U: ?::core::marker::Sized + $crate::FromBytes>(&'a U); + struct AssertDstIsImmutable<'a, T: ?::core::marker::Sized + $crate::Immutable>(&'a T); + + let _ = AssertSrcIsIntoBytes(e); + let _ = AssertSrcIsImmutable(e); + + if true { + #[allow(unused, unreachable_code)] + let u = AssertDstIsFromBytes(loop {}); + u.0 + } else { + #[allow(unused, unreachable_code)] + let u = AssertDstIsImmutable(loop {}); + u.0 + } + } else { + use $crate::util::macro_util::TransmuteRefDst; + let t = $crate::util::macro_util::Wrap::new(e); + // SAFETY: The `if false` branch ensures that: + // - `Src: IntoBytes + Immutable` + // - `Dst: FromBytes + Immutable` + unsafe { + t.transmute_ref() + } + } + }} +} + +/// Safely transmutes a mutable reference of one type to a mutable reference of +/// another type of the same size and compatible alignment. +/// +/// This macro behaves like an invocation of this function: +/// +/// ```ignore +/// const fn transmute_mut<'src, 'dst, Src, Dst>(src: &'src mut Src) -> &'dst mut Dst +/// where +/// 'src: 'dst, +/// Src: FromBytes + IntoBytes, +/// Dst: FromBytes + IntoBytes, +/// align_of::<Src>() >= align_of::<Dst>(), +/// size_compatible::<Src, Dst>(), +/// { +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// The types `Src` and `Dst` are inferred from the calling context; they cannot +/// be explicitly specified in the macro invocation. +/// +/// # Size compatibility +/// +/// `transmute_mut!` supports transmuting between `Sized` types or between +/// unsized (i.e., `?Sized`) types. It supports any transmutation that preserves +/// the number of bytes of the referent, even if doing so requires updating the +/// metadata stored in an unsized "fat" reference: +/// +/// ``` +/// # use zerocopy::transmute_mut; +/// # use core::mem::size_of_val; // Not in the prelude on our MSRV +/// let src: &mut [[u8; 2]] = &mut [[0, 1], [2, 3]][..]; +/// let dst: &mut [u8] = transmute_mut!(src); +/// +/// assert_eq!(dst.len(), 4); +/// assert_eq!(dst, [0, 1, 2, 3]); +/// let dst_size = size_of_val(dst); +/// assert_eq!(src.len(), 2); +/// assert_eq!(size_of_val(src), dst_size); +/// ``` +/// +/// # Errors +/// +/// Violations of the alignment and size compatibility checks are detected +/// *after* the compiler performs monomorphization. This has two important +/// consequences. +/// +/// First, it means that generic code will *never* fail these conditions: +/// +/// ``` +/// # use zerocopy::{transmute_mut, FromBytes, IntoBytes, Immutable}; +/// fn transmute_mut<Src, Dst>(src: &mut Src) -> &mut Dst +/// where +/// Src: FromBytes + IntoBytes, +/// Dst: FromBytes + IntoBytes, +/// { +/// transmute_mut!(src) +/// } +/// ``` +/// +/// Instead, failures will only be detected once generic code is instantiated +/// with concrete types: +/// +/// ```compile_fail,E0080 +/// # use zerocopy::{transmute_mut, FromBytes, IntoBytes, Immutable}; +/// # +/// # fn transmute_mut<Src, Dst>(src: &mut Src) -> &mut Dst +/// # where +/// # Src: FromBytes + IntoBytes, +/// # Dst: FromBytes + IntoBytes, +/// # { +/// # transmute_mut!(src) +/// # } +/// let src: &mut u16 = &mut 0; +/// let dst: &mut u8 = transmute_mut(src); +/// ``` +/// +/// Second, the fact that violations are detected after monomorphization means +/// that `cargo check` will usually not detect errors, even when types are +/// concrete. Instead, `cargo build` must be used to detect such errors. +/// +/// +/// # Examples +/// +/// Transmuting between `Sized` types: +/// +/// ``` +/// # use zerocopy::transmute_mut; +/// let mut one_dimensional: [u8; 8] = [0, 1, 2, 3, 4, 5, 6, 7]; +/// +/// let two_dimensional: &mut [[u8; 4]; 2] = transmute_mut!(&mut one_dimensional); +/// +/// assert_eq!(two_dimensional, &[[0, 1, 2, 3], [4, 5, 6, 7]]); +/// +/// two_dimensional.reverse(); +/// +/// assert_eq!(one_dimensional, [4, 5, 6, 7, 0, 1, 2, 3]); +/// ``` +/// +/// Transmuting between unsized types: +/// +/// ``` +/// # use {zerocopy::*, zerocopy_derive::*}; +/// # type u16 = zerocopy::byteorder::native_endian::U16; +/// # type u32 = zerocopy::byteorder::native_endian::U32; +/// #[derive(KnownLayout, FromBytes, IntoBytes, Immutable)] +/// #[repr(C)] +/// struct SliceDst<T, U> { +/// t: T, +/// u: [U], +/// } +/// +/// type Src = SliceDst<u32, u16>; +/// type Dst = SliceDst<u16, u8>; +/// +/// let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7]; +/// let src = Src::mut_from_bytes(&mut bytes[..]).unwrap(); +/// let dst: &mut Dst = transmute_mut!(src); +/// +/// assert_eq!(dst.t.as_bytes(), [0, 1]); +/// assert_eq!(dst.u, [2, 3, 4, 5, 6, 7]); +/// +/// assert_eq!(src.t.as_bytes(), [0, 1, 2, 3]); +/// assert_eq!(src.u.len(), 2); +/// assert_eq!(src.u.as_bytes(), [4, 5, 6, 7]); +/// +/// ``` +#[macro_export] +macro_rules! transmute_mut { + ($e:expr) => {{ + // NOTE: This must be a macro (rather than a function with trait bounds) + // because, for backwards-compatibility on v0.8.x, we use the autoref + // specialization trick to dispatch to different `transmute_mut` + // implementations: one which doesn't require `Src: KnownLayout + Dst: + // KnownLayout` when `Src: Sized + Dst: Sized`, and one which requires + // `KnownLayout` bounds otherwise. + + // Ensure that the source type is a mutable reference. + let e: &mut _ = $e; + + #[allow(unused)] + use $crate::util::macro_util::TransmuteMutDst as _; + let t = $crate::util::macro_util::Wrap::new(e); + t.transmute_mut() + }} +} + +/// Conditionally transmutes a value of one type to a value of another type of +/// the same size. +/// +/// This macro behaves like an invocation of this function: +/// +/// ```ignore +/// fn try_transmute<Src, Dst>(src: Src) -> Result<Dst, ValidityError<Src, Dst>> +/// where +/// Src: IntoBytes, +/// Dst: TryFromBytes, +/// size_of::<Src>() == size_of::<Dst>(), +/// { +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// However, unlike a function, this macro can only be invoked when the types of +/// `Src` and `Dst` are completely concrete. The types `Src` and `Dst` are +/// inferred from the calling context; they cannot be explicitly specified in +/// the macro invocation. +/// +/// Note that the `Src` produced by the expression `$e` will *not* be dropped. +/// Semantically, its bits will be copied into a new value of type `Dst`, the +/// original `Src` will be forgotten, and the value of type `Dst` will be +/// returned. +/// +/// # Examples +/// +/// ``` +/// # use zerocopy::*; +/// // 0u8 → bool = false +/// assert_eq!(try_transmute!(0u8), Ok(false)); +/// +/// // 1u8 → bool = true +/// assert_eq!(try_transmute!(1u8), Ok(true)); +/// +/// // 2u8 → bool = error +/// assert!(matches!( +/// try_transmute!(2u8), +/// Result::<bool, _>::Err(ValidityError { .. }) +/// )); +/// ``` +#[macro_export] +macro_rules! try_transmute { + ($e:expr) => {{ + // NOTE: This must be a macro (rather than a function with trait bounds) + // because there's no way, in a generic context, to enforce that two + // types have the same size. `core::mem::transmute` uses compiler magic + // to enforce this so long as the types are concrete. + + let e = $e; + if false { + // Check that the sizes of the source and destination types are + // equal. + + // SAFETY: This code is never executed. + Ok(unsafe { + // Clippy: We can't annotate the types; this macro is designed + // to infer the types from the calling context. + #[allow(clippy::missing_transmute_annotations)] + $crate::util::macro_util::core_reexport::mem::transmute(e) + }) + } else { + $crate::util::macro_util::try_transmute::<_, _>(e) + } + }} +} + +/// Conditionally transmutes a mutable or immutable reference of one type to an +/// immutable reference of another type of the same size and compatible +/// alignment. +/// +/// This macro behaves like an invocation of this function: +/// +/// ```ignore +/// fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> +/// where +/// Src: IntoBytes + Immutable, +/// Dst: TryFromBytes + Immutable, +/// size_of::<Src>() == size_of::<Dst>(), +/// align_of::<Src>() >= align_of::<Dst>(), +/// { +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// However, unlike a function, this macro can only be invoked when the types of +/// `Src` and `Dst` are completely concrete. The types `Src` and `Dst` are +/// inferred from the calling context; they cannot be explicitly specified in +/// the macro invocation. +/// +/// # Examples +/// +/// ``` +/// # use zerocopy::*; +/// // 0u8 → bool = false +/// assert_eq!(try_transmute_ref!(&0u8), Ok(&false)); +/// +/// // 1u8 → bool = true +/// assert_eq!(try_transmute_ref!(&1u8), Ok(&true)); +/// +/// // 2u8 → bool = error +/// assert!(matches!( +/// try_transmute_ref!(&2u8), +/// Result::<&bool, _>::Err(ValidityError { .. }) +/// )); +/// ``` +/// +/// # Alignment increase error message +/// +/// Because of limitations on macros, the error message generated when +/// `try_transmute_ref!` is used to transmute from a type of lower alignment to +/// a type of higher alignment is somewhat confusing. For example, the following +/// code: +/// +/// ```compile_fail +/// let increase_alignment: Result<&u16, _> = zerocopy::try_transmute_ref!(&[0u8; 2]); +/// ``` +/// +/// ...generates the following error: +/// +/// ```text +/// error[E0512]: cannot transmute between types of different sizes, or dependently-sized types +/// --> example.rs:1:47 +/// | +/// 1 | let increase_alignment: Result<&u16, _> = zerocopy::try_transmute_ref!(&[0u8; 2]); +/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +/// | +/// = note: source type: `AlignOf<[u8; 2]>` (8 bits) +/// = note: target type: `MaxAlignsOf<[u8; 2], u16>` (16 bits) +/// = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `zerocopy::try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info)/// ``` +/// ``` +/// +/// This is saying that `max(align_of::<T>(), align_of::<U>()) != +/// align_of::<T>()`, which is equivalent to `align_of::<T>() < +/// align_of::<U>()`. +#[macro_export] +macro_rules! try_transmute_ref { + ($e:expr) => {{ + // NOTE: This must be a macro (rather than a function with trait bounds) + // because there's no way, in a generic context, to enforce that two + // types have the same size. `core::mem::transmute` uses compiler magic + // to enforce this so long as the types are concrete. + + // Ensure that the source type is a reference or a mutable reference + // (note that mutable references are implicitly reborrowed here). + let e: &_ = $e; + + #[allow(unreachable_code, unused, clippy::diverging_sub_expression)] + if false { + // This branch, though never taken, ensures that `size_of::<T>() == + // size_of::<U>()` and that that `align_of::<T>() >= + // align_of::<U>()`. + + // `t` is inferred to have type `T` because it's assigned to `e` (of + // type `&T`) as `&t`. + let mut t = loop {}; + e = &t; + + // `u` is inferred to have type `U` because it's used as `Ok(&u)` as + // the value returned from this branch. + let u; + + $crate::assert_size_eq!(t, u); + $crate::assert_align_gt_eq!(t, u); + + Ok(&u) + } else { + $crate::util::macro_util::try_transmute_ref::<_, _>(e) + } + }} +} + +/// Conditionally transmutes a mutable reference of one type to a mutable +/// reference of another type of the same size and compatible alignment. +/// +/// This macro behaves like an invocation of this function: +/// +/// ```ignore +/// fn try_transmute_mut<Src, Dst>(src: &mut Src) -> Result<&mut Dst, ValidityError<&mut Src, Dst>> +/// where +/// Src: FromBytes + IntoBytes, +/// Dst: TryFromBytes + IntoBytes, +/// size_of::<Src>() == size_of::<Dst>(), +/// align_of::<Src>() >= align_of::<Dst>(), +/// { +/// # /* +/// ... +/// # */ +/// } +/// ``` +/// +/// However, unlike a function, this macro can only be invoked when the types of +/// `Src` and `Dst` are completely concrete. The types `Src` and `Dst` are +/// inferred from the calling context; they cannot be explicitly specified in +/// the macro invocation. +/// +/// # Examples +/// +/// ``` +/// # use zerocopy::*; +/// // 0u8 → bool = false +/// let src = &mut 0u8; +/// assert_eq!(try_transmute_mut!(src), Ok(&mut false)); +/// +/// // 1u8 → bool = true +/// let src = &mut 1u8; +/// assert_eq!(try_transmute_mut!(src), Ok(&mut true)); +/// +/// // 2u8 → bool = error +/// let src = &mut 2u8; +/// assert!(matches!( +/// try_transmute_mut!(src), +/// Result::<&mut bool, _>::Err(ValidityError { .. }) +/// )); +/// ``` +/// +/// # Alignment increase error message +/// +/// Because of limitations on macros, the error message generated when +/// `try_transmute_ref!` is used to transmute from a type of lower alignment to +/// a type of higher alignment is somewhat confusing. For example, the following +/// code: +/// +/// ```compile_fail +/// let src = &mut [0u8; 2]; +/// let increase_alignment: Result<&mut u16, _> = zerocopy::try_transmute_mut!(src); +/// ``` +/// +/// ...generates the following error: +/// +/// ```text +/// error[E0512]: cannot transmute between types of different sizes, or dependently-sized types +/// --> example.rs:2:51 +/// | +/// 2 | let increase_alignment: Result<&mut u16, _> = zerocopy::try_transmute_mut!(src); +/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +/// | +/// = note: source type: `AlignOf<[u8; 2]>` (8 bits) +/// = note: target type: `MaxAlignsOf<[u8; 2], u16>` (16 bits) +/// = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `zerocopy::try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) +/// ``` +/// +/// This is saying that `max(align_of::<T>(), align_of::<U>()) != +/// align_of::<T>()`, which is equivalent to `align_of::<T>() < +/// align_of::<U>()`. +#[macro_export] +macro_rules! try_transmute_mut { + ($e:expr) => {{ + // NOTE: This must be a macro (rather than a function with trait bounds) + // because there's no way, in a generic context, to enforce that two + // types have the same size. `core::mem::transmute` uses compiler magic + // to enforce this so long as the types are concrete. + + // Ensure that the source type is a mutable reference. + let e: &mut _ = $e; + + #[allow(unreachable_code, unused, clippy::diverging_sub_expression)] + if false { + // This branch, though never taken, ensures that `size_of::<T>() == + // size_of::<U>()` and that that `align_of::<T>() >= + // align_of::<U>()`. + + // `t` is inferred to have type `T` because it's assigned to `e` (of + // type `&mut T`) as `&mut t`. + let mut t = loop {}; + e = &mut t; + + // `u` is inferred to have type `U` because it's used as `Ok(&mut + // u)` as the value returned from this branch. + let u; + + $crate::assert_size_eq!(t, u); + $crate::assert_align_gt_eq!(t, u); + + Ok(&mut u) + } else { + $crate::util::macro_util::try_transmute_mut::<_, _>(e) + } + }} +} + +/// Includes a file and safely transmutes it to a value of an arbitrary type. +/// +/// The file will be included as a byte array, `[u8; N]`, which will be +/// transmuted to another type, `T`. `T` is inferred from the calling context, +/// and must implement [`FromBytes`]. +/// +/// The file is located relative to the current file (similarly to how modules +/// are found). The provided path is interpreted in a platform-specific way at +/// compile time. So, for instance, an invocation with a Windows path containing +/// backslashes `\` would not compile correctly on Unix. +/// +/// `include_value!` is ignorant of byte order. For byte order-aware types, see +/// the [`byteorder`] module. +/// +/// [`FromBytes`]: crate::FromBytes +/// [`byteorder`]: crate::byteorder +/// +/// # Examples +/// +/// Assume there are two files in the same directory with the following +/// contents: +/// +/// File `data` (no trailing newline): +/// +/// ```text +/// abcd +/// ``` +/// +/// File `main.rs`: +/// +/// ```rust +/// use zerocopy::include_value; +/// # macro_rules! include_value { +/// # ($file:expr) => { zerocopy::include_value!(concat!("../testdata/include_value/", $file)) }; +/// # } +/// +/// fn main() { +/// let as_u32: u32 = include_value!("data"); +/// assert_eq!(as_u32, u32::from_ne_bytes([b'a', b'b', b'c', b'd'])); +/// let as_i32: i32 = include_value!("data"); +/// assert_eq!(as_i32, i32::from_ne_bytes([b'a', b'b', b'c', b'd'])); +/// } +/// ``` +/// +/// # Use in `const` contexts +/// +/// This macro can be invoked in `const` contexts. +#[doc(alias("include_bytes", "include_data", "include_type"))] +#[macro_export] +macro_rules! include_value { + ($file:expr $(,)?) => { + $crate::transmute!(*::core::include_bytes!($file)) + }; +} + +#[doc(hidden)] +#[macro_export] +macro_rules! cryptocorrosion_derive_traits { + ( + #[repr($repr:ident)] + $(#[$attr:meta])* + $vis:vis struct $name:ident $(<$($tyvar:ident),*>)? + $( + ( + $($tuple_field_vis:vis $tuple_field_ty:ty),* + ); + )? + + $( + { + $($field_vis:vis $field_name:ident: $field_ty:ty,)* + } + )? + ) => { + $crate::cryptocorrosion_derive_traits!(@assert_allowed_struct_repr #[repr($repr)]); + + $(#[$attr])* + #[repr($repr)] + $vis struct $name $(<$($tyvar),*>)? + $( + ( + $($tuple_field_vis $tuple_field_ty),* + ); + )? + + $( + { + $($field_vis $field_name: $field_ty,)* + } + )? + + // SAFETY: See inline. + unsafe impl $(<$($tyvar),*>)? $crate::TryFromBytes for $name$(<$($tyvar),*>)? + where + $( + $($tuple_field_ty: $crate::FromBytes,)* + )? + + $( + $($field_ty: $crate::FromBytes,)* + )? + { + fn is_bit_valid<A>(_c: $crate::Maybe<'_, Self, A>) -> bool + where + A: $crate::pointer::invariant::Reference + { + // SAFETY: This macro only accepts `#[repr(C)]` and + // `#[repr(transparent)]` structs, and this `impl` block + // requires all field types to be `FromBytes`. Thus, all + // initialized byte sequences constitutes valid instances of + // `Self`. + true + } + + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` and + // `#[repr(transparent)]` structs, and this `impl` block requires all + // field types to be `FromBytes`, which is a sub-trait of `FromZeros`. + unsafe impl $(<$($tyvar),*>)? $crate::FromZeros for $name$(<$($tyvar),*>)? + where + $( + $($tuple_field_ty: $crate::FromBytes,)* + )? + + $( + $($field_ty: $crate::FromBytes,)* + )? + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` and + // `#[repr(transparent)]` structs, and this `impl` block requires all + // field types to be `FromBytes`. + unsafe impl $(<$($tyvar),*>)? $crate::FromBytes for $name$(<$($tyvar),*>)? + where + $( + $($tuple_field_ty: $crate::FromBytes,)* + )? + + $( + $($field_ty: $crate::FromBytes,)* + )? + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` and + // `#[repr(transparent)]` structs, this `impl` block requires all field + // types to be `IntoBytes`, and a padding check is used to ensures that + // there are no padding bytes. + unsafe impl $(<$($tyvar),*>)? $crate::IntoBytes for $name$(<$($tyvar),*>)? + where + $( + $($tuple_field_ty: $crate::IntoBytes,)* + )? + + $( + $($field_ty: $crate::IntoBytes,)* + )? + + (): $crate::util::macro_util::PaddingFree< + Self, + { + $crate::cryptocorrosion_derive_traits!( + @struct_padding_check #[repr($repr)] + $(($($tuple_field_ty),*))? + $({$($field_ty),*})? + ) + }, + >, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` and + // `#[repr(transparent)]` structs, and this `impl` block requires all + // field types to be `Immutable`. + unsafe impl $(<$($tyvar),*>)? $crate::Immutable for $name$(<$($tyvar),*>)? + where + $( + $($tuple_field_ty: $crate::Immutable,)* + )? + + $( + $($field_ty: $crate::Immutable,)* + )? + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + }; + (@assert_allowed_struct_repr #[repr(transparent)]) => {}; + (@assert_allowed_struct_repr #[repr(C)]) => {}; + (@assert_allowed_struct_repr #[$_attr:meta]) => { + compile_error!("repr must be `#[repr(transparent)]` or `#[repr(C)]`"); + }; + ( + @struct_padding_check #[repr(transparent)] + $(($($tuple_field_ty:ty),*))? + $({$($field_ty:ty),*})? + ) => { + // SAFETY: `#[repr(transparent)]` structs cannot have the same layout as + // their single non-zero-sized field, and so cannot have any padding + // outside of that field. + 0 + }; + ( + @struct_padding_check #[repr(C)] + $(($($tuple_field_ty:ty),*))? + $({$($field_ty:ty),*})? + ) => { + $crate::struct_padding!( + Self, + [ + $($($tuple_field_ty),*)? + $($($field_ty),*)? + ] + ) + }; + ( + #[repr(C)] + $(#[$attr:meta])* + $vis:vis union $name:ident { + $( + $field_name:ident: $field_ty:ty, + )* + } + ) => { + $(#[$attr])* + #[repr(C)] + $vis union $name { + $( + $field_name: $field_ty, + )* + } + + // SAFETY: See inline. + unsafe impl $crate::TryFromBytes for $name + where + $( + $field_ty: $crate::FromBytes, + )* + { + fn is_bit_valid<A>(_c: $crate::Maybe<'_, Self, A>) -> bool + where + A: $crate::pointer::invariant::Reference + { + // SAFETY: This macro only accepts `#[repr(C)]` unions, and this + // `impl` block requires all field types to be `FromBytes`. + // Thus, all initialized byte sequences constitutes valid + // instances of `Self`. + true + } + + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` unions, and this `impl` + // block requires all field types to be `FromBytes`, which is a + // sub-trait of `FromZeros`. + unsafe impl $crate::FromZeros for $name + where + $( + $field_ty: $crate::FromBytes, + )* + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` unions, and this `impl` + // block requires all field types to be `FromBytes`. + unsafe impl $crate::FromBytes for $name + where + $( + $field_ty: $crate::FromBytes, + )* + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` unions, this `impl` + // block requires all field types to be `IntoBytes`, and a padding check + // is used to ensures that there are no padding bytes before or after + // any field. + unsafe impl $crate::IntoBytes for $name + where + $( + $field_ty: $crate::IntoBytes, + )* + (): $crate::util::macro_util::PaddingFree< + Self, + { + $crate::union_padding!( + Self, + [$($field_ty),*] + ) + }, + >, + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + + // SAFETY: This macro only accepts `#[repr(C)]` unions, and this `impl` + // block requires all field types to be `Immutable`. + unsafe impl $crate::Immutable for $name + where + $( + $field_ty: $crate::Immutable, + )* + { + fn only_derive_is_allowed_to_implement_this_trait() {} + } + }; +} + +#[cfg(test)] +mod tests { + use crate::{ + byteorder::native_endian::{U16, U32}, + util::testutil::*, + *, + }; + + #[derive(KnownLayout, Immutable, FromBytes, IntoBytes, PartialEq, Debug)] + #[repr(C)] + struct SliceDst<T, U> { + a: T, + b: [U], + } + + #[test] + fn test_transmute() { + // Test that memory is transmuted as expected. + let array_of_u8s = [0u8, 1, 2, 3, 4, 5, 6, 7]; + let array_of_arrays = [[0, 1], [2, 3], [4, 5], [6, 7]]; + let x: [[u8; 2]; 4] = transmute!(array_of_u8s); + assert_eq!(x, array_of_arrays); + let x: [u8; 8] = transmute!(array_of_arrays); + assert_eq!(x, array_of_u8s); + + // Test that memory is transmuted as expected when shrinking. + let x: [[u8; 2]; 3] = transmute!(#![allow(shrink)] array_of_u8s); + assert_eq!(x, [[0u8, 1], [2, 3], [4, 5]]); + + // Test that the source expression's value is forgotten rather than + // dropped. + #[derive(IntoBytes)] + #[repr(transparent)] + struct PanicOnDrop(()); + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop::drop"); + } + } + #[allow(clippy::let_unit_value)] + let _: () = transmute!(PanicOnDrop(())); + #[allow(clippy::let_unit_value)] + let _: () = transmute!(#![allow(shrink)] PanicOnDrop(())); + + // Test that `transmute!` is legal in a const context. + const ARRAY_OF_U8S: [u8; 8] = [0u8, 1, 2, 3, 4, 5, 6, 7]; + const ARRAY_OF_ARRAYS: [[u8; 2]; 4] = [[0, 1], [2, 3], [4, 5], [6, 7]]; + const X: [[u8; 2]; 4] = transmute!(ARRAY_OF_U8S); + assert_eq!(X, ARRAY_OF_ARRAYS); + const X_SHRINK: [[u8; 2]; 3] = transmute!(#![allow(shrink)] ARRAY_OF_U8S); + assert_eq!(X_SHRINK, [[0u8, 1], [2, 3], [4, 5]]); + + // Test that `transmute!` works with `!Immutable` types. + let x: usize = transmute!(UnsafeCell::new(1usize)); + assert_eq!(x, 1); + let x: UnsafeCell<usize> = transmute!(1usize); + assert_eq!(x.into_inner(), 1); + let x: UnsafeCell<isize> = transmute!(UnsafeCell::new(1usize)); + assert_eq!(x.into_inner(), 1); + } + + // A `Sized` type which doesn't implement `KnownLayout` (it is "not + // `KnownLayout`", or `Nkl`). + // + // This permits us to test that `transmute_ref!` and `transmute_mut!` work + // for types which are `Sized + !KnownLayout`. When we added support for + // slice DSTs in #1924, this new support relied on `KnownLayout`, but we + // need to make sure to remain backwards-compatible with code which uses + // these macros with types which are `!KnownLayout`. + #[derive(FromBytes, IntoBytes, Immutable, PartialEq, Eq, Debug)] + #[repr(transparent)] + struct Nkl<T>(T); + + #[test] + fn test_transmute_ref() { + // Test that memory is transmuted as expected. + let array_of_u8s = [0u8, 1, 2, 3, 4, 5, 6, 7]; + let array_of_arrays = [[0, 1], [2, 3], [4, 5], [6, 7]]; + let x: &[[u8; 2]; 4] = transmute_ref!(&array_of_u8s); + assert_eq!(*x, array_of_arrays); + let x: &[u8; 8] = transmute_ref!(&array_of_arrays); + assert_eq!(*x, array_of_u8s); + + // Test that `transmute_ref!` is legal in a const context. + const ARRAY_OF_U8S: [u8; 8] = [0u8, 1, 2, 3, 4, 5, 6, 7]; + const ARRAY_OF_ARRAYS: [[u8; 2]; 4] = [[0, 1], [2, 3], [4, 5], [6, 7]]; + #[allow(clippy::redundant_static_lifetimes)] + const X: &'static [[u8; 2]; 4] = transmute_ref!(&ARRAY_OF_U8S); + assert_eq!(*X, ARRAY_OF_ARRAYS); + + // Before 1.61.0, we can't define the `const fn transmute_ref` function + // that we do on and after 1.61.0. + #[cfg(no_zerocopy_generic_bounds_in_const_fn_1_61_0)] + { + // Test that `transmute_ref!` supports non-`KnownLayout` `Sized` + // types. + const ARRAY_OF_NKL_U8S: Nkl<[u8; 8]> = Nkl([0u8, 1, 2, 3, 4, 5, 6, 7]); + const ARRAY_OF_NKL_ARRAYS: Nkl<[[u8; 2]; 4]> = Nkl([[0, 1], [2, 3], [4, 5], [6, 7]]); + const X_NKL: &Nkl<[[u8; 2]; 4]> = transmute_ref!(&ARRAY_OF_NKL_U8S); + assert_eq!(*X_NKL, ARRAY_OF_NKL_ARRAYS); + } + + #[cfg(not(no_zerocopy_generic_bounds_in_const_fn_1_61_0))] + { + // Call through a generic function to make sure our autoref + // specialization trick works even when types are generic. + const fn transmute_ref<T, U>(t: &T) -> &U + where + T: IntoBytes + Immutable, + U: FromBytes + Immutable, + { + transmute_ref!(t) + } + + // Test that `transmute_ref!` supports non-`KnownLayout` `Sized` + // types. + const ARRAY_OF_NKL_U8S: Nkl<[u8; 8]> = Nkl([0u8, 1, 2, 3, 4, 5, 6, 7]); + const ARRAY_OF_NKL_ARRAYS: Nkl<[[u8; 2]; 4]> = Nkl([[0, 1], [2, 3], [4, 5], [6, 7]]); + const X_NKL: &Nkl<[[u8; 2]; 4]> = transmute_ref(&ARRAY_OF_NKL_U8S); + assert_eq!(*X_NKL, ARRAY_OF_NKL_ARRAYS); + } + + // Test that `transmute_ref!` works on slice DSTs in and that memory is + // transmuted as expected. + let slice_dst_of_u8s = + SliceDst::<U16, [u8; 2]>::ref_from_bytes(&[0, 1, 2, 3, 4, 5][..]).unwrap(); + let slice_dst_of_u16s = + SliceDst::<U16, U16>::ref_from_bytes(&[0, 1, 2, 3, 4, 5][..]).unwrap(); + let x: &SliceDst<U16, U16> = transmute_ref!(slice_dst_of_u8s); + assert_eq!(x, slice_dst_of_u16s); + + let slice_dst_of_u8s = + SliceDst::<U16, u8>::ref_from_bytes(&[0, 1, 2, 3, 4, 5][..]).unwrap(); + let x: &[u8] = transmute_ref!(slice_dst_of_u8s); + assert_eq!(x, [0, 1, 2, 3, 4, 5]); + + let x: &[u8] = transmute_ref!(slice_dst_of_u16s); + assert_eq!(x, [0, 1, 2, 3, 4, 5]); + + let x: &[U16] = transmute_ref!(slice_dst_of_u16s); + let slice_of_u16s: &[U16] = <[U16]>::ref_from_bytes(&[0, 1, 2, 3, 4, 5][..]).unwrap(); + assert_eq!(x, slice_of_u16s); + + // Test that transmuting from a type with larger trailing slice offset + // and larger trailing slice element works. + let bytes = &[0, 1, 2, 3, 4, 5, 6, 7][..]; + let slice_dst_big = SliceDst::<U32, U16>::ref_from_bytes(bytes).unwrap(); + let slice_dst_small = SliceDst::<U16, u8>::ref_from_bytes(bytes).unwrap(); + let x: &SliceDst<U16, u8> = transmute_ref!(slice_dst_big); + assert_eq!(x, slice_dst_small); + + // Test that it's legal to transmute a reference while shrinking the + // lifetime (note that `X` has the lifetime `'static`). + let x: &[u8; 8] = transmute_ref!(X); + assert_eq!(*x, ARRAY_OF_U8S); + + // Test that `transmute_ref!` supports decreasing alignment. + let u = AU64(0); + let array = [0, 0, 0, 0, 0, 0, 0, 0]; + let x: &[u8; 8] = transmute_ref!(&u); + assert_eq!(*x, array); + + // Test that a mutable reference can be turned into an immutable one. + let mut x = 0u8; + #[allow(clippy::useless_transmute)] + let y: &u8 = transmute_ref!(&mut x); + assert_eq!(*y, 0); + } + + #[test] + fn test_try_transmute() { + // Test that memory is transmuted with `try_transmute` as expected. + let array_of_bools = [false, true, false, true, false, true, false, true]; + let array_of_arrays = [[0, 1], [0, 1], [0, 1], [0, 1]]; + let x: Result<[[u8; 2]; 4], _> = try_transmute!(array_of_bools); + assert_eq!(x, Ok(array_of_arrays)); + let x: Result<[bool; 8], _> = try_transmute!(array_of_arrays); + assert_eq!(x, Ok(array_of_bools)); + + // Test that `try_transmute!` works with `!Immutable` types. + let x: Result<usize, _> = try_transmute!(UnsafeCell::new(1usize)); + assert_eq!(x.unwrap(), 1); + let x: Result<UnsafeCell<usize>, _> = try_transmute!(1usize); + assert_eq!(x.unwrap().into_inner(), 1); + let x: Result<UnsafeCell<isize>, _> = try_transmute!(UnsafeCell::new(1usize)); + assert_eq!(x.unwrap().into_inner(), 1); + + #[derive(FromBytes, IntoBytes, Debug, PartialEq)] + #[repr(transparent)] + struct PanicOnDrop<T>(T); + + impl<T> Drop for PanicOnDrop<T> { + fn drop(&mut self) { + panic!("PanicOnDrop dropped"); + } + } + + // Since `try_transmute!` semantically moves its argument on failure, + // the `PanicOnDrop` is not dropped, and thus this shouldn't panic. + let x: Result<usize, _> = try_transmute!(PanicOnDrop(1usize)); + assert_eq!(x, Ok(1)); + + // Since `try_transmute!` semantically returns ownership of its argument + // on failure, the `PanicOnDrop` is returned rather than dropped, and + // thus this shouldn't panic. + let y: Result<bool, _> = try_transmute!(PanicOnDrop(2u8)); + // We have to use `map_err` instead of comparing against + // `Err(PanicOnDrop(2u8))` because the latter would create and then drop + // its `PanicOnDrop` temporary, which would cause a panic. + assert_eq!(y.as_ref().map_err(|p| &p.src.0), Err::<&bool, _>(&2u8)); + mem::forget(y); + } + + #[test] + fn test_try_transmute_ref() { + // Test that memory is transmuted with `try_transmute_ref` as expected. + let array_of_bools = &[false, true, false, true, false, true, false, true]; + let array_of_arrays = &[[0, 1], [0, 1], [0, 1], [0, 1]]; + let x: Result<&[[u8; 2]; 4], _> = try_transmute_ref!(array_of_bools); + assert_eq!(x, Ok(array_of_arrays)); + let x: Result<&[bool; 8], _> = try_transmute_ref!(array_of_arrays); + assert_eq!(x, Ok(array_of_bools)); + + // Test that it's legal to transmute a reference while shrinking the + // lifetime. + { + let x: Result<&[[u8; 2]; 4], _> = try_transmute_ref!(array_of_bools); + assert_eq!(x, Ok(array_of_arrays)); + } + + // Test that `try_transmute_ref!` supports decreasing alignment. + let u = AU64(0); + let array = [0u8, 0, 0, 0, 0, 0, 0, 0]; + let x: Result<&[u8; 8], _> = try_transmute_ref!(&u); + assert_eq!(x, Ok(&array)); + + // Test that a mutable reference can be turned into an immutable one. + let mut x = 0u8; + #[allow(clippy::useless_transmute)] + let y: Result<&u8, _> = try_transmute_ref!(&mut x); + assert_eq!(y, Ok(&0)); + } + + #[test] + fn test_try_transmute_mut() { + // Test that memory is transmuted with `try_transmute_mut` as expected. + let array_of_u8s = &mut [0u8, 1, 0, 1, 0, 1, 0, 1]; + let array_of_arrays = &mut [[0u8, 1], [0, 1], [0, 1], [0, 1]]; + let x: Result<&mut [[u8; 2]; 4], _> = try_transmute_mut!(array_of_u8s); + assert_eq!(x, Ok(array_of_arrays)); + + let array_of_bools = &mut [false, true, false, true, false, true, false, true]; + let array_of_arrays = &mut [[0u8, 1], [0, 1], [0, 1], [0, 1]]; + let x: Result<&mut [bool; 8], _> = try_transmute_mut!(array_of_arrays); + assert_eq!(x, Ok(array_of_bools)); + + // Test that it's legal to transmute a reference while shrinking the + // lifetime. + let array_of_bools = &mut [false, true, false, true, false, true, false, true]; + let array_of_arrays = &mut [[0u8, 1], [0, 1], [0, 1], [0, 1]]; + { + let x: Result<&mut [bool; 8], _> = try_transmute_mut!(array_of_arrays); + assert_eq!(x, Ok(array_of_bools)); + } + + // Test that `try_transmute_mut!` supports decreasing alignment. + let u = &mut AU64(0); + let array = &mut [0u8, 0, 0, 0, 0, 0, 0, 0]; + let x: Result<&mut [u8; 8], _> = try_transmute_mut!(u); + assert_eq!(x, Ok(array)); + + // Test that a mutable reference can be turned into an immutable one. + let mut x = 0u8; + #[allow(clippy::useless_transmute)] + let y: Result<&mut u8, _> = try_transmute_mut!(&mut x); + assert_eq!(y, Ok(&mut 0)); + } + + #[test] + fn test_transmute_mut() { + // Test that memory is transmuted as expected. + let mut array_of_u8s = [0u8, 1, 2, 3, 4, 5, 6, 7]; + let mut array_of_arrays = [[0, 1], [2, 3], [4, 5], [6, 7]]; + let x: &mut [[u8; 2]; 4] = transmute_mut!(&mut array_of_u8s); + assert_eq!(*x, array_of_arrays); + let x: &mut [u8; 8] = transmute_mut!(&mut array_of_arrays); + assert_eq!(*x, array_of_u8s); + + { + // Test that it's legal to transmute a reference while shrinking the + // lifetime. + let x: &mut [u8; 8] = transmute_mut!(&mut array_of_arrays); + assert_eq!(*x, array_of_u8s); + } + + // Test that `transmute_mut!` supports non-`KnownLayout` types. + let mut array_of_u8s = Nkl([0u8, 1, 2, 3, 4, 5, 6, 7]); + let mut array_of_arrays = Nkl([[0, 1], [2, 3], [4, 5], [6, 7]]); + let x: &mut Nkl<[[u8; 2]; 4]> = transmute_mut!(&mut array_of_u8s); + assert_eq!(*x, array_of_arrays); + let x: &mut Nkl<[u8; 8]> = transmute_mut!(&mut array_of_arrays); + assert_eq!(*x, array_of_u8s); + + // Test that `transmute_mut!` supports decreasing alignment. + let mut u = AU64(0); + let array = [0, 0, 0, 0, 0, 0, 0, 0]; + let x: &[u8; 8] = transmute_mut!(&mut u); + assert_eq!(*x, array); + + // Test that a mutable reference can be turned into an immutable one. + let mut x = 0u8; + #[allow(clippy::useless_transmute)] + let y: &u8 = transmute_mut!(&mut x); + assert_eq!(*y, 0); + + // Test that `transmute_mut!` works on slice DSTs in and that memory is + // transmuted as expected. + let mut bytes = [0, 1, 2, 3, 4, 5, 6]; + let slice_dst_of_u8s = SliceDst::<u8, [u8; 2]>::mut_from_bytes(&mut bytes[..]).unwrap(); + let mut bytes = [0, 1, 2, 3, 4, 5, 6]; + let slice_dst_of_u16s = SliceDst::<u8, U16>::mut_from_bytes(&mut bytes[..]).unwrap(); + let x: &mut SliceDst<u8, U16> = transmute_mut!(slice_dst_of_u8s); + assert_eq!(x, slice_dst_of_u16s); + + // Test that `transmute_mut!` works on slices that memory is transmuted + // as expected. + let array_of_u16s: &mut [u16] = &mut [0u16, 1, 2]; + let array_of_i16s: &mut [i16] = &mut [0i16, 1, 2]; + let x: &mut [i16] = transmute_mut!(array_of_u16s); + assert_eq!(x, array_of_i16s); + + // Test that transmuting from a type with larger trailing slice offset + // and larger trailing slice element works. + let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7]; + let slice_dst_big = SliceDst::<U32, U16>::mut_from_bytes(&mut bytes[..]).unwrap(); + let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7]; + let slice_dst_small = SliceDst::<U16, u8>::mut_from_bytes(&mut bytes[..]).unwrap(); + let x: &mut SliceDst<U16, u8> = transmute_mut!(slice_dst_big); + assert_eq!(x, slice_dst_small); + } + + #[test] + fn test_macros_evaluate_args_once() { + let mut ctr = 0; + #[allow(clippy::useless_transmute)] + let _: usize = transmute!({ + ctr += 1; + 0usize + }); + assert_eq!(ctr, 1); + + let mut ctr = 0; + let _: &usize = transmute_ref!({ + ctr += 1; + &0usize + }); + assert_eq!(ctr, 1); + + let mut ctr: usize = 0; + let _: &mut usize = transmute_mut!({ + ctr += 1; + &mut ctr + }); + assert_eq!(ctr, 1); + + let mut ctr = 0; + #[allow(clippy::useless_transmute)] + let _: usize = try_transmute!({ + ctr += 1; + 0usize + }) + .unwrap(); + assert_eq!(ctr, 1); + } + + #[test] + fn test_include_value() { + const AS_U32: u32 = include_value!("../testdata/include_value/data"); + assert_eq!(AS_U32, u32::from_ne_bytes([b'a', b'b', b'c', b'd'])); + const AS_I32: i32 = include_value!("../testdata/include_value/data"); + assert_eq!(AS_I32, i32::from_ne_bytes([b'a', b'b', b'c', b'd'])); + } + + #[test] + #[allow(non_camel_case_types, unreachable_pub, dead_code)] + fn test_cryptocorrosion_derive_traits() { + // Test the set of invocations added in + // https://github.com/cryptocorrosion/cryptocorrosion/pull/85 + + fn assert_impls<T: FromBytes + IntoBytes + Immutable>() {} + + cryptocorrosion_derive_traits! { + #[repr(C)] + #[derive(Clone, Copy)] + pub union vec128_storage { + d: [u32; 4], + q: [u64; 2], + } + } + + assert_impls::<vec128_storage>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[derive(Copy, Clone, Debug, PartialEq)] + pub struct u32x4_generic([u32; 4]); + } + + assert_impls::<u32x4_generic>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[derive(Copy, Clone, Debug, PartialEq)] + pub struct u64x2_generic([u64; 2]); + } + + assert_impls::<u64x2_generic>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[derive(Copy, Clone, Debug, PartialEq)] + pub struct u128x1_generic([u128; 1]); + } + + assert_impls::<u128x1_generic>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[derive(Copy, Clone, Default)] + #[allow(non_camel_case_types)] + pub struct x2<W, G>(pub [W; 2], PhantomData<G>); + } + + enum NotZerocopy {} + assert_impls::<x2<(), NotZerocopy>>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[derive(Copy, Clone, Default)] + #[allow(non_camel_case_types)] + pub struct x4<W>(pub [W; 4]); + } + + assert_impls::<x4<()>>(); + + #[cfg(feature = "simd")] + #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] + { + #[cfg(target_arch = "x86")] + use core::arch::x86::{__m128i, __m256i}; + #[cfg(target_arch = "x86_64")] + use core::arch::x86_64::{__m128i, __m256i}; + + cryptocorrosion_derive_traits! { + #[repr(C)] + #[derive(Copy, Clone)] + pub struct X4(__m128i, __m128i, __m128i, __m128i); + } + + assert_impls::<X4>(); + + cryptocorrosion_derive_traits! { + #[repr(C)] + /// Generic wrapper for unparameterized storage of any of the + /// possible impls. Converting into and out of this type should + /// be essentially free, although it may be more aligned than a + /// particular impl requires. + #[allow(non_camel_case_types)] + #[derive(Copy, Clone)] + pub union vec128_storage { + u32x4: [u32; 4], + u64x2: [u64; 2], + u128x1: [u128; 1], + sse2: __m128i, + } + } + + assert_impls::<vec128_storage>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[allow(non_camel_case_types)] + #[derive(Copy, Clone)] + pub struct vec<S3, S4, NI> { + x: __m128i, + s3: PhantomData<S3>, + s4: PhantomData<S4>, + ni: PhantomData<NI>, + } + } + + assert_impls::<vec<NotZerocopy, NotZerocopy, NotZerocopy>>(); + + cryptocorrosion_derive_traits! { + #[repr(transparent)] + #[derive(Copy, Clone)] + pub struct u32x4x2_avx2<NI> { + x: __m256i, + ni: PhantomData<NI>, + } + } + + assert_impls::<u32x4x2_avx2<NotZerocopy>>(); + } + + // Make sure that our derive works for `#[repr(C)]` structs even though + // cryptocorrosion doesn't currently have any. + cryptocorrosion_derive_traits! { + #[repr(C)] + #[derive(Copy, Clone, Debug, PartialEq)] + pub struct ReprC(u8, u8, u16); + } + } +} diff --git a/vendor/zerocopy/src/pointer/inner.rs b/vendor/zerocopy/src/pointer/inner.rs new file mode 100644 index 00000000..4f6cd5db --- /dev/null +++ b/vendor/zerocopy/src/pointer/inner.rs @@ -0,0 +1,748 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{marker::PhantomData, mem, ops::Range, ptr::NonNull}; + +pub use _def::PtrInner; + +#[allow(unused_imports)] +use crate::util::polyfills::NumExt as _; +use crate::{ + layout::{CastType, MetadataCastError}, + util::AsAddress, + AlignmentError, CastError, KnownLayout, MetadataOf, SizeError, SplitAt, +}; + +mod _def { + use super::*; + /// The inner pointer stored inside a [`Ptr`][crate::Ptr]. + /// + /// `PtrInner<'a, T>` is [covariant] in `'a` and invariant in `T`. + /// + /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html + #[allow(missing_debug_implementations)] + pub struct PtrInner<'a, T> + where + T: ?Sized, + { + /// # Invariants + /// + /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid + /// provenance for its referent, which is entirely contained in some + /// Rust allocation, `A`. + /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live + /// for at least `'a`. + /// + /// # Postconditions + /// + /// By virtue of these invariants, code may assume the following, which + /// are logical implications of the invariants: + /// - `ptr`'s referent is not larger than `isize::MAX` bytes \[1\] + /// - `ptr`'s referent does not wrap around the address space \[1\] + /// + /// \[1\] Per <https://doc.rust-lang.org/1.85.0/std/ptr/index.html#allocated-object>: + /// + /// For any allocated object with `base` address, `size`, and a set of + /// `addresses`, the following are guaranteed: + /// ... + /// - `size <= isize::MAX` + /// + /// As a consequence of these guarantees, given any address `a` within + /// the set of addresses of an allocated object: + /// ... + /// - It is guaranteed that, given `o = a - base` (i.e., the offset of + /// `a` within the allocated object), `base + o` will not wrap + /// around the address space (in other words, will not overflow + /// `usize`) + ptr: NonNull<T>, + // SAFETY: `&'a UnsafeCell<T>` is covariant in `'a` and invariant in `T` + // [1]. We use this construction rather than the equivalent `&mut T`, + // because our MSRV of 1.65 prohibits `&mut` types in const contexts. + // + // [1] https://doc.rust-lang.org/1.81.0/reference/subtyping.html#variance + _marker: PhantomData<&'a core::cell::UnsafeCell<T>>, + } + + impl<'a, T: 'a + ?Sized> Copy for PtrInner<'a, T> {} + impl<'a, T: 'a + ?Sized> Clone for PtrInner<'a, T> { + #[inline(always)] + fn clone(&self) -> PtrInner<'a, T> { + // SAFETY: None of the invariants on `ptr` are affected by having + // multiple copies of a `PtrInner`. + *self + } + } + + impl<'a, T: 'a + ?Sized> PtrInner<'a, T> { + /// Constructs a `Ptr` from a [`NonNull`]. + /// + /// # Safety + /// + /// The caller promises that: + /// + /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid + /// provenance for its referent, which is entirely contained in some + /// Rust allocation, `A`. + /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live + /// for at least `'a`. + #[inline(always)] + #[must_use] + pub const unsafe fn new(ptr: NonNull<T>) -> PtrInner<'a, T> { + // SAFETY: The caller has promised to satisfy all safety invariants + // of `PtrInner`. + Self { ptr, _marker: PhantomData } + } + + /// Converts this `PtrInner<T>` to a [`NonNull<T>`]. + /// + /// Note that this method does not consume `self`. The caller should + /// watch out for `unsafe` code which uses the returned `NonNull` in a + /// way that violates the safety invariants of `self`. + #[inline(always)] + #[must_use] + pub const fn as_non_null(&self) -> NonNull<T> { + self.ptr + } + } +} + +impl<'a, T: ?Sized> PtrInner<'a, T> { + /// Constructs a `PtrInner` from a reference. + #[inline] + pub(crate) fn from_ref(ptr: &'a T) -> Self { + let ptr = NonNull::from(ptr); + // SAFETY: + // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on + // `&'a T` [1], has valid provenance for its referent, which is + // entirely contained in some Rust allocation, `A`. + // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on + // `&'a T`, is guaranteed to live for at least `'a`. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety: + // + // For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`, + // when such values cross an API boundary, the following invariants + // must generally be upheld: + // ... + // - if `size_of_val(t) > 0`, then `t` is dereferenceable for + // `size_of_val(t)` many bytes + // + // If `t` points at address `a`, being “dereferenceable” for N bytes + // means that the memory range `[a, a + N)` is all contained within a + // single allocated object. + unsafe { Self::new(ptr) } + } + + /// Constructs a `PtrInner` from a mutable reference. + #[inline] + pub(crate) fn from_mut(ptr: &'a mut T) -> Self { + let ptr = NonNull::from(ptr); + // SAFETY: + // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on + // `&'a mut T` [1], has valid provenance for its referent, which is + // entirely contained in some Rust allocation, `A`. + // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on + // `&'a mut T`, is guaranteed to live for at least `'a`. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety: + // + // For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`, + // when such values cross an API boundary, the following invariants + // must generally be upheld: + // ... + // - if `size_of_val(t) > 0`, then `t` is dereferenceable for + // `size_of_val(t)` many bytes + // + // If `t` points at address `a`, being “dereferenceable” for N bytes + // means that the memory range `[a, a + N)` is all contained within a + // single allocated object. + unsafe { Self::new(ptr) } + } + + #[must_use] + #[inline(always)] + pub fn cast_sized<U>(self) -> PtrInner<'a, U> + where + T: Sized, + { + static_assert!(T, U => mem::size_of::<T>() >= mem::size_of::<U>()); + // SAFETY: By the preceding assert, `U` is no larger than `T`, which is + // the size of `self`'s referent. + unsafe { self.cast() } + } + + /// # Safety + /// + /// `U` must not be larger than the size of `self`'s referent. + #[must_use] + #[inline(always)] + pub unsafe fn cast<U>(self) -> PtrInner<'a, U> { + let ptr = self.as_non_null().cast::<U>(); + + // SAFETY: The caller promises that `U` is no larger than `self`'s + // referent. Thus, `ptr` addresses a subset of the bytes addressed by + // `self`. + // + // 0. By invariant on `self`, if `self`'s referent is not zero sized, + // then `self` has valid provenance for its referent, which is + // entirely contained in some Rust allocation, `A`. Thus, the same + // holds of `ptr`. + // 1. By invariant on `self`, if `self`'s referent is not zero sized, + // then `A` is guaranteed to live for at least `'a`. + unsafe { PtrInner::new(ptr) } + } +} + +#[allow(clippy::needless_lifetimes)] +impl<'a, T> PtrInner<'a, T> +where + T: ?Sized + KnownLayout, +{ + /// Extracts the metadata of this `ptr`. + pub(crate) fn meta(self) -> MetadataOf<T> { + let meta = T::pointer_to_metadata(self.as_non_null().as_ptr()); + // SAFETY: By invariant on `PtrInner`, `self.as_non_null()` addresses no + // more than `isize::MAX` bytes. + unsafe { MetadataOf::new_unchecked(meta) } + } + + /// Produces a `PtrInner` with the same address and provenance as `self` but + /// the given `meta`. + /// + /// # Safety + /// + /// The caller promises that if `self`'s referent is not zero sized, then + /// a pointer constructed from its address with the given `meta` metadata + /// will address a subset of the allocation pointed to by `self`. + #[inline] + pub(crate) unsafe fn with_meta(self, meta: T::PointerMetadata) -> Self + where + T: KnownLayout, + { + let raw = T::raw_from_ptr_len(self.as_non_null().cast(), meta); + + // SAFETY: + // + // Lemma 0: `raw` either addresses zero bytes, or addresses a subset of + // the allocation pointed to by `self` and has the same + // provenance as `self`. Proof: `raw` is constructed using + // provenance-preserving operations, and the caller has + // promised that, if `self`'s referent is not zero-sized, the + // resulting pointer addresses a subset of the allocation + // pointed to by `self`. + // + // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not + // zero sized, then `ptr` is derived from some valid Rust allocation, + // `A`. + // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not + // zero sized, then `ptr` has valid provenance for `A`. + // 2. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not + // zero sized, then `ptr` addresses a byte range which is entirely + // contained in `A`. + // 3. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte + // range whose length fits in an `isize`. + // 4. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte + // range which does not wrap around the address space. + // 5. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not + // zero sized, then `A` is guaranteed to live for at least `'a`. + unsafe { PtrInner::new(raw) } + } + + pub(crate) fn as_bytes(self) -> PtrInner<'a, [u8]> { + let ptr = self.as_non_null(); + let bytes = match T::size_of_val_raw(ptr) { + Some(bytes) => bytes, + // SAFETY: `KnownLayout::size_of_val_raw` promises to always + // return `Some` so long as the resulting size fits in a + // `usize`. By invariant on `PtrInner`, `self` refers to a range + // of bytes whose size fits in an `isize`, which implies that it + // also fits in a `usize`. + None => unsafe { core::hint::unreachable_unchecked() }, + }; + + let ptr = core::ptr::slice_from_raw_parts_mut(ptr.cast::<u8>().as_ptr(), bytes); + + // SAFETY: `ptr` has the same address as `ptr = self.as_non_null()`, + // which is non-null by construction. + let ptr = unsafe { NonNull::new_unchecked(ptr) }; + + // SAFETY: `ptr` points to `bytes` `u8`s starting at the same address as + // `self`'s referent. Since `bytes` is the length of `self`'s referent, + // `ptr` addresses the same byte range as `self`. Thus, by invariant on + // `self` (as a `PtrInner`): + // + // 0. If `ptr`'s referent is not zero sized, then `ptr` has valid + // provenance for its referent, which is entirely contained in some + // Rust allocation, `A`. + // 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live + // for at least `'a`. + unsafe { PtrInner::new(ptr) } + } +} + +#[allow(clippy::needless_lifetimes)] +impl<'a, T> PtrInner<'a, T> +where + T: ?Sized + KnownLayout<PointerMetadata = usize>, +{ + /// Splits `T` in two. + /// + /// # Safety + /// + /// The caller promises that: + /// - `l_len.get() <= self.meta()`. + /// + /// ## (Non-)Overlap + /// + /// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed that + /// `left` and `right` are contiguous and non-overlapping if + /// `l_len.padding_needed_for() == 0`. This is true for all `[T]`. + /// + /// If `l_len.padding_needed_for() != 0`, then the left pointer will overlap + /// the right pointer to satisfy `T`'s padding requirements. + pub(crate) unsafe fn split_at_unchecked( + self, + l_len: crate::util::MetadataOf<T>, + ) -> (Self, PtrInner<'a, [T::Elem]>) + where + T: SplitAt, + { + let l_len = l_len.get(); + + // SAFETY: The caller promises that `l_len.get() <= self.meta()`. + // Trivially, `0 <= l_len`. + let left = unsafe { self.with_meta(l_len) }; + + let right = self.trailing_slice(); + // SAFETY: The caller promises that `l_len <= self.meta() = slf.meta()`. + // Trivially, `slf.meta() <= slf.meta()`. + let right = unsafe { right.slice_unchecked(l_len..self.meta().get()) }; + + // SAFETY: If `l_len.padding_needed_for() == 0`, then `left` and `right` + // are non-overlapping. Proof: `left` is constructed `slf` with `l_len` + // as its (exclusive) upper bound. If `l_len.padding_needed_for() == 0`, + // then `left` requires no trailing padding following its final element. + // Since `right` is constructed from `slf`'s trailing slice with `l_len` + // as its (inclusive) lower bound, no byte is referred to by both + // pointers. + // + // Conversely, `l_len.padding_needed_for() == N`, where `N + // > 0`, `left` requires `N` bytes of trailing padding following its + // final element. Since `right` is constructed from the trailing slice + // of `slf` with `l_len` as its (inclusive) lower bound, the first `N` + // bytes of `right` are aliased by `left`. + (left, right) + } + + /// Produces the trailing slice of `self`. + pub(crate) fn trailing_slice(self) -> PtrInner<'a, [T::Elem]> + where + T: SplitAt, + { + let offset = crate::trailing_slice_layout::<T>().offset; + + let bytes = self.as_non_null().cast::<u8>().as_ptr(); + + // SAFETY: + // - By invariant on `T: KnownLayout`, `T::LAYOUT` describes `T`'s + // layout. `offset` is the offset of the trailing slice within `T`, + // which is by definition in-bounds or one byte past the end of any + // `T`, regardless of metadata. By invariant on `PtrInner`, `self` + // (and thus `bytes`) points to a byte range of size `<= isize::MAX`, + // and so `offset <= isize::MAX`. Since `size_of::<u8>() == 1`, + // `offset * size_of::<u8>() <= isize::MAX`. + // - If `offset > 0`, then by invariant on `PtrInner`, `self` (and thus + // `bytes`) points to a byte range entirely contained within the same + // allocated object as `self`. As explained above, this offset results + // in a pointer to or one byte past the end of this allocated object. + let bytes = unsafe { bytes.add(offset) }; + + // SAFETY: By the preceding safety argument, `bytes` is within or one + // byte past the end of the same allocated object as `self`, which + // ensures that it is non-null. + let bytes = unsafe { NonNull::new_unchecked(bytes) }; + + let ptr = KnownLayout::raw_from_ptr_len(bytes, self.meta().get()); + + // SAFETY: + // 0. If `ptr`'s referent is not zero sized, then `ptr` is derived from + // some valid Rust allocation, `A`, because `ptr` is derived from + // the same allocated object as `self`. + // 1. If `ptr`'s referent is not zero sized, then `ptr` has valid + // provenance for `A` because `raw` is derived from the same + // allocated object as `self` via provenance-preserving operations. + // 2. If `ptr`'s referent is not zero sized, then `ptr` addresses a byte + // range which is entirely contained in `A`, by previous safety proof + // on `bytes`. + // 3. `ptr` addresses a byte range whose length fits in an `isize`, by + // consequence of #2. + // 4. `ptr` addresses a byte range which does not wrap around the + // address space, by consequence of #2. + // 5. If `ptr`'s referent is not zero sized, then `A` is guaranteed to + // live for at least `'a`, because `ptr` is derived from `self`. + unsafe { PtrInner::new(ptr) } + } +} + +#[allow(clippy::needless_lifetimes)] +impl<'a, T> PtrInner<'a, [T]> { + /// Creates a pointer which addresses the given `range` of self. + /// + /// # Safety + /// + /// `range` is a valid range (`start <= end`) and `end <= self.meta()`. + pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self { + let base = self.as_non_null().cast::<T>().as_ptr(); + + // SAFETY: The caller promises that `start <= end <= self.meta()`. By + // invariant, if `self`'s referent is not zero-sized, then `self` refers + // to a byte range which is contained within a single allocation, which + // is no more than `isize::MAX` bytes long, and which does not wrap + // around the address space. Thus, this pointer arithmetic remains + // in-bounds of the same allocation, and does not wrap around the + // address space. The offset (in bytes) does not overflow `isize`. + // + // If `self`'s referent is zero-sized, then these conditions are + // trivially satisfied. + let base = unsafe { base.add(range.start) }; + + // SAFETY: The caller promises that `start <= end`, and so this will not + // underflow. + #[allow(unstable_name_collisions)] + let len = unsafe { range.end.unchecked_sub(range.start) }; + + let ptr = core::ptr::slice_from_raw_parts_mut(base, len); + + // SAFETY: By invariant, `self`'s referent is either a ZST or lives + // entirely in an allocation. `ptr` points inside of or one byte past + // the end of that referent. Thus, in either case, `ptr` is non-null. + let ptr = unsafe { NonNull::new_unchecked(ptr) }; + + // SAFETY: + // + // Lemma 0: `ptr` addresses a subset of the bytes addressed by `self`, + // and has the same provenance. Proof: The caller guarantees + // that `start <= end <= self.meta()`. Thus, `base` is + // in-bounds of `self`, and `base + (end - start)` is also + // in-bounds of self. Finally, `ptr` is constructed using + // provenance-preserving operations. + // + // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not + // zero sized, then `ptr` has valid provenance for its referent, + // which is entirely contained in some Rust allocation, `A`. + // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not + // zero sized, then `A` is guaranteed to live for at least `'a`. + unsafe { PtrInner::new(ptr) } + } + + /// Iteratively projects the elements `PtrInner<T>` from `PtrInner<[T]>`. + pub(crate) fn iter(&self) -> impl Iterator<Item = PtrInner<'a, T>> { + // FIXME(#429): Once `NonNull::cast` documents that it preserves + // provenance, cite those docs. + let base = self.as_non_null().cast::<T>().as_ptr(); + (0..self.meta().get()).map(move |i| { + // FIXME(https://github.com/rust-lang/rust/issues/74265): Use + // `NonNull::get_unchecked_mut`. + + // SAFETY: If the following conditions are not satisfied + // `pointer::cast` may induce Undefined Behavior [1]: + // + // > - The computed offset, `count * size_of::<T>()` bytes, must not + // > overflow `isize``. + // > - If the computed offset is non-zero, then `self` must be + // > derived from a pointer to some allocated object, and the + // > entire memory range between `self` and the result must be in + // > bounds of that allocated object. In particular, this range + // > must not “wrap around” the edge of the address space. + // + // [1] https://doc.rust-lang.org/std/primitive.pointer.html#method.add + // + // We satisfy both of these conditions here: + // - By invariant on `Ptr`, `self` addresses a byte range whose + // length fits in an `isize`. Since `elem` is contained in `self`, + // the computed offset of `elem` must fit within `isize.` + // - If the computed offset is non-zero, then this means that the + // referent is not zero-sized. In this case, `base` points to an + // allocated object (by invariant on `self`). Thus: + // - By contract, `self.meta()` accurately reflects the number of + // elements in the slice. `i` is in bounds of `c.meta()` by + // construction, and so the result of this addition cannot + // overflow past the end of the allocation referred to by `c`. + // - By invariant on `Ptr`, `self` addresses a byte range which + // does not wrap around the address space. Since `elem` is + // contained in `self`, the computed offset of `elem` must wrap + // around the address space. + // + // FIXME(#429): Once `pointer::add` documents that it preserves + // provenance, cite those docs. + let elem = unsafe { base.add(i) }; + + // SAFETY: `elem` must not be null. `base` is constructed from a + // `NonNull` pointer, and the addition that produces `elem` must not + // overflow or wrap around, so `elem >= base > 0`. + // + // FIXME(#429): Once `NonNull::new_unchecked` documents that it + // preserves provenance, cite those docs. + let elem = unsafe { NonNull::new_unchecked(elem) }; + + // SAFETY: The safety invariants of `Ptr::new` (see definition) are + // satisfied: + // 0. If `elem`'s referent is not zero sized, then `elem` has valid + // provenance for its referent, because it derived from `self` + // using a series of provenance-preserving operations, and + // because `self` has valid provenance for its referent. By the + // same argument, `elem`'s referent is entirely contained within + // the same allocated object as `self`'s referent. + // 1. If `elem`'s referent is not zero sized, then the allocation of + // `elem` is guaranteed to live for at least `'a`, because `elem` + // is entirely contained in `self`, which lives for at least `'a` + // by invariant on `Ptr`. + unsafe { PtrInner::new(elem) } + }) + } +} + +impl<'a, T, const N: usize> PtrInner<'a, [T; N]> { + /// Casts this pointer-to-array into a slice. + /// + /// # Safety + /// + /// Callers may assume that the returned `PtrInner` references the same + /// address and length as `self`. + #[allow(clippy::wrong_self_convention)] + pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> { + let start = self.as_non_null().cast::<T>().as_ptr(); + let slice = core::ptr::slice_from_raw_parts_mut(start, N); + // SAFETY: `slice` is not null, because it is derived from `start` + // which is non-null. + let slice = unsafe { NonNull::new_unchecked(slice) }; + // SAFETY: Lemma: In the following safety arguments, note that `slice` + // is derived from `self` in two steps: first, by casting `self: [T; N]` + // to `start: T`, then by constructing a pointer to a slice starting at + // `start` of length `N`. As a result, `slice` references exactly the + // same allocation as `self`, if any. + // + // 0. By the above lemma, if `slice`'s referent is not zero sized, then + // `slice` has the same referent as `self`. By invariant on `self`, + // this referent is entirely contained within some allocation, `A`. + // Because `slice` was constructed using provenance-preserving + // operations, it has provenance for its entire referent. + // 1. By the above lemma, if `slice`'s referent is not zero sized, then + // `A` is guaranteed to live for at least `'a`, because it is derived + // from the same allocation as `self`, which, by invariant on `Ptr`, + // lives for at least `'a`. + unsafe { PtrInner::new(slice) } + } +} + +impl<'a> PtrInner<'a, [u8]> { + /// Attempts to cast `self` to a `U` using the given cast type. + /// + /// If `U` is a slice DST and pointer metadata (`meta`) is provided, then + /// the cast will only succeed if it would produce an object with the given + /// metadata. + /// + /// Returns `None` if the resulting `U` would be invalidly-aligned, if no + /// `U` can fit in `self`, or if the provided pointer metadata describes an + /// invalid instance of `U`. On success, returns a pointer to the + /// largest-possible `U` which fits in `self`. + /// + /// # Safety + /// + /// The caller may assume that this implementation is correct, and may rely + /// on that assumption for the soundness of their code. In particular, the + /// caller may assume that, if `try_cast_into` returns `Some((ptr, + /// remainder))`, then `ptr` and `remainder` refer to non-overlapping byte + /// ranges within `self`, and that `ptr` and `remainder` entirely cover + /// `self`. Finally: + /// - If this is a prefix cast, `ptr` has the same address as `self`. + /// - If this is a suffix cast, `remainder` has the same address as `self`. + #[inline] + pub(crate) fn try_cast_into<U>( + self, + cast_type: CastType, + meta: Option<U::PointerMetadata>, + ) -> Result<(PtrInner<'a, U>, PtrInner<'a, [u8]>), CastError<Self, U>> + where + U: 'a + ?Sized + KnownLayout, + { + // PANICS: By invariant, the byte range addressed by + // `self.as_non_null()` does not wrap around the address space. This + // implies that the sum of the address (represented as a `usize`) and + // length do not overflow `usize`, as required by + // `validate_cast_and_convert_metadata`. Thus, this call to + // `validate_cast_and_convert_metadata` will only panic if `U` is a DST + // whose trailing slice element is zero-sized. + let maybe_metadata = MetadataOf::<U>::validate_cast_and_convert_metadata( + AsAddress::addr(self.as_non_null().as_ptr()), + self.meta(), + cast_type, + meta, + ); + + let (elems, split_at) = match maybe_metadata { + Ok((elems, split_at)) => (elems, split_at), + Err(MetadataCastError::Alignment) => { + // SAFETY: Since `validate_cast_and_convert_metadata` returned + // an alignment error, `U` must have an alignment requirement + // greater than one. + let err = unsafe { AlignmentError::<_, U>::new_unchecked(self) }; + return Err(CastError::Alignment(err)); + } + Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))), + }; + + // SAFETY: `validate_cast_and_convert_metadata` promises to return + // `split_at <= self.meta()`. + // + // Lemma 0: `l_slice` and `r_slice` are non-overlapping. Proof: By + // contract on `PtrInner::split_at_unchecked`, the produced `PtrInner`s + // are always non-overlapping if `self` is a `[T]`; here it is a `[u8]`. + let (l_slice, r_slice) = unsafe { self.split_at_unchecked(split_at) }; + + let (target, remainder) = match cast_type { + CastType::Prefix => (l_slice, r_slice), + CastType::Suffix => (r_slice, l_slice), + }; + + let base = target.as_non_null().cast::<u8>(); + + let ptr = U::raw_from_ptr_len(base, elems.get()); + + // SAFETY: + // 0. By invariant, if `target`'s referent is not zero sized, then + // `target` has provenance valid for some Rust allocation, `A`. + // Because `ptr` is derived from `target` via provenance-preserving + // operations, `ptr` will also have provenance valid for its entire + // referent. + // 1. `validate_cast_and_convert_metadata` promises that the object + // described by `elems` and `split_at` lives at a byte range which is + // a subset of the input byte range. Thus, by invariant, if + // `target`'s referent is not zero sized, then `target` refers to an + // allocation which is guaranteed to live for at least `'a`, and thus + // so does `ptr`. + Ok((unsafe { PtrInner::new(ptr) }, remainder)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::*; + + #[test] + fn test_meta() { + let arr = [1; 16]; + let dst = <[u8]>::ref_from_bytes(&arr[..]).unwrap(); + let ptr = PtrInner::from_ref(dst); + assert_eq!(ptr.meta().get(), 16); + + // SAFETY: 8 is less than 16 + let ptr = unsafe { ptr.with_meta(8) }; + + assert_eq!(ptr.meta().get(), 8); + } + + #[test] + fn test_split_at() { + fn test_split_at<const OFFSET: usize, const BUFFER_SIZE: usize>() { + #[derive(FromBytes, KnownLayout, SplitAt, Immutable)] + #[repr(C)] + struct SliceDst<const OFFSET: usize> { + prefix: [u8; OFFSET], + trailing: [u8], + } + + let n: usize = BUFFER_SIZE - OFFSET; + let arr = [1; BUFFER_SIZE]; + let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap(); + let ptr = PtrInner::from_ref(dst); + for i in 0..=n { + assert_eq!(ptr.meta().get(), n); + // SAFETY: `i` is in bounds by construction. + let i = unsafe { MetadataOf::new_unchecked(i) }; + // SAFETY: `i` is in bounds by construction. + let (l, r) = unsafe { ptr.split_at_unchecked(i) }; + // SAFETY: Points to a valid value by construction. + #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)] + // Clippy false positive + let l_sum: usize = l + .trailing_slice() + .iter() + .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) } + as usize) + .sum(); + // SAFETY: Points to a valid value by construction. + #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)] + // Clippy false positive + let r_sum: usize = r + .iter() + .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) } + as usize) + .sum(); + assert_eq!(l_sum, i.get()); + assert_eq!(r_sum, n - i.get()); + assert_eq!(l_sum + r_sum, n); + } + } + + test_split_at::<0, 16>(); + test_split_at::<1, 17>(); + test_split_at::<2, 18>(); + } + + #[test] + fn test_trailing_slice() { + fn test_trailing_slice<const OFFSET: usize, const BUFFER_SIZE: usize>() { + #[derive(FromBytes, KnownLayout, SplitAt, Immutable)] + #[repr(C)] + struct SliceDst<const OFFSET: usize> { + prefix: [u8; OFFSET], + trailing: [u8], + } + + let n: usize = BUFFER_SIZE - OFFSET; + let arr = [1; BUFFER_SIZE]; + let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap(); + let ptr = PtrInner::from_ref(dst); + + assert_eq!(ptr.meta().get(), n); + let trailing = ptr.trailing_slice(); + assert_eq!(trailing.meta().get(), n); + + assert_eq!( + // SAFETY: We assume this to be sound for the sake of this test, + // which will fail, here, in miri, if the safety precondition of + // `offset_of` is not satisfied. + unsafe { + #[allow(clippy::as_conversions)] + let offset = (trailing.as_non_null().as_ptr() as *mut u8) + .offset_from(ptr.as_non_null().as_ptr() as *mut _); + offset + }, + isize::try_from(OFFSET).unwrap(), + ); + + // SAFETY: Points to a valid value by construction. + #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)] + // Clippy false positive + let trailing: usize = + trailing + .iter() + .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) } + as usize) + .sum(); + + assert_eq!(trailing, n); + } + + test_trailing_slice::<0, 16>(); + test_trailing_slice::<1, 17>(); + test_trailing_slice::<2, 18>(); + } +} diff --git a/vendor/zerocopy/src/pointer/invariant.rs b/vendor/zerocopy/src/pointer/invariant.rs new file mode 100644 index 00000000..90ab1a87 --- /dev/null +++ b/vendor/zerocopy/src/pointer/invariant.rs @@ -0,0 +1,253 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#![allow(missing_copy_implementations, missing_debug_implementations)] + +//! The parameterized invariants of a [`Ptr`][super::Ptr]. +//! +//! Invariants are encoded as ([`Aliasing`], [`Alignment`], [`Validity`]) +//! triples implementing the [`Invariants`] trait. + +/// The invariants of a [`Ptr`][super::Ptr]. +pub trait Invariants: Sealed { + type Aliasing: Aliasing; + type Alignment: Alignment; + type Validity: Validity; +} + +impl<A: Aliasing, AA: Alignment, V: Validity> Invariants for (A, AA, V) { + type Aliasing = A; + type Alignment = AA; + type Validity = V; +} + +/// The aliasing invariant of a [`Ptr`][super::Ptr]. +/// +/// All aliasing invariants must permit reading from the bytes of a pointer's +/// referent which are not covered by [`UnsafeCell`]s. +/// +/// [`UnsafeCell`]: core::cell::UnsafeCell +pub trait Aliasing: Sealed { + /// Is `Self` [`Exclusive`]? + #[doc(hidden)] + const IS_EXCLUSIVE: bool; +} + +/// The alignment invariant of a [`Ptr`][super::Ptr]. +pub trait Alignment: Sealed {} + +/// The validity invariant of a [`Ptr`][super::Ptr]. +/// +/// # Safety +/// +/// In this section, we will use `Ptr<T, V>` as a shorthand for `Ptr<T, I: +/// Invariants<Validity = V>>` for brevity. +/// +/// Each `V: Validity` defines a set of bit values which may appear in the +/// referent of a `Ptr<T, V>`, denoted `S(T, V)`. Each `V: Validity`, in its +/// documentation, provides a definition of `S(T, V)` which must be valid for +/// all `T: ?Sized`. Any `V: Validity` must guarantee that this set is only a +/// function of the *bit validity* of the referent type, `T`, and not of any +/// other property of `T`. As a consequence, given `V: Validity`, `T`, and `U` +/// where `T` and `U` have the same bit validity, `S(V, T) = S(V, U)`. +/// +/// It is guaranteed that the referent of any `ptr: Ptr<T, V>` is a member of +/// `S(T, V)`. Unsafe code must ensure that this guarantee will be upheld for +/// any existing `Ptr`s or any `Ptr`s that that code creates. +/// +/// An important implication of this guarantee is that it restricts what +/// transmutes are sound, where "transmute" is used in this context to refer to +/// changing the referent type or validity invariant of a `Ptr`, as either +/// change may change the set of bit values permitted to appear in the referent. +/// In particular, the following are necessary (but not sufficient) conditions +/// in order for a transmute from `src: Ptr<T, V>` to `dst: Ptr<U, W>` to be +/// sound: +/// - If `S(T, V) = S(U, W)`, then no restrictions apply; otherwise, +/// - If `dst` permits mutation of its referent (e.g. via `Exclusive` aliasing +/// or interior mutation under `Shared` aliasing), then it must hold that +/// `S(T, V) ⊇ S(U, W)` - in other words, the transmute must not expand the +/// set of allowed referent bit patterns. A violation of this requirement +/// would permit using `dst` to write `x` where `x ∈ S(U, W)` but `x ∉ S(T, +/// V)`, which would violate the guarantee that `src`'s referent may only +/// contain values in `S(T, V)`. +/// - If the referent may be mutated without going through `dst` while `dst` is +/// live (e.g. via interior mutation on a `Shared`-aliased `Ptr` or `&` +/// reference), then it must hold that `S(T, V) ⊆ S(U, W)` - in other words, +/// the transmute must not shrink the set of allowed referent bit patterns. A +/// violation of this requirement would permit using `src` or another +/// mechanism (e.g. a `&` reference used to derive `src`) to write `x` where +/// `x ∈ S(T, V)` but `x ∉ S(U, W)`, which would violate the guarantee that +/// `dst`'s referent may only contain values in `S(U, W)`. +pub unsafe trait Validity: Sealed {} + +/// An [`Aliasing`] invariant which is either [`Shared`] or [`Exclusive`]. +/// +/// # Safety +/// +/// Given `A: Reference`, callers may assume that either `A = Shared` or `A = +/// Exclusive`. +pub trait Reference: Aliasing + Sealed {} + +/// The `Ptr<'a, T>` adheres to the aliasing rules of a `&'a T`. +/// +/// The referent of a shared-aliased `Ptr` may be concurrently referenced by any +/// number of shared-aliased `Ptr` or `&T` references, or by any number of +/// `Ptr<U>` or `&U` references as permitted by `T`'s library safety invariants, +/// and may not be concurrently referenced by any exclusively-aliased `Ptr`s or +/// `&mut` references. The referent must not be mutated, except via +/// [`UnsafeCell`]s, and only when permitted by `T`'s library safety invariants. +/// +/// [`UnsafeCell`]: core::cell::UnsafeCell +pub enum Shared {} +impl Aliasing for Shared { + const IS_EXCLUSIVE: bool = false; +} +impl Reference for Shared {} + +/// The `Ptr<'a, T>` adheres to the aliasing rules of a `&'a mut T`. +/// +/// The referent of an exclusively-aliased `Ptr` may not be concurrently +/// referenced by any other `Ptr`s or references, and may not be accessed (read +/// or written) other than via this `Ptr`. +pub enum Exclusive {} +impl Aliasing for Exclusive { + const IS_EXCLUSIVE: bool = true; +} +impl Reference for Exclusive {} + +/// It is unknown whether the pointer is aligned. +pub enum Unaligned {} + +impl Alignment for Unaligned {} + +/// The referent is aligned: for `Ptr<T>`, the referent's address is a multiple +/// of the `T`'s alignment. +pub enum Aligned {} +impl Alignment for Aligned {} + +/// Any bit pattern is allowed in the `Ptr`'s referent, including uninitialized +/// bytes. +pub enum Uninit {} +// SAFETY: `Uninit`'s validity is well-defined for all `T: ?Sized`, and is not a +// function of any property of `T` other than its bit validity (in fact, it's +// not even a property of `T`'s bit validity, but this is more than we are +// required to uphold). +unsafe impl Validity for Uninit {} + +/// The byte ranges initialized in `T` are also initialized in the referent of a +/// `Ptr<T>`. +/// +/// Formally: uninitialized bytes may only be present in `Ptr<T>`'s referent +/// where they are guaranteed to be present in `T`. This is a dynamic property: +/// if, at a particular byte offset, a valid enum discriminant is set, the +/// subsequent bytes may only have uninitialized bytes as specified by the +/// corresponding enum. +/// +/// Formally, given `len = size_of_val_raw(ptr)`, at every byte offset, `b`, in +/// the range `[0, len)`: +/// - If, in any instance `t: T` of length `len`, the byte at offset `b` in `t` +/// is initialized, then the byte at offset `b` within `*ptr` must be +/// initialized. +/// - Let `c` be the contents of the byte range `[0, b)` in `*ptr`. Let `S` be +/// the subset of valid instances of `T` of length `len` which contain `c` in +/// the offset range `[0, b)`. If, in any instance of `t: T` in `S`, the byte +/// at offset `b` in `t` is initialized, then the byte at offset `b` in `*ptr` +/// must be initialized. +/// +/// Pragmatically, this means that if `*ptr` is guaranteed to contain an enum +/// type at a particular offset, and the enum discriminant stored in `*ptr` +/// corresponds to a valid variant of that enum type, then it is guaranteed +/// that the appropriate bytes of `*ptr` are initialized as defined by that +/// variant's bit validity (although note that the variant may contain another +/// enum type, in which case the same rules apply depending on the state of +/// its discriminant, and so on recursively). +pub enum AsInitialized {} +// SAFETY: `AsInitialized`'s validity is well-defined for all `T: ?Sized`, and +// is not a function of any property of `T` other than its bit validity. +unsafe impl Validity for AsInitialized {} + +/// The byte ranges in the referent are fully initialized. In other words, if +/// the referent is `N` bytes long, then it contains a bit-valid `[u8; N]`. +pub enum Initialized {} +// SAFETY: `Initialized`'s validity is well-defined for all `T: ?Sized`, and is +// not a function of any property of `T` other than its bit validity (in fact, +// it's not even a property of `T`'s bit validity, but this is more than we are +// required to uphold). +unsafe impl Validity for Initialized {} + +/// The referent of a `Ptr<T>` is valid for `T`, upholding bit validity and any +/// library safety invariants. +pub enum Valid {} +// SAFETY: `Valid`'s validity is well-defined for all `T: ?Sized`, and is not a +// function of any property of `T` other than its bit validity. +unsafe impl Validity for Valid {} + +/// # Safety +/// +/// `DT: CastableFrom<ST, SV, DV>` is sound if `SV = DV = Uninit` or `SV = DV = +/// Initialized`. +pub unsafe trait CastableFrom<ST: ?Sized, SV, DV> {} + +// SAFETY: `SV = DV = Uninit`. +unsafe impl<ST: ?Sized, DT: ?Sized> CastableFrom<ST, Uninit, Uninit> for DT {} +// SAFETY: `SV = DV = Initialized`. +unsafe impl<ST: ?Sized, DT: ?Sized> CastableFrom<ST, Initialized, Initialized> for DT {} + +/// [`Ptr`](crate::Ptr) referents that permit unsynchronized read operations. +/// +/// `T: Read<A, R>` implies that a pointer to `T` with aliasing `A` permits +/// unsynchronized read operations. This can be because `A` is [`Exclusive`] or +/// because `T` does not permit interior mutation. +/// +/// # Safety +/// +/// `T: Read<A, R>` if either of the following conditions holds: +/// - `A` is [`Exclusive`] +/// - `T` implements [`Immutable`](crate::Immutable) +/// +/// As a consequence, if `T: Read<A, R>`, then any `Ptr<T, (A, ...)>` is +/// permitted to perform unsynchronized reads from its referent. +pub trait Read<A: Aliasing, R> {} + +impl<A: Aliasing, T: ?Sized + crate::Immutable> Read<A, BecauseImmutable> for T {} +impl<T: ?Sized> Read<Exclusive, BecauseExclusive> for T {} + +/// Unsynchronized reads are permitted because only one live [`Ptr`](crate::Ptr) +/// or reference may exist to the referent bytes at a time. +#[derive(Copy, Clone, Debug)] +#[doc(hidden)] +pub enum BecauseExclusive {} + +/// Unsynchronized reads are permitted because no live [`Ptr`](crate::Ptr)s or +/// references permit interior mutation. +#[derive(Copy, Clone, Debug)] +#[doc(hidden)] +pub enum BecauseImmutable {} + +use sealed::Sealed; +mod sealed { + use super::*; + + pub trait Sealed {} + + impl Sealed for Shared {} + impl Sealed for Exclusive {} + + impl Sealed for Unaligned {} + impl Sealed for Aligned {} + + impl Sealed for Uninit {} + impl Sealed for AsInitialized {} + impl Sealed for Initialized {} + impl Sealed for Valid {} + + impl<A: Sealed, AA: Sealed, V: Sealed> Sealed for (A, AA, V) {} + + impl Sealed for BecauseImmutable {} + impl Sealed for BecauseExclusive {} +} diff --git a/vendor/zerocopy/src/pointer/mod.rs b/vendor/zerocopy/src/pointer/mod.rs new file mode 100644 index 00000000..6005bc78 --- /dev/null +++ b/vendor/zerocopy/src/pointer/mod.rs @@ -0,0 +1,40 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Abstractions over raw pointers. + +mod inner; +#[doc(hidden)] +pub mod invariant; +mod ptr; +mod transmute; + +#[doc(hidden)] +pub use {inner::PtrInner, transmute::*}; +#[doc(hidden)] +pub use { + invariant::{BecauseExclusive, BecauseImmutable, Read}, + ptr::Ptr, +}; + +/// A shorthand for a maybe-valid, maybe-aligned reference. Used as the argument +/// to [`TryFromBytes::is_bit_valid`]. +/// +/// [`TryFromBytes::is_bit_valid`]: crate::TryFromBytes::is_bit_valid +pub type Maybe<'a, T, Aliasing = invariant::Shared, Alignment = invariant::Unaligned> = + Ptr<'a, T, (Aliasing, Alignment, invariant::Initialized)>; + +/// Checks if the referent is zeroed. +pub(crate) fn is_zeroed<T, I>(ptr: Ptr<'_, T, I>) -> bool +where + T: crate::Immutable + crate::KnownLayout, + I: invariant::Invariants<Validity = invariant::Initialized>, + I::Aliasing: invariant::Reference, +{ + ptr.as_bytes::<BecauseImmutable>().as_ref().iter().all(|&byte| byte == 0) +} diff --git a/vendor/zerocopy/src/pointer/ptr.rs b/vendor/zerocopy/src/pointer/ptr.rs new file mode 100644 index 00000000..af7752af --- /dev/null +++ b/vendor/zerocopy/src/pointer/ptr.rs @@ -0,0 +1,1491 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{ + fmt::{Debug, Formatter}, + marker::PhantomData, +}; + +use crate::{ + pointer::{ + inner::PtrInner, + invariant::*, + transmute::{MutationCompatible, SizeEq, TransmuteFromPtr}, + }, + AlignmentError, CastError, CastType, KnownLayout, SizeError, TryFromBytes, ValidityError, +}; + +/// Module used to gate access to [`Ptr`]'s fields. +mod def { + #[cfg(doc)] + use super::super::invariant; + use super::*; + + /// A raw pointer with more restrictions. + /// + /// `Ptr<T>` is similar to [`NonNull<T>`], but it is more restrictive in the + /// following ways (note that these requirements only hold of non-zero-sized + /// referents): + /// - It must derive from a valid allocation. + /// - It must reference a byte range which is contained inside the + /// allocation from which it derives. + /// - As a consequence, the byte range it references must have a size + /// which does not overflow `isize`. + /// + /// Depending on how `Ptr` is parameterized, it may have additional + /// invariants: + /// - `ptr` conforms to the aliasing invariant of + /// [`I::Aliasing`](invariant::Aliasing). + /// - `ptr` conforms to the alignment invariant of + /// [`I::Alignment`](invariant::Alignment). + /// - `ptr` conforms to the validity invariant of + /// [`I::Validity`](invariant::Validity). + /// + /// `Ptr<'a, T>` is [covariant] in `'a` and invariant in `T`. + /// + /// [`NonNull<T>`]: core::ptr::NonNull + /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html + pub struct Ptr<'a, T, I> + where + T: ?Sized, + I: Invariants, + { + /// # Invariants + /// + /// 0. `ptr` conforms to the aliasing invariant of + /// [`I::Aliasing`](invariant::Aliasing). + /// 1. `ptr` conforms to the alignment invariant of + /// [`I::Alignment`](invariant::Alignment). + /// 2. `ptr` conforms to the validity invariant of + /// [`I::Validity`](invariant::Validity). + // SAFETY: `PtrInner<'a, T>` is covariant in `'a` and invariant in `T`. + ptr: PtrInner<'a, T>, + _invariants: PhantomData<I>, + } + + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants, + { + /// Constructs a new `Ptr` from a [`PtrInner`]. + /// + /// # Safety + /// + /// The caller promises that: + /// + /// 0. `ptr` conforms to the aliasing invariant of + /// [`I::Aliasing`](invariant::Aliasing). + /// 1. `ptr` conforms to the alignment invariant of + /// [`I::Alignment`](invariant::Alignment). + /// 2. `ptr` conforms to the validity invariant of + /// [`I::Validity`](invariant::Validity). + pub(crate) unsafe fn from_inner(ptr: PtrInner<'a, T>) -> Ptr<'a, T, I> { + // SAFETY: The caller has promised to satisfy all safety invariants + // of `Ptr`. + Self { ptr, _invariants: PhantomData } + } + + /// Converts this `Ptr<T>` to a [`PtrInner<T>`]. + /// + /// Note that this method does not consume `self`. The caller should + /// watch out for `unsafe` code which uses the returned value in a way + /// that violates the safety invariants of `self`. + pub(crate) fn as_inner(&self) -> PtrInner<'a, T> { + self.ptr + } + } +} + +#[allow(unreachable_pub)] // This is a false positive on our MSRV toolchain. +pub use def::Ptr; + +/// External trait implementations on [`Ptr`]. +mod _external { + use super::*; + + /// SAFETY: Shared pointers are safely `Copy`. `Ptr`'s other invariants + /// (besides aliasing) are unaffected by the number of references that exist + /// to `Ptr`'s referent. The notable cases are: + /// - Alignment is a property of the referent type (`T`) and the address, + /// both of which are unchanged + /// - Let `S(T, V)` be the set of bit values permitted to appear in the + /// referent of a `Ptr<T, I: Invariants<Validity = V>>`. Since this copy + /// does not change `I::Validity` or `T`, `S(T, I::Validity)` is also + /// unchanged. + /// + /// We are required to guarantee that the referents of the original `Ptr` + /// and of the copy (which, of course, are actually the same since they + /// live in the same byte address range) both remain in the set `S(T, + /// I::Validity)`. Since this invariant holds on the original `Ptr`, it + /// cannot be violated by the original `Ptr`, and thus the original `Ptr` + /// cannot be used to violate this invariant on the copy. The inverse + /// holds as well. + impl<'a, T, I> Copy for Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants<Aliasing = Shared>, + { + } + + /// SAFETY: See the safety comment on `Copy`. + impl<'a, T, I> Clone for Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants<Aliasing = Shared>, + { + #[inline] + fn clone(&self) -> Self { + *self + } + } + + impl<'a, T, I> Debug for Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants, + { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { + self.as_inner().as_non_null().fmt(f) + } + } +} + +/// Methods for converting to and from `Ptr` and Rust's safe reference types. +mod _conversions { + use super::*; + + /// `&'a T` → `Ptr<'a, T>` + impl<'a, T> Ptr<'a, T, (Shared, Aligned, Valid)> + where + T: 'a + ?Sized, + { + /// Constructs a `Ptr` from a shared reference. + #[doc(hidden)] + #[inline] + pub fn from_ref(ptr: &'a T) -> Self { + let inner = PtrInner::from_ref(ptr); + // SAFETY: + // 0. `ptr`, by invariant on `&'a T`, conforms to the aliasing + // invariant of `Shared`. + // 1. `ptr`, by invariant on `&'a T`, conforms to the alignment + // invariant of `Aligned`. + // 2. `ptr`'s referent, by invariant on `&'a T`, is a bit-valid `T`. + // This satisfies the requirement that a `Ptr<T, (_, _, Valid)>` + // point to a bit-valid `T`. Even if `T` permits interior + // mutation, this invariant guarantees that the returned `Ptr` + // can only ever be used to modify the referent to store + // bit-valid `T`s, which ensures that the returned `Ptr` cannot + // be used to violate the soundness of the original `ptr: &'a T` + // or of any other references that may exist to the same + // referent. + unsafe { Self::from_inner(inner) } + } + } + + /// `&'a mut T` → `Ptr<'a, T>` + impl<'a, T> Ptr<'a, T, (Exclusive, Aligned, Valid)> + where + T: 'a + ?Sized, + { + /// Constructs a `Ptr` from an exclusive reference. + #[inline] + pub(crate) fn from_mut(ptr: &'a mut T) -> Self { + let inner = PtrInner::from_mut(ptr); + // SAFETY: + // 0. `ptr`, by invariant on `&'a mut T`, conforms to the aliasing + // invariant of `Exclusive`. + // 1. `ptr`, by invariant on `&'a mut T`, conforms to the alignment + // invariant of `Aligned`. + // 2. `ptr`'s referent, by invariant on `&'a mut T`, is a bit-valid + // `T`. This satisfies the requirement that a `Ptr<T, (_, _, + // Valid)>` point to a bit-valid `T`. This invariant guarantees + // that the returned `Ptr` can only ever be used to modify the + // referent to store bit-valid `T`s, which ensures that the + // returned `Ptr` cannot be used to violate the soundness of the + // original `ptr: &'a mut T`. + unsafe { Self::from_inner(inner) } + } + } + + /// `Ptr<'a, T>` → `&'a T` + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants<Alignment = Aligned, Validity = Valid>, + I::Aliasing: Reference, + { + /// Converts `self` to a shared reference. + // This consumes `self`, not `&self`, because `self` is, logically, a + // pointer. For `I::Aliasing = invariant::Shared`, `Self: Copy`, and so + // this doesn't prevent the caller from still using the pointer after + // calling `as_ref`. + #[allow(clippy::wrong_self_convention)] + pub(crate) fn as_ref(self) -> &'a T { + let raw = self.as_inner().as_non_null(); + // SAFETY: `self` satisfies the `Aligned` invariant, so we know that + // `raw` is validly-aligned for `T`. + #[cfg(miri)] + unsafe { + crate::util::miri_promise_symbolic_alignment( + raw.as_ptr().cast(), + core::mem::align_of_val_raw(raw.as_ptr()), + ); + } + // SAFETY: This invocation of `NonNull::as_ref` satisfies its + // documented safety preconditions: + // + // 1. The pointer is properly aligned. This is ensured by-contract + // on `Ptr`, because the `I::Alignment` is `Aligned`. + // + // 2. If the pointer's referent is not zero-sized, then the pointer + // must be “dereferenceable” in the sense defined in the module + // documentation; i.e.: + // + // > The memory range of the given size starting at the pointer + // > must all be within the bounds of a single allocated object. + // > [2] + // + // This is ensured by contract on all `PtrInner`s. + // + // 3. The pointer must point to a validly-initialized instance of + // `T`. This is ensured by-contract on `Ptr`, because the + // `I::Validity` is `Valid`. + // + // 4. You must enforce Rust’s aliasing rules. This is ensured by + // contract on `Ptr`, because `I::Aliasing: Reference`. Either it + // is `Shared` or `Exclusive`. If it is `Shared`, other + // references may not mutate the referent outside of + // `UnsafeCell`s. + // + // [1]: https://doc.rust-lang.org/std/ptr/struct.NonNull.html#method.as_ref + // [2]: https://doc.rust-lang.org/std/ptr/index.html#safety + unsafe { raw.as_ref() } + } + } + + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants, + I::Aliasing: Reference, + { + /// Reborrows `self`, producing another `Ptr`. + /// + /// Since `self` is borrowed immutably, this prevents any mutable + /// methods from being called on `self` as long as the returned `Ptr` + /// exists. + #[doc(hidden)] + #[inline] + #[allow(clippy::needless_lifetimes)] // Allows us to name the lifetime in the safety comment below. + pub fn reborrow<'b>(&'b mut self) -> Ptr<'b, T, I> + where + 'a: 'b, + { + // SAFETY: The following all hold by invariant on `self`, and thus + // hold of `ptr = self.as_inner()`: + // 0. SEE BELOW. + // 1. `ptr` conforms to the alignment invariant of + // [`I::Alignment`](invariant::Alignment). + // 2. `ptr` conforms to the validity invariant of + // [`I::Validity`](invariant::Validity). `self` and the returned + // `Ptr` permit the same bit values in their referents since they + // have the same referent type (`T`) and the same validity + // (`I::Validity`). Thus, regardless of what mutation is + // permitted (`Exclusive` aliasing or `Shared`-aliased interior + // mutation), neither can be used to write a value to the + // referent which violates the other's validity invariant. + // + // For aliasing (0 above), since `I::Aliasing: Reference`, + // there are two cases for `I::Aliasing`: + // - For `invariant::Shared`: `'a` outlives `'b`, and so the + // returned `Ptr` does not permit accessing the referent any + // longer than is possible via `self`. For shared aliasing, it is + // sound for multiple `Ptr`s to exist simultaneously which + // reference the same memory, so creating a new one is not + // problematic. + // - For `invariant::Exclusive`: Since `self` is `&'b mut` and we + // return a `Ptr` with lifetime `'b`, `self` is inaccessible to + // the caller for the lifetime `'b` - in other words, `self` is + // inaccessible to the caller as long as the returned `Ptr` + // exists. Since `self` is an exclusive `Ptr`, no other live + // references or `Ptr`s may exist which refer to the same memory + // while `self` is live. Thus, as long as the returned `Ptr` + // exists, no other references or `Ptr`s which refer to the same + // memory may be live. + unsafe { Ptr::from_inner(self.as_inner()) } + } + } + + /// `Ptr<'a, T>` → `&'a mut T` + impl<'a, T> Ptr<'a, T, (Exclusive, Aligned, Valid)> + where + T: 'a + ?Sized, + { + /// Converts `self` to a mutable reference. + #[allow(clippy::wrong_self_convention)] + pub(crate) fn as_mut(self) -> &'a mut T { + let mut raw = self.as_inner().as_non_null(); + // SAFETY: `self` satisfies the `Aligned` invariant, so we know that + // `raw` is validly-aligned for `T`. + #[cfg(miri)] + unsafe { + crate::util::miri_promise_symbolic_alignment( + raw.as_ptr().cast(), + core::mem::align_of_val_raw(raw.as_ptr()), + ); + } + // SAFETY: This invocation of `NonNull::as_mut` satisfies its + // documented safety preconditions: + // + // 1. The pointer is properly aligned. This is ensured by-contract + // on `Ptr`, because the `ALIGNMENT_INVARIANT` is `Aligned`. + // + // 2. If the pointer's referent is not zero-sized, then the pointer + // must be “dereferenceable” in the sense defined in the module + // documentation; i.e.: + // + // > The memory range of the given size starting at the pointer + // > must all be within the bounds of a single allocated object. + // > [2] + // + // This is ensured by contract on all `PtrInner`s. + // + // 3. The pointer must point to a validly-initialized instance of + // `T`. This is ensured by-contract on `Ptr`, because the + // validity invariant is `Valid`. + // + // 4. You must enforce Rust’s aliasing rules. This is ensured by + // contract on `Ptr`, because the `ALIASING_INVARIANT` is + // `Exclusive`. + // + // [1]: https://doc.rust-lang.org/std/ptr/struct.NonNull.html#method.as_mut + // [2]: https://doc.rust-lang.org/std/ptr/index.html#safety + unsafe { raw.as_mut() } + } + } + + /// `Ptr<'a, T>` → `Ptr<'a, U>` + impl<'a, T: ?Sized, I> Ptr<'a, T, I> + where + I: Invariants, + { + pub(crate) fn transmute<U, V, R>(self) -> Ptr<'a, U, (I::Aliasing, Unaligned, V)> + where + V: Validity, + U: TransmuteFromPtr<T, I::Aliasing, I::Validity, V, R> + SizeEq<T> + ?Sized, + { + // SAFETY: + // - `SizeEq::cast_from_raw` promises to preserve address, + // provenance, and the number of bytes in the referent + // - If aliasing is `Shared`, then by `U: TransmuteFromPtr<T>`, at + // least one of the following holds: + // - `T: Immutable` and `U: Immutable`, in which case it is + // trivially sound for shared code to operate on a `&T` and `&U` + // at the same time, as neither can perform interior mutation + // - It is directly guaranteed that it is sound for shared code to + // operate on these references simultaneously + // - By `U: TransmuteFromPtr<T, I::Aliasing, I::Validity, V>`, it is + // sound to perform this transmute. + unsafe { self.transmute_unchecked(SizeEq::cast_from_raw) } + } + + #[doc(hidden)] + #[inline(always)] + #[must_use] + pub fn recall_validity<V, R>(self) -> Ptr<'a, T, (I::Aliasing, I::Alignment, V)> + where + V: Validity, + T: TransmuteFromPtr<T, I::Aliasing, I::Validity, V, R>, + { + // SAFETY: + // - This cast is a no-op, and so trivially preserves address, + // referent size, and provenance + // - It is trivially sound to have multiple `&T` referencing the + // same referent simultaneously + // - By `T: TransmuteFromPtr<T, I::Aliasing, I::Validity, V>`, it is + // sound to perform this transmute. + let ptr = unsafe { self.transmute_unchecked(SizeEq::cast_from_raw) }; + // SAFETY: `self` and `ptr` have the same address and referent type. + // Therefore, if `self` satisfies `I::Alignment`, then so does + // `ptr`. + unsafe { ptr.assume_alignment::<I::Alignment>() } + } + + /// Casts to a different (unsized) target type without checking interior + /// mutability. + /// + /// Callers should prefer [`cast_unsized`] where possible. + /// + /// [`cast_unsized`]: Ptr::cast_unsized + /// + /// # Safety + /// + /// The caller promises that `u = cast(p)` is a pointer cast with the + /// following properties: + /// - `u` addresses a subset of the bytes addressed by `p` + /// - `u` has the same provenance as `p` + /// - If `I::Aliasing` is [`Shared`], it must not be possible for safe + /// code, operating on a `&T` and `&U` with the same referent + /// simultaneously, to cause undefined behavior + /// - It is sound to transmute a pointer of type `T` with aliasing + /// `I::Aliasing` and validity `I::Validity` to a pointer of type `U` + /// with aliasing `I::Aliasing` and validity `V`. This is a subtle + /// soundness requirement that is a function of `T`, `U`, + /// `I::Aliasing`, `I::Validity`, and `V`, and may depend upon the + /// presence, absence, or specific location of `UnsafeCell`s in `T` + /// and/or `U`. See [`Validity`] for more details. + #[doc(hidden)] + #[inline] + pub unsafe fn transmute_unchecked<U: ?Sized, V, F>( + self, + cast: F, + ) -> Ptr<'a, U, (I::Aliasing, Unaligned, V)> + where + V: Validity, + F: FnOnce(PtrInner<'a, T>) -> PtrInner<'a, U>, + { + let ptr = cast(self.as_inner()); + + // SAFETY: + // + // The following safety arguments rely on the fact that the caller + // promises that `cast` returns a `PtrInner` which addresses a + // prefix of the bytes of `*self`, and so properties that hold of + // `*self` also hold of `*ptr`. + // + // 0. `ptr` conforms to the aliasing invariant of `I::Aliasing`: + // - `Exclusive`: `self` is the only `Ptr` or reference which is + // permitted to read or modify the referent for the lifetime + // `'a`. Since we consume `self` by value, the returned pointer + // remains the only `Ptr` or reference which is permitted to + // read or modify the referent for the lifetime `'a`. + // - `Shared`: Since `self` has aliasing `Shared`, we know that + // no other code may mutate the referent during the lifetime + // `'a`, except via `UnsafeCell`s, and except as permitted by + // `T`'s library safety invariants. The caller promises that + // any safe operations which can be permitted on a `&T` and a + // `&U` simultaneously must be sound. Thus, no operations on a + // `&U` could violate `&T`'s library safety invariants, and + // vice-versa. Since any mutation via shared references outside + // of `UnsafeCell`s is unsound, this must be impossible using + // `&T` and `&U`. + // - `Inaccessible`: There are no restrictions we need to uphold. + // 1. `ptr` trivially satisfies the alignment invariant `Unaligned`. + // 2. The caller promises that `ptr` conforms to the validity + // invariant `V` with respect to its referent type, `U`. + unsafe { Ptr::from_inner(ptr) } + } + } + + /// `Ptr<'a, T, (_, _, _)>` → `Ptr<'a, Unalign<T>, (_, Aligned, _)>` + impl<'a, T, I> Ptr<'a, T, I> + where + I: Invariants, + { + /// Converts a `Ptr` an unaligned `T` into a `Ptr` to an aligned + /// `Unalign<T>`. + pub(crate) fn into_unalign( + self, + ) -> Ptr<'a, crate::Unalign<T>, (I::Aliasing, Aligned, I::Validity)> { + // SAFETY: + // - This cast preserves provenance. + // - This cast preserves address. `Unalign<T>` promises to have the + // same size as `T`, and so the cast returns a pointer addressing + // the same byte range as `p`. + // - By the same argument, the returned pointer refers to + // `UnsafeCell`s at the same locations as `p`. + // - `Unalign<T>` promises to have the same bit validity as `T`. By + // invariant on `Validity`, the set of bit patterns allowed in the + // referent of a `Ptr<X, (_, _, V)>` is only a function of the + // validity of `X` and of `V`. Thus, the set of bit patterns + // allowed in the referent of a `Ptr<T, (_, _, I::Validity)>` is + // the same as the set of bit patterns allowed in the referent of + // a `Ptr<Unalign<T>, (_, _, I::Validity)>`. As a result, `self` + // and the returned `Ptr` permit the same set of bit patterns in + // their referents, and so neither can be used to violate the + // validity of the other. + let ptr = unsafe { self.transmute_unchecked(PtrInner::cast_sized) }; + ptr.bikeshed_recall_aligned() + } + } + + impl<'a, T, I> Ptr<'a, T, I> + where + T: ?Sized, + I: Invariants<Validity = Valid>, + I::Aliasing: Reference, + { + /// Reads the referent. + #[must_use] + #[inline] + pub fn read_unaligned<R>(self) -> T + where + T: Copy, + T: Read<I::Aliasing, R>, + { + (*self.into_unalign().as_ref()).into_inner() + } + + /// Views the value as an aligned reference. + /// + /// This is only available if `T` is [`Unaligned`]. + #[must_use] + #[inline] + pub fn unaligned_as_ref(self) -> &'a T + where + T: crate::Unaligned, + { + self.bikeshed_recall_aligned().as_ref() + } + } +} + +/// State transitions between invariants. +mod _transitions { + use super::*; + use crate::pointer::transmute::TryTransmuteFromPtr; + + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants, + { + /// Returns a `Ptr` with [`Exclusive`] aliasing if `self` already has + /// `Exclusive` aliasing, or generates a compile-time assertion failure. + /// + /// This allows code which is generic over aliasing to down-cast to a + /// concrete aliasing. + /// + /// [`Exclusive`]: crate::pointer::invariant::Exclusive + #[inline] + pub(crate) fn into_exclusive_or_pme( + self, + ) -> Ptr<'a, T, (Exclusive, I::Alignment, I::Validity)> { + // NOTE(https://github.com/rust-lang/rust/issues/131625): We do this + // rather than just having `Aliasing::IS_EXCLUSIVE` have the panic + // behavior because doing it that way causes rustdoc to fail while + // attempting to document hidden items (since it evaluates the + // constant - and thus panics). + trait AliasingExt: Aliasing { + const IS_EXCL: bool; + } + + impl<A: Aliasing> AliasingExt for A { + const IS_EXCL: bool = { + const_assert!(Self::IS_EXCLUSIVE); + true + }; + } + + assert!(I::Aliasing::IS_EXCL); + + // SAFETY: We've confirmed that `self` already has the aliasing + // `Exclusive`. If it didn't, either the preceding assert would fail + // or evaluating `I::Aliasing::IS_EXCL` would fail. We're *pretty* + // sure that it's guaranteed to fail const eval, but the `assert!` + // provides a backstop in case that doesn't work. + unsafe { self.assume_exclusive() } + } + + /// Assumes that `self` satisfies the invariants `H`. + /// + /// # Safety + /// + /// The caller promises that `self` satisfies the invariants `H`. + unsafe fn assume_invariants<H: Invariants>(self) -> Ptr<'a, T, H> { + // SAFETY: The caller has promised to satisfy all parameterized + // invariants of `Ptr`. `Ptr`'s other invariants are satisfied + // by-contract by the source `Ptr`. + unsafe { Ptr::from_inner(self.as_inner()) } + } + + /// Helps the type system unify two distinct invariant types which are + /// actually the same. + pub(crate) fn unify_invariants< + H: Invariants<Aliasing = I::Aliasing, Alignment = I::Alignment, Validity = I::Validity>, + >( + self, + ) -> Ptr<'a, T, H> { + // SAFETY: The associated type bounds on `H` ensure that the + // invariants are unchanged. + unsafe { self.assume_invariants::<H>() } + } + + /// Assumes that `self` satisfies the aliasing requirement of `A`. + /// + /// # Safety + /// + /// The caller promises that `self` satisfies the aliasing requirement + /// of `A`. + #[inline] + pub(crate) unsafe fn assume_aliasing<A: Aliasing>( + self, + ) -> Ptr<'a, T, (A, I::Alignment, I::Validity)> { + // SAFETY: The caller promises that `self` satisfies the aliasing + // requirements of `A`. + unsafe { self.assume_invariants() } + } + + /// Assumes `self` satisfies the aliasing requirement of [`Exclusive`]. + /// + /// # Safety + /// + /// The caller promises that `self` satisfies the aliasing requirement + /// of `Exclusive`. + /// + /// [`Exclusive`]: crate::pointer::invariant::Exclusive + #[inline] + pub(crate) unsafe fn assume_exclusive( + self, + ) -> Ptr<'a, T, (Exclusive, I::Alignment, I::Validity)> { + // SAFETY: The caller promises that `self` satisfies the aliasing + // requirements of `Exclusive`. + unsafe { self.assume_aliasing::<Exclusive>() } + } + + /// Assumes that `self`'s referent is validly-aligned for `T` if + /// required by `A`. + /// + /// # Safety + /// + /// The caller promises that `self`'s referent conforms to the alignment + /// invariant of `T` if required by `A`. + #[inline] + pub(crate) unsafe fn assume_alignment<A: Alignment>( + self, + ) -> Ptr<'a, T, (I::Aliasing, A, I::Validity)> { + // SAFETY: The caller promises that `self`'s referent is + // well-aligned for `T` if required by `A` . + unsafe { self.assume_invariants() } + } + + /// Checks the `self`'s alignment at runtime, returning an aligned `Ptr` + /// on success. + pub(crate) fn try_into_aligned( + self, + ) -> Result<Ptr<'a, T, (I::Aliasing, Aligned, I::Validity)>, AlignmentError<Self, T>> + where + T: Sized, + { + if let Err(err) = + crate::util::validate_aligned_to::<_, T>(self.as_inner().as_non_null()) + { + return Err(err.with_src(self)); + } + + // SAFETY: We just checked the alignment. + Ok(unsafe { self.assume_alignment::<Aligned>() }) + } + + /// Recalls that `self`'s referent is validly-aligned for `T`. + #[inline] + // FIXME(#859): Reconsider the name of this method before making it + // public. + pub(crate) fn bikeshed_recall_aligned( + self, + ) -> Ptr<'a, T, (I::Aliasing, Aligned, I::Validity)> + where + T: crate::Unaligned, + { + // SAFETY: The bound `T: Unaligned` ensures that `T` has no + // non-trivial alignment requirement. + unsafe { self.assume_alignment::<Aligned>() } + } + + /// Assumes that `self`'s referent conforms to the validity requirement + /// of `V`. + /// + /// # Safety + /// + /// The caller promises that `self`'s referent conforms to the validity + /// requirement of `V`. + #[doc(hidden)] + #[must_use] + #[inline] + pub unsafe fn assume_validity<V: Validity>( + self, + ) -> Ptr<'a, T, (I::Aliasing, I::Alignment, V)> { + // SAFETY: The caller promises that `self`'s referent conforms to + // the validity requirement of `V`. + unsafe { self.assume_invariants() } + } + + /// A shorthand for `self.assume_validity<invariant::Initialized>()`. + /// + /// # Safety + /// + /// The caller promises to uphold the safety preconditions of + /// `self.assume_validity<invariant::Initialized>()`. + #[doc(hidden)] + #[must_use] + #[inline] + pub unsafe fn assume_initialized( + self, + ) -> Ptr<'a, T, (I::Aliasing, I::Alignment, Initialized)> { + // SAFETY: The caller has promised to uphold the safety + // preconditions. + unsafe { self.assume_validity::<Initialized>() } + } + + /// A shorthand for `self.assume_validity<Valid>()`. + /// + /// # Safety + /// + /// The caller promises to uphold the safety preconditions of + /// `self.assume_validity<Valid>()`. + #[doc(hidden)] + #[must_use] + #[inline] + pub unsafe fn assume_valid(self) -> Ptr<'a, T, (I::Aliasing, I::Alignment, Valid)> { + // SAFETY: The caller has promised to uphold the safety + // preconditions. + unsafe { self.assume_validity::<Valid>() } + } + + /// Recalls that `self`'s referent is initialized. + #[doc(hidden)] + #[must_use] + #[inline] + // FIXME(#859): Reconsider the name of this method before making it + // public. + pub fn bikeshed_recall_initialized_from_bytes( + self, + ) -> Ptr<'a, T, (I::Aliasing, I::Alignment, Initialized)> + where + T: crate::IntoBytes + crate::FromBytes, + I: Invariants<Validity = Valid>, + { + // SAFETY: The `T: IntoBytes + FromBytes` bound ensures that `T`'s + // bit validity is equivalent to `[u8]`. In other words, the set of + // allowed referents for a `Ptr<T, (_, _, Valid)>` is the set of + // initialized bit patterns. The same is true of the set of allowed + // referents for any `Ptr<_, (_, _, Initialized)>`. Thus, this call + // does not change the set of allowed values in the referent. + unsafe { self.assume_initialized() } + } + + /// Recalls that `self`'s referent is initialized. + #[doc(hidden)] + #[must_use] + #[inline] + // FIXME(#859): Reconsider the name of this method before making it + // public. + pub fn bikeshed_recall_initialized_immutable( + self, + ) -> Ptr<'a, T, (Shared, I::Alignment, Initialized)> + where + T: crate::IntoBytes + crate::Immutable, + I: Invariants<Aliasing = Shared, Validity = Valid>, + { + // SAFETY: Let `O` (for "old") be the set of allowed bit patterns in + // `self`'s referent, and let `N` (for "new") be the set of allowed + // bit patterns in the referent of the returned `Ptr`. `T: + // IntoBytes` and `I: Invariants<Validity = Valid>` ensures that `O` + // cannot contain any uninitialized bit patterns. Since the returned + // `Ptr` has validity `Initialized`, `N` is equal to the set of all + // initialized bit patterns. Thus, `O` is a subset of `N`, and so + // the returned `Ptr`'s validity invariant is upheld. + // + // Since `T: Immutable` and aliasing is `Shared`, the returned `Ptr` + // cannot be used to modify the referent. Before this call, `self`'s + // referent is guaranteed by invariant on `Ptr` to satisfy `self`'s + // validity invariant. Since the returned `Ptr` cannot be used to + // modify the referent, this guarantee cannot be violated by the + // returned `Ptr` (even if `O` is a strict subset of `N`). + unsafe { self.assume_initialized() } + } + + /// Checks that `self`'s referent is validly initialized for `T`, + /// returning a `Ptr` with `Valid` on success. + /// + /// # Panics + /// + /// This method will panic if + /// [`T::is_bit_valid`][TryFromBytes::is_bit_valid] panics. + /// + /// # Safety + /// + /// On error, unsafe code may rely on this method's returned + /// `ValidityError` containing `self`. + #[inline] + pub(crate) fn try_into_valid<R, S>( + mut self, + ) -> Result<Ptr<'a, T, (I::Aliasing, I::Alignment, Valid)>, ValidityError<Self, T>> + where + T: TryFromBytes + + Read<I::Aliasing, R> + + TryTransmuteFromPtr<T, I::Aliasing, I::Validity, Valid, S>, + I::Aliasing: Reference, + I: Invariants<Validity = Initialized>, + { + // This call may panic. If that happens, it doesn't cause any + // soundness issues, as we have not generated any invalid state + // which we need to fix before returning. + if T::is_bit_valid(self.reborrow().forget_aligned()) { + // SAFETY: If `T::is_bit_valid`, code may assume that `self` + // contains a bit-valid instance of `T`. By `T: + // TryTransmuteFromPtr<T, I::Aliasing, I::Validity, Valid>`, so + // long as `self`'s referent conforms to the `Valid` validity + // for `T` (which we just confirmed), then this transmute is + // sound. + Ok(unsafe { self.assume_valid() }) + } else { + Err(ValidityError::new(self)) + } + } + + /// Forgets that `self`'s referent is validly-aligned for `T`. + #[doc(hidden)] + #[must_use] + #[inline] + pub fn forget_aligned(self) -> Ptr<'a, T, (I::Aliasing, Unaligned, I::Validity)> { + // SAFETY: `Unaligned` is less restrictive than `Aligned`. + unsafe { self.assume_invariants() } + } + } +} + +/// Casts of the referent type. +mod _casts { + use super::*; + + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants, + { + /// Casts to a different (unsized) target type without checking interior + /// mutability. + /// + /// Callers should prefer [`cast_unsized`] where possible. + /// + /// [`cast_unsized`]: Ptr::cast_unsized + /// + /// # Safety + /// + /// The caller promises that `u = cast(p)` is a pointer cast with the + /// following properties: + /// - `u` addresses a subset of the bytes addressed by `p` + /// - `u` has the same provenance as `p` + /// - If `I::Aliasing` is [`Shared`], it must not be possible for safe + /// code, operating on a `&T` and `&U` with the same referent + /// simultaneously, to cause undefined behavior + /// + /// `cast_unsized_unchecked` guarantees that the pointer passed to + /// `cast` will reference a byte sequence which is either contained + /// inside a single allocated object or is zero sized. In either case, + /// this means that its size will fit in an `isize` and it will not wrap + /// around the address space. + #[doc(hidden)] + #[inline] + pub unsafe fn cast_unsized_unchecked<U, F: FnOnce(PtrInner<'a, T>) -> PtrInner<'a, U>>( + self, + cast: F, + ) -> Ptr<'a, U, (I::Aliasing, Unaligned, I::Validity)> + where + U: 'a + CastableFrom<T, I::Validity, I::Validity> + ?Sized, + { + // SAFETY: + // - The caller promises that `u = cast(p)` is a pointer which + // satisfies: + // - `u` addresses a subset of the bytes addressed by `p` + // - `u` has the same provenance as `p` + // - If `I::Aliasing` is [`Shared`], it must not be possible for + // safe code, operating on a `&T` and `&U` with the same + // referent simultaneously, to cause undefined behavior + // - By `U: CastableFrom<T, I::Validity, I::Validity>`, + // `I::Validity` is either `Uninit` or `Initialized`. In both + // cases, the bit validity `I::Validity` has the same semantics + // regardless of referent type. In other words, the set of allowed + // referent values for `Ptr<T, (_, _, I::Validity)>` and `Ptr<U, + // (_, _, I::Validity)>` are identical. As a consequence, neither + // `self` nor the returned `Ptr` can be used to write values which + // are invalid for the other. + // + // `transmute_unchecked` guarantees that it will only pass pointers + // to `cast` which either reference a zero-sized byte range or + // reference a byte range which is entirely contained inside of an + // allocated object. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + self.transmute_unchecked(cast) + } + } + + /// Casts to a different (unsized) target type. + /// + /// # Safety + /// + /// The caller promises that `u = cast(p)` is a pointer cast with the + /// following properties: + /// - `u` addresses a subset of the bytes addressed by `p` + /// - `u` has the same provenance as `p` + #[doc(hidden)] + #[inline] + pub unsafe fn cast_unsized<U, F, R>( + self, + cast: F, + ) -> Ptr<'a, U, (I::Aliasing, Unaligned, I::Validity)> + where + T: MutationCompatible<U, I::Aliasing, I::Validity, I::Validity, R>, + U: 'a + ?Sized + CastableFrom<T, I::Validity, I::Validity>, + F: FnOnce(PtrInner<'a, T>) -> PtrInner<'a, U>, + { + // SAFETY: Because `T: MutationCompatible<U, I::Aliasing, R>`, one + // of the following holds: + // - `T: Read<I::Aliasing>` and `U: Read<I::Aliasing>`, in which + // case one of the following holds: + // - `I::Aliasing` is `Exclusive` + // - `T` and `U` are both `Immutable` + // - It is sound for safe code to operate on `&T` and `&U` with the + // same referent simultaneously + // + // The caller promises all other safety preconditions. + unsafe { self.cast_unsized_unchecked(cast) } + } + } + + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + KnownLayout + ?Sized, + I: Invariants<Validity = Initialized>, + { + /// Casts this pointer-to-initialized into a pointer-to-bytes. + #[allow(clippy::wrong_self_convention)] + #[must_use] + #[inline] + pub fn as_bytes<R>(self) -> Ptr<'a, [u8], (I::Aliasing, Aligned, Valid)> + where + T: Read<I::Aliasing, R>, + I::Aliasing: Reference, + { + // SAFETY: `PtrInner::as_bytes` returns a pointer which addresses + // the same byte range as its argument, and which has the same + // provenance. + let ptr = unsafe { self.cast_unsized(PtrInner::as_bytes) }; + ptr.bikeshed_recall_aligned().recall_validity::<Valid, (_, (_, _))>() + } + } + + impl<'a, T, I, const N: usize> Ptr<'a, [T; N], I> + where + T: 'a, + I: Invariants, + { + /// Casts this pointer-to-array into a slice. + #[allow(clippy::wrong_self_convention)] + pub(crate) fn as_slice(self) -> Ptr<'a, [T], I> { + let slice = self.as_inner().as_slice(); + // SAFETY: Note that, by post-condition on `PtrInner::as_slice`, + // `slice` refers to the same byte range as `self.as_inner()`. + // + // 0. Thus, `slice` conforms to the aliasing invariant of + // `I::Aliasing` because `self` does. + // 1. By the above lemma, `slice` conforms to the alignment + // invariant of `I::Alignment` because `self` does. + // 2. Since `[T; N]` and `[T]` have the same bit validity [1][2], + // and since `self` and the returned `Ptr` have the same validity + // invariant, neither `self` nor the returned `Ptr` can be used + // to write a value to the referent which violates the other's + // validity invariant. + // + // [1] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#array-layout: + // + // An array of `[T; N]` has a size of `size_of::<T>() * N` and the + // same alignment of `T`. Arrays are laid out so that the + // zero-based `nth` element of the array is offset from the start + // of the array by `n * size_of::<T>()` bytes. + // + // ... + // + // Slices have the same layout as the section of the array they + // slice. + // + // [2] Per https://doc.rust-lang.org/1.81.0/reference/types/array.html#array-types: + // + // All elements of arrays are always initialized + unsafe { Ptr::from_inner(slice) } + } + } + + /// For caller convenience, these methods are generic over alignment + /// invariant. In practice, the referent is always well-aligned, because the + /// alignment of `[u8]` is 1. + impl<'a, I> Ptr<'a, [u8], I> + where + I: Invariants<Validity = Valid>, + { + /// Attempts to cast `self` to a `U` using the given cast type. + /// + /// If `U` is a slice DST and pointer metadata (`meta`) is provided, + /// then the cast will only succeed if it would produce an object with + /// the given metadata. + /// + /// Returns `None` if the resulting `U` would be invalidly-aligned, if + /// no `U` can fit in `self`, or if the provided pointer metadata + /// describes an invalid instance of `U`. On success, returns a pointer + /// to the largest-possible `U` which fits in `self`. + /// + /// # Safety + /// + /// The caller may assume that this implementation is correct, and may + /// rely on that assumption for the soundness of their code. In + /// particular, the caller may assume that, if `try_cast_into` returns + /// `Some((ptr, remainder))`, then `ptr` and `remainder` refer to + /// non-overlapping byte ranges within `self`, and that `ptr` and + /// `remainder` entirely cover `self`. Finally: + /// - If this is a prefix cast, `ptr` has the same address as `self`. + /// - If this is a suffix cast, `remainder` has the same address as + /// `self`. + #[inline(always)] + pub(crate) fn try_cast_into<U, R>( + self, + cast_type: CastType, + meta: Option<U::PointerMetadata>, + ) -> Result< + (Ptr<'a, U, (I::Aliasing, Aligned, Initialized)>, Ptr<'a, [u8], I>), + CastError<Self, U>, + > + where + I::Aliasing: Reference, + U: 'a + ?Sized + KnownLayout + Read<I::Aliasing, R>, + { + let (inner, remainder) = + self.as_inner().try_cast_into(cast_type, meta).map_err(|err| { + err.map_src(|inner| + // SAFETY: `PtrInner::try_cast_into` promises to return its + // original argument on error, which was originally produced + // by `self.as_inner()`, which is guaranteed to satisfy + // `Ptr`'s invariants. + unsafe { Ptr::from_inner(inner) }) + })?; + + // SAFETY: + // 0. Since `U: Read<I::Aliasing, _>`, either: + // - `I::Aliasing` is `Exclusive`, in which case both `src` and + // `ptr` conform to `Exclusive` + // - `I::Aliasing` is `Shared` and `U` is `Immutable` (we already + // know that `[u8]: Immutable`). In this case, neither `U` nor + // `[u8]` permit mutation, and so `Shared` aliasing is + // satisfied. + // 1. `ptr` conforms to the alignment invariant of `Aligned` because + // it is derived from `try_cast_into`, which promises that the + // object described by `target` is validly aligned for `U`. + // 2. By trait bound, `self` - and thus `target` - is a bit-valid + // `[u8]`. `Ptr<[u8], (_, _, Valid)>` and `Ptr<_, (_, _, + // Initialized)>` have the same bit validity, and so neither + // `self` nor `res` can be used to write a value to the referent + // which violates the other's validity invariant. + let res = unsafe { Ptr::from_inner(inner) }; + + // SAFETY: + // 0. `self` and `remainder` both have the type `[u8]`. Thus, they + // have `UnsafeCell`s at the same locations. Type casting does + // not affect aliasing. + // 1. `[u8]` has no alignment requirement. + // 2. `self` has validity `Valid` and has type `[u8]`. Since + // `remainder` references a subset of `self`'s referent, it is + // also a bit-valid `[u8]`. Thus, neither `self` nor `remainder` + // can be used to write a value to the referent which violates + // the other's validity invariant. + let remainder = unsafe { Ptr::from_inner(remainder) }; + + Ok((res, remainder)) + } + + /// Attempts to cast `self` into a `U`, failing if all of the bytes of + /// `self` cannot be treated as a `U`. + /// + /// In particular, this method fails if `self` is not validly-aligned + /// for `U` or if `self`'s size is not a valid size for `U`. + /// + /// # Safety + /// + /// On success, the caller may assume that the returned pointer + /// references the same byte range as `self`. + #[allow(unused)] + #[inline(always)] + pub(crate) fn try_cast_into_no_leftover<U, R>( + self, + meta: Option<U::PointerMetadata>, + ) -> Result<Ptr<'a, U, (I::Aliasing, Aligned, Initialized)>, CastError<Self, U>> + where + I::Aliasing: Reference, + U: 'a + ?Sized + KnownLayout + Read<I::Aliasing, R>, + { + // FIXME(#67): Remove this allow. See NonNulSlicelExt for more + // details. + #[allow(unstable_name_collisions)] + match self.try_cast_into(CastType::Prefix, meta) { + Ok((slf, remainder)) => { + if remainder.len() == 0 { + Ok(slf) + } else { + // Undo the cast so we can return the original bytes. + let slf = slf.as_bytes(); + // Restore the initial alignment invariant of `self`. + // + // SAFETY: The referent type of `slf` is now equal to + // that of `self`, but the alignment invariants + // nominally differ. Since `slf` and `self` refer to the + // same memory and no actions have been taken that would + // violate the original invariants on `self`, it is + // sound to apply the alignment invariant of `self` onto + // `slf`. + let slf = unsafe { slf.assume_alignment::<I::Alignment>() }; + let slf = slf.unify_invariants(); + Err(CastError::Size(SizeError::<_, U>::new(slf))) + } + } + Err(err) => Err(err), + } + } + } + + impl<'a, T, I> Ptr<'a, core::cell::UnsafeCell<T>, I> + where + T: 'a + ?Sized, + I: Invariants<Aliasing = Exclusive>, + { + /// Converts this `Ptr` into a pointer to the underlying data. + /// + /// This call borrows the `UnsafeCell` mutably (at compile-time) which + /// guarantees that we possess the only reference. + /// + /// This is like [`UnsafeCell::get_mut`], but for `Ptr`. + /// + /// [`UnsafeCell::get_mut`]: core::cell::UnsafeCell::get_mut + #[must_use] + #[inline(always)] + pub fn get_mut(self) -> Ptr<'a, T, I> { + // SAFETY: + // - The closure uses an `as` cast, which preserves address range + // and provenance. + // - Aliasing is `Exclusive`, and so we are not required to promise + // anything about the locations of `UnsafeCell`s. + // - `UnsafeCell<T>` has the same bit validity as `T` [1]. + // Technically the term "representation" doesn't guarantee this, + // but the subsequent sentence in the documentation makes it clear + // that this is the intention. + // + // By invariant on `Validity`, since `T` and `UnsafeCell<T>` have + // the same bit validity, then the set of values which may appear + // in the referent of a `Ptr<T, (_, _, V)>` is the same as the set + // which may appear in the referent of a `Ptr<UnsafeCell<T>, (_, + // _, V)>`. Thus, neither `self` nor `ptr` may be used to write a + // value to the referent which would violate the other's validity + // invariant. + // + // [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: + // + // `UnsafeCell<T>` has the same in-memory representation as its + // inner type `T`. A consequence of this guarantee is that it is + // possible to convert between `T` and `UnsafeCell<T>`. + #[allow(clippy::as_conversions)] + #[allow(clippy::multiple_unsafe_ops_per_block)] + let ptr = unsafe { self.transmute_unchecked(|ptr| cast!(ptr)) }; + + // SAFETY: `UnsafeCell<T>` has the same alignment as `T` [1], + // and so if `self` is guaranteed to be aligned, then so is the + // returned `Ptr`. + // + // [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: + // + // `UnsafeCell<T>` has the same in-memory representation as + // its inner type `T`. A consequence of this guarantee is that + // it is possible to convert between `T` and `UnsafeCell<T>`. + let ptr = unsafe { ptr.assume_alignment::<I::Alignment>() }; + ptr.unify_invariants() + } + } +} + +/// Projections through the referent. +mod _project { + use super::*; + + impl<'a, T, I> Ptr<'a, [T], I> + where + T: 'a, + I: Invariants, + I::Aliasing: Reference, + { + /// Iteratively projects the elements `Ptr<T>` from `Ptr<[T]>`. + pub(crate) fn iter(&self) -> impl Iterator<Item = Ptr<'a, T, I>> { + // SAFETY: + // 0. `elem` conforms to the aliasing invariant of `I::Aliasing` + // because projection does not impact the aliasing invariant. + // 1. `elem`, conditionally, conforms to the validity invariant of + // `I::Alignment`. If `elem` is projected from data well-aligned + // for `[T]`, `elem` will be valid for `T`. + // 2. `elem` conforms to the validity invariant of `I::Validity`. + // Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#array-layout: + // + // Slices have the same layout as the section of the array they + // slice. + // + // Arrays are laid out so that the zero-based `nth` element of + // the array is offset from the start of the array by `n * + // size_of::<T>()` bytes. Thus, `elem` addresses a valid `T` + // within the slice. Since `self` satisfies `I::Validity`, `elem` + // also satisfies `I::Validity`. + self.as_inner().iter().map(|elem| unsafe { Ptr::from_inner(elem) }) + } + } + + #[allow(clippy::needless_lifetimes)] + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized + KnownLayout<PointerMetadata = usize>, + I: Invariants, + { + /// The number of slice elements in the object referenced by `self`. + pub(crate) fn len(&self) -> usize { + self.as_inner().meta().get() + } + } +} + +#[cfg(test)] +mod tests { + use core::mem::{self, MaybeUninit}; + + use super::*; + #[allow(unused)] // Needed on our MSRV, but considered unused on later toolchains. + use crate::util::AsAddress; + use crate::{pointer::BecauseImmutable, util::testutil::AU64, FromBytes, Immutable}; + + mod test_ptr_try_cast_into_soundness { + use super::*; + + // This test is designed so that if `Ptr::try_cast_into_xxx` are + // buggy, it will manifest as unsoundness that Miri can detect. + + // - If `size_of::<T>() == 0`, `N == 4` + // - Else, `N == 4 * size_of::<T>()` + // + // Each test will be run for each metadata in `metas`. + fn test<T, I, const N: usize>(metas: I) + where + T: ?Sized + KnownLayout + Immutable + FromBytes, + I: IntoIterator<Item = Option<T::PointerMetadata>> + Clone, + { + let mut bytes = [MaybeUninit::<u8>::uninit(); N]; + let initialized = [MaybeUninit::new(0u8); N]; + for start in 0..=bytes.len() { + for end in start..=bytes.len() { + // Set all bytes to uninitialized other than those in + // the range we're going to pass to `try_cast_from`. + // This allows Miri to detect out-of-bounds reads + // because they read uninitialized memory. Without this, + // some out-of-bounds reads would still be in-bounds of + // `bytes`, and so might spuriously be accepted. + bytes = [MaybeUninit::<u8>::uninit(); N]; + let bytes = &mut bytes[start..end]; + // Initialize only the byte range we're going to pass to + // `try_cast_from`. + bytes.copy_from_slice(&initialized[start..end]); + + let bytes = { + let bytes: *const [MaybeUninit<u8>] = bytes; + #[allow(clippy::as_conversions)] + let bytes = bytes as *const [u8]; + // SAFETY: We just initialized these bytes to valid + // `u8`s. + unsafe { &*bytes } + }; + + // SAFETY: The bytes in `slf` must be initialized. + unsafe fn validate_and_get_len< + T: ?Sized + KnownLayout + FromBytes + Immutable, + >( + slf: Ptr<'_, T, (Shared, Aligned, Initialized)>, + ) -> usize { + let t = slf.recall_validity().as_ref(); + + let bytes = { + let len = mem::size_of_val(t); + let t: *const T = t; + // SAFETY: + // - We know `t`'s bytes are all initialized + // because we just read it from `slf`, which + // points to an initialized range of bytes. If + // there's a bug and this doesn't hold, then + // that's exactly what we're hoping Miri will + // catch! + // - Since `T: FromBytes`, `T` doesn't contain + // any `UnsafeCell`s, so it's okay for `t: T` + // and a `&[u8]` to the same memory to be + // alive concurrently. + unsafe { core::slice::from_raw_parts(t.cast::<u8>(), len) } + }; + + // This assertion ensures that `t`'s bytes are read + // and compared to another value, which in turn + // ensures that Miri gets a chance to notice if any + // of `t`'s bytes are uninitialized, which they + // shouldn't be (see the comment above). + assert_eq!(bytes, vec![0u8; bytes.len()]); + + mem::size_of_val(t) + } + + for meta in metas.clone().into_iter() { + for cast_type in [CastType::Prefix, CastType::Suffix] { + if let Ok((slf, remaining)) = Ptr::from_ref(bytes) + .try_cast_into::<T, BecauseImmutable>(cast_type, meta) + { + // SAFETY: All bytes in `bytes` have been + // initialized. + let len = unsafe { validate_and_get_len(slf) }; + assert_eq!(remaining.len(), bytes.len() - len); + #[allow(unstable_name_collisions)] + let bytes_addr = bytes.as_ptr().addr(); + #[allow(unstable_name_collisions)] + let remaining_addr = + remaining.as_inner().as_non_null().as_ptr().addr(); + match cast_type { + CastType::Prefix => { + assert_eq!(remaining_addr, bytes_addr + len) + } + CastType::Suffix => assert_eq!(remaining_addr, bytes_addr), + } + + if let Some(want) = meta { + let got = KnownLayout::pointer_to_metadata( + slf.as_inner().as_non_null().as_ptr(), + ); + assert_eq!(got, want); + } + } + } + + if let Ok(slf) = Ptr::from_ref(bytes) + .try_cast_into_no_leftover::<T, BecauseImmutable>(meta) + { + // SAFETY: All bytes in `bytes` have been + // initialized. + let len = unsafe { validate_and_get_len(slf) }; + assert_eq!(len, bytes.len()); + + if let Some(want) = meta { + let got = KnownLayout::pointer_to_metadata( + slf.as_inner().as_non_null().as_ptr(), + ); + assert_eq!(got, want); + } + } + } + } + } + } + + #[derive(FromBytes, KnownLayout, Immutable)] + #[repr(C)] + struct SliceDst<T> { + a: u8, + trailing: [T], + } + + // Each test case becomes its own `#[test]` function. We do this because + // this test in particular takes far, far longer to execute under Miri + // than all of our other tests combined. Previously, we had these + // execute sequentially in a single test function. We run Miri tests in + // parallel in CI, but this test being sequential meant that most of + // that parallelism was wasted, as all other tests would finish in a + // fraction of the total execution time, leaving this test to execute on + // a single thread for the remainder of the test. By putting each test + // case in its own function, we permit better use of available + // parallelism. + macro_rules! test { + ($test_name:ident: $ty:ty) => { + #[test] + #[allow(non_snake_case)] + fn $test_name() { + const S: usize = core::mem::size_of::<$ty>(); + const N: usize = if S == 0 { 4 } else { S * 4 }; + test::<$ty, _, N>([None]); + + // If `$ty` is a ZST, then we can't pass `None` as the + // pointer metadata, or else computing the correct trailing + // slice length will panic. + if S == 0 { + test::<[$ty], _, N>([Some(0), Some(1), Some(2), Some(3)]); + test::<SliceDst<$ty>, _, N>([Some(0), Some(1), Some(2), Some(3)]); + } else { + test::<[$ty], _, N>([None, Some(0), Some(1), Some(2), Some(3)]); + test::<SliceDst<$ty>, _, N>([None, Some(0), Some(1), Some(2), Some(3)]); + } + } + }; + ($ty:ident) => { + test!($ty: $ty); + }; + ($($ty:ident),*) => { $(test!($ty);)* } + } + + test!(empty_tuple: ()); + test!(u8, u16, u32, u64, u128, usize, AU64); + test!(i8, i16, i32, i64, i128, isize); + test!(f32, f64); + } + + #[test] + fn test_try_cast_into_explicit_count() { + macro_rules! test { + ($ty:ty, $bytes:expr, $elems:expr, $expect:expr) => {{ + let bytes = [0u8; $bytes]; + let ptr = Ptr::from_ref(&bytes[..]); + let res = + ptr.try_cast_into::<$ty, BecauseImmutable>(CastType::Prefix, Some($elems)); + if let Some(expect) = $expect { + let (ptr, _) = res.unwrap(); + assert_eq!( + KnownLayout::pointer_to_metadata(ptr.as_inner().as_non_null().as_ptr()), + expect + ); + } else { + let _ = res.unwrap_err(); + } + }}; + } + + #[derive(KnownLayout, Immutable)] + #[repr(C)] + struct ZstDst { + u: [u8; 8], + slc: [()], + } + + test!(ZstDst, 8, 0, Some(0)); + test!(ZstDst, 7, 0, None); + + test!(ZstDst, 8, usize::MAX, Some(usize::MAX)); + test!(ZstDst, 7, usize::MAX, None); + + #[derive(KnownLayout, Immutable)] + #[repr(C)] + struct Dst { + u: [u8; 8], + slc: [u8], + } + + test!(Dst, 8, 0, Some(0)); + test!(Dst, 7, 0, None); + + test!(Dst, 9, 1, Some(1)); + test!(Dst, 8, 1, None); + + // If we didn't properly check for overflow, this would cause the + // metadata to overflow to 0, and thus the cast would spuriously + // succeed. + test!(Dst, 8, usize::MAX - 8 + 1, None); + } +} diff --git a/vendor/zerocopy/src/pointer/transmute.rs b/vendor/zerocopy/src/pointer/transmute.rs new file mode 100644 index 00000000..c1fa540a --- /dev/null +++ b/vendor/zerocopy/src/pointer/transmute.rs @@ -0,0 +1,492 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{ + cell::{Cell, UnsafeCell}, + mem::{ManuallyDrop, MaybeUninit}, + num::Wrapping, +}; + +use crate::{ + pointer::{invariant::*, PtrInner}, + FromBytes, Immutable, IntoBytes, Unalign, +}; + +/// Transmutations which are sound to attempt, conditional on validating the bit +/// validity of the destination type. +/// +/// If a `Ptr` transmutation is `TryTransmuteFromPtr`, then it is sound to +/// perform that transmutation so long as some additional mechanism is used to +/// validate that the referent is bit-valid for the destination type. That +/// validation mechanism could be a type bound (such as `TransmuteFrom`) or a +/// runtime validity check. +/// +/// # Safety +/// +/// ## Post-conditions +/// +/// Given `Dst: TryTransmuteFromPtr<Src, A, SV, DV, _>`, callers may assume the +/// following: +/// +/// Given `src: Ptr<'a, Src, (A, _, SV)>`, if the referent of `src` is +/// `DV`-valid for `Dst`, then it is sound to transmute `src` into `dst: Ptr<'a, +/// Dst, (A, Unaligned, DV)>` by preserving pointer address and metadata. +/// +/// ## Pre-conditions +/// +/// Given `src: Ptr<Src, (A, _, SV)>` and `dst: Ptr<Dst, (A, Unaligned, DV)>`, +/// `Dst: TryTransmuteFromPtr<Src, A, SV, DV, _>` is sound if all of the +/// following hold: +/// - Forwards transmutation: Either of the following hold: +/// - So long as `dst` is active, no mutation of `dst`'s referent is allowed +/// except via `dst` itself +/// - The set of `DV`-valid `Dst`s is a superset of the set of `SV`-valid +/// `Src`s +/// - Reverse transmutation: Either of the following hold: +/// - `dst` does not permit mutation of its referent +/// - The set of `DV`-valid `Dst`s is a subset of the set of `SV`-valid `Src`s +/// - No safe code, given access to `src` and `dst`, can cause undefined +/// behavior: Any of the following hold: +/// - `A` is `Exclusive` +/// - `Src: Immutable` and `Dst: Immutable` +/// - It is sound for shared code to operate on a `&Src` and `&Dst` which +/// reference the same byte range at the same time +/// +/// ## Proof +/// +/// Given: +/// - `src: Ptr<'a, Src, (A, _, SV)>` +/// - `src`'s referent is `DV`-valid for `Dst` +/// - `Dst: SizeEq<Src>` +/// +/// We are trying to prove that it is sound to perform a pointer address- and +/// metadata-preserving transmute from `src` to a `dst: Ptr<'a, Dst, (A, +/// Unaligned, DV)>`. We need to prove that such a transmute does not violate +/// any of `src`'s invariants, and that it satisfies all invariants of the +/// destination `Ptr` type. +/// +/// First, all of `src`'s `PtrInner` invariants are upheld. `src`'s address and +/// metadata are unchanged, so: +/// - If its referent is not zero sized, then it still has valid provenance for +/// its referent, which is still entirely contained in some Rust allocation, +/// `A` +/// - If its referent is not zero sized, `A` is guaranteed to live for at least +/// `'a` +/// +/// Since `Dst: SizeEq<Src>`, and since `dst` has the same address and metadata +/// as `src`, `dst` addresses the same byte range as `src`. `dst` also has the +/// same lifetime as `src`. Therefore, all of the `PtrInner` invariants +/// mentioned above also hold for `dst`. +/// +/// Second, since `src`'s address is unchanged, it still satisfies its +/// alignment. Since `dst`'s alignment is `Unaligned`, it trivially satisfies +/// its alignment. +/// +/// Third, aliasing is either `Exclusive` or `Shared`: +/// - If it is `Exclusive`, then both `src` and `dst` satisfy `Exclusive` +/// aliasing trivially: since `src` and `dst` have the same lifetime, `src` is +/// inaccessible so long as `dst` is alive, and no other live `Ptr`s or +/// references may reference the same referent. +/// - If it is `Shared`, then either: +/// - `Src: Immutable` and `Dst: Immutable`, and so `UnsafeCell`s trivially +/// cover the same byte ranges in both types. +/// - It is explicitly sound for safe code to operate on a `&Src` and a `&Dst` +/// pointing to the same byte range at the same time. +/// +/// Fourth, `src`'s validity is satisfied. By invariant, `src`'s referent began +/// as an `SV`-valid `Src`. It is guaranteed to remain so, as either of the +/// following hold: +/// - `dst` does not permit mutation of its referent. +/// - The set of `DV`-valid `Dst`s is a superset of the set of `SV`-valid +/// `Src`s. Thus, any value written via `dst` is guaranteed to be `SV`-valid +/// for `Src`. +/// +/// Fifth, `dst`'s validity is satisfied. It is a given of this proof that the +/// referent is `DV`-valid for `Dst`. It is guaranteed to remain so, as either +/// of the following hold: +/// - So long as `dst` is active, no mutation of the referent is allowed except +/// via `dst` itself. +/// - The set of `DV`-valid `Dst`s is a superset of the set of `SV`-valid +/// `Src`s. Thus, any value written via `src` is guaranteed to be a `DV`-valid +/// `Dst`. +pub unsafe trait TryTransmuteFromPtr<Src: ?Sized, A: Aliasing, SV: Validity, DV: Validity, R>: + SizeEq<Src> +{ +} + +#[allow(missing_copy_implementations, missing_debug_implementations)] +pub enum BecauseMutationCompatible {} + +// SAFETY: +// - Forwards transmutation: By `Dst: MutationCompatible<Src, A, SV, DV, _>`, we +// know that at least one of the following holds: +// - So long as `dst: Ptr<Dst>` is active, no mutation of its referent is +// allowed except via `dst` itself if either of the following hold: +// - Aliasing is `Exclusive`, in which case, so long as the `Dst` `Ptr` +// exists, no mutation is permitted except via that `Ptr` +// - Aliasing is `Shared`, `Src: Immutable`, and `Dst: Immutable`, in which +// case no mutation is possible via either `Ptr` +// - `Dst: TransmuteFrom<Src, SV, DV>`. Since `Dst: SizeEq<Src>`, this bound +// guarantees that the set of `DV`-valid `Dst`s is a supserset of the set of +// `SV`-valid `Src`s. +// - Reverse transmutation: `Src: TransmuteFrom<Dst, DV, SV>`. Since `Dst: +// SizeEq<Src>`, this guarantees that the set of `DV`-valid `Dst`s is a subset +// of the set of `SV`-valid `Src`s. +// - No safe code, given access to `src` and `dst`, can cause undefined +// behavior: By `Dst: MutationCompatible<Src, A, SV, DV, _>`, at least one of +// the following holds: +// - `A` is `Exclusive` +// - `Src: Immutable` and `Dst: Immutable` +// - `Dst: InvariantsEq<Src>`, which guarantees that `Src` and `Dst` have the +// same invariants, and have `UnsafeCell`s covering the same byte ranges +unsafe impl<Src, Dst, SV, DV, A, R> + TryTransmuteFromPtr<Src, A, SV, DV, (BecauseMutationCompatible, R)> for Dst +where + A: Aliasing, + SV: Validity, + DV: Validity, + Src: TransmuteFrom<Dst, DV, SV> + ?Sized, + Dst: MutationCompatible<Src, A, SV, DV, R> + SizeEq<Src> + ?Sized, +{ +} + +// SAFETY: +// - Forwards transmutation: Since aliasing is `Shared` and `Src: Immutable`, +// `src` does not permit mutation of its referent. +// - Reverse transmutation: Since aliasing is `Shared` and `Dst: Immutable`, +// `dst` does not permit mutation of its referent. +// - No safe code, given access to `src` and `dst`, can cause undefined +// behavior: `Src: Immutable` and `Dst: Immutable` +unsafe impl<Src, Dst, SV, DV> TryTransmuteFromPtr<Src, Shared, SV, DV, BecauseImmutable> for Dst +where + SV: Validity, + DV: Validity, + Src: Immutable + ?Sized, + Dst: Immutable + SizeEq<Src> + ?Sized, +{ +} + +/// Denotes that `src: Ptr<Src, (A, _, SV)>` and `dst: Ptr<Self, (A, _, DV)>`, +/// referencing the same referent at the same time, cannot be used by safe code +/// to break library safety invariants of `Src` or `Self`. +/// +/// # Safety +/// +/// At least one of the following must hold: +/// - `Src: Read<A, _>` and `Self: Read<A, _>` +/// - `Self: InvariantsEq<Src>`, and, for some `V`: +/// - `Dst: TransmuteFrom<Src, V, V>` +/// - `Src: TransmuteFrom<Dst, V, V>` +pub unsafe trait MutationCompatible<Src: ?Sized, A: Aliasing, SV, DV, R> {} + +#[allow(missing_copy_implementations, missing_debug_implementations)] +pub enum BecauseRead {} + +// SAFETY: `Src: Read<A, _>` and `Dst: Read<A, _>`. +unsafe impl<Src: ?Sized, Dst: ?Sized, A: Aliasing, SV: Validity, DV: Validity, R, S> + MutationCompatible<Src, A, SV, DV, (BecauseRead, (R, S))> for Dst +where + Src: Read<A, R>, + Dst: Read<A, S>, +{ +} + +/// Denotes that two types have the same invariants. +/// +/// # Safety +/// +/// It is sound for safe code to operate on a `&T` and a `&Self` pointing to the +/// same referent at the same time - no such safe code can cause undefined +/// behavior. +pub unsafe trait InvariantsEq<T: ?Sized> {} + +// SAFETY: Trivially sound to have multiple `&T` pointing to the same referent. +unsafe impl<T: ?Sized> InvariantsEq<T> for T {} + +// SAFETY: `Dst: InvariantsEq<Src> + TransmuteFrom<Src, SV, DV>`, and `Src: +// TransmuteFrom<Dst, DV, SV>`. +unsafe impl<Src: ?Sized, Dst: ?Sized, A: Aliasing, SV: Validity, DV: Validity> + MutationCompatible<Src, A, SV, DV, BecauseInvariantsEq> for Dst +where + Src: TransmuteFrom<Dst, DV, SV>, + Dst: TransmuteFrom<Src, SV, DV> + InvariantsEq<Src>, +{ +} + +pub(crate) enum BecauseInvariantsEq {} + +macro_rules! unsafe_impl_invariants_eq { + ($tyvar:ident => $t:ty, $u:ty) => {{ + crate::util::macros::__unsafe(); + // SAFETY: The caller promises that this is sound. + unsafe impl<$tyvar> InvariantsEq<$t> for $u {} + // SAFETY: The caller promises that this is sound. + unsafe impl<$tyvar> InvariantsEq<$u> for $t {} + }}; +} + +impl_transitive_transmute_from!(T => MaybeUninit<T> => T => Wrapping<T>); +impl_transitive_transmute_from!(T => Wrapping<T> => T => MaybeUninit<T>); + +// SAFETY: `ManuallyDrop<T>` has the same size and bit validity as `T` [1], and +// implements `Deref<Target = T>` [2]. Thus, it is already possible for safe +// code to obtain a `&T` and a `&ManuallyDrop<T>` to the same referent at the +// same time. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: +// +// `ManuallyDrop<T>` is guaranteed to have the same layout and bit +// validity as `T` +// +// [2] https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html#impl-Deref-for-ManuallyDrop%3CT%3E +unsafe impl<T: ?Sized> InvariantsEq<T> for ManuallyDrop<T> {} +// SAFETY: See previous safety comment. +unsafe impl<T: ?Sized> InvariantsEq<ManuallyDrop<T>> for T {} + +/// Transmutations which are always sound. +/// +/// `TransmuteFromPtr` is a shorthand for [`TryTransmuteFromPtr`] and +/// [`TransmuteFrom`]. +/// +/// # Safety +/// +/// `Dst: TransmuteFromPtr<Src, A, SV, DV, _>` is equivalent to `Dst: +/// TryTransmuteFromPtr<Src, A, SV, DV, _> + TransmuteFrom<Src, SV, DV>`. +pub unsafe trait TransmuteFromPtr<Src: ?Sized, A: Aliasing, SV: Validity, DV: Validity, R>: + TryTransmuteFromPtr<Src, A, SV, DV, R> + TransmuteFrom<Src, SV, DV> +{ +} + +// SAFETY: The `where` bounds are equivalent to the safety invariant on +// `TransmuteFromPtr`. +unsafe impl<Src: ?Sized, Dst: ?Sized, A: Aliasing, SV: Validity, DV: Validity, R> + TransmuteFromPtr<Src, A, SV, DV, R> for Dst +where + Dst: TransmuteFrom<Src, SV, DV> + TryTransmuteFromPtr<Src, A, SV, DV, R>, +{ +} + +/// Denotes that any `SV`-valid `Src` may soundly be transmuted into a +/// `DV`-valid `Self`. +/// +/// # Safety +/// +/// Given `src: Ptr<Src, (_, _, SV)>` and `dst: Ptr<Dst, (_, _, DV)>`, if the +/// referents of `src` and `dst` are the same size, then the set of bit patterns +/// allowed to appear in `src`'s referent must be a subset of the set allowed to +/// appear in `dst`'s referent. +/// +/// If the referents are not the same size, then `Dst: TransmuteFrom<Src, SV, +/// DV>` conveys no safety guarantee. +pub unsafe trait TransmuteFrom<Src: ?Sized, SV, DV> {} + +/// # Safety +/// +/// `T` and `Self` must have the same vtable kind (`Sized`, slice DST, `dyn`, +/// etc) and have the same size. In particular: +/// - If `T: Sized` and `Self: Sized`, then their sizes must be equal +/// - If `T: ?Sized` and `Self: ?Sized`, then it must be the case that, given +/// any `t: PtrInner<'_, T>`, `<Self as SizeEq<T>>::cast_from_raw(t)` produces +/// a pointer which addresses the same number of bytes as `t`. *Note that it +/// is **not** guaranteed that an `as` cast preserves referent size: it may be +/// the case that `cast_from_raw` modifies the pointer's metadata in order to +/// preserve referent size, which an `as` cast does not do.* +pub unsafe trait SizeEq<T: ?Sized> { + fn cast_from_raw(t: PtrInner<'_, T>) -> PtrInner<'_, Self>; +} + +// SAFETY: `T` trivially has the same size and vtable kind as `T`, and since +// pointer `*mut T -> *mut T` pointer casts are no-ops, this cast trivially +// preserves referent size (when `T: ?Sized`). +unsafe impl<T: ?Sized> SizeEq<T> for T { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, T>) -> PtrInner<'_, T> { + t + } +} + +// SAFETY: Since `Src: IntoBytes`, the set of valid `Src`'s is the set of +// initialized bit patterns, which is exactly the set allowed in the referent of +// any `Initialized` `Ptr`. +unsafe impl<Src, Dst> TransmuteFrom<Src, Valid, Initialized> for Dst +where + Src: IntoBytes + ?Sized, + Dst: ?Sized, +{ +} + +// SAFETY: Since `Dst: FromBytes`, any initialized bit pattern may appear in the +// referent of a `Ptr<Dst, (_, _, Valid)>`. This is exactly equal to the set of +// bit patterns which may appear in the referent of any `Initialized` `Ptr`. +unsafe impl<Src, Dst> TransmuteFrom<Src, Initialized, Valid> for Dst +where + Src: ?Sized, + Dst: FromBytes + ?Sized, +{ +} + +// FIXME(#2354): This seems like a smell - the soundness of this bound has +// nothing to do with `Src` or `Dst` - we're basically just saying `[u8; N]` is +// transmutable into `[u8; N]`. + +// SAFETY: The set of allowed bit patterns in the referent of any `Initialized` +// `Ptr` is the same regardless of referent type. +unsafe impl<Src, Dst> TransmuteFrom<Src, Initialized, Initialized> for Dst +where + Src: ?Sized, + Dst: ?Sized, +{ +} + +// FIXME(#2354): This seems like a smell - the soundness of this bound has +// nothing to do with `Dst` - we're basically just saying that any type is +// transmutable into `MaybeUninit<[u8; N]>`. + +// SAFETY: A `Dst` with validity `Uninit` permits any byte sequence, and +// therefore can be transmuted from any value. +unsafe impl<Src, Dst, V> TransmuteFrom<Src, V, Uninit> for Dst +where + Src: ?Sized, + Dst: ?Sized, + V: Validity, +{ +} + +// SAFETY: +// - `ManuallyDrop<T>` has the same size as `T` [1] +// - `ManuallyDrop<T>` has the same validity as `T` [1] +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: +// +// `ManuallyDrop<T>` is guaranteed to have the same layout and bit validity as +// `T` +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_for_transparent_wrapper!(T: ?Sized => ManuallyDrop<T>) }; + +// SAFETY: +// - `Unalign<T>` promises to have the same size as `T`. +// - `Unalign<T>` promises to have the same validity as `T`. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_for_transparent_wrapper!(T => Unalign<T>) }; +// SAFETY: `Unalign<T>` promises to have the same size and validity as `T`. +// Given `u: &Unalign<T>`, it is already possible to obtain `let t = +// u.try_deref().unwrap()`. Because `Unalign<T>` has the same size as `T`, the +// returned `&T` must point to the same referent as `u`, and thus it must be +// sound for these two references to exist at the same time since it's already +// possible for safe code to get into this state. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_invariants_eq!(T => T, Unalign<T>) }; + +// SAFETY: +// - `Wrapping<T>` has the same size as `T` [1]. +// - `Wrapping<T>` has only one field, which is `pub` [2]. We are also +// guaranteed per that `Wrapping<T>` has the same layout as `T` [1]. The only +// way for both of these to be true simultaneously is for `Wrapping<T>` to +// have the same bit validity as `T`. In particular, in order to change the +// bit validity, one of the following would need to happen: +// - `Wrapping` could change its `repr`, but this would violate the layout +// guarantee. +// - `Wrapping` could add or change its fields, but this would be a +// stability-breaking change. +// +// [1] Per https://doc.rust-lang.org/1.85.0/core/num/struct.Wrapping.html#layout-1: +// +// `Wrapping<T>` is guaranteed to have the same layout and ABI as `T`. +// +// [2] Definition from https://doc.rust-lang.org/1.85.0/core/num/struct.Wrapping.html: +// +// ``` +// #[repr(transparent)] +// pub struct Wrapping<T>(pub T); +// ``` +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_for_transparent_wrapper!(T => Wrapping<T>) }; + +// SAFETY: By the preceding safety proof, `Wrapping<T>` and `T` have the same +// layout and bit validity. Since a `Wrapping<T>`'s `T` field is `pub`, given +// `w: &Wrapping<T>`, it's possible to do `let t = &w.t`, which means that it's +// already possible for safe code to obtain a `&Wrapping<T>` and a `&T` pointing +// to the same referent at the same time. Thus, this must be sound. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_invariants_eq!(T => T, Wrapping<T>) }; + +// SAFETY: +// - `UnsafeCell<T>` has the same size as `T` [1]. +// - Per [1], `UnsafeCell<T>` has the same bit validity as `T`. Technically the +// term "representation" doesn't guarantee this, but the subsequent sentence +// in the documentation makes it clear that this is the intention. +// +// [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: +// +// `UnsafeCell<T>` has the same in-memory representation as its inner type +// `T`. A consequence of this guarantee is that it is possible to convert +// between `T` and `UnsafeCell<T>`. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_for_transparent_wrapper!(T: ?Sized => UnsafeCell<T>) }; + +// SAFETY: +// - `Cell<T>` has the same size as `T` [1]. +// - Per [1], `Cell<T>` has the same bit validity as `T`. Technically the term +// "representation" doesn't guarantee this, but it does promise to have the +// "same memory layout and caveats as `UnsafeCell<T>`." The `UnsafeCell` docs +// [2] make it clear that bit validity is the intention even if that phrase +// isn't used. +// +// [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.Cell.html#memory-layout: +// +// `Cell<T>` has the same memory layout and caveats as `UnsafeCell<T>`. In +// particular, this means that `Cell<T>` has the same in-memory representation +// as its inner type `T`. +// +// [2] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: +// +// `UnsafeCell<T>` has the same in-memory representation as its inner type +// `T`. A consequence of this guarantee is that it is possible to convert +// between `T` and `UnsafeCell<T>`. +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { unsafe_impl_for_transparent_wrapper!(T: ?Sized => Cell<T>) }; + +impl_transitive_transmute_from!(T: ?Sized => Cell<T> => T => UnsafeCell<T>); +impl_transitive_transmute_from!(T: ?Sized => UnsafeCell<T> => T => Cell<T>); + +// SAFETY: `MaybeUninit<T>` has no validity requirements. Currently this is not +// explicitly guaranteed, but it's obvious from `MaybeUninit`'s documentation +// that this is the intention: +// https://doc.rust-lang.org/1.85.0/core/mem/union.MaybeUninit.html +unsafe impl<T> TransmuteFrom<T, Uninit, Valid> for MaybeUninit<T> {} + +// SAFETY: `MaybeUninit<T>` has the same size as `T` [1]. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: +// +// `MaybeUninit<T>` is guaranteed to have the same size, alignment, and ABI as +// `T` +unsafe impl<T> SizeEq<T> for MaybeUninit<T> { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, T>) -> PtrInner<'_, MaybeUninit<T>> { + // SAFETY: Per preceding safety comment, `MaybeUninit<T>` and `T` have + // the same size, and so this cast preserves referent size. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + cast!(t) + } + } +} + +// SAFETY: See previous safety comment. +unsafe impl<T> SizeEq<MaybeUninit<T>> for T { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, MaybeUninit<T>>) -> PtrInner<'_, T> { + // SAFETY: Per preceding safety comment, `MaybeUninit<T>` and `T` have + // the same size, and so this cast preserves referent size. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + cast!(t) + } + } +} diff --git a/vendor/zerocopy/src/ref.rs b/vendor/zerocopy/src/ref.rs new file mode 100644 index 00000000..352a9fc0 --- /dev/null +++ b/vendor/zerocopy/src/ref.rs @@ -0,0 +1,1175 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use super::*; + +mod def { + use core::marker::PhantomData; + + use crate::{ + ByteSlice, ByteSliceMut, CloneableByteSlice, CopyableByteSlice, IntoByteSlice, + IntoByteSliceMut, + }; + + /// A typed reference derived from a byte slice. + /// + /// A `Ref<B, T>` is a reference to a `T` which is stored in a byte slice, `B`. + /// Unlike a native reference (`&T` or `&mut T`), `Ref<B, T>` has the same + /// mutability as the byte slice it was constructed from (`B`). + /// + /// # Examples + /// + /// `Ref` can be used to treat a sequence of bytes as a structured type, and + /// to read and write the fields of that type as if the byte slice reference + /// were simply a reference to that type. + /// + /// ```rust + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)] + /// #[repr(C)] + /// struct UdpHeader { + /// src_port: [u8; 2], + /// dst_port: [u8; 2], + /// length: [u8; 2], + /// checksum: [u8; 2], + /// } + /// + /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)] + /// #[repr(C, packed)] + /// struct UdpPacket { + /// header: UdpHeader, + /// body: [u8], + /// } + /// + /// impl UdpPacket { + /// pub fn parse<B: ByteSlice>(bytes: B) -> Option<Ref<B, UdpPacket>> { + /// Ref::from_bytes(bytes).ok() + /// } + /// } + /// ``` + pub struct Ref<B, T: ?Sized>( + // INVARIANTS: The referent (via `.deref`, `.deref_mut`, `.into`) byte + // slice is aligned to `T`'s alignment and its size corresponds to a + // valid size for `T`. + B, + PhantomData<T>, + ); + + impl<B, T: ?Sized> Ref<B, T> { + /// Constructs a new `Ref`. + /// + /// # Safety + /// + /// `bytes` dereferences (via [`deref`], [`deref_mut`], and [`into`]) to + /// a byte slice which is aligned to `T`'s alignment and whose size is a + /// valid size for `T`. + /// + /// [`deref`]: core::ops::Deref::deref + /// [`deref_mut`]: core::ops::DerefMut::deref_mut + /// [`into`]: core::convert::Into::into + pub(crate) unsafe fn new_unchecked(bytes: B) -> Ref<B, T> { + // INVARIANTS: The caller has promised that `bytes`'s referent is + // validly-aligned and has a valid size. + Ref(bytes, PhantomData) + } + } + + impl<B: ByteSlice, T: ?Sized> Ref<B, T> { + /// Access the byte slice as a [`ByteSlice`]. + /// + /// # Safety + /// + /// The caller promises not to call methods on the returned + /// [`ByteSlice`] other than `ByteSlice` methods (for example, via + /// `Any::downcast_ref`). + /// + /// `as_byte_slice` promises to return a `ByteSlice` whose referent is + /// validly-aligned for `T` and has a valid size for `T`. + pub(crate) unsafe fn as_byte_slice(&self) -> &impl ByteSlice { + // INVARIANTS: The caller promises not to call methods other than + // those on `ByteSlice`. Since `B: ByteSlice`, dereference stability + // guarantees that calling `ByteSlice` methods will not change the + // address or length of `self.0`'s referent. + // + // SAFETY: By invariant on `self.0`, the alignment and size + // post-conditions are upheld. + &self.0 + } + } + + impl<B: ByteSliceMut, T: ?Sized> Ref<B, T> { + /// Access the byte slice as a [`ByteSliceMut`]. + /// + /// # Safety + /// + /// The caller promises not to call methods on the returned + /// [`ByteSliceMut`] other than `ByteSliceMut` methods (for example, via + /// `Any::downcast_mut`). + /// + /// `as_byte_slice` promises to return a `ByteSlice` whose referent is + /// validly-aligned for `T` and has a valid size for `T`. + pub(crate) unsafe fn as_byte_slice_mut(&mut self) -> &mut impl ByteSliceMut { + // INVARIANTS: The caller promises not to call methods other than + // those on `ByteSliceMut`. Since `B: ByteSlice`, dereference + // stability guarantees that calling `ByteSlice` methods will not + // change the address or length of `self.0`'s referent. + // + // SAFETY: By invariant on `self.0`, the alignment and size + // post-conditions are upheld. + &mut self.0 + } + } + + impl<'a, B: IntoByteSlice<'a>, T: ?Sized> Ref<B, T> { + /// Access the byte slice as an [`IntoByteSlice`]. + /// + /// # Safety + /// + /// The caller promises not to call methods on the returned + /// [`IntoByteSlice`] other than `IntoByteSlice` methods (for example, + /// via `Any::downcast_ref`). + /// + /// `as_byte_slice` promises to return a `ByteSlice` whose referent is + /// validly-aligned for `T` and has a valid size for `T`. + pub(crate) unsafe fn into_byte_slice(self) -> impl IntoByteSlice<'a> { + // INVARIANTS: The caller promises not to call methods other than + // those on `IntoByteSlice`. Since `B: ByteSlice`, dereference + // stability guarantees that calling `ByteSlice` methods will not + // change the address or length of `self.0`'s referent. + // + // SAFETY: By invariant on `self.0`, the alignment and size + // post-conditions are upheld. + self.0 + } + } + + impl<'a, B: IntoByteSliceMut<'a>, T: ?Sized> Ref<B, T> { + /// Access the byte slice as an [`IntoByteSliceMut`]. + /// + /// # Safety + /// + /// The caller promises not to call methods on the returned + /// [`IntoByteSliceMut`] other than `IntoByteSliceMut` methods (for + /// example, via `Any::downcast_mut`). + /// + /// `as_byte_slice` promises to return a `ByteSlice` whose referent is + /// validly-aligned for `T` and has a valid size for `T`. + pub(crate) unsafe fn into_byte_slice_mut(self) -> impl IntoByteSliceMut<'a> { + // INVARIANTS: The caller promises not to call methods other than + // those on `IntoByteSliceMut`. Since `B: ByteSlice`, dereference + // stability guarantees that calling `ByteSlice` methods will not + // change the address or length of `self.0`'s referent. + // + // SAFETY: By invariant on `self.0`, the alignment and size + // post-conditions are upheld. + self.0 + } + } + + impl<B: CloneableByteSlice + Clone, T: ?Sized> Clone for Ref<B, T> { + #[inline] + fn clone(&self) -> Ref<B, T> { + // INVARIANTS: Since `B: CloneableByteSlice`, `self.0.clone()` has + // the same address and length as `self.0`. Since `self.0` upholds + // the field invariants, so does `self.0.clone()`. + Ref(self.0.clone(), PhantomData) + } + } + + // INVARIANTS: Since `B: CopyableByteSlice`, the copied `Ref`'s `.0` has the + // same address and length as the original `Ref`'s `.0`. Since the original + // upholds the field invariants, so does the copy. + impl<B: CopyableByteSlice + Copy, T: ?Sized> Copy for Ref<B, T> {} +} + +#[allow(unreachable_pub)] // This is a false positive on our MSRV toolchain. +pub use def::Ref; + +impl<B, T> Ref<B, T> +where + B: ByteSlice, +{ + #[must_use = "has no side effects"] + pub(crate) fn sized_from(bytes: B) -> Result<Ref<B, T>, CastError<B, T>> { + if bytes.len() != mem::size_of::<T>() { + return Err(SizeError::new(bytes).into()); + } + if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) { + return Err(err.with_src(bytes).into()); + } + + // SAFETY: We just validated size and alignment. + Ok(unsafe { Ref::new_unchecked(bytes) }) + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, +{ + #[must_use = "has no side effects"] + pub(crate) fn sized_from_prefix(bytes: B) -> Result<(Ref<B, T>, B), CastError<B, T>> { + if bytes.len() < mem::size_of::<T>() { + return Err(SizeError::new(bytes).into()); + } + if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) { + return Err(err.with_src(bytes).into()); + } + let (bytes, suffix) = + bytes.split_at(mem::size_of::<T>()).map_err(|b| SizeError::new(b).into())?; + // SAFETY: We just validated alignment and that `bytes` is at least as + // large as `T`. `bytes.split_at(mem::size_of::<T>())?` ensures that the + // new `bytes` is exactly the size of `T`. By safety postcondition on + // `SplitByteSlice::split_at` we can rely on `split_at` to produce the + // correct `bytes` and `suffix`. + let r = unsafe { Ref::new_unchecked(bytes) }; + Ok((r, suffix)) + } + + #[must_use = "has no side effects"] + pub(crate) fn sized_from_suffix(bytes: B) -> Result<(B, Ref<B, T>), CastError<B, T>> { + let bytes_len = bytes.len(); + let split_at = if let Some(split_at) = bytes_len.checked_sub(mem::size_of::<T>()) { + split_at + } else { + return Err(SizeError::new(bytes).into()); + }; + let (prefix, bytes) = bytes.split_at(split_at).map_err(|b| SizeError::new(b).into())?; + if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) { + return Err(err.with_src(bytes).into()); + } + // SAFETY: Since `split_at` is defined as `bytes_len - size_of::<T>()`, + // the `bytes` which results from `let (prefix, bytes) = + // bytes.split_at(split_at)?` has length `size_of::<T>()`. After + // constructing `bytes`, we validate that it has the proper alignment. + // By safety postcondition on `SplitByteSlice::split_at` we can rely on + // `split_at` to produce the correct `prefix` and `bytes`. + let r = unsafe { Ref::new_unchecked(bytes) }; + Ok((prefix, r)) + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSlice, + T: KnownLayout + Immutable + ?Sized, +{ + /// Constructs a `Ref` from a byte slice. + /// + /// If the length of `source` is not a [valid size of `T`][valid-size], or + /// if `source` is not appropriately aligned for `T`, this returns `Err`. If + /// [`T: Unaligned`][t-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// `T` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [t-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = Ref::<_, ZSTy>::from_bytes(&b"UU"[..]); // ⚠ Compile Error! + /// ``` + #[must_use = "has no side effects"] + #[inline] + pub fn from_bytes(source: B) -> Result<Ref<B, T>, CastError<B, T>> { + static_assert_dst_is_not_zst!(T); + if let Err(e) = + Ptr::from_ref(source.deref()).try_cast_into_no_leftover::<T, BecauseImmutable>(None) + { + return Err(e.with_src(()).with_src(source)); + } + // SAFETY: `try_cast_into_no_leftover` validates size and alignment. + Ok(unsafe { Ref::new_unchecked(source) }) + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, + T: KnownLayout + Immutable + ?Sized, +{ + /// Constructs a `Ref` from the prefix of a byte slice. + /// + /// This method computes the [largest possible size of `T`][valid-size] that + /// can fit in the leading bytes of `source`, then attempts to return both a + /// `Ref` to those bytes, and a reference to the remaining bytes. If there + /// are insufficient bytes, or if `source` is not appropriately aligned, + /// this returns `Err`. If [`T: Unaligned`][t-unaligned], you can + /// [infallibly discard the alignment error][size-error-from]. + /// + /// `T` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [t-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = Ref::<_, ZSTy>::from_prefix(&b"UU"[..]); // ⚠ Compile Error! + /// ``` + #[must_use = "has no side effects"] + #[inline] + pub fn from_prefix(source: B) -> Result<(Ref<B, T>, B), CastError<B, T>> { + static_assert_dst_is_not_zst!(T); + let remainder = match Ptr::from_ref(source.deref()) + .try_cast_into::<T, BecauseImmutable>(CastType::Prefix, None) + { + Ok((_, remainder)) => remainder, + Err(e) => { + return Err(e.with_src(()).with_src(source)); + } + }; + + // SAFETY: `remainder` is constructed as a subset of `source`, and so it + // cannot have a larger size than `source`. Both of their `len` methods + // measure bytes (`source` deref's to `[u8]`, and `remainder` is a + // `Ptr<[u8]>`), so `source.len() >= remainder.len()`. Thus, this cannot + // underflow. + #[allow(unstable_name_collisions)] + let split_at = unsafe { source.len().unchecked_sub(remainder.len()) }; + let (bytes, suffix) = source.split_at(split_at).map_err(|b| SizeError::new(b).into())?; + // SAFETY: `try_cast_into` validates size and alignment, and returns a + // `split_at` that indicates how many bytes of `source` correspond to a + // valid `T`. By safety postcondition on `SplitByteSlice::split_at` we + // can rely on `split_at` to produce the correct `source` and `suffix`. + let r = unsafe { Ref::new_unchecked(bytes) }; + Ok((r, suffix)) + } + + /// Constructs a `Ref` from the suffix of a byte slice. + /// + /// This method computes the [largest possible size of `T`][valid-size] that + /// can fit in the trailing bytes of `source`, then attempts to return both + /// a `Ref` to those bytes, and a reference to the preceding bytes. If there + /// are insufficient bytes, or if that suffix of `source` is not + /// appropriately aligned, this returns `Err`. If [`T: + /// Unaligned`][t-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// `T` may be a sized type, a slice, or a [slice DST][slice-dst]. + /// + /// [valid-size]: crate::KnownLayout#what-is-a-valid-size + /// [t-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// [slice-dst]: KnownLayout#dynamically-sized-types + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = Ref::<_, ZSTy>::from_suffix(&b"UU"[..]); // ⚠ Compile Error! + /// ``` + #[must_use = "has no side effects"] + #[inline] + pub fn from_suffix(source: B) -> Result<(B, Ref<B, T>), CastError<B, T>> { + static_assert_dst_is_not_zst!(T); + let remainder = match Ptr::from_ref(source.deref()) + .try_cast_into::<T, BecauseImmutable>(CastType::Suffix, None) + { + Ok((_, remainder)) => remainder, + Err(e) => { + let e = e.with_src(()); + return Err(e.with_src(source)); + } + }; + + let split_at = remainder.len(); + let (prefix, bytes) = source.split_at(split_at).map_err(|b| SizeError::new(b).into())?; + // SAFETY: `try_cast_into` validates size and alignment, and returns a + // `split_at` that indicates how many bytes of `source` correspond to a + // valid `T`. By safety postcondition on `SplitByteSlice::split_at` we + // can rely on `split_at` to produce the correct `prefix` and `bytes`. + let r = unsafe { Ref::new_unchecked(bytes) }; + Ok((prefix, r)) + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSlice, + T: KnownLayout<PointerMetadata = usize> + Immutable + ?Sized, +{ + /// Constructs a `Ref` from the given bytes with DST length equal to `count` + /// without copying. + /// + /// This method attempts to return a `Ref` to the prefix of `source` + /// interpreted as a `T` with `count` trailing elements, and a reference to + /// the remaining bytes. If the length of `source` is not equal to the size + /// of `Self` with `count` elements, or if `source` is not appropriately + /// aligned, this returns `Err`. If [`T: Unaligned`][t-unaligned], you can + /// [infallibly discard the alignment error][size-error-from]. + /// + /// [t-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = Ref::<_, ZSTy>::from_bytes_with_elems(&b"UU"[..], 42); // ⚠ Compile Error! + /// ``` + #[inline] + pub fn from_bytes_with_elems(source: B, count: usize) -> Result<Ref<B, T>, CastError<B, T>> { + static_assert_dst_is_not_zst!(T); + let expected_len = match T::size_for_metadata(count) { + Some(len) => len, + None => return Err(SizeError::new(source).into()), + }; + if source.len() != expected_len { + return Err(SizeError::new(source).into()); + } + Self::from_bytes(source) + } +} + +impl<B, T> Ref<B, T> +where + B: SplitByteSlice, + T: KnownLayout<PointerMetadata = usize> + Immutable + ?Sized, +{ + /// Constructs a `Ref` from the prefix of the given bytes with DST + /// length equal to `count` without copying. + /// + /// This method attempts to return a `Ref` to the prefix of `source` + /// interpreted as a `T` with `count` trailing elements, and a reference to + /// the remaining bytes. If there are insufficient bytes, or if `source` is + /// not appropriately aligned, this returns `Err`. If [`T: + /// Unaligned`][t-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// [t-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = Ref::<_, ZSTy>::from_prefix_with_elems(&b"UU"[..], 42); // ⚠ Compile Error! + /// ``` + #[inline] + pub fn from_prefix_with_elems( + source: B, + count: usize, + ) -> Result<(Ref<B, T>, B), CastError<B, T>> { + static_assert_dst_is_not_zst!(T); + let expected_len = match T::size_for_metadata(count) { + Some(len) => len, + None => return Err(SizeError::new(source).into()), + }; + let (prefix, bytes) = source.split_at(expected_len).map_err(SizeError::new)?; + Self::from_bytes(prefix).map(move |l| (l, bytes)) + } + + /// Constructs a `Ref` from the suffix of the given bytes with DST length + /// equal to `count` without copying. + /// + /// This method attempts to return a `Ref` to the suffix of `source` + /// interpreted as a `T` with `count` trailing elements, and a reference to + /// the preceding bytes. If there are insufficient bytes, or if that suffix + /// of `source` is not appropriately aligned, this returns `Err`. If [`T: + /// Unaligned`][t-unaligned], you can [infallibly discard the alignment + /// error][size-error-from]. + /// + /// [t-unaligned]: Unaligned + /// [size-error-from]: error/struct.SizeError.html#method.from-1 + /// + /// # Compile-Time Assertions + /// + /// This method cannot yet be used on unsized types whose dynamically-sized + /// component is zero-sized. Attempting to use this method on such types + /// results in a compile-time assertion error; e.g.: + /// + /// ```compile_fail,E0080 + /// use zerocopy::*; + /// # use zerocopy_derive::*; + /// + /// #[derive(Immutable, KnownLayout)] + /// #[repr(C)] + /// struct ZSTy { + /// leading_sized: u16, + /// trailing_dst: [()], + /// } + /// + /// let _ = Ref::<_, ZSTy>::from_suffix_with_elems(&b"UU"[..], 42); // ⚠ Compile Error! + /// ``` + #[inline] + pub fn from_suffix_with_elems( + source: B, + count: usize, + ) -> Result<(B, Ref<B, T>), CastError<B, T>> { + static_assert_dst_is_not_zst!(T); + let expected_len = match T::size_for_metadata(count) { + Some(len) => len, + None => return Err(SizeError::new(source).into()), + }; + let split_at = if let Some(split_at) = source.len().checked_sub(expected_len) { + split_at + } else { + return Err(SizeError::new(source).into()); + }; + // SAFETY: The preceding `source.len().checked_sub(expected_len)` + // guarantees that `split_at` is in-bounds. + let (bytes, suffix) = unsafe { source.split_at_unchecked(split_at) }; + Self::from_bytes(suffix).map(move |l| (bytes, l)) + } +} + +impl<'a, B, T> Ref<B, T> +where + B: 'a + IntoByteSlice<'a>, + T: FromBytes + KnownLayout + Immutable + ?Sized, +{ + /// Converts this `Ref` into a reference. + /// + /// `into_ref` consumes the `Ref`, and returns a reference to `T`. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Ref::into_ref(r)` instead of `r.into_ref()`. This is so that + /// there is no conflict with a method on the inner type. + #[must_use = "has no side effects"] + #[inline(always)] + pub fn into_ref(r: Self) -> &'a T { + // Presumably unreachable, since we've guarded each constructor of `Ref`. + static_assert_dst_is_not_zst!(T); + + // SAFETY: We don't call any methods on `b` other than those provided by + // `IntoByteSlice`. + let b = unsafe { r.into_byte_slice() }; + + // PANICS: By post-condition on `into_byte_slice`, `b`'s size and + // alignment are valid for `T`. By post-condition, `b.into_byte_slice()` + // produces a byte slice with identical address and length to that + // produced by `b.deref()`. + let ptr = Ptr::from_ref(b.into_byte_slice()) + .try_cast_into_no_leftover::<T, BecauseImmutable>(None) + .expect("zerocopy internal error: into_ref should be infallible"); + let ptr = ptr.recall_validity(); + ptr.as_ref() + } +} + +impl<'a, B, T> Ref<B, T> +where + B: 'a + IntoByteSliceMut<'a>, + T: FromBytes + IntoBytes + KnownLayout + ?Sized, +{ + /// Converts this `Ref` into a mutable reference. + /// + /// `into_mut` consumes the `Ref`, and returns a mutable reference to `T`. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Ref::into_mut(r)` instead of `r.into_mut()`. This is so that + /// there is no conflict with a method on the inner type. + #[must_use = "has no side effects"] + #[inline(always)] + pub fn into_mut(r: Self) -> &'a mut T { + // Presumably unreachable, since we've guarded each constructor of `Ref`. + static_assert_dst_is_not_zst!(T); + + // SAFETY: We don't call any methods on `b` other than those provided by + // `IntoByteSliceMut`. + let b = unsafe { r.into_byte_slice_mut() }; + + // PANICS: By post-condition on `into_byte_slice_mut`, `b`'s size and + // alignment are valid for `T`. By post-condition, + // `b.into_byte_slice_mut()` produces a byte slice with identical + // address and length to that produced by `b.deref_mut()`. + let ptr = Ptr::from_mut(b.into_byte_slice_mut()) + .try_cast_into_no_leftover::<T, BecauseExclusive>(None) + .expect("zerocopy internal error: into_ref should be infallible"); + let ptr = ptr.recall_validity::<_, (_, (_, _))>(); + ptr.as_mut() + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSlice, + T: ?Sized, +{ + /// Gets the underlying bytes. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Ref::bytes(r)` instead of `r.bytes()`. This is so that there is + /// no conflict with a method on the inner type. + #[inline] + pub fn bytes(r: &Self) -> &[u8] { + // SAFETY: We don't call any methods on `b` other than those provided by + // `ByteSlice`. + unsafe { r.as_byte_slice().deref() } + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSliceMut, + T: ?Sized, +{ + /// Gets the underlying bytes mutably. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Ref::bytes_mut(r)` instead of `r.bytes_mut()`. This is so that + /// there is no conflict with a method on the inner type. + #[inline] + pub fn bytes_mut(r: &mut Self) -> &mut [u8] { + // SAFETY: We don't call any methods on `b` other than those provided by + // `ByteSliceMut`. + unsafe { r.as_byte_slice_mut().deref_mut() } + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSlice, + T: FromBytes, +{ + /// Reads a copy of `T`. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Ref::read(r)` instead of `r.read()`. This is so that there is no + /// conflict with a method on the inner type. + #[must_use = "has no side effects"] + #[inline] + pub fn read(r: &Self) -> T { + // SAFETY: We don't call any methods on `b` other than those provided by + // `ByteSlice`. + let b = unsafe { r.as_byte_slice() }; + + // SAFETY: By postcondition on `as_byte_slice`, we know that `b` is a + // valid size and alignment for `T`. By safety invariant on `ByteSlice`, + // we know that this is preserved via `.deref()`. Because `T: + // FromBytes`, it is sound to interpret these bytes as a `T`. + unsafe { ptr::read(b.deref().as_ptr().cast::<T>()) } + } +} + +impl<B, T> Ref<B, T> +where + B: ByteSliceMut, + T: IntoBytes, +{ + /// Writes the bytes of `t` and then forgets `t`. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Ref::write(r, t)` instead of `r.write(t)`. This is so that there + /// is no conflict with a method on the inner type. + #[inline] + pub fn write(r: &mut Self, t: T) { + // SAFETY: We don't call any methods on `b` other than those provided by + // `ByteSliceMut`. + let b = unsafe { r.as_byte_slice_mut() }; + + // SAFETY: By postcondition on `as_byte_slice_mut`, we know that `b` is + // a valid size and alignment for `T`. By safety invariant on + // `ByteSlice`, we know that this is preserved via `.deref()`. Writing + // `t` to the buffer will allow all of the bytes of `t` to be accessed + // as a `[u8]`, but because `T: IntoBytes`, we know that this is sound. + unsafe { ptr::write(b.deref_mut().as_mut_ptr().cast::<T>(), t) } + } +} + +impl<B, T> Deref for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + KnownLayout + Immutable + ?Sized, +{ + type Target = T; + #[inline] + fn deref(&self) -> &T { + // Presumably unreachable, since we've guarded each constructor of `Ref`. + static_assert_dst_is_not_zst!(T); + + // SAFETY: We don't call any methods on `b` other than those provided by + // `ByteSlice`. + let b = unsafe { self.as_byte_slice() }; + + // PANICS: By postcondition on `as_byte_slice`, `b`'s size and alignment + // are valid for `T`, and by invariant on `ByteSlice`, these are + // preserved through `.deref()`, so this `unwrap` will not panic. + let ptr = Ptr::from_ref(b.deref()) + .try_cast_into_no_leftover::<T, BecauseImmutable>(None) + .expect("zerocopy internal error: Deref::deref should be infallible"); + let ptr = ptr.recall_validity(); + ptr.as_ref() + } +} + +impl<B, T> DerefMut for Ref<B, T> +where + B: ByteSliceMut, + // FIXME(#251): We can't remove `Immutable` here because it's required by + // the impl of `Deref`, which is a super-trait of `DerefMut`. Maybe we can + // add a separate inherent method for this? + T: FromBytes + IntoBytes + KnownLayout + Immutable + ?Sized, +{ + #[inline] + fn deref_mut(&mut self) -> &mut T { + // Presumably unreachable, since we've guarded each constructor of `Ref`. + static_assert_dst_is_not_zst!(T); + + // SAFETY: We don't call any methods on `b` other than those provided by + // `ByteSliceMut`. + let b = unsafe { self.as_byte_slice_mut() }; + + // PANICS: By postcondition on `as_byte_slice_mut`, `b`'s size and + // alignment are valid for `T`, and by invariant on `ByteSlice`, these + // are preserved through `.deref_mut()`, so this `unwrap` will not + // panic. + let ptr = Ptr::from_mut(b.deref_mut()) + .try_cast_into_no_leftover::<T, BecauseExclusive>(None) + .expect("zerocopy internal error: DerefMut::deref_mut should be infallible"); + let ptr = ptr.recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>(); + ptr.as_mut() + } +} + +impl<T, B> Display for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + Display + KnownLayout + Immutable + ?Sized, +{ + #[inline] + fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { + let inner: &T = self; + inner.fmt(fmt) + } +} + +impl<T, B> Debug for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + Debug + KnownLayout + Immutable + ?Sized, +{ + #[inline] + fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { + let inner: &T = self; + fmt.debug_tuple("Ref").field(&inner).finish() + } +} + +impl<T, B> Eq for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + Eq + KnownLayout + Immutable + ?Sized, +{ +} + +impl<T, B> PartialEq for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + PartialEq + KnownLayout + Immutable + ?Sized, +{ + #[inline] + fn eq(&self, other: &Self) -> bool { + self.deref().eq(other.deref()) + } +} + +impl<T, B> Ord for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + Ord + KnownLayout + Immutable + ?Sized, +{ + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + let inner: &T = self; + let other_inner: &T = other; + inner.cmp(other_inner) + } +} + +impl<T, B> PartialOrd for Ref<B, T> +where + B: ByteSlice, + T: FromBytes + PartialOrd + KnownLayout + Immutable + ?Sized, +{ + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + let inner: &T = self; + let other_inner: &T = other; + inner.partial_cmp(other_inner) + } +} + +#[cfg(test)] +#[allow(clippy::assertions_on_result_states)] +mod tests { + use core::convert::TryInto as _; + + use super::*; + use crate::util::testutil::*; + + #[test] + fn test_mut_slice_into_ref() { + // Prior to #1260/#1299, calling `into_ref` on a `&mut [u8]`-backed + // `Ref` was not supported. + let mut buf = [0u8]; + let r = Ref::<&mut [u8], u8>::from_bytes(&mut buf).unwrap(); + assert_eq!(Ref::into_ref(r), &0); + } + + #[test] + fn test_address() { + // Test that the `Deref` and `DerefMut` implementations return a + // reference which points to the right region of memory. + + let buf = [0]; + let r = Ref::<_, u8>::from_bytes(&buf[..]).unwrap(); + let buf_ptr = buf.as_ptr(); + let deref_ptr: *const u8 = r.deref(); + assert_eq!(buf_ptr, deref_ptr); + + let buf = [0]; + let r = Ref::<_, [u8]>::from_bytes(&buf[..]).unwrap(); + let buf_ptr = buf.as_ptr(); + let deref_ptr = r.deref().as_ptr(); + assert_eq!(buf_ptr, deref_ptr); + } + + // Verify that values written to a `Ref` are properly shared between the + // typed and untyped representations, that reads via `deref` and `read` + // behave the same, and that writes via `deref_mut` and `write` behave the + // same. + fn test_new_helper(mut r: Ref<&mut [u8], AU64>) { + // assert that the value starts at 0 + assert_eq!(*r, AU64(0)); + assert_eq!(Ref::read(&r), AU64(0)); + + // Assert that values written to the typed value are reflected in the + // byte slice. + const VAL1: AU64 = AU64(0xFF00FF00FF00FF00); + *r = VAL1; + assert_eq!(Ref::bytes(&r), &VAL1.to_bytes()); + *r = AU64(0); + Ref::write(&mut r, VAL1); + assert_eq!(Ref::bytes(&r), &VAL1.to_bytes()); + + // Assert that values written to the byte slice are reflected in the + // typed value. + const VAL2: AU64 = AU64(!VAL1.0); // different from `VAL1` + Ref::bytes_mut(&mut r).copy_from_slice(&VAL2.to_bytes()[..]); + assert_eq!(*r, VAL2); + assert_eq!(Ref::read(&r), VAL2); + } + + // Verify that values written to a `Ref` are properly shared between the + // typed and untyped representations; pass a value with `typed_len` `AU64`s + // backed by an array of `typed_len * 8` bytes. + fn test_new_helper_slice(mut r: Ref<&mut [u8], [AU64]>, typed_len: usize) { + // Assert that the value starts out zeroed. + assert_eq!(&*r, vec![AU64(0); typed_len].as_slice()); + + // Check the backing storage is the exact same slice. + let untyped_len = typed_len * 8; + assert_eq!(Ref::bytes(&r).len(), untyped_len); + assert_eq!(Ref::bytes(&r).as_ptr(), r.as_ptr().cast::<u8>()); + + // Assert that values written to the typed value are reflected in the + // byte slice. + const VAL1: AU64 = AU64(0xFF00FF00FF00FF00); + for typed in &mut *r { + *typed = VAL1; + } + assert_eq!(Ref::bytes(&r), VAL1.0.to_ne_bytes().repeat(typed_len).as_slice()); + + // Assert that values written to the byte slice are reflected in the + // typed value. + const VAL2: AU64 = AU64(!VAL1.0); // different from VAL1 + Ref::bytes_mut(&mut r).copy_from_slice(&VAL2.0.to_ne_bytes().repeat(typed_len)); + assert!(r.iter().copied().all(|x| x == VAL2)); + } + + #[test] + fn test_new_aligned_sized() { + // Test that a properly-aligned, properly-sized buffer works for new, + // new_from_prefix, and new_from_suffix, and that new_from_prefix and + // new_from_suffix return empty slices. Test that a properly-aligned + // buffer whose length is a multiple of the element size works for + // new_slice. + + // A buffer with an alignment of 8. + let mut buf = Align::<[u8; 8], AU64>::default(); + // `buf.t` should be aligned to 8, so this should always succeed. + test_new_helper(Ref::<_, AU64>::from_bytes(&mut buf.t[..]).unwrap()); + { + // In a block so that `r` and `suffix` don't live too long. + buf.set_default(); + let (r, suffix) = Ref::<_, AU64>::from_prefix(&mut buf.t[..]).unwrap(); + assert!(suffix.is_empty()); + test_new_helper(r); + } + { + buf.set_default(); + let (prefix, r) = Ref::<_, AU64>::from_suffix(&mut buf.t[..]).unwrap(); + assert!(prefix.is_empty()); + test_new_helper(r); + } + + // A buffer with alignment 8 and length 24. We choose this length very + // intentionally: if we instead used length 16, then the prefix and + // suffix lengths would be identical. In the past, we used length 16, + // which resulted in this test failing to discover the bug uncovered in + // #506. + let mut buf = Align::<[u8; 24], AU64>::default(); + // `buf.t` should be aligned to 8 and have a length which is a multiple + // of `size_of::<AU64>()`, so this should always succeed. + test_new_helper_slice(Ref::<_, [AU64]>::from_bytes(&mut buf.t[..]).unwrap(), 3); + buf.set_default(); + let r = Ref::<_, [AU64]>::from_bytes_with_elems(&mut buf.t[..], 3).unwrap(); + test_new_helper_slice(r, 3); + + let ascending: [u8; 24] = (0..24).collect::<Vec<_>>().try_into().unwrap(); + // 16 ascending bytes followed by 8 zeros. + let mut ascending_prefix = ascending; + ascending_prefix[16..].copy_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0]); + // 8 zeros followed by 16 ascending bytes. + let mut ascending_suffix = ascending; + ascending_suffix[..8].copy_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0]); + { + buf.t = ascending_suffix; + let (r, suffix) = Ref::<_, [AU64]>::from_prefix_with_elems(&mut buf.t[..], 1).unwrap(); + assert_eq!(suffix, &ascending[8..]); + test_new_helper_slice(r, 1); + } + { + buf.t = ascending_prefix; + let (prefix, r) = Ref::<_, [AU64]>::from_suffix_with_elems(&mut buf.t[..], 1).unwrap(); + assert_eq!(prefix, &ascending[..16]); + test_new_helper_slice(r, 1); + } + } + + #[test] + fn test_new_oversized() { + // Test that a properly-aligned, overly-sized buffer works for + // `new_from_prefix` and `new_from_suffix`, and that they return the + // remainder and prefix of the slice respectively. + + let mut buf = Align::<[u8; 16], AU64>::default(); + { + // In a block so that `r` and `suffix` don't live too long. `buf.t` + // should be aligned to 8, so this should always succeed. + let (r, suffix) = Ref::<_, AU64>::from_prefix(&mut buf.t[..]).unwrap(); + assert_eq!(suffix.len(), 8); + test_new_helper(r); + } + { + buf.set_default(); + // `buf.t` should be aligned to 8, so this should always succeed. + let (prefix, r) = Ref::<_, AU64>::from_suffix(&mut buf.t[..]).unwrap(); + assert_eq!(prefix.len(), 8); + test_new_helper(r); + } + } + + #[test] + #[allow(clippy::cognitive_complexity)] + fn test_new_error() { + // Fail because the buffer is too large. + + // A buffer with an alignment of 8. + let buf = Align::<[u8; 16], AU64>::default(); + // `buf.t` should be aligned to 8, so only the length check should fail. + assert!(Ref::<_, AU64>::from_bytes(&buf.t[..]).is_err()); + + // Fail because the buffer is too small. + + // A buffer with an alignment of 8. + let buf = Align::<[u8; 4], AU64>::default(); + // `buf.t` should be aligned to 8, so only the length check should fail. + assert!(Ref::<_, AU64>::from_bytes(&buf.t[..]).is_err()); + assert!(Ref::<_, AU64>::from_prefix(&buf.t[..]).is_err()); + assert!(Ref::<_, AU64>::from_suffix(&buf.t[..]).is_err()); + + // Fail because the length is not a multiple of the element size. + + let buf = Align::<[u8; 12], AU64>::default(); + // `buf.t` has length 12, but element size is 8. + assert!(Ref::<_, [AU64]>::from_bytes(&buf.t[..]).is_err()); + + // Fail because the buffer is too short. + let buf = Align::<[u8; 12], AU64>::default(); + // `buf.t` has length 12, but the element size is 8 (and we're expecting + // two of them). For each function, we test with a length that would + // cause the size to overflow `usize`, and with a normal length that + // will fail thanks to the buffer being too short; these are different + // error paths, and while the error types are the same, the distinction + // shows up in code coverage metrics. + let n = (usize::MAX / mem::size_of::<AU64>()) + 1; + assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[..], n).is_err()); + assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[..], 2).is_err()); + assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], n).is_err()); + assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], 2).is_err()); + assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], n).is_err()); + assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], 2).is_err()); + + // Fail because the alignment is insufficient. + + // A buffer with an alignment of 8. An odd buffer size is chosen so that + // the last byte of the buffer has odd alignment. + let buf = Align::<[u8; 13], AU64>::default(); + // Slicing from 1, we get a buffer with size 12 (so the length check + // should succeed) but an alignment of only 1, which is insufficient. + assert!(Ref::<_, AU64>::from_bytes(&buf.t[1..]).is_err()); + assert!(Ref::<_, AU64>::from_prefix(&buf.t[1..]).is_err()); + assert!(Ref::<_, [AU64]>::from_bytes(&buf.t[1..]).is_err()); + assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[1..], 1).is_err()); + assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[1..], 1).is_err()); + assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[1..], 1).is_err()); + // Slicing is unnecessary here because `new_from_suffix` uses the suffix + // of the slice, which has odd alignment. + assert!(Ref::<_, AU64>::from_suffix(&buf.t[..]).is_err()); + + // Fail due to arithmetic overflow. + + let buf = Align::<[u8; 16], AU64>::default(); + let unreasonable_len = usize::MAX / mem::size_of::<AU64>() + 1; + assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], unreasonable_len).is_err()); + assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], unreasonable_len).is_err()); + } + + #[test] + #[allow(unstable_name_collisions)] + #[allow(clippy::as_conversions)] + fn test_into_ref_mut() { + #[allow(unused)] + use crate::util::AsAddress as _; + + let mut buf = Align::<[u8; 8], u64>::default(); + let r = Ref::<_, u64>::from_bytes(&buf.t[..]).unwrap(); + let rf = Ref::into_ref(r); + assert_eq!(rf, &0u64); + let buf_addr = (&buf.t as *const [u8; 8]).addr(); + assert_eq!((rf as *const u64).addr(), buf_addr); + + let r = Ref::<_, u64>::from_bytes(&mut buf.t[..]).unwrap(); + let rf = Ref::into_mut(r); + assert_eq!(rf, &mut 0u64); + assert_eq!((rf as *mut u64).addr(), buf_addr); + + *rf = u64::MAX; + assert_eq!(buf.t, [0xFF; 8]); + } + + #[test] + fn test_display_debug() { + let buf = Align::<[u8; 8], u64>::default(); + let r = Ref::<_, u64>::from_bytes(&buf.t[..]).unwrap(); + assert_eq!(format!("{}", r), "0"); + assert_eq!(format!("{:?}", r), "Ref(0)"); + + let buf = Align::<[u8; 8], u64>::default(); + let r = Ref::<_, [u64]>::from_bytes(&buf.t[..]).unwrap(); + assert_eq!(format!("{:?}", r), "Ref([0])"); + } + + #[test] + fn test_eq() { + let buf1 = 0_u64; + let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap(); + let buf2 = 0_u64; + let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap(); + assert_eq!(r1, r2); + } + + #[test] + fn test_ne() { + let buf1 = 0_u64; + let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap(); + let buf2 = 1_u64; + let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap(); + assert_ne!(r1, r2); + } + + #[test] + fn test_ord() { + let buf1 = 0_u64; + let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap(); + let buf2 = 1_u64; + let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap(); + assert!(r1 < r2); + assert_eq!(PartialOrd::partial_cmp(&r1, &r2), Some(Ordering::Less)); + assert_eq!(Ord::cmp(&r1, &r2), Ordering::Less); + } +} diff --git a/vendor/zerocopy/src/split_at.rs b/vendor/zerocopy/src/split_at.rs new file mode 100644 index 00000000..be6454f2 --- /dev/null +++ b/vendor/zerocopy/src/split_at.rs @@ -0,0 +1,904 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use super::*; +use crate::pointer::invariant::{Aligned, Exclusive, Invariants, Shared, Valid}; + +/// Types that can be split in two. +/// +/// This trait generalizes Rust's existing support for splitting slices to +/// support slices and slice-based dynamically-sized types ("slice DSTs"). +/// +/// # Implementation +/// +/// **Do not implement this trait yourself!** Instead, use +/// [`#[derive(SplitAt)]`][derive]; e.g.: +/// +/// ``` +/// # use zerocopy_derive::{SplitAt, KnownLayout}; +/// #[derive(SplitAt, KnownLayout)] +/// #[repr(C)] +/// struct MyStruct<T: ?Sized> { +/// # /* +/// ..., +/// # */ +/// // `SplitAt` types must have at least one field. +/// field: T, +/// } +/// ``` +/// +/// This derive performs a sophisticated, compile-time safety analysis to +/// determine whether a type is `SplitAt`. +/// +/// # Safety +/// +/// This trait does not convey any safety guarantees to code outside this crate. +/// +/// You must not rely on the `#[doc(hidden)]` internals of `SplitAt`. Future +/// releases of zerocopy may make backwards-breaking changes to these items, +/// including changes that only affect soundness, which may cause code which +/// uses those items to silently become unsound. +/// +#[cfg_attr(feature = "derive", doc = "[derive]: zerocopy_derive::SplitAt")] +#[cfg_attr( + not(feature = "derive"), + doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.SplitAt.html"), +)] +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented(note = "Consider adding `#[derive(SplitAt)]` to `{Self}`") +)] +// # Safety +// +// The trailing slice is well-aligned for its element type. `Self` is `[T]`, or +// a `repr(C)` or `repr(transparent)` slice DST. +pub unsafe trait SplitAt: KnownLayout<PointerMetadata = usize> { + /// The element type of the trailing slice. + type Elem; + + #[doc(hidden)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized; + + /// Unsafely splits `self` in two. + /// + /// # Safety + /// + /// The caller promises that `l_len` is not greater than the length of + /// `self`'s trailing slice. + #[inline] + #[must_use] + unsafe fn split_at_unchecked(&self, l_len: usize) -> Split<&Self> { + // SAFETY: By precondition on the caller, `l_len <= self.len()`. + unsafe { Split::<&Self>::new(self, l_len) } + } + + /// Attempts to split `self` in two. + /// + /// Returns `None` if `l_len` is greater than the length of `self`'s + /// trailing slice. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// length: u8, + /// body: [u8], + /// } + /// + /// // These bytes encode a `Packet`. + /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::ref_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at(packet.length as usize).unwrap(); + /// + /// // Use the `Immutable` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_immutable(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// ``` + #[inline] + #[must_use = "has no side effects"] + fn split_at(&self, l_len: usize) -> Option<Split<&Self>> { + MetadataOf::new_in_bounds(self, l_len).map( + #[inline(always)] + |l_len| { + // SAFETY: We have ensured that `l_len <= self.len()` (by + // post-condition on `MetadataOf::new_in_bounds`) + unsafe { Split::new(self, l_len.get()) } + }, + ) + } + + /// Unsafely splits `self` in two. + /// + /// # Safety + /// + /// The caller promises that `l_len` is not greater than the length of + /// `self`'s trailing slice. + #[inline] + #[must_use] + unsafe fn split_at_mut_unchecked(&mut self, l_len: usize) -> Split<&mut Self> { + // SAFETY: By precondition on the caller, `l_len <= self.len()`. + unsafe { Split::<&mut Self>::new(self, l_len) } + } + + /// Attempts to split `self` in two. + /// + /// Returns `None` if `l_len` is greater than the length of `self`'s + /// trailing slice, or if the given `l_len` would result in [the trailing + /// padding](KnownLayout#slice-dst-layout) of the left portion overlapping + /// the right portion. + /// + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes)] + /// #[repr(C)] + /// struct Packet<B: ?Sized> { + /// length: u8, + /// body: B, + /// } + /// + /// // These bytes encode a `Packet`. + /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// { + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at_mut(packet.length as usize).unwrap(); + /// + /// // Use the `IntoBytes` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_into_bytes(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// + /// rest.fill(0); + /// } + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]); + /// ``` + #[inline] + fn split_at_mut(&mut self, l_len: usize) -> Option<Split<&mut Self>> { + MetadataOf::new_in_bounds(self, l_len).map( + #[inline(always)] + |l_len| { + // SAFETY: We have ensured that `l_len <= self.len()` (by + // post-condition on `MetadataOf::new_in_bounds`) + unsafe { Split::new(self, l_len.get()) } + }, + ) + } +} + +// SAFETY: `[T]`'s trailing slice is `[T]`, which is trivially aligned. +unsafe impl<T> SplitAt for [T] { + type Elem = T; + + #[inline] + #[allow(dead_code)] + fn only_derive_is_allowed_to_implement_this_trait() + where + Self: Sized, + { + } +} + +/// A `T` that has been split into two possibly-overlapping parts. +/// +/// For some dynamically sized types, the padding that appears after the +/// trailing slice field [is a dynamic function of the trailing slice +/// length](KnownLayout#slice-dst-layout). If `T` is split at a length that +/// requires trailing padding, the trailing padding of the left part of the +/// split `T` will overlap the right part. If `T` is a mutable reference or +/// permits interior mutation, you must ensure that the left and right parts do +/// not overlap. You can do this at zero-cost using using +/// [`Self::via_immutable`], [`Self::via_into_bytes`], or +/// [`Self::via_unaligned`], or with a dynamic check by using +/// [`Self::via_runtime_check`]. +#[derive(Debug)] +pub struct Split<T> { + /// A pointer to the source slice DST. + source: T, + /// The length of the future left half of `source`. + /// + /// # Safety + /// + /// If `source` is a pointer to a slice DST, `l_len` is no greater than + /// `source`'s length. + l_len: usize, +} + +impl<T> Split<T> { + /// Produces a `Split` of `source` with `l_len`. + /// + /// # Safety + /// + /// `l_len` is no greater than `source`'s length. + #[inline(always)] + unsafe fn new(source: T, l_len: usize) -> Self { + Self { source, l_len } + } +} + +impl<'a, T> Split<&'a T> +where + T: ?Sized + SplitAt, +{ + #[inline(always)] + fn into_ptr(self) -> Split<Ptr<'a, T, (Shared, Aligned, Valid)>> { + let source = Ptr::from_ref(self.source); + // SAFETY: `Ptr::from_ref(self.source)` points to exactly `self.source` + // and thus maintains the invariants of `self` with respect to `l_len`. + unsafe { Split::new(source, self.l_len) } + } + + /// Produces the split parts of `self`, using [`Immutable`] to ensure that + /// it is sound to have concurrent references to both parts. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable)] + /// #[repr(C)] + /// struct Packet { + /// length: u8, + /// body: [u8], + /// } + /// + /// // These bytes encode a `Packet`. + /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::ref_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at(packet.length as usize).unwrap(); + /// + /// // Use the `Immutable` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_immutable(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_immutable(self) -> (&'a T, &'a [T::Elem]) + where + T: Immutable, + { + let (l, r) = self.into_ptr().via_immutable(); + (l.as_ref(), r.as_ref()) + } + + /// Produces the split parts of `self`, using [`IntoBytes`] to ensure that + /// it is sound to have concurrent references to both parts. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable, IntoBytes)] + /// #[repr(C)] + /// struct Packet<B: ?Sized> { + /// length: u8, + /// body: B, + /// } + /// + /// // These bytes encode a `Packet`. + /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::<[u8]>::ref_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at(packet.length as usize).unwrap(); + /// + /// // Use the `IntoBytes` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_into_bytes(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_into_bytes(self) -> (&'a T, &'a [T::Elem]) + where + T: IntoBytes, + { + let (l, r) = self.into_ptr().via_into_bytes(); + (l.as_ref(), r.as_ref()) + } + + /// Produces the split parts of `self`, using [`Unaligned`] to ensure that + /// it is sound to have concurrent references to both parts. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable, Unaligned)] + /// #[repr(C)] + /// struct Packet { + /// length: u8, + /// body: [u8], + /// } + /// + /// // These bytes encode a `Packet`. + /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::ref_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at(packet.length as usize).unwrap(); + /// + /// // Use the `Unaligned` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_unaligned(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_unaligned(self) -> (&'a T, &'a [T::Elem]) + where + T: Unaligned, + { + let (l, r) = self.into_ptr().via_unaligned(); + (l.as_ref(), r.as_ref()) + } + + /// Produces the split parts of `self`, using a dynamic check to ensure that + /// it is sound to have concurrent references to both parts. You should + /// prefer using [`Self::via_immutable`], [`Self::via_into_bytes`], or + /// [`Self::via_unaligned`], which have no runtime cost. + /// + /// Note that this check is overly conservative if `T` is [`Immutable`]; for + /// some types, this check will reject some splits which + /// [`Self::via_immutable`] will accept. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes, IntoBytes, network_endian::U16}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable, Debug)] + /// #[repr(C, align(2))] + /// struct Packet { + /// length: U16, + /// body: [u8], + /// } + /// + /// // These bytes encode a `Packet`. + /// let bytes = [ + /// 4u16.to_be(), + /// 1u16.to_be(), + /// 2u16.to_be(), + /// 3u16.to_be(), + /// 4u16.to_be() + /// ]; + /// + /// let packet = Packet::ref_from_bytes(bytes.as_bytes()).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [0, 1, 0, 2, 0, 3, 0, 4]); + /// + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at(packet.length.into()).unwrap(); + /// + /// // Use a dynamic check to prove that it's okay to return concurrent + /// // references to `packet` and `rest`. + /// let (packet, rest) = split.via_runtime_check().unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [0, 1, 0, 2]); + /// assert_eq!(rest, [0, 3, 0, 4]); + /// + /// // Attempt to split `packet` at `length - 1`. + /// let idx = packet.length.get() - 1; + /// let split = packet.split_at(idx as usize).unwrap(); + /// + /// // Attempt (and fail) to use a dynamic check to prove that it's okay + /// // to return concurrent references to `packet` and `rest`. Note that + /// // this is a case of `via_runtime_check` being overly conservative. + /// // Although the left and right parts indeed overlap, the `Immutable` + /// // bound ensures that concurrently referencing these overlapping + /// // parts is sound. + /// assert!(split.via_runtime_check().is_err()); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_runtime_check(self) -> Result<(&'a T, &'a [T::Elem]), Self> { + match self.into_ptr().via_runtime_check() { + Ok((l, r)) => Ok((l.as_ref(), r.as_ref())), + Err(s) => Err(s.into_ref()), + } + } + + /// Unsafely produces the split parts of `self`. + /// + /// # Safety + /// + /// If `T` permits interior mutation, the trailing padding bytes of the left + /// portion must not overlap the right portion. For some dynamically sized + /// types, the padding that appears after the trailing slice field [is a + /// dynamic function of the trailing slice + /// length](KnownLayout#slice-dst-layout). Thus, for some types, this + /// condition is dependent on the length of the left portion. + #[must_use = "has no side effects"] + #[inline(always)] + pub unsafe fn via_unchecked(self) -> (&'a T, &'a [T::Elem]) { + // SAFETY: The aliasing of `self.into_ptr()` is not `Exclusive`, but the + // caller has promised that if `T` permits interior mutation then the + // left and right portions of `self` split at `l_len` do not overlap. + let (l, r) = unsafe { self.into_ptr().via_unchecked() }; + (l.as_ref(), r.as_ref()) + } +} + +impl<'a, T> Split<&'a mut T> +where + T: ?Sized + SplitAt, +{ + #[inline(always)] + fn into_ptr(self) -> Split<Ptr<'a, T, (Exclusive, Aligned, Valid)>> { + let source = Ptr::from_mut(self.source); + // SAFETY: `Ptr::from_mut(self.source)` points to exactly `self.source`, + // and thus maintains the invariants of `self` with respect to `l_len`. + unsafe { Split::new(source, self.l_len) } + } + + /// Produces the split parts of `self`, using [`IntoBytes`] to ensure that + /// it is sound to have concurrent references to both parts. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes)] + /// #[repr(C)] + /// struct Packet<B: ?Sized> { + /// length: u8, + /// body: B, + /// } + /// + /// // These bytes encode a `Packet`. + /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// { + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at_mut(packet.length as usize).unwrap(); + /// + /// // Use the `IntoBytes` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_into_bytes(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// + /// rest.fill(0); + /// } + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_into_bytes(self) -> (&'a mut T, &'a mut [T::Elem]) + where + T: IntoBytes, + { + let (l, r) = self.into_ptr().via_into_bytes(); + (l.as_mut(), r.as_mut()) + } + + /// Produces the split parts of `self`, using [`Unaligned`] to ensure that + /// it is sound to have concurrent references to both parts. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes, Unaligned)] + /// #[repr(C)] + /// struct Packet<B: ?Sized> { + /// length: u8, + /// body: B, + /// } + /// + /// // These bytes encode a `Packet`. + /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// { + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at_mut(packet.length as usize).unwrap(); + /// + /// // Use the `Unaligned` bound on `Packet` to prove that it's okay to + /// // return concurrent references to `packet` and `rest`. + /// let (packet, rest) = split.via_unaligned(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// + /// rest.fill(0); + /// } + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_unaligned(self) -> (&'a mut T, &'a mut [T::Elem]) + where + T: Unaligned, + { + let (l, r) = self.into_ptr().via_unaligned(); + (l.as_mut(), r.as_mut()) + } + + /// Produces the split parts of `self`, using a dynamic check to ensure that + /// it is sound to have concurrent references to both parts. You should + /// prefer using [`Self::via_into_bytes`] or [`Self::via_unaligned`], which + /// have no runtime cost. + /// + /// # Examples + /// + /// ``` + /// use zerocopy::{SplitAt, FromBytes}; + /// # use zerocopy_derive::*; + /// + /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes, Debug)] + /// #[repr(C)] + /// struct Packet<B: ?Sized> { + /// length: u8, + /// body: B, + /// } + /// + /// // These bytes encode a `Packet`. + /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..]; + /// + /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// + /// { + /// // Attempt to split `packet` at `length`. + /// let split = packet.split_at_mut(packet.length as usize).unwrap(); + /// + /// // Use a dynamic check to prove that it's okay to return concurrent + /// // references to `packet` and `rest`. + /// let (packet, rest) = split.via_runtime_check().unwrap(); + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4]); + /// assert_eq!(rest, [5, 6, 7, 8, 9]); + /// + /// rest.fill(0); + /// } + /// + /// assert_eq!(packet.length, 4); + /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]); + /// ``` + #[must_use = "has no side effects"] + #[inline(always)] + pub fn via_runtime_check(self) -> Result<(&'a mut T, &'a mut [T::Elem]), Self> { + match self.into_ptr().via_runtime_check() { + Ok((l, r)) => Ok((l.as_mut(), r.as_mut())), + Err(s) => Err(s.into_mut()), + } + } + + /// Unsafely produces the split parts of `self`. + /// + /// # Safety + /// + /// The trailing padding bytes of the left portion must not overlap the + /// right portion. For some dynamically sized types, the padding that + /// appears after the trailing slice field [is a dynamic function of the + /// trailing slice length](KnownLayout#slice-dst-layout). Thus, for some + /// types, this condition is dependent on the length of the left portion. + #[must_use = "has no side effects"] + #[inline(always)] + pub unsafe fn via_unchecked(self) -> (&'a mut T, &'a mut [T::Elem]) { + // SAFETY: The aliasing of `self.into_ptr()` is `Exclusive`, and the + // caller has promised that the left and right portions of `self` split + // at `l_len` do not overlap. + let (l, r) = unsafe { self.into_ptr().via_unchecked() }; + (l.as_mut(), r.as_mut()) + } +} + +impl<'a, T, I> Split<Ptr<'a, T, I>> +where + T: ?Sized + SplitAt, + I: Invariants<Alignment = Aligned, Validity = Valid>, +{ + fn into_ref(self) -> Split<&'a T> + where + I: Invariants<Aliasing = Shared>, + { + // SAFETY: `self.source.as_ref()` points to exactly the same referent as + // `self.source` and thus maintains the invariants of `self` with + // respect to `l_len`. + unsafe { Split::new(self.source.as_ref(), self.l_len) } + } + + fn into_mut(self) -> Split<&'a mut T> + where + I: Invariants<Aliasing = Exclusive>, + { + // SAFETY: `self.source.as_mut()` points to exactly the same referent as + // `self.source` and thus maintains the invariants of `self` with + // respect to `l_len`. + unsafe { Split::new(self.source.unify_invariants().as_mut(), self.l_len) } + } + + /// Produces the length of `self`'s left part. + #[inline(always)] + fn l_len(&self) -> MetadataOf<T> { + // SAFETY: By invariant on `Split`, `self.l_len` is not greater than the + // length of `self.source`. + unsafe { MetadataOf::<T>::new_unchecked(self.l_len) } + } + + /// Produces the split parts of `self`, using [`Immutable`] to ensure that + /// it is sound to have concurrent references to both parts. + #[inline(always)] + fn via_immutable(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>) + where + T: Immutable, + I: Invariants<Aliasing = Shared>, + { + // SAFETY: `Aliasing = Shared` and `T: Immutable`. + unsafe { self.via_unchecked() } + } + + /// Produces the split parts of `self`, using [`IntoBytes`] to ensure that + /// it is sound to have concurrent references to both parts. + #[inline(always)] + fn via_into_bytes(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>) + where + T: IntoBytes, + { + // SAFETY: By `T: IntoBytes`, `T` has no padding for any length. + // Consequently, `T` can be split into non-overlapping parts at any + // index. + unsafe { self.via_unchecked() } + } + + /// Produces the split parts of `self`, using [`Unaligned`] to ensure that + /// it is sound to have concurrent references to both parts. + #[inline(always)] + fn via_unaligned(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>) + where + T: Unaligned, + { + // SAFETY: By `T: SplitAt + Unaligned`, `T` is either a slice or a + // `repr(C)` or `repr(transparent)` slice DST that is well-aligned at + // any address and length. If `T` is a slice DST with alignment 1, + // `repr(C)` or `repr(transparent)` ensures that no padding is placed + // after the final element of the trailing slice. Consequently, `T` can + // be split into strictly non-overlapping parts any any index. + unsafe { self.via_unchecked() } + } + + /// Produces the split parts of `self`, using a dynamic check to ensure that + /// it is sound to have concurrent references to both parts. You should + /// prefer using [`Self::via_immutable`], [`Self::via_into_bytes`], or + /// [`Self::via_unaligned`], which have no runtime cost. + #[inline(always)] + fn via_runtime_check(self) -> Result<(Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>), Self> { + let l_len = self.l_len(); + // FIXME(#1290): Once we require `KnownLayout` on all fields, add an + // `IS_IMMUTABLE` associated const, and add `T::IS_IMMUTABLE ||` to the + // below check. + if l_len.padding_needed_for() == 0 { + // SAFETY: By `T: SplitAt`, `T` is either `[T]`, or a `repr(C)` or + // `repr(transparent)` slice DST, for which the trailing padding + // needed to accommodate `l_len` trailing elements is + // `l_len.padding_needed_for()`. If no trailing padding is required, + // the left and right parts are strictly non-overlapping. + Ok(unsafe { self.via_unchecked() }) + } else { + Err(self) + } + } + + /// Unsafely produces the split parts of `self`. + /// + /// # Safety + /// + /// The caller promises that if `I::Aliasing` is [`Exclusive`] or `T` + /// permits interior mutation, then `l_len.padding_needed_for() == 0`. + #[inline(always)] + unsafe fn via_unchecked(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>) { + let l_len = self.l_len(); + let inner = self.source.as_inner(); + + // SAFETY: By invariant on `Self::l_len`, `l_len` is not greater than + // the length of `inner`'s trailing slice. + let (left, right) = unsafe { inner.split_at_unchecked(l_len) }; + + // Lemma 0: `left` and `right` conform to the aliasing invariant + // `I::Aliasing`. Proof: If `I::Aliasing` is `Exclusive` or `T` permits + // interior mutation, the caller promises that `l_len.padding_needed_for() + // == 0`. Consequently, by post-condition on `PtrInner::split_at_unchecked`, + // there is no trailing padding after `left`'s final element that would + // overlap into `right`. If `I::Aliasing` is shared and `T` forbids interior + // mutation, then overlap between their referents is permissible. + + // SAFETY: + // 0. `left` conforms to the aliasing invariant of `I::Aliasing`, by Lemma 0. + // 1. `left` conforms to the alignment invariant of `I::Alignment, because + // the referents of `left` and `Self` have the same address and type + // (and, thus, alignment requirement). + // 2. `left` conforms to the validity invariant of `I::Validity`, neither + // the type nor bytes of `left`'s referent have been changed. + let left = unsafe { Ptr::from_inner(left) }; + + // SAFETY: + // 0. `right` conforms to the aliasing invariant of `I::Aliasing`, by Lemma + // 0. + // 1. `right` conforms to the alignment invariant of `I::Alignment, because + // if `ptr` with `I::Alignment = Aligned`, then by invariant on `T: + // SplitAt`, the trailing slice of `ptr` (from which `right` is derived) + // will also be well-aligned. + // 2. `right` conforms to the validity invariant of `I::Validity`, + // because `right: [T::Elem]` is derived from the trailing slice of + // `ptr`, which, by contract on `T: SplitAt::Elem`, has type + // `[T::Elem]`. The `left` part cannot be used to invalidate `right`, + // because the caller promises that if `I::Aliasing` is `Exclusive` + // or `T` permits interior mutation, then `l_len.padding_needed_for() + // == 0` and thus the parts will be non-overlapping. + let right = unsafe { Ptr::from_inner(right) }; + + (left, right) + } +} + +#[cfg(test)] +mod tests { + #[cfg(feature = "derive")] + #[test] + fn test_split_at() { + use crate::{FromBytes, Immutable, IntoBytes, KnownLayout, SplitAt}; + + #[derive(FromBytes, KnownLayout, SplitAt, IntoBytes, Immutable, Debug)] + #[repr(C)] + struct SliceDst<const OFFSET: usize> { + prefix: [u8; OFFSET], + trailing: [u8], + } + + #[allow(clippy::as_conversions)] + fn test_split_at<const OFFSET: usize, const BUFFER_SIZE: usize>() { + // Test `split_at` + let n: usize = BUFFER_SIZE - OFFSET; + let arr = [1; BUFFER_SIZE]; + let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap(); + for i in 0..=n { + let (l, r) = dst.split_at(i).unwrap().via_runtime_check().unwrap(); + let l_sum: u8 = l.trailing.iter().sum(); + let r_sum: u8 = r.iter().sum(); + assert_eq!(l_sum, i as u8); + assert_eq!(r_sum, (n - i) as u8); + assert_eq!(l_sum + r_sum, n as u8); + } + + // Test `split_at_mut` + let n: usize = BUFFER_SIZE - OFFSET; + let mut arr = [1; BUFFER_SIZE]; + let dst = SliceDst::<OFFSET>::mut_from_bytes(&mut arr[..]).unwrap(); + for i in 0..=n { + let (l, r) = dst.split_at_mut(i).unwrap().via_runtime_check().unwrap(); + let l_sum: u8 = l.trailing.iter().sum(); + let r_sum: u8 = r.iter().sum(); + assert_eq!(l_sum, i as u8); + assert_eq!(r_sum, (n - i) as u8); + assert_eq!(l_sum + r_sum, n as u8); + } + } + + test_split_at::<0, 16>(); + test_split_at::<1, 17>(); + test_split_at::<2, 18>(); + } + + #[cfg(feature = "derive")] + #[test] + #[allow(clippy::as_conversions)] + fn test_split_at_overlapping() { + use crate::{FromBytes, Immutable, IntoBytes, KnownLayout, SplitAt}; + + #[derive(FromBytes, KnownLayout, SplitAt, Immutable)] + #[repr(C, align(2))] + struct SliceDst { + prefix: u8, + trailing: [u8], + } + + const N: usize = 16; + + let arr = [1u16; N]; + let dst = SliceDst::ref_from_bytes(arr.as_bytes()).unwrap(); + + for i in 0..N { + let split = dst.split_at(i).unwrap().via_runtime_check(); + if i % 2 == 1 { + assert!(split.is_ok()); + } else { + assert!(split.is_err()); + } + } + } +} diff --git a/vendor/zerocopy/src/util/macro_util.rs b/vendor/zerocopy/src/util/macro_util.rs new file mode 100644 index 00000000..3412b8a1 --- /dev/null +++ b/vendor/zerocopy/src/util/macro_util.rs @@ -0,0 +1,1346 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Utilities used by macros and by `zerocopy-derive`. +//! +//! These are defined here `zerocopy` rather than in code generated by macros or +//! by `zerocopy-derive` so that they can be compiled once rather than +//! recompiled for every invocation (e.g., if they were defined in generated +//! code, then deriving `IntoBytes` and `FromBytes` on three different types +//! would result in the code in question being emitted and compiled six +//! different times). + +#![allow(missing_debug_implementations)] + +// FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove +// this `cfg` when `size_of_val_raw` is stabilized. +#[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] +#[cfg(not(target_pointer_width = "16"))] +use core::ptr::{self, NonNull}; +use core::{ + marker::PhantomData, + mem::{self, ManuallyDrop}, +}; + +use crate::{ + pointer::{ + invariant::{self, BecauseExclusive, BecauseImmutable, Invariants}, + BecauseInvariantsEq, InvariantsEq, SizeEq, TryTransmuteFromPtr, + }, + FromBytes, FromZeros, Immutable, IntoBytes, KnownLayout, Ptr, TryFromBytes, ValidityError, +}; + +/// Projects the type of the field at `Index` in `Self`. +/// +/// The `Index` parameter is any sort of handle that identifies the field; its +/// definition is the obligation of the implementer. +/// +/// # Safety +/// +/// Unsafe code may assume that this accurately reflects the definition of +/// `Self`. +pub unsafe trait Field<Index> { + /// The type of the field at `Index`. + type Type: ?Sized; +} + +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented( + message = "`{T}` has {PADDING_BYTES} total byte(s) of padding", + label = "types with padding cannot implement `IntoBytes`", + note = "consider using `zerocopy::Unalign` to lower the alignment of individual fields", + note = "consider adding explicit fields where padding would be", + note = "consider using `#[repr(packed)]` to remove padding" + ) +)] +pub trait PaddingFree<T: ?Sized, const PADDING_BYTES: usize> {} +impl<T: ?Sized> PaddingFree<T, 0> for () {} + +// FIXME(#1112): In the slice DST case, we should delegate to *both* +// `PaddingFree` *and* `DynamicPaddingFree` (and probably rename `PaddingFree` +// to `StaticPaddingFree` or something - or introduce a third trait with that +// name) so that we can have more clear error messages. + +#[cfg_attr( + not(no_zerocopy_diagnostic_on_unimplemented_1_78_0), + diagnostic::on_unimplemented( + message = "`{T}` has one or more padding bytes", + label = "types with padding cannot implement `IntoBytes`", + note = "consider using `zerocopy::Unalign` to lower the alignment of individual fields", + note = "consider adding explicit fields where padding would be", + note = "consider using `#[repr(packed)]` to remove padding" + ) +)] +pub trait DynamicPaddingFree<T: ?Sized, const HAS_PADDING: bool> {} +impl<T: ?Sized> DynamicPaddingFree<T, false> for () {} + +/// A type whose size is equal to `align_of::<T>()`. +#[repr(C)] +pub struct AlignOf<T> { + // This field ensures that: + // - The size is always at least 1 (the minimum possible alignment). + // - If the alignment is greater than 1, Rust has to round up to the next + // multiple of it in order to make sure that `Align`'s size is a multiple + // of that alignment. Without this field, its size could be 0, which is a + // valid multiple of any alignment. + _u: u8, + _a: [T; 0], +} + +impl<T> AlignOf<T> { + #[inline(never)] // Make `missing_inline_in_public_items` happy. + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + pub fn into_t(self) -> T { + unreachable!() + } +} + +/// A type whose size is equal to `max(align_of::<T>(), align_of::<U>())`. +#[repr(C)] +pub union MaxAlignsOf<T, U> { + _t: ManuallyDrop<AlignOf<T>>, + _u: ManuallyDrop<AlignOf<U>>, +} + +impl<T, U> MaxAlignsOf<T, U> { + #[inline(never)] // Make `missing_inline_in_public_items` happy. + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + pub fn new(_t: T, _u: U) -> MaxAlignsOf<T, U> { + unreachable!() + } +} + +#[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] +#[cfg(not(target_pointer_width = "16"))] +const _64K: usize = 1 << 16; + +// FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove +// this `cfg` when `size_of_val_raw` is stabilized. +#[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] +#[cfg(not(target_pointer_width = "16"))] +#[repr(C, align(65536))] +struct Aligned64kAllocation([u8; _64K]); + +/// A pointer to an aligned allocation of size 2^16. +/// +/// # Safety +/// +/// `ALIGNED_64K_ALLOCATION` is guaranteed to point to the entirety of an +/// allocation with size and alignment 2^16, and to have valid provenance. +// FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove +// this `cfg` when `size_of_val_raw` is stabilized. +#[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] +#[cfg(not(target_pointer_width = "16"))] +pub const ALIGNED_64K_ALLOCATION: NonNull<[u8]> = { + const REF: &Aligned64kAllocation = &Aligned64kAllocation([0; _64K]); + let ptr: *const Aligned64kAllocation = REF; + let ptr: *const [u8] = ptr::slice_from_raw_parts(ptr.cast(), _64K); + // SAFETY: + // - `ptr` is derived from a Rust reference, which is guaranteed to be + // non-null. + // - `ptr` is derived from an `&Aligned64kAllocation`, which has size and + // alignment `_64K` as promised. Its length is initialized to `_64K`, + // which means that it refers to the entire allocation. + // - `ptr` is derived from a Rust reference, which is guaranteed to have + // valid provenance. + // + // FIXME(#429): Once `NonNull::new_unchecked` docs document that it + // preserves provenance, cite those docs. + // FIXME: Replace this `as` with `ptr.cast_mut()` once our MSRV >= 1.65 + #[allow(clippy::as_conversions)] + unsafe { + NonNull::new_unchecked(ptr as *mut _) + } +}; + +/// Computes the offset of the base of the field `$trailing_field_name` within +/// the type `$ty`. +/// +/// `trailing_field_offset!` produces code which is valid in a `const` context. +// FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove +// this `cfg` when `size_of_val_raw` is stabilized. +#[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! trailing_field_offset { + ($ty:ty, $trailing_field_name:tt) => {{ + let min_size = { + let zero_elems: *const [()] = + $crate::util::macro_util::core_reexport::ptr::slice_from_raw_parts( + $crate::util::macro_util::core_reexport::ptr::NonNull::<()>::dangling() + .as_ptr() + .cast_const(), + 0, + ); + // SAFETY: + // - If `$ty` is `Sized`, `size_of_val_raw` is always safe to call. + // - Otherwise: + // - If `$ty` is not a slice DST, this pointer conversion will + // fail due to "mismatched vtable kinds", and compilation will + // fail. + // - If `$ty` is a slice DST, we have constructed `zero_elems` to + // have zero trailing slice elements. Per the `size_of_val_raw` + // docs, "For the special case where the dynamic tail length is + // 0, this function is safe to call." [1] + // + // [1] https://doc.rust-lang.org/nightly/std/mem/fn.size_of_val_raw.html + unsafe { + #[allow(clippy::as_conversions)] + $crate::util::macro_util::core_reexport::mem::size_of_val_raw( + zero_elems as *const $ty, + ) + } + }; + + assert!(min_size <= _64K); + + #[allow(clippy::as_conversions)] + let ptr = ALIGNED_64K_ALLOCATION.as_ptr() as *const $ty; + + // SAFETY: + // - Thanks to the preceding `assert!`, we know that the value with zero + // elements fits in `_64K` bytes, and thus in the allocation addressed + // by `ALIGNED_64K_ALLOCATION`. The offset of the trailing field is + // guaranteed to be no larger than this size, so this field projection + // is guaranteed to remain in-bounds of its allocation. + // - Because the minimum size is no larger than `_64K` bytes, and + // because an object's size must always be a multiple of its alignment + // [1], we know that `$ty`'s alignment is no larger than `_64K`. The + // allocation addressed by `ALIGNED_64K_ALLOCATION` is guaranteed to + // be aligned to `_64K`, so `ptr` is guaranteed to satisfy `$ty`'s + // alignment. + // - As required by `addr_of!`, we do not write through `field`. + // + // Note that, as of [2], this requirement is technically unnecessary + // for Rust versions >= 1.75.0, but no harm in guaranteeing it anyway + // until we bump our MSRV. + // + // [1] Per https://doc.rust-lang.org/reference/type-layout.html: + // + // The size of a value is always a multiple of its alignment. + // + // [2] https://github.com/rust-lang/reference/pull/1387 + let field = unsafe { + $crate::util::macro_util::core_reexport::ptr::addr_of!((*ptr).$trailing_field_name) + }; + // SAFETY: + // - Both `ptr` and `field` are derived from the same allocated object. + // - By the preceding safety comment, `field` is in bounds of that + // allocated object. + // - The distance, in bytes, between `ptr` and `field` is required to be + // a multiple of the size of `u8`, which is trivially true because + // `u8`'s size is 1. + // - The distance, in bytes, cannot overflow `isize`. This is guaranteed + // because no allocated object can have a size larger than can fit in + // `isize`. [1] + // - The distance being in-bounds cannot rely on wrapping around the + // address space. This is guaranteed because the same is guaranteed of + // allocated objects. [1] + // + // [1] FIXME(#429), FIXME(https://github.com/rust-lang/rust/pull/116675): + // Once these are guaranteed in the Reference, cite it. + let offset = unsafe { field.cast::<u8>().offset_from(ptr.cast::<u8>()) }; + // Guaranteed not to be lossy: `field` comes after `ptr`, so the offset + // from `ptr` to `field` is guaranteed to be positive. + assert!(offset >= 0); + Some( + #[allow(clippy::as_conversions)] + { + offset as usize + }, + ) + }}; +} + +/// Computes alignment of `$ty: ?Sized`. +/// +/// `align_of!` produces code which is valid in a `const` context. +// FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove +// this `cfg` when `size_of_val_raw` is stabilized. +#[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! align_of { + ($ty:ty) => {{ + // SAFETY: `OffsetOfTrailingIsAlignment` is `repr(C)`, and its layout is + // guaranteed [1] to begin with the single-byte layout for `_byte`, + // followed by the padding needed to align `_trailing`, then the layout + // for `_trailing`, and finally any trailing padding bytes needed to + // correctly-align the entire struct. + // + // This macro computes the alignment of `$ty` by counting the number of + // bytes preceding `_trailing`. For instance, if the alignment of `$ty` + // is `1`, then no padding is required align `_trailing` and it will be + // located immediately after `_byte` at offset 1. If the alignment of + // `$ty` is 2, then a single padding byte is required before + // `_trailing`, and `_trailing` will be located at offset 2. + + // This correspondence between offset and alignment holds for all valid + // Rust alignments, and we confirm this exhaustively (or, at least up to + // the maximum alignment supported by `trailing_field_offset!`) in + // `test_align_of_dst`. + // + // [1]: https://doc.rust-lang.org/nomicon/other-reprs.html#reprc + + #[repr(C)] + struct OffsetOfTrailingIsAlignment { + _byte: u8, + _trailing: $ty, + } + + trailing_field_offset!(OffsetOfTrailingIsAlignment, _trailing) + }}; +} + +mod size_to_tag { + pub trait SizeToTag<const SIZE: usize> { + type Tag; + } + + impl SizeToTag<1> for () { + type Tag = u8; + } + impl SizeToTag<2> for () { + type Tag = u16; + } + impl SizeToTag<4> for () { + type Tag = u32; + } + impl SizeToTag<8> for () { + type Tag = u64; + } + impl SizeToTag<16> for () { + type Tag = u128; + } +} + +/// An alias for the unsigned integer of the given size in bytes. +#[doc(hidden)] +pub type SizeToTag<const SIZE: usize> = <() as size_to_tag::SizeToTag<SIZE>>::Tag; + +// We put `Sized` in its own module so it can have the same name as the standard +// library `Sized` without shadowing it in the parent module. +#[cfg(not(no_zerocopy_diagnostic_on_unimplemented_1_78_0))] +mod __size_of { + #[diagnostic::on_unimplemented( + message = "`{Self}` is unsized", + label = "`IntoBytes` needs all field types to be `Sized` in order to determine whether there is padding", + note = "consider using `#[repr(packed)]` to remove padding", + note = "`IntoBytes` does not require the fields of `#[repr(packed)]` types to be `Sized`" + )] + pub trait Sized: core::marker::Sized {} + impl<T: core::marker::Sized> Sized for T {} + + #[inline(always)] + #[must_use] + #[allow(clippy::needless_maybe_sized)] + pub const fn size_of<T: Sized + ?core::marker::Sized>() -> usize { + core::mem::size_of::<T>() + } +} + +#[cfg(no_zerocopy_diagnostic_on_unimplemented_1_78_0)] +pub use core::mem::size_of; + +#[cfg(not(no_zerocopy_diagnostic_on_unimplemented_1_78_0))] +pub use __size_of::size_of; + +/// How many padding bytes does the struct type `$t` have? +/// +/// `$ts` is the list of the type of every field in `$t`. `$t` must be a struct +/// type, or else `struct_padding!`'s result may be meaningless. +/// +/// Note that `struct_padding!`'s results are independent of `repcr` since they +/// only consider the size of the type and the sizes of the fields. Whatever the +/// repr, the size of the type already takes into account any padding that the +/// compiler has decided to add. Structs with well-defined representations (such +/// as `repr(C)`) can use this macro to check for padding. Note that while this +/// may yield some consistent value for some `repr(Rust)` structs, it is not +/// guaranteed across platforms or compilations. +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! struct_padding { + ($t:ty, [$($ts:ty),*]) => { + $crate::util::macro_util::size_of::<$t>() - (0 $(+ $crate::util::macro_util::size_of::<$ts>())*) + }; +} + +/// Does the `repr(C)` struct type `$t` have padding? +/// +/// `$ts` is the list of the type of every field in `$t`. `$t` must be a +/// `repr(C)` struct type, or else `struct_has_padding!`'s result may be +/// meaningless. +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! repr_c_struct_has_padding { + ($t:ty, [$($ts:tt),*]) => {{ + let layout = $crate::DstLayout::for_repr_c_struct( + $crate::util::macro_util::core_reexport::option::Option::None, + $crate::util::macro_util::core_reexport::option::Option::None, + &[$($crate::repr_c_struct_has_padding!(@field $ts),)*] + ); + layout.requires_static_padding() || layout.requires_dynamic_padding() + }}; + (@field ([$t:ty])) => { + <[$t] as $crate::KnownLayout>::LAYOUT + }; + (@field ($t:ty)) => { + $crate::DstLayout::for_unpadded_type::<$t>() + }; + (@field [$t:ty]) => { + <[$t] as $crate::KnownLayout>::LAYOUT + }; + (@field $t:ty) => { + $crate::DstLayout::for_unpadded_type::<$t>() + }; +} + +/// Does the union type `$t` have padding? +/// +/// `$ts` is the list of the type of every field in `$t`. `$t` must be a union +/// type, or else `union_padding!`'s result may be meaningless. +/// +/// Note that `union_padding!`'s results are independent of `repr` since they +/// only consider the size of the type and the sizes of the fields. Whatever the +/// repr, the size of the type already takes into account any padding that the +/// compiler has decided to add. Unions with well-defined representations (such +/// as `repr(C)`) can use this macro to check for padding. Note that while this +/// may yield some consistent value for some `repr(Rust)` unions, it is not +/// guaranteed across platforms or compilations. +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! union_padding { + ($t:ty, [$($ts:ty),*]) => {{ + let mut max = 0; + $({ + let padding = $crate::util::macro_util::size_of::<$t>() - $crate::util::macro_util::size_of::<$ts>(); + if padding > max { + max = padding; + } + })* + max + }}; +} + +/// How many padding bytes does the enum type `$t` have? +/// +/// `$disc` is the type of the enum tag, and `$ts` is a list of fields in each +/// square-bracket-delimited variant. `$t` must be an enum, or else +/// `enum_padding!`'s result may be meaningless. An enum has padding if any of +/// its variant structs [1][2] contain padding, and so all of the variants of an +/// enum must be "full" in order for the enum to not have padding. +/// +/// The results of `enum_padding!` require that the enum is not `repr(Rust)`, as +/// `repr(Rust)` enums may niche the enum's tag and reduce the total number of +/// bytes required to represent the enum as a result. As long as the enum is +/// `repr(C)`, `repr(int)`, or `repr(C, int)`, this will consistently return +/// whether the enum contains any padding bytes. +/// +/// [1]: https://doc.rust-lang.org/1.81.0/reference/type-layout.html#reprc-enums-with-fields +/// [2]: https://doc.rust-lang.org/1.81.0/reference/type-layout.html#primitive-representation-of-enums-with-fields +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! enum_padding { + ($t:ty, $disc:ty, $([$($ts:ty),*]),*) => {{ + let mut max = 0; + $({ + let padding = $crate::util::macro_util::size_of::<$t>() + - ( + $crate::util::macro_util::size_of::<$disc>() + $(+ $crate::util::macro_util::size_of::<$ts>())* + ); + if padding > max { + max = padding; + } + })* + max + }}; +} + +/// Does `t` have alignment greater than or equal to `u`? If not, this macro +/// produces a compile error. It must be invoked in a dead codepath. This is +/// used in `transmute_ref!` and `transmute_mut!`. +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! assert_align_gt_eq { + ($t:ident, $u: ident) => {{ + // The comments here should be read in the context of this macro's + // invocations in `transmute_ref!` and `transmute_mut!`. + if false { + // The type wildcard in this bound is inferred to be `T` because + // `align_of.into_t()` is assigned to `t` (which has type `T`). + let align_of: $crate::util::macro_util::AlignOf<_> = unreachable!(); + $t = align_of.into_t(); + // `max_aligns` is inferred to have type `MaxAlignsOf<T, U>` because + // of the inferred types of `t` and `u`. + let mut max_aligns = $crate::util::macro_util::MaxAlignsOf::new($t, $u); + + // This transmute will only compile successfully if + // `align_of::<T>() == max(align_of::<T>(), align_of::<U>())` - in + // other words, if `align_of::<T>() >= align_of::<U>()`. + // + // SAFETY: This code is never run. + max_aligns = unsafe { + // Clippy: We can't annotate the types; this macro is designed + // to infer the types from the calling context. + #[allow(clippy::missing_transmute_annotations)] + $crate::util::macro_util::core_reexport::mem::transmute(align_of) + }; + } else { + loop {} + } + }}; +} + +/// Do `t` and `u` have the same size? If not, this macro produces a compile +/// error. It must be invoked in a dead codepath. This is used in +/// `transmute_ref!` and `transmute_mut!`. +#[doc(hidden)] // `#[macro_export]` bypasses this module's `#[doc(hidden)]`. +#[macro_export] +macro_rules! assert_size_eq { + ($t:ident, $u: ident) => {{ + // The comments here should be read in the context of this macro's + // invocations in `transmute_ref!` and `transmute_mut!`. + if false { + // SAFETY: This code is never run. + $u = unsafe { + // Clippy: + // - It's okay to transmute a type to itself. + // - We can't annotate the types; this macro is designed to + // infer the types from the calling context. + #[allow(clippy::useless_transmute, clippy::missing_transmute_annotations)] + $crate::util::macro_util::core_reexport::mem::transmute($t) + }; + } else { + loop {} + } + }}; +} + +/// Is a given source a valid instance of `Dst`? +/// +/// If so, returns `src` casted to a `Ptr<Dst, _>`. Otherwise returns `None`. +/// +/// # Safety +/// +/// Unsafe code may assume that, if `try_cast_or_pme(src)` returns `Ok`, +/// `*src` is a bit-valid instance of `Dst`, and that the size of `Src` is +/// greater than or equal to the size of `Dst`. +/// +/// Unsafe code may assume that, if `try_cast_or_pme(src)` returns `Err`, the +/// encapsulated `Ptr` value is the original `src`. `try_cast_or_pme` cannot +/// guarantee that the referent has not been modified, as it calls user-defined +/// code (`TryFromBytes::is_bit_valid`). +/// +/// # Panics +/// +/// `try_cast_or_pme` may either produce a post-monomorphization error or a +/// panic if `Dst` not the same size as `Src`. Otherwise, `try_cast_or_pme` +/// panics under the same circumstances as [`is_bit_valid`]. +/// +/// [`is_bit_valid`]: TryFromBytes::is_bit_valid +#[doc(hidden)] +#[inline] +fn try_cast_or_pme<Src, Dst, I, R, S>( + src: Ptr<'_, Src, I>, +) -> Result< + Ptr<'_, Dst, (I::Aliasing, invariant::Unaligned, invariant::Valid)>, + ValidityError<Ptr<'_, Src, I>, Dst>, +> +where + // FIXME(#2226): There should be a `Src: FromBytes` bound here, but doing so + // requires deeper surgery. + Src: invariant::Read<I::Aliasing, R>, + Dst: TryFromBytes + + invariant::Read<I::Aliasing, R> + + TryTransmuteFromPtr<Dst, I::Aliasing, invariant::Initialized, invariant::Valid, S>, + I: Invariants<Validity = invariant::Initialized>, + I::Aliasing: invariant::Reference, +{ + static_assert!(Src, Dst => mem::size_of::<Dst>() == mem::size_of::<Src>()); + + // SAFETY: This is a pointer cast, satisfying the following properties: + // - `p as *mut Dst` addresses a subset of the `bytes` addressed by `src`, + // because we assert above that the size of `Dst` equal to the size of + // `Src`. + // - `p as *mut Dst` is a provenance-preserving cast + #[allow(clippy::multiple_unsafe_ops_per_block)] + let c_ptr = unsafe { src.cast_unsized(|p| cast!(p)) }; + + match c_ptr.try_into_valid() { + Ok(ptr) => Ok(ptr), + Err(err) => { + // Re-cast `Ptr<Dst>` to `Ptr<Src>`. + let ptr = err.into_src(); + // SAFETY: This is a pointer cast, satisfying the following + // properties: + // - `p as *mut Src` addresses a subset of the `bytes` addressed by + // `ptr`, because we assert above that the size of `Dst` is equal + // to the size of `Src`. + // - `p as *mut Src` is a provenance-preserving cast + #[allow(clippy::multiple_unsafe_ops_per_block)] + let ptr = unsafe { ptr.cast_unsized(|p| cast!(p)) }; + // SAFETY: `ptr` is `src`, and has the same alignment invariant. + let ptr = unsafe { ptr.assume_alignment::<I::Alignment>() }; + // SAFETY: `ptr` is `src` and has the same validity invariant. + let ptr = unsafe { ptr.assume_validity::<I::Validity>() }; + Err(ValidityError::new(ptr.unify_invariants())) + } + } +} + +/// Attempts to transmute `Src` into `Dst`. +/// +/// A helper for `try_transmute!`. +/// +/// # Panics +/// +/// `try_transmute` may either produce a post-monomorphization error or a panic +/// if `Dst` is bigger than `Src`. Otherwise, `try_transmute` panics under the +/// same circumstances as [`is_bit_valid`]. +/// +/// [`is_bit_valid`]: TryFromBytes::is_bit_valid +#[inline(always)] +pub fn try_transmute<Src, Dst>(src: Src) -> Result<Dst, ValidityError<Src, Dst>> +where + Src: IntoBytes, + Dst: TryFromBytes, +{ + static_assert!(Src, Dst => mem::size_of::<Dst>() == mem::size_of::<Src>()); + + let mu_src = mem::MaybeUninit::new(src); + // SAFETY: By invariant on `&`, the following are satisfied: + // - `&mu_src` is valid for reads + // - `&mu_src` is properly aligned + // - `&mu_src`'s referent is bit-valid + let mu_src_copy = unsafe { core::ptr::read(&mu_src) }; + // SAFETY: `MaybeUninit` has no validity constraints. + let mut mu_dst: mem::MaybeUninit<Dst> = + unsafe { crate::util::transmute_unchecked(mu_src_copy) }; + + let ptr = Ptr::from_mut(&mut mu_dst); + + // SAFETY: Since `Src: IntoBytes`, and since `size_of::<Src>() == + // size_of::<Dst>()` by the preceding assertion, all of `mu_dst`'s bytes are + // initialized. + let ptr = unsafe { ptr.assume_validity::<invariant::Initialized>() }; + + // SAFETY: `MaybeUninit<T>` and `T` have the same size [1], so this cast + // preserves the referent's size. This cast preserves provenance. + // + // [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: + // + // `MaybeUninit<T>` is guaranteed to have the same size, alignment, and + // ABI as `T` + let ptr: Ptr<'_, Dst, _> = unsafe { + ptr.cast_unsized(|ptr: crate::pointer::PtrInner<'_, mem::MaybeUninit<Dst>>| { + ptr.cast_sized() + }) + }; + + if Dst::is_bit_valid(ptr.forget_aligned()) { + // SAFETY: Since `Dst::is_bit_valid`, we know that `ptr`'s referent is + // bit-valid for `Dst`. `ptr` points to `mu_dst`, and no intervening + // operations have mutated it, so it is a bit-valid `Dst`. + Ok(unsafe { mu_dst.assume_init() }) + } else { + // SAFETY: `mu_src` was constructed from `src` and never modified, so it + // is still bit-valid. + Err(ValidityError::new(unsafe { mu_src.assume_init() })) + } +} + +/// Attempts to transmute `&Src` into `&Dst`. +/// +/// A helper for `try_transmute_ref!`. +/// +/// # Panics +/// +/// `try_transmute_ref` may either produce a post-monomorphization error or a +/// panic if `Dst` is bigger or has a stricter alignment requirement than `Src`. +/// Otherwise, `try_transmute_ref` panics under the same circumstances as +/// [`is_bit_valid`]. +/// +/// [`is_bit_valid`]: TryFromBytes::is_bit_valid +#[inline(always)] +pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> +where + Src: IntoBytes + Immutable, + Dst: TryFromBytes + Immutable, +{ + let ptr = Ptr::from_ref(src); + let ptr = ptr.bikeshed_recall_initialized_immutable(); + match try_cast_or_pme::<Src, Dst, _, BecauseImmutable, _>(ptr) { + Ok(ptr) => { + static_assert!(Src, Dst => mem::align_of::<Dst>() <= mem::align_of::<Src>()); + // SAFETY: We have checked that `Dst` does not have a stricter + // alignment requirement than `Src`. + let ptr = unsafe { ptr.assume_alignment::<invariant::Aligned>() }; + Ok(ptr.as_ref()) + } + Err(err) => Err(err.map_src(|ptr| { + // SAFETY: Because `Src: Immutable` and we create a `Ptr` via + // `Ptr::from_ref`, the resulting `Ptr` is a shared-and-`Immutable` + // `Ptr`, which does not permit mutation of its referent. Therefore, + // no mutation could have happened during the call to + // `try_cast_or_pme` (any such mutation would be unsound). + // + // `try_cast_or_pme` promises to return its original argument, and + // so we know that we are getting back the same `ptr` that we + // originally passed, and that `ptr` was a bit-valid `Src`. + let ptr = unsafe { ptr.assume_valid() }; + ptr.as_ref() + })), + } +} + +/// Attempts to transmute `&mut Src` into `&mut Dst`. +/// +/// A helper for `try_transmute_mut!`. +/// +/// # Panics +/// +/// `try_transmute_mut` may either produce a post-monomorphization error or a +/// panic if `Dst` is bigger or has a stricter alignment requirement than `Src`. +/// Otherwise, `try_transmute_mut` panics under the same circumstances as +/// [`is_bit_valid`]. +/// +/// [`is_bit_valid`]: TryFromBytes::is_bit_valid +#[inline(always)] +pub fn try_transmute_mut<Src, Dst>(src: &mut Src) -> Result<&mut Dst, ValidityError<&mut Src, Dst>> +where + Src: FromBytes + IntoBytes, + Dst: TryFromBytes + IntoBytes, +{ + let ptr = Ptr::from_mut(src); + let ptr = ptr.bikeshed_recall_initialized_from_bytes(); + match try_cast_or_pme::<Src, Dst, _, BecauseExclusive, _>(ptr) { + Ok(ptr) => { + static_assert!(Src, Dst => mem::align_of::<Dst>() <= mem::align_of::<Src>()); + // SAFETY: We have checked that `Dst` does not have a stricter + // alignment requirement than `Src`. + let ptr = unsafe { ptr.assume_alignment::<invariant::Aligned>() }; + Ok(ptr.as_mut()) + } + Err(err) => { + Err(err.map_src(|ptr| ptr.recall_validity::<_, (_, BecauseInvariantsEq)>().as_mut())) + } + } +} + +// Used in `transmute_ref!` and friends. +// +// This permits us to use the autoref specialization trick to dispatch to +// associated functions for `transmute_ref` and `transmute_mut` when both `Src` +// and `Dst` are `Sized`, and to trait methods otherwise. The associated +// functions, unlike the trait methods, do not require a `KnownLayout` bound. +// This permits us to add support for transmuting references to unsized types +// without breaking backwards-compatibility (on v0.8.x) with the old +// implementation, which did not require a `KnownLayout` bound to transmute +// sized types. +#[derive(Copy, Clone)] +pub struct Wrap<Src, Dst>(pub Src, pub PhantomData<Dst>); + +impl<Src, Dst> Wrap<Src, Dst> { + #[inline(always)] + pub const fn new(src: Src) -> Self { + Wrap(src, PhantomData) + } +} + +impl<'a, Src, Dst> Wrap<&'a Src, &'a Dst> { + /// # Safety + /// The caller must guarantee that: + /// - `Src: IntoBytes + Immutable` + /// - `Dst: FromBytes + Immutable` + /// + /// # PME + /// + /// Instantiating this method PMEs unless both: + /// - `mem::size_of::<Dst>() == mem::size_of::<Src>()` + /// - `mem::align_of::<Dst>() <= mem::align_of::<Src>()` + #[inline(always)] + #[must_use] + pub const unsafe fn transmute_ref(self) -> &'a Dst { + static_assert!(Src, Dst => mem::size_of::<Dst>() == mem::size_of::<Src>()); + static_assert!(Src, Dst => mem::align_of::<Dst>() <= mem::align_of::<Src>()); + + let src: *const Src = self.0; + let dst = src.cast::<Dst>(); + // SAFETY: + // - We know that it is sound to view the target type of the input + // reference (`Src`) as the target type of the output reference + // (`Dst`) because the caller has guaranteed that `Src: IntoBytes`, + // `Dst: FromBytes`, and `size_of::<Src>() == size_of::<Dst>()`. + // - We know that there are no `UnsafeCell`s, and thus we don't have to + // worry about `UnsafeCell` overlap, because `Src: Immutable` and + // `Dst: Immutable`. + // - The caller has guaranteed that alignment is not increased. + // - We know that the returned lifetime will not outlive the input + // lifetime thanks to the lifetime bounds on this function. + // + // FIXME(#67): Once our MSRV is 1.58, replace this `transmute` with + // `&*dst`. + #[allow(clippy::transmute_ptr_to_ref)] + unsafe { + mem::transmute(dst) + } + } +} + +impl<'a, Src, Dst> Wrap<&'a mut Src, &'a mut Dst> { + /// Transmutes a mutable reference of one type to a mutable reference of + /// another type. + /// + /// # PME + /// + /// Instantiating this method PMEs unless both: + /// - `mem::size_of::<Dst>() == mem::size_of::<Src>()` + /// - `mem::align_of::<Dst>() <= mem::align_of::<Src>()` + #[inline(always)] + #[must_use] + pub fn transmute_mut(self) -> &'a mut Dst + where + Src: FromBytes + IntoBytes, + Dst: FromBytes + IntoBytes, + { + static_assert!(Src, Dst => mem::size_of::<Dst>() == mem::size_of::<Src>()); + static_assert!(Src, Dst => mem::align_of::<Dst>() <= mem::align_of::<Src>()); + + let src: *mut Src = self.0; + let dst = src.cast::<Dst>(); + // SAFETY: + // - We know that it is sound to view the target type of the input + // reference (`Src`) as the target type of the output reference + // (`Dst`) and vice-versa because `Src: FromBytes + IntoBytes`, `Dst: + // FromBytes + IntoBytes`, and (as asserted above) `size_of::<Src>() + // == size_of::<Dst>()`. + // - We asserted above that alignment will not increase. + // - We know that the returned lifetime will not outlive the input + // lifetime thanks to the lifetime bounds on this function. + unsafe { &mut *dst } + } +} + +pub trait TransmuteRefDst<'a> { + type Dst: ?Sized; + + #[must_use] + fn transmute_ref(self) -> &'a Self::Dst; +} + +impl<'a, Src: ?Sized, Dst: ?Sized> TransmuteRefDst<'a> for Wrap<&'a Src, &'a Dst> +where + Src: KnownLayout<PointerMetadata = usize> + IntoBytes + Immutable, + Dst: KnownLayout<PointerMetadata = usize> + FromBytes + Immutable, +{ + type Dst = Dst; + + #[inline(always)] + fn transmute_ref(self) -> &'a Dst { + static_assert!(Src: ?Sized + KnownLayout, Dst: ?Sized + KnownLayout => { + Src::LAYOUT.align.get() >= Dst::LAYOUT.align.get() + }, "cannot transmute reference when destination type has higher alignment than source type"); + + // SAFETY: We only use `S` as `S<Src>` and `D` as `D<Dst>`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + unsafe_with_size_eq!(<S<Src>, D<Dst>> { + let ptr = Ptr::from_ref(self.0) + .transmute::<S<Src>, invariant::Valid, BecauseImmutable>() + .recall_validity::<invariant::Initialized, _>() + .transmute::<D<Dst>, invariant::Initialized, (crate::pointer::BecauseMutationCompatible, _)>() + .recall_validity::<invariant::Valid, _>(); + + #[allow(unused_unsafe)] + // SAFETY: The preceding `static_assert!` ensures that + // `T::LAYOUT.align >= U::LAYOUT.align`. Since `self.0` is + // validly-aligned for `T`, it is also validly-aligned for `U`. + let ptr = unsafe { ptr.assume_alignment() }; + + &ptr.as_ref().0 + }) + } + } +} + +pub trait TransmuteMutDst<'a> { + type Dst: ?Sized; + #[must_use] + fn transmute_mut(self) -> &'a mut Self::Dst; +} + +impl<'a, Src: ?Sized, Dst: ?Sized> TransmuteMutDst<'a> for Wrap<&'a mut Src, &'a mut Dst> +where + Src: KnownLayout<PointerMetadata = usize> + FromBytes + IntoBytes, + Dst: KnownLayout<PointerMetadata = usize> + FromBytes + IntoBytes, +{ + type Dst = Dst; + + #[inline(always)] + fn transmute_mut(self) -> &'a mut Dst { + static_assert!(Src: ?Sized + KnownLayout, Dst: ?Sized + KnownLayout => { + Src::LAYOUT.align.get() >= Dst::LAYOUT.align.get() + }, "cannot transmute reference when destination type has higher alignment than source type"); + + // SAFETY: We only use `S` as `S<Src>` and `D` as `D<Dst>`. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { + unsafe_with_size_eq!(<S<Src>, D<Dst>> { + let ptr = Ptr::from_mut(self.0) + .transmute::<S<Src>, invariant::Valid, _>() + .recall_validity::<invariant::Initialized, (_, (_, _))>() + .transmute::<D<Dst>, invariant::Initialized, _>() + .recall_validity::<invariant::Valid, (_, (_, _))>(); + + #[allow(unused_unsafe)] + // SAFETY: The preceding `static_assert!` ensures that + // `T::LAYOUT.align >= U::LAYOUT.align`. Since `self.0` is + // validly-aligned for `T`, it is also validly-aligned for `U`. + let ptr = unsafe { ptr.assume_alignment() }; + + &mut ptr.as_mut().0 + }) + } + } +} + +/// A function which emits a warning if its return value is not used. +#[must_use] +#[inline(always)] +pub const fn must_use<T>(t: T) -> T { + t +} + +// NOTE: We can't change this to a `pub use core as core_reexport` until [1] is +// fixed or we update to a semver-breaking version (as of this writing, 0.8.0) +// on the `main` branch. +// +// [1] https://github.com/obi1kenobi/cargo-semver-checks/issues/573 +pub mod core_reexport { + pub use core::*; + + pub mod mem { + pub use core::mem::*; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::util::testutil::*; + + #[test] + fn test_align_of() { + macro_rules! test { + ($ty:ty) => { + assert_eq!(mem::size_of::<AlignOf<$ty>>(), mem::align_of::<$ty>()); + }; + } + + test!(()); + test!(u8); + test!(AU64); + test!([AU64; 2]); + } + + #[test] + fn test_max_aligns_of() { + macro_rules! test { + ($t:ty, $u:ty) => { + assert_eq!( + mem::size_of::<MaxAlignsOf<$t, $u>>(), + core::cmp::max(mem::align_of::<$t>(), mem::align_of::<$u>()) + ); + }; + } + + test!(u8, u8); + test!(u8, AU64); + test!(AU64, u8); + } + + #[test] + fn test_typed_align_check() { + // Test that the type-based alignment check used in + // `assert_align_gt_eq!` behaves as expected. + + macro_rules! assert_t_align_gteq_u_align { + ($t:ty, $u:ty, $gteq:expr) => { + assert_eq!( + mem::size_of::<MaxAlignsOf<$t, $u>>() == mem::size_of::<AlignOf<$t>>(), + $gteq + ); + }; + } + + assert_t_align_gteq_u_align!(u8, u8, true); + assert_t_align_gteq_u_align!(AU64, AU64, true); + assert_t_align_gteq_u_align!(AU64, u8, true); + assert_t_align_gteq_u_align!(u8, AU64, false); + } + + // FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove + // this `cfg` when `size_of_val_raw` is stabilized. + #[allow(clippy::decimal_literal_representation)] + #[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] + #[test] + fn test_trailing_field_offset() { + assert_eq!(mem::align_of::<Aligned64kAllocation>(), _64K); + + macro_rules! test { + (#[$cfg:meta] ($($ts:ty),* ; $trailing_field_ty:ty) => $expect:expr) => {{ + #[$cfg] + struct Test($(#[allow(dead_code)] $ts,)* #[allow(dead_code)] $trailing_field_ty); + assert_eq!(test!(@offset $($ts),* ; $trailing_field_ty), $expect); + }}; + (#[$cfg:meta] $(#[$cfgs:meta])* ($($ts:ty),* ; $trailing_field_ty:ty) => $expect:expr) => { + test!(#[$cfg] ($($ts),* ; $trailing_field_ty) => $expect); + test!($(#[$cfgs])* ($($ts),* ; $trailing_field_ty) => $expect); + }; + (@offset ; $_trailing:ty) => { trailing_field_offset!(Test, 0) }; + (@offset $_t:ty ; $_trailing:ty) => { trailing_field_offset!(Test, 1) }; + } + + test!(#[repr(C)] #[repr(transparent)] #[repr(packed)](; u8) => Some(0)); + test!(#[repr(C)] #[repr(transparent)] #[repr(packed)](; [u8]) => Some(0)); + test!(#[repr(C)] #[repr(C, packed)] (u8; u8) => Some(1)); + test!(#[repr(C)] (; AU64) => Some(0)); + test!(#[repr(C)] (; [AU64]) => Some(0)); + test!(#[repr(C)] (u8; AU64) => Some(8)); + test!(#[repr(C)] (u8; [AU64]) => Some(8)); + + #[derive( + Immutable, FromBytes, Eq, PartialEq, Ord, PartialOrd, Default, Debug, Copy, Clone, + )] + #[repr(C)] + pub(crate) struct Nested<T, U: ?Sized> { + _t: T, + _u: U, + } + + test!(#[repr(C)] (; Nested<u8, AU64>) => Some(0)); + test!(#[repr(C)] (; Nested<u8, [AU64]>) => Some(0)); + test!(#[repr(C)] (u8; Nested<u8, AU64>) => Some(8)); + test!(#[repr(C)] (u8; Nested<u8, [AU64]>) => Some(8)); + + // Test that `packed(N)` limits the offset of the trailing field. + test!(#[repr(C, packed( 1))] (u8; elain::Align< 2>) => Some( 1)); + test!(#[repr(C, packed( 2))] (u8; elain::Align< 4>) => Some( 2)); + test!(#[repr(C, packed( 4))] (u8; elain::Align< 8>) => Some( 4)); + test!(#[repr(C, packed( 8))] (u8; elain::Align< 16>) => Some( 8)); + test!(#[repr(C, packed( 16))] (u8; elain::Align< 32>) => Some( 16)); + test!(#[repr(C, packed( 32))] (u8; elain::Align< 64>) => Some( 32)); + test!(#[repr(C, packed( 64))] (u8; elain::Align< 128>) => Some( 64)); + test!(#[repr(C, packed( 128))] (u8; elain::Align< 256>) => Some( 128)); + test!(#[repr(C, packed( 256))] (u8; elain::Align< 512>) => Some( 256)); + test!(#[repr(C, packed( 512))] (u8; elain::Align< 1024>) => Some( 512)); + test!(#[repr(C, packed( 1024))] (u8; elain::Align< 2048>) => Some( 1024)); + test!(#[repr(C, packed( 2048))] (u8; elain::Align< 4096>) => Some( 2048)); + test!(#[repr(C, packed( 4096))] (u8; elain::Align< 8192>) => Some( 4096)); + test!(#[repr(C, packed( 8192))] (u8; elain::Align< 16384>) => Some( 8192)); + test!(#[repr(C, packed( 16384))] (u8; elain::Align< 32768>) => Some( 16384)); + test!(#[repr(C, packed( 32768))] (u8; elain::Align< 65536>) => Some( 32768)); + test!(#[repr(C, packed( 65536))] (u8; elain::Align< 131072>) => Some( 65536)); + /* Alignments above 65536 are not yet supported. + test!(#[repr(C, packed( 131072))] (u8; elain::Align< 262144>) => Some( 131072)); + test!(#[repr(C, packed( 262144))] (u8; elain::Align< 524288>) => Some( 262144)); + test!(#[repr(C, packed( 524288))] (u8; elain::Align< 1048576>) => Some( 524288)); + test!(#[repr(C, packed( 1048576))] (u8; elain::Align< 2097152>) => Some( 1048576)); + test!(#[repr(C, packed( 2097152))] (u8; elain::Align< 4194304>) => Some( 2097152)); + test!(#[repr(C, packed( 4194304))] (u8; elain::Align< 8388608>) => Some( 4194304)); + test!(#[repr(C, packed( 8388608))] (u8; elain::Align< 16777216>) => Some( 8388608)); + test!(#[repr(C, packed( 16777216))] (u8; elain::Align< 33554432>) => Some( 16777216)); + test!(#[repr(C, packed( 33554432))] (u8; elain::Align< 67108864>) => Some( 33554432)); + test!(#[repr(C, packed( 67108864))] (u8; elain::Align< 33554432>) => Some( 67108864)); + test!(#[repr(C, packed( 33554432))] (u8; elain::Align<134217728>) => Some( 33554432)); + test!(#[repr(C, packed(134217728))] (u8; elain::Align<268435456>) => Some(134217728)); + test!(#[repr(C, packed(268435456))] (u8; elain::Align<268435456>) => Some(268435456)); + */ + + // Test that `align(N)` does not limit the offset of the trailing field. + test!(#[repr(C, align( 1))] (u8; elain::Align< 2>) => Some( 2)); + test!(#[repr(C, align( 2))] (u8; elain::Align< 4>) => Some( 4)); + test!(#[repr(C, align( 4))] (u8; elain::Align< 8>) => Some( 8)); + test!(#[repr(C, align( 8))] (u8; elain::Align< 16>) => Some( 16)); + test!(#[repr(C, align( 16))] (u8; elain::Align< 32>) => Some( 32)); + test!(#[repr(C, align( 32))] (u8; elain::Align< 64>) => Some( 64)); + test!(#[repr(C, align( 64))] (u8; elain::Align< 128>) => Some( 128)); + test!(#[repr(C, align( 128))] (u8; elain::Align< 256>) => Some( 256)); + test!(#[repr(C, align( 256))] (u8; elain::Align< 512>) => Some( 512)); + test!(#[repr(C, align( 512))] (u8; elain::Align< 1024>) => Some( 1024)); + test!(#[repr(C, align( 1024))] (u8; elain::Align< 2048>) => Some( 2048)); + test!(#[repr(C, align( 2048))] (u8; elain::Align< 4096>) => Some( 4096)); + test!(#[repr(C, align( 4096))] (u8; elain::Align< 8192>) => Some( 8192)); + test!(#[repr(C, align( 8192))] (u8; elain::Align< 16384>) => Some( 16384)); + test!(#[repr(C, align( 16384))] (u8; elain::Align< 32768>) => Some( 32768)); + test!(#[repr(C, align( 32768))] (u8; elain::Align< 65536>) => Some( 65536)); + /* Alignments above 65536 are not yet supported. + test!(#[repr(C, align( 65536))] (u8; elain::Align< 131072>) => Some( 131072)); + test!(#[repr(C, align( 131072))] (u8; elain::Align< 262144>) => Some( 262144)); + test!(#[repr(C, align( 262144))] (u8; elain::Align< 524288>) => Some( 524288)); + test!(#[repr(C, align( 524288))] (u8; elain::Align< 1048576>) => Some( 1048576)); + test!(#[repr(C, align( 1048576))] (u8; elain::Align< 2097152>) => Some( 2097152)); + test!(#[repr(C, align( 2097152))] (u8; elain::Align< 4194304>) => Some( 4194304)); + test!(#[repr(C, align( 4194304))] (u8; elain::Align< 8388608>) => Some( 8388608)); + test!(#[repr(C, align( 8388608))] (u8; elain::Align< 16777216>) => Some( 16777216)); + test!(#[repr(C, align( 16777216))] (u8; elain::Align< 33554432>) => Some( 33554432)); + test!(#[repr(C, align( 33554432))] (u8; elain::Align< 67108864>) => Some( 67108864)); + test!(#[repr(C, align( 67108864))] (u8; elain::Align< 33554432>) => Some( 33554432)); + test!(#[repr(C, align( 33554432))] (u8; elain::Align<134217728>) => Some(134217728)); + test!(#[repr(C, align(134217728))] (u8; elain::Align<268435456>) => Some(268435456)); + */ + } + + // FIXME(#29), FIXME(https://github.com/rust-lang/rust/issues/69835): Remove + // this `cfg` when `size_of_val_raw` is stabilized. + #[allow(clippy::decimal_literal_representation)] + #[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] + #[test] + fn test_align_of_dst() { + // Test that `align_of!` correctly computes the alignment of DSTs. + assert_eq!(align_of!([elain::Align<1>]), Some(1)); + assert_eq!(align_of!([elain::Align<2>]), Some(2)); + assert_eq!(align_of!([elain::Align<4>]), Some(4)); + assert_eq!(align_of!([elain::Align<8>]), Some(8)); + assert_eq!(align_of!([elain::Align<16>]), Some(16)); + assert_eq!(align_of!([elain::Align<32>]), Some(32)); + assert_eq!(align_of!([elain::Align<64>]), Some(64)); + assert_eq!(align_of!([elain::Align<128>]), Some(128)); + assert_eq!(align_of!([elain::Align<256>]), Some(256)); + assert_eq!(align_of!([elain::Align<512>]), Some(512)); + assert_eq!(align_of!([elain::Align<1024>]), Some(1024)); + assert_eq!(align_of!([elain::Align<2048>]), Some(2048)); + assert_eq!(align_of!([elain::Align<4096>]), Some(4096)); + assert_eq!(align_of!([elain::Align<8192>]), Some(8192)); + assert_eq!(align_of!([elain::Align<16384>]), Some(16384)); + assert_eq!(align_of!([elain::Align<32768>]), Some(32768)); + assert_eq!(align_of!([elain::Align<65536>]), Some(65536)); + /* Alignments above 65536 are not yet supported. + assert_eq!(align_of!([elain::Align<131072>]), Some(131072)); + assert_eq!(align_of!([elain::Align<262144>]), Some(262144)); + assert_eq!(align_of!([elain::Align<524288>]), Some(524288)); + assert_eq!(align_of!([elain::Align<1048576>]), Some(1048576)); + assert_eq!(align_of!([elain::Align<2097152>]), Some(2097152)); + assert_eq!(align_of!([elain::Align<4194304>]), Some(4194304)); + assert_eq!(align_of!([elain::Align<8388608>]), Some(8388608)); + assert_eq!(align_of!([elain::Align<16777216>]), Some(16777216)); + assert_eq!(align_of!([elain::Align<33554432>]), Some(33554432)); + assert_eq!(align_of!([elain::Align<67108864>]), Some(67108864)); + assert_eq!(align_of!([elain::Align<33554432>]), Some(33554432)); + assert_eq!(align_of!([elain::Align<134217728>]), Some(134217728)); + assert_eq!(align_of!([elain::Align<268435456>]), Some(268435456)); + */ + } + + #[test] + fn test_enum_casts() { + // Test that casting the variants of enums with signed integer reprs to + // unsigned integers obeys expected signed -> unsigned casting rules. + + #[repr(i8)] + enum ReprI8 { + MinusOne = -1, + Zero = 0, + Min = i8::MIN, + Max = i8::MAX, + } + + #[allow(clippy::as_conversions)] + let x = ReprI8::MinusOne as u8; + assert_eq!(x, u8::MAX); + + #[allow(clippy::as_conversions)] + let x = ReprI8::Zero as u8; + assert_eq!(x, 0); + + #[allow(clippy::as_conversions)] + let x = ReprI8::Min as u8; + assert_eq!(x, 128); + + #[allow(clippy::as_conversions)] + let x = ReprI8::Max as u8; + assert_eq!(x, 127); + } + + #[test] + fn test_struct_padding() { + // Test that, for each provided repr, `struct_padding!` reports the + // expected value. + macro_rules! test { + (#[$cfg:meta] ($($ts:ty),*) => $expect:expr) => {{ + #[$cfg] + #[allow(dead_code)] + struct Test($($ts),*); + assert_eq!(struct_padding!(Test, [$($ts),*]), $expect); + }}; + (#[$cfg:meta] $(#[$cfgs:meta])* ($($ts:ty),*) => $expect:expr) => { + test!(#[$cfg] ($($ts),*) => $expect); + test!($(#[$cfgs])* ($($ts),*) => $expect); + }; + } + + test!(#[repr(C)] #[repr(transparent)] #[repr(packed)] () => 0); + test!(#[repr(C)] #[repr(transparent)] #[repr(packed)] (u8) => 0); + test!(#[repr(C)] #[repr(transparent)] #[repr(packed)] (u8, ()) => 0); + test!(#[repr(C)] #[repr(packed)] (u8, u8) => 0); + + test!(#[repr(C)] (u8, AU64) => 7); + // Rust won't let you put `#[repr(packed)]` on a type which contains a + // `#[repr(align(n > 1))]` type (`AU64`), so we have to use `u64` here. + // It's not ideal, but it definitely has align > 1 on /some/ of our CI + // targets, and this isn't a particularly complex macro we're testing + // anyway. + test!(#[repr(packed)] (u8, u64) => 0); + } + + #[test] + fn test_repr_c_struct_padding() { + // Test that, for each provided repr, `repr_c_struct_padding!` reports + // the expected value. + macro_rules! test { + (($($ts:tt),*) => $expect:expr) => {{ + #[repr(C)] + #[allow(dead_code)] + struct Test($($ts),*); + assert_eq!(repr_c_struct_has_padding!(Test, [$($ts),*]), $expect); + }}; + } + + // Test static padding + test!(() => false); + test!(([u8]) => false); + test!((u8) => false); + test!((u8, [u8]) => false); + test!((u8, ()) => false); + test!((u8, (), [u8]) => false); + test!((u8, u8) => false); + test!((u8, u8, [u8]) => false); + + test!((u8, AU64) => true); + test!((u8, AU64, [u8]) => true); + + // Test dynamic padding + test!((AU64, [AU64]) => false); + test!((u8, [AU64]) => true); + + #[repr(align(4))] + struct AU32(#[allow(unused)] u32); + test!((AU64, [AU64]) => false); + test!((AU64, [AU32]) => true); + } + + #[test] + fn test_union_padding() { + // Test that, for each provided repr, `union_padding!` reports the + // expected value. + macro_rules! test { + (#[$cfg:meta] {$($fs:ident: $ts:ty),*} => $expect:expr) => {{ + #[$cfg] + #[allow(unused)] // fields are never read + union Test{ $($fs: $ts),* } + assert_eq!(union_padding!(Test, [$($ts),*]), $expect); + }}; + (#[$cfg:meta] $(#[$cfgs:meta])* {$($fs:ident: $ts:ty),*} => $expect:expr) => { + test!(#[$cfg] {$($fs: $ts),*} => $expect); + test!($(#[$cfgs])* {$($fs: $ts),*} => $expect); + }; + } + + test!(#[repr(C)] #[repr(packed)] {a: u8} => 0); + test!(#[repr(C)] #[repr(packed)] {a: u8, b: u8} => 0); + + // Rust won't let you put `#[repr(packed)]` on a type which contains a + // `#[repr(align(n > 1))]` type (`AU64`), so we have to use `u64` here. + // It's not ideal, but it definitely has align > 1 on /some/ of our CI + // targets, and this isn't a particularly complex macro we're testing + // anyway. + test!(#[repr(C)] #[repr(packed)] {a: u8, b: u64} => 7); + } + + #[test] + fn test_enum_padding() { + // Test that, for each provided repr, `enum_has_padding!` reports the + // expected value. + macro_rules! test { + (#[repr($disc:ident $(, $c:ident)?)] { $($vs:ident ($($ts:ty),*),)* } => $expect:expr) => { + test!(@case #[repr($disc $(, $c)?)] { $($vs ($($ts),*),)* } => $expect); + }; + (#[repr($disc:ident $(, $c:ident)?)] #[$cfg:meta] $(#[$cfgs:meta])* { $($vs:ident ($($ts:ty),*),)* } => $expect:expr) => { + test!(@case #[repr($disc $(, $c)?)] #[$cfg] { $($vs ($($ts),*),)* } => $expect); + test!(#[repr($disc $(, $c)?)] $(#[$cfgs])* { $($vs ($($ts),*),)* } => $expect); + }; + (@case #[repr($disc:ident $(, $c:ident)?)] $(#[$cfg:meta])? { $($vs:ident ($($ts:ty),*),)* } => $expect:expr) => {{ + #[repr($disc $(, $c)?)] + $(#[$cfg])? + #[allow(unused)] // variants and fields are never used + enum Test { + $($vs ($($ts),*),)* + } + assert_eq!( + enum_padding!(Test, $disc, $([$($ts),*]),*), + $expect + ); + }}; + } + + #[allow(unused)] + #[repr(align(2))] + struct U16(u16); + + #[allow(unused)] + #[repr(align(4))] + struct U32(u32); + + test!(#[repr(u8)] #[repr(C)] { + A(u8), + } => 0); + test!(#[repr(u16)] #[repr(C)] { + A(u8, u8), + B(U16), + } => 0); + test!(#[repr(u32)] #[repr(C)] { + A(u8, u8, u8, u8), + B(U16, u8, u8), + C(u8, u8, U16), + D(U16, U16), + E(U32), + } => 0); + + // `repr(int)` can pack the discriminant more efficiently + test!(#[repr(u8)] { + A(u8, U16), + } => 0); + test!(#[repr(u8)] { + A(u8, U16, U32), + } => 0); + + // `repr(C)` cannot + test!(#[repr(u8, C)] { + A(u8, U16), + } => 2); + test!(#[repr(u8, C)] { + A(u8, u8, u8, U32), + } => 4); + + // And field ordering can always cause problems + test!(#[repr(u8)] #[repr(C)] { + A(U16, u8), + } => 2); + test!(#[repr(u8)] #[repr(C)] { + A(U32, u8, u8, u8), + } => 4); + } +} diff --git a/vendor/zerocopy/src/util/macros.rs b/vendor/zerocopy/src/util/macros.rs new file mode 100644 index 00000000..2a5818d4 --- /dev/null +++ b/vendor/zerocopy/src/util/macros.rs @@ -0,0 +1,948 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +/// Unsafely implements trait(s) for a type. +/// +/// # Safety +/// +/// The trait impl must be sound. +/// +/// When implementing `TryFromBytes`: +/// - If no `is_bit_valid` impl is provided, then it must be valid for +/// `is_bit_valid` to unconditionally return `true`. In other words, it must +/// be the case that any initialized sequence of bytes constitutes a valid +/// instance of `$ty`. +/// - If an `is_bit_valid` impl is provided, then the impl of `is_bit_valid` +/// must only return `true` if its argument refers to a valid `$ty`. +macro_rules! unsafe_impl { + // Implement `$trait` for `$ty` with no bounds. + ($(#[$attr:meta])* $ty:ty: $trait:ident $(; |$candidate:ident| $is_bit_valid:expr)?) => {{ + crate::util::macros::__unsafe(); + + $(#[$attr])* + // SAFETY: The caller promises that this is sound. + unsafe impl $trait for $ty { + unsafe_impl!(@method $trait $(; |$candidate| $is_bit_valid)?); + } + }}; + + // Implement all `$traits` for `$ty` with no bounds. + // + // The 2 arms under this one are there so we can apply + // N attributes for each one of M trait implementations. + // The simple solution of: + // + // ($(#[$attrs:meta])* $ty:ty: $($traits:ident),*) => { + // $( unsafe_impl!( $(#[$attrs])* $ty: $traits ) );* + // } + // + // Won't work. The macro processor sees that the outer repetition + // contains both $attrs and $traits and expects them to match the same + // amount of fragments. + // + // To solve this we must: + // 1. Pack the attributes into a single token tree fragment we can match over. + // 2. Expand the traits. + // 3. Unpack and expand the attributes. + ($(#[$attrs:meta])* $ty:ty: $($traits:ident),*) => { + unsafe_impl!(@impl_traits_with_packed_attrs { $(#[$attrs])* } $ty: $($traits),*) + }; + + (@impl_traits_with_packed_attrs $attrs:tt $ty:ty: $($traits:ident),*) => {{ + $( unsafe_impl!(@unpack_attrs $attrs $ty: $traits); )* + }}; + + (@unpack_attrs { $(#[$attrs:meta])* } $ty:ty: $traits:ident) => { + unsafe_impl!($(#[$attrs])* $ty: $traits); + }; + + // This arm is identical to the following one, except it contains a + // preceding `const`. If we attempt to handle these with a single arm, there + // is an inherent ambiguity between `const` (the keyword) and `const` (the + // ident match for `$tyvar:ident`). + // + // To explain how this works, consider the following invocation: + // + // unsafe_impl!(const N: usize, T: ?Sized + Copy => Clone for Foo<T>); + // + // In this invocation, here are the assignments to meta-variables: + // + // |---------------|------------| + // | Meta-variable | Assignment | + // |---------------|------------| + // | $constname | N | + // | $constty | usize | + // | $tyvar | T | + // | $optbound | Sized | + // | $bound | Copy | + // | $trait | Clone | + // | $ty | Foo<T> | + // |---------------|------------| + // + // The following arm has the same behavior with the exception of the lack of + // support for a leading `const` parameter. + ( + $(#[$attr:meta])* + const $constname:ident : $constty:ident $(,)? + $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? + ) => { + unsafe_impl!( + @inner + $(#[$attr])* + @const $constname: $constty, + $($tyvar $(: $(? $optbound +)* + $($bound +)*)?,)* + => $trait for $ty $(; |$candidate| $is_bit_valid)? + ); + }; + ( + $(#[$attr:meta])* + $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? + ) => {{ + unsafe_impl!( + @inner + $(#[$attr])* + $($tyvar $(: $(? $optbound +)* + $($bound +)*)?,)* + => $trait for $ty $(; |$candidate| $is_bit_valid)? + ); + }}; + ( + @inner + $(#[$attr:meta])* + $(@const $constname:ident : $constty:ident,)* + $($tyvar:ident $(: $(? $optbound:ident +)* + $($bound:ident +)* )?,)* + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? + ) => {{ + crate::util::macros::__unsafe(); + + $(#[$attr])* + #[allow(non_local_definitions)] + // SAFETY: The caller promises that this is sound. + unsafe impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),* $(, const $constname: $constty,)*> $trait for $ty { + unsafe_impl!(@method $trait $(; |$candidate| $is_bit_valid)?); + } + }}; + + (@method TryFromBytes ; |$candidate:ident| $is_bit_valid:expr) => { + #[allow(clippy::missing_inline_in_public_items, dead_code)] + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn only_derive_is_allowed_to_implement_this_trait() {} + + #[inline] + fn is_bit_valid<AA: crate::pointer::invariant::Reference>($candidate: Maybe<'_, Self, AA>) -> bool { + $is_bit_valid + } + }; + (@method TryFromBytes) => { + #[allow(clippy::missing_inline_in_public_items)] + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn only_derive_is_allowed_to_implement_this_trait() {} + #[inline(always)] fn is_bit_valid<AA: crate::pointer::invariant::Reference>(_: Maybe<'_, Self, AA>) -> bool { true } + }; + (@method $trait:ident) => { + #[allow(clippy::missing_inline_in_public_items, dead_code)] + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn only_derive_is_allowed_to_implement_this_trait() {} + }; + (@method $trait:ident; |$_candidate:ident| $_is_bit_valid:expr) => { + compile_error!("Can't provide `is_bit_valid` impl for trait other than `TryFromBytes`"); + }; +} + +/// Implements `$trait` for `$ty` where `$ty: TransmuteFrom<$repr>` (and +/// vice-versa). +/// +/// Calling this macro is safe; the internals of the macro emit appropriate +/// trait bounds which ensure that the given impl is sound. +macro_rules! impl_for_transmute_from { + ( + $(#[$attr:meta])* + $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?)? + => $trait:ident for $ty:ty [$($unsafe_cell:ident)? <$repr:ty>] + ) => { + const _: () = { + $(#[$attr])* + #[allow(non_local_definitions)] + + // SAFETY: `is_trait<T, R>` (defined and used below) requires `T: + // TransmuteFrom<R>`, `R: TransmuteFrom<T>`, and `R: $trait`. It is + // called using `$ty` and `$repr`, ensuring that `$ty` and `$repr` + // have equivalent bit validity, and ensuring that `$repr: $trait`. + // The supported traits - `TryFromBytes`, `FromZeros`, `FromBytes`, + // and `IntoBytes` - are defined only in terms of the bit validity + // of a type. Therefore, `$repr: $trait` ensures that `$ty: $trait` + // is sound. + unsafe impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?)?> $trait for $ty { + #[allow(dead_code, clippy::missing_inline_in_public_items)] + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn only_derive_is_allowed_to_implement_this_trait() { + use crate::pointer::{*, invariant::Valid}; + + impl_for_transmute_from!(@assert_is_supported_trait $trait); + + fn is_trait<T, R>() + where + T: TransmuteFrom<R, Valid, Valid> + ?Sized, + R: TransmuteFrom<T, Valid, Valid> + ?Sized, + R: $trait, + { + } + + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn f<$($tyvar $(: $(? $optbound +)* $($bound +)*)?)?>() { + is_trait::<$ty, $repr>(); + } + } + + impl_for_transmute_from!( + @is_bit_valid + $(<$tyvar $(: $(? $optbound +)* $($bound +)*)?>)? + $trait for $ty [$($unsafe_cell)? <$repr>] + ); + } + }; + }; + (@assert_is_supported_trait TryFromBytes) => {}; + (@assert_is_supported_trait FromZeros) => {}; + (@assert_is_supported_trait FromBytes) => {}; + (@assert_is_supported_trait IntoBytes) => {}; + ( + @is_bit_valid + $(<$tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?>)? + TryFromBytes for $ty:ty [UnsafeCell<$repr:ty>] + ) => { + #[inline] + fn is_bit_valid<A: crate::pointer::invariant::Reference>(candidate: Maybe<'_, Self, A>) -> bool { + let c: Maybe<'_, Self, crate::pointer::invariant::Exclusive> = candidate.into_exclusive_or_pme(); + let c: Maybe<'_, $repr, _> = c.transmute::<_, _, (_, (_, (BecauseExclusive, BecauseExclusive)))>(); + // SAFETY: This macro ensures that `$repr` and `Self` have the same + // size and bit validity. Thus, a bit-valid instance of `$repr` is + // also a bit-valid instance of `Self`. + <$repr as TryFromBytes>::is_bit_valid(c) + } + }; + ( + @is_bit_valid + $(<$tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?>)? + TryFromBytes for $ty:ty [<$repr:ty>] + ) => { + #[inline] + fn is_bit_valid<A: crate::pointer::invariant::Reference>(candidate: $crate::Maybe<'_, Self, A>) -> bool { + // SAFETY: This macro ensures that `$repr` and `Self` have the same + // size and bit validity. Thus, a bit-valid instance of `$repr` is + // also a bit-valid instance of `Self`. + <$repr as TryFromBytes>::is_bit_valid(candidate.transmute()) + } + }; + ( + @is_bit_valid + $(<$tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?>)? + $trait:ident for $ty:ty [$($unsafe_cell:ident)? <$repr:ty>] + ) => { + // Trait other than `TryFromBytes`; no `is_bit_valid` impl. + }; +} + +/// Implements a trait for a type, bounding on each member of the power set of +/// a set of type variables. This is useful for implementing traits for tuples +/// or `fn` types. +/// +/// The last argument is the name of a macro which will be called in every +/// `impl` block, and is expected to expand to the name of the type for which to +/// implement the trait. +/// +/// For example, the invocation: +/// ```ignore +/// unsafe_impl_for_power_set!(A, B => Foo for type!(...)) +/// ``` +/// ...expands to: +/// ```ignore +/// unsafe impl Foo for type!() { ... } +/// unsafe impl<B> Foo for type!(B) { ... } +/// unsafe impl<A, B> Foo for type!(A, B) { ... } +/// ``` +macro_rules! unsafe_impl_for_power_set { + ( + $first:ident $(, $rest:ident)* $(-> $ret:ident)? => $trait:ident for $macro:ident!(...) + $(; |$candidate:ident| $is_bit_valid:expr)? + ) => { + unsafe_impl_for_power_set!( + $($rest),* $(-> $ret)? => $trait for $macro!(...) + $(; |$candidate| $is_bit_valid)? + ); + unsafe_impl_for_power_set!( + @impl $first $(, $rest)* $(-> $ret)? => $trait for $macro!(...) + $(; |$candidate| $is_bit_valid)? + ); + }; + ( + $(-> $ret:ident)? => $trait:ident for $macro:ident!(...) + $(; |$candidate:ident| $is_bit_valid:expr)? + ) => { + unsafe_impl_for_power_set!( + @impl $(-> $ret)? => $trait for $macro!(...) + $(; |$candidate| $is_bit_valid)? + ); + }; + ( + @impl $($vars:ident),* $(-> $ret:ident)? => $trait:ident for $macro:ident!(...) + $(; |$candidate:ident| $is_bit_valid:expr)? + ) => { + unsafe_impl!( + $($vars,)* $($ret)? => $trait for $macro!($($vars),* $(-> $ret)?) + $(; |$candidate| $is_bit_valid)? + ); + }; +} + +/// Expands to an `Option<extern "C" fn>` type with the given argument types and +/// return type. Designed for use with `unsafe_impl_for_power_set`. +macro_rules! opt_extern_c_fn { + ($($args:ident),* -> $ret:ident) => { Option<extern "C" fn($($args),*) -> $ret> }; +} + +/// Expands to an `Option<unsafe extern "C" fn>` type with the given argument +/// types and return type. Designed for use with `unsafe_impl_for_power_set`. +macro_rules! opt_unsafe_extern_c_fn { + ($($args:ident),* -> $ret:ident) => { Option<unsafe extern "C" fn($($args),*) -> $ret> }; +} + +/// Expands to an `Option<fn>` type with the given argument types and return +/// type. Designed for use with `unsafe_impl_for_power_set`. +macro_rules! opt_fn { + ($($args:ident),* -> $ret:ident) => { Option<fn($($args),*) -> $ret> }; +} + +/// Expands to an `Option<unsafe fn>` type with the given argument types and +/// return type. Designed for use with `unsafe_impl_for_power_set`. +macro_rules! opt_unsafe_fn { + ($($args:ident),* -> $ret:ident) => { Option<unsafe fn($($args),*) -> $ret> }; +} + +/// Implements trait(s) for a type or verifies the given implementation by +/// referencing an existing (derived) implementation. +/// +/// This macro exists so that we can provide zerocopy-derive as an optional +/// dependency and still get the benefit of using its derives to validate that +/// our trait impls are sound. +/// +/// When compiling without `--cfg 'feature = "derive"` and without `--cfg test`, +/// `impl_or_verify!` emits the provided trait impl. When compiling with either +/// of those cfgs, it is expected that the type in question is deriving the +/// traits instead. In this case, `impl_or_verify!` emits code which validates +/// that the given trait impl is at least as restrictive as the the impl emitted +/// by the custom derive. This has the effect of confirming that the impl which +/// is emitted when the `derive` feature is disabled is actually sound (on the +/// assumption that the impl emitted by the custom derive is sound). +/// +/// The caller is still required to provide a safety comment (e.g. using the +/// `const _: () = unsafe` macro). The reason for this restriction is that, +/// while `impl_or_verify!` can guarantee that the provided impl is sound when +/// it is compiled with the appropriate cfgs, there is no way to guarantee that +/// it is +/// ever compiled with those cfgs. In particular, it would be possible to +/// accidentally place an `impl_or_verify!` call in a context that is only ever +/// compiled when the `derive` feature is disabled. If that were to happen, +/// there would be nothing to prevent an unsound trait impl from being emitted. +/// Requiring a safety comment reduces the likelihood of emitting an unsound +/// impl in this case, and also provides useful documentation for readers of the +/// code. +/// +/// Finally, if a `TryFromBytes::is_bit_valid` impl is provided, it must adhere +/// to the safety preconditions of [`unsafe_impl!`]. +/// +/// ## Example +/// +/// ```rust,ignore +/// // Note that these derives are gated by `feature = "derive"` +/// #[cfg_attr(any(feature = "derive", test), derive(FromZeros, FromBytes, IntoBytes, Unaligned))] +/// #[repr(transparent)] +/// struct Wrapper<T>(T); +/// +/// const _: () = unsafe { +/// /// SAFETY: +/// /// `Wrapper<T>` is `repr(transparent)`, so it is sound to implement any +/// /// zerocopy trait if `T` implements that trait. +/// impl_or_verify!(T: FromZeros => FromZeros for Wrapper<T>); +/// impl_or_verify!(T: FromBytes => FromBytes for Wrapper<T>); +/// impl_or_verify!(T: IntoBytes => IntoBytes for Wrapper<T>); +/// impl_or_verify!(T: Unaligned => Unaligned for Wrapper<T>); +/// } +/// ``` +macro_rules! impl_or_verify { + // The following two match arms follow the same pattern as their + // counterparts in `unsafe_impl!`; see the documentation on those arms for + // more details. + ( + const $constname:ident : $constty:ident $(,)? + $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* + => $trait:ident for $ty:ty + ) => { + impl_or_verify!(@impl { unsafe_impl!( + const $constname: $constty, $($tyvar $(: $(? $optbound +)* $($bound +)*)?),* => $trait for $ty + ); }); + impl_or_verify!(@verify $trait, { + impl<const $constname: $constty, $($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + }); + }; + ( + $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? + ) => { + impl_or_verify!(@impl { unsafe_impl!( + $($tyvar $(: $(? $optbound +)* $($bound +)*)?),* => $trait for $ty + $(; |$candidate| $is_bit_valid)? + ); }); + impl_or_verify!(@verify $trait, { + impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + }); + }; + (@impl $impl_block:tt) => { + #[cfg(not(any(feature = "derive", test)))] + { $impl_block }; + }; + (@verify $trait:ident, $impl_block:tt) => { + #[cfg(any(feature = "derive", test))] + { + // On some toolchains, `Subtrait` triggers the `dead_code` lint + // because it is implemented but never used. + #[allow(dead_code)] + trait Subtrait: $trait {} + $impl_block + }; + }; +} + +/// Implements `KnownLayout` for a sized type. +macro_rules! impl_known_layout { + ($(const $constvar:ident : $constty:ty, $tyvar:ident $(: ?$optbound:ident)? => $ty:ty),* $(,)?) => { + $(impl_known_layout!(@inner const $constvar: $constty, $tyvar $(: ?$optbound)? => $ty);)* + }; + ($($tyvar:ident $(: ?$optbound:ident)? => $ty:ty),* $(,)?) => { + $(impl_known_layout!(@inner , $tyvar $(: ?$optbound)? => $ty);)* + }; + ($($(#[$attrs:meta])* $ty:ty),*) => { $(impl_known_layout!(@inner , => $(#[$attrs])* $ty);)* }; + (@inner $(const $constvar:ident : $constty:ty)? , $($tyvar:ident $(: ?$optbound:ident)?)? => $(#[$attrs:meta])* $ty:ty) => { + const _: () = { + use core::ptr::NonNull; + + #[allow(non_local_definitions)] + $(#[$attrs])* + // SAFETY: Delegates safety to `DstLayout::for_type`. + unsafe impl<$($tyvar $(: ?$optbound)?)? $(, const $constvar : $constty)?> KnownLayout for $ty { + #[allow(clippy::missing_inline_in_public_items)] + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {} + + type PointerMetadata = (); + + // SAFETY: `CoreMaybeUninit<T>::LAYOUT` and `T::LAYOUT` are + // identical because `CoreMaybeUninit<T>` has the same size and + // alignment as `T` [1], and `CoreMaybeUninit` admits + // uninitialized bytes in all positions. + // + // [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: + // + // `MaybeUninit<T>` is guaranteed to have the same size, + // alignment, and ABI as `T` + type MaybeUninit = core::mem::MaybeUninit<Self>; + + const LAYOUT: crate::DstLayout = crate::DstLayout::for_type::<$ty>(); + + // SAFETY: `.cast` preserves address and provenance. + // + // FIXME(#429): Add documentation to `.cast` that promises that + // it preserves provenance. + #[inline(always)] + fn raw_from_ptr_len(bytes: NonNull<u8>, _meta: ()) -> NonNull<Self> { + bytes.cast::<Self>() + } + + #[inline(always)] + fn pointer_to_metadata(_ptr: *mut Self) -> () { + } + } + }; + }; +} + +/// Implements `KnownLayout` for a type in terms of the implementation of +/// another type with the same representation. +/// +/// # Safety +/// +/// - `$ty` and `$repr` must have the same: +/// - Fixed prefix size +/// - Alignment +/// - (For DSTs) trailing slice element size +/// - It must be valid to perform an `as` cast from `*mut $repr` to `*mut $ty`, +/// and this operation must preserve referent size (ie, `size_of_val_raw`). +macro_rules! unsafe_impl_known_layout { + ($($tyvar:ident: ?Sized + KnownLayout =>)? #[repr($repr:ty)] $ty:ty) => {{ + use core::ptr::NonNull; + + crate::util::macros::__unsafe(); + + #[allow(non_local_definitions)] + // SAFETY: The caller promises that this is sound. + unsafe impl<$($tyvar: ?Sized + KnownLayout)?> KnownLayout for $ty { + #[allow(clippy::missing_inline_in_public_items, dead_code)] + #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] + fn only_derive_is_allowed_to_implement_this_trait() {} + + type PointerMetadata = <$repr as KnownLayout>::PointerMetadata; + type MaybeUninit = <$repr as KnownLayout>::MaybeUninit; + + const LAYOUT: DstLayout = <$repr as KnownLayout>::LAYOUT; + + // SAFETY: All operations preserve address and provenance. Caller + // has promised that the `as` cast preserves size. + // + // FIXME(#429): Add documentation to `NonNull::new_unchecked` that + // it preserves provenance. + #[inline(always)] + fn raw_from_ptr_len(bytes: NonNull<u8>, meta: <$repr as KnownLayout>::PointerMetadata) -> NonNull<Self> { + #[allow(clippy::as_conversions)] + let ptr = <$repr>::raw_from_ptr_len(bytes, meta).as_ptr() as *mut Self; + // SAFETY: `ptr` was converted from `bytes`, which is non-null. + unsafe { NonNull::new_unchecked(ptr) } + } + + #[inline(always)] + fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata { + #[allow(clippy::as_conversions)] + let ptr = ptr as *mut $repr; + <$repr>::pointer_to_metadata(ptr) + } + } + }}; +} + +/// Uses `align_of` to confirm that a type or set of types have alignment 1. +/// +/// Note that `align_of<T>` requires `T: Sized`, so this macro doesn't work for +/// unsized types. +macro_rules! assert_unaligned { + ($($tys:ty),*) => { + $( + // We only compile this assertion under `cfg(test)` to avoid taking + // an extra non-dev dependency (and making this crate more expensive + // to compile for our dependents). + #[cfg(test)] + static_assertions::const_assert_eq!(core::mem::align_of::<$tys>(), 1); + )* + }; +} + +/// Emits a function definition as either `const fn` or `fn` depending on +/// whether the current toolchain version supports `const fn` with generic trait +/// bounds. +macro_rules! maybe_const_trait_bounded_fn { + // This case handles both `self` methods (where `self` is by value) and + // non-method functions. Each `$args` may optionally be followed by `: + // $arg_tys:ty`, which can be omitted for `self`. + ($(#[$attr:meta])* $vis:vis const fn $name:ident($($args:ident $(: $arg_tys:ty)?),* $(,)?) $(-> $ret_ty:ty)? $body:block) => { + #[cfg(not(no_zerocopy_generic_bounds_in_const_fn_1_61_0))] + $(#[$attr])* $vis const fn $name($($args $(: $arg_tys)?),*) $(-> $ret_ty)? $body + + #[cfg(no_zerocopy_generic_bounds_in_const_fn_1_61_0)] + $(#[$attr])* $vis fn $name($($args $(: $arg_tys)?),*) $(-> $ret_ty)? $body + }; +} + +/// Either panic (if the current Rust toolchain supports panicking in `const +/// fn`) or evaluate a constant that will cause an array indexing error whose +/// error message will include the format string. +/// +/// The type that this expression evaluates to must be `Copy`, or else the +/// non-panicking desugaring will fail to compile. +macro_rules! const_panic { + (@non_panic $($_arg:tt)+) => {{ + // This will type check to whatever type is expected based on the call + // site. + let panic: [_; 0] = []; + // This will always fail (since we're indexing into an array of size 0. + #[allow(unconditional_panic)] + panic[0] + }}; + ($($arg:tt)+) => {{ + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + panic!($($arg)+); + #[cfg(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)] + const_panic!(@non_panic $($arg)+) + }}; +} + +/// Either assert (if the current Rust toolchain supports panicking in `const +/// fn`) or evaluate the expression and, if it evaluates to `false`, call +/// `const_panic!`. This is used in place of `assert!` in const contexts to +/// accommodate old toolchains. +macro_rules! const_assert { + ($e:expr) => {{ + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + assert!($e); + #[cfg(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)] + { + let e = $e; + if !e { + let _: () = const_panic!(@non_panic concat!("assertion failed: ", stringify!($e))); + } + } + }}; + ($e:expr, $($args:tt)+) => {{ + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + assert!($e, $($args)+); + #[cfg(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)] + { + let e = $e; + if !e { + let _: () = const_panic!(@non_panic concat!("assertion failed: ", stringify!($e), ": ", stringify!($arg)), $($args)*); + } + } + }}; +} + +/// Like `const_assert!`, but relative to `debug_assert!`. +macro_rules! const_debug_assert { + ($e:expr $(, $msg:expr)?) => {{ + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + debug_assert!($e $(, $msg)?); + #[cfg(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)] + { + // Use this (rather than `#[cfg(debug_assertions)]`) to ensure that + // `$e` is always compiled even if it will never be evaluated at + // runtime. + if cfg!(debug_assertions) { + let e = $e; + if !e { + let _: () = const_panic!(@non_panic concat!("assertion failed: ", stringify!($e) $(, ": ", $msg)?)); + } + } + } + }} +} + +/// Either invoke `unreachable!()` or `loop {}` depending on whether the Rust +/// toolchain supports panicking in `const fn`. +macro_rules! const_unreachable { + () => {{ + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + unreachable!(); + + #[cfg(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)] + loop {} + }}; +} + +/// Asserts at compile time that `$condition` is true for `Self` or the given +/// `$tyvar`s. Unlike `const_assert`, this is *strictly* a compile-time check; +/// it cannot be evaluated in a runtime context. The condition is checked after +/// monomorphization and, upon failure, emits a compile error. +macro_rules! static_assert { + (Self $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )? => $condition:expr $(, $args:tt)*) => {{ + trait StaticAssert { + const ASSERT: bool; + } + + impl<T $(: $(? $optbound +)* $($bound +)*)?> StaticAssert for T { + const ASSERT: bool = { + const_assert!($condition $(, $args)*); + $condition + }; + } + + const_assert!(<Self as StaticAssert>::ASSERT); + }}; + ($($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* => $condition:expr $(, $args:tt)*) => {{ + trait StaticAssert { + const ASSERT: bool; + } + + // NOTE: We use `PhantomData` so we can support unsized types. + impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?,)*> StaticAssert for ($(core::marker::PhantomData<$tyvar>,)*) { + const ASSERT: bool = { + const_assert!($condition $(, $args)*); + $condition + }; + } + + const_assert!(<($(core::marker::PhantomData<$tyvar>,)*) as StaticAssert>::ASSERT); + }}; +} + +/// Assert at compile time that `tyvar` does not have a zero-sized DST +/// component. +macro_rules! static_assert_dst_is_not_zst { + ($tyvar:ident) => {{ + use crate::KnownLayout; + static_assert!($tyvar: ?Sized + KnownLayout => { + let dst_is_zst = match $tyvar::LAYOUT.size_info { + crate::SizeInfo::Sized { .. } => false, + crate::SizeInfo::SliceDst(TrailingSliceLayout { elem_size, .. }) => { + elem_size == 0 + } + }; + !dst_is_zst + }, "cannot call this method on a dynamically-sized type whose trailing slice element is zero-sized"); + }} +} + +/// # Safety +/// +/// The caller must ensure that the cast does not grow the size of the referent. +/// Preserving or shrinking the size of the referent are both acceptable. +macro_rules! cast { + ($p:expr) => {{ + let ptr: crate::pointer::PtrInner<'_, _> = $p; + let ptr = ptr.as_non_null(); + let ptr = ptr.as_ptr(); + #[allow(clippy::as_conversions)] + let ptr = ptr as *mut _; + #[allow(unused_unsafe)] + // SAFETY: `NonNull::as_ptr` returns a non-null pointer, so the argument + // to `NonNull::new_unchecked` is also non-null. + let ptr = unsafe { core::ptr::NonNull::new_unchecked(ptr) }; + // SAFETY: The caller promises that the cast preserves or shrinks + // referent size. By invariant on `$p: PtrInner` (guaranteed by type + // annotation above), `$p` refers to a byte range entirely contained + // inside of a single allocation, has provenance for that whole byte + // range, and will not outlive the allocation. All of these conditions + // are preserved when preserving or shrinking referent size. + crate::pointer::PtrInner::new(ptr) + }}; +} + +/// Implements `TransmuteFrom` and `SizeEq` for `T` and `$wrapper<T>`. +/// +/// # Safety +/// +/// `T` and `$wrapper<T>` must have the same bit validity, and must have the +/// same size in the sense of `SizeEq`. +macro_rules! unsafe_impl_for_transparent_wrapper { + (T $(: ?$optbound:ident)? => $wrapper:ident<T>) => {{ + crate::util::macros::__unsafe(); + + use crate::pointer::{TransmuteFrom, PtrInner, SizeEq, invariant::Valid}; + + // SAFETY: The caller promises that `T` and `$wrapper<T>` have the same + // bit validity. + unsafe impl<T $(: ?$optbound)?> TransmuteFrom<T, Valid, Valid> for $wrapper<T> {} + // SAFETY: See previous safety comment. + unsafe impl<T $(: ?$optbound)?> TransmuteFrom<$wrapper<T>, Valid, Valid> for T {} + // SAFETY: The caller promises that `T` and `$wrapper<T>` satisfy + // `SizeEq`. + unsafe impl<T $(: ?$optbound)?> SizeEq<T> for $wrapper<T> { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, T>) -> PtrInner<'_, $wrapper<T>> { + // SAFETY: See previous safety comment. + unsafe { cast!(t) } + } + } + // SAFETY: See previous safety comment. + unsafe impl<T $(: ?$optbound)?> SizeEq<$wrapper<T>> for T { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, $wrapper<T>>) -> PtrInner<'_, T> { + // SAFETY: See previous safety comment. + unsafe { cast!(t) } + } + } + }}; +} + +macro_rules! impl_transitive_transmute_from { + ($($tyvar:ident $(: ?$optbound:ident)?)? => $t:ty => $u:ty => $v:ty) => { + const _: () = { + use crate::pointer::{TransmuteFrom, PtrInner, SizeEq, invariant::Valid}; + + // SAFETY: Since `$u: SizeEq<$t>` and `$v: SizeEq<U>`, this impl is + // transitively sound. + unsafe impl<$($tyvar $(: ?$optbound)?)?> SizeEq<$t> for $v + where + $u: SizeEq<$t>, + $v: SizeEq<$u>, + { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, $t>) -> PtrInner<'_, $v> { + let u = <$u as SizeEq<_>>::cast_from_raw(t); + <$v as SizeEq<_>>::cast_from_raw(u) + } + } + + // SAFETY: Since `$u: TransmuteFrom<$t, Valid, Valid>`, it is sound + // to transmute a bit-valid `$t` to a bit-valid `$u`. Since `$v: + // TransmuteFrom<$u, Valid, Valid>`, it is sound to transmute that + // bit-valid `$u` to a bit-valid `$v`. + unsafe impl<$($tyvar $(: ?$optbound)?)?> TransmuteFrom<$t, Valid, Valid> for $v + where + $u: TransmuteFrom<$t, Valid, Valid>, + $v: TransmuteFrom<$u, Valid, Valid>, + {} + }; + }; +} + +#[rustfmt::skip] +macro_rules! impl_size_eq { + ($t:ty, $u:ty) => { + const _: () = { + use crate::{KnownLayout, pointer::{PtrInner, SizeEq}}; + + static_assert!(=> { + let t = <$t as KnownLayout>::LAYOUT; + let u = <$u as KnownLayout>::LAYOUT; + t.align.get() >= u.align.get() && match (t.size_info, u.size_info) { + (SizeInfo::Sized { size: t }, SizeInfo::Sized { size: u }) => t == u, + ( + SizeInfo::SliceDst(TrailingSliceLayout { offset: t_offset, elem_size: t_elem_size }), + SizeInfo::SliceDst(TrailingSliceLayout { offset: u_offset, elem_size: u_elem_size }) + ) => t_offset == u_offset && t_elem_size == u_elem_size, + _ => false, + } + }); + + // SAFETY: See inline. + unsafe impl SizeEq<$t> for $u { + #[inline(always)] + fn cast_from_raw(t: PtrInner<'_, $t>) -> PtrInner<'_, $u> { + // SAFETY: We've asserted that their + // `KnownLayout::LAYOUT.size_info`s are equal, and so this + // cast is guaranteed to preserve address and referent size. + // It trivially preserves provenance. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { cast!(t) } + } + } + // SAFETY: See previous safety comment. + unsafe impl SizeEq<$u> for $t { + #[inline(always)] + fn cast_from_raw(u: PtrInner<'_, $u>) -> PtrInner<'_, $t> { + // SAFETY: See previous safety comment. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { cast!(u) } + } + } + }; + }; +} + +/// Invokes `$blk` in a context in which `$src<$t>` and `$dst<$u>` implement +/// `SizeEq`. +/// +/// This macro emits code which implements `SizeEq`, and ensures that the impl +/// is sound via PME. +/// +/// # Safety +/// +/// Inside of `$blk`, the caller must only use `$src` and `$dst` as `$src<$t>` +/// and `$dst<$u>`. The caller must not use `$src` or `$dst` to wrap any other +/// types. +macro_rules! unsafe_with_size_eq { + (<$src:ident<$t:ident>, $dst:ident<$u:ident>> $blk:expr) => {{ + crate::util::macros::__unsafe(); + + use crate::{KnownLayout, pointer::PtrInner}; + + #[repr(transparent)] + struct $src<T: ?Sized>(T); + + #[repr(transparent)] + struct $dst<U: ?Sized>(U); + + // SAFETY: Since `$src<T>` is a `#[repr(transparent)]` wrapper around + // `T`, it has the same bit validity and size as `T`. + unsafe_impl_for_transparent_wrapper!(T: ?Sized => $src<T>); + + // SAFETY: Since `$dst<T>` is a `#[repr(transparent)]` wrapper around + // `T`, it has the same bit validity and size as `T`. + unsafe_impl_for_transparent_wrapper!(T: ?Sized => $dst<T>); + + // SAFETY: `$src<T>` is a `#[repr(transparent)]` wrapper around `T` with + // no added semantics. + unsafe impl<T: ?Sized> InvariantsEq<$src<T>> for T {} + + // SAFETY: `$dst<T>` is a `#[repr(transparent)]` wrapper around `T` with + // no added semantics. + unsafe impl<T: ?Sized> InvariantsEq<$dst<T>> for T {} + + // SAFETY: See inline for the soundness of this impl when + // `cast_from_raw` is actually instantiated (otherwise, PMEs may not be + // triggered). + // + // We manually instantiate `cast_from_raw` below to ensure that this PME + // can be triggered, and the caller promises not to use `$src` and + // `$dst` with any wrapped types other than `$t` and `$u` respectively. + unsafe impl<T: ?Sized, U: ?Sized> SizeEq<$src<T>> for $dst<U> + where + T: KnownLayout<PointerMetadata = usize>, + U: KnownLayout<PointerMetadata = usize>, + { + fn cast_from_raw(src: PtrInner<'_, $src<T>>) -> PtrInner<'_, Self> { + // SAFETY: `crate::layout::cast_from_raw` promises to satisfy + // the safety invariants of `SizeEq::cast_from_raw`, or to + // generate a PME. Since `$src<T>` and `$dst<U>` are + // `#[repr(transparent)]` wrappers around `T` and `U` + // respectively, a `cast_from_raw` impl which satisfies the + // conditions for casting from `NonNull<T>` to `NonNull<U>` also + // satisfies the conditions for casting from `NonNull<$src<T>>` + // to `NonNull<$dst<U>>`. + + // SAFETY: By the preceding safety comment, this cast preserves + // referent size. + #[allow(clippy::multiple_unsafe_ops_per_block)] + let src: PtrInner<'_, T> = unsafe { cast!(src) }; + let dst: PtrInner<'_, U> = crate::layout::cast_from_raw(src); + // SAFETY: By the preceding safety comment, this cast preserves + // referent size. + #[allow(clippy::multiple_unsafe_ops_per_block)] + unsafe { cast!(dst) } + } + } + + // See safety comment on the preceding `unsafe impl` block for an + // explanation of why we need this block. + if 1 == 0 { + let ptr = <$t as KnownLayout>::raw_dangling(); + #[allow(unused_unsafe)] + // SAFETY: This call is never executed. + #[allow(clippy::multiple_unsafe_ops_per_block)] + let ptr = unsafe { crate::pointer::PtrInner::new(ptr) }; + #[allow(unused_unsafe)] + // SAFETY: This call is never executed. + #[allow(clippy::multiple_unsafe_ops_per_block)] + let ptr = unsafe { cast!(ptr) }; + let _ = <$dst<$u> as SizeEq<$src<$t>>>::cast_from_raw(ptr); + } + + impl_for_transmute_from!(T: ?Sized + TryFromBytes => TryFromBytes for $src<T>[<T>]); + impl_for_transmute_from!(T: ?Sized + FromBytes => FromBytes for $src<T>[<T>]); + impl_for_transmute_from!(T: ?Sized + FromZeros => FromZeros for $src<T>[<T>]); + impl_for_transmute_from!(T: ?Sized + IntoBytes => IntoBytes for $src<T>[<T>]); + + impl_for_transmute_from!(U: ?Sized + TryFromBytes => TryFromBytes for $dst<U>[<U>]); + impl_for_transmute_from!(U: ?Sized + FromBytes => FromBytes for $dst<U>[<U>]); + impl_for_transmute_from!(U: ?Sized + FromZeros => FromZeros for $dst<U>[<U>]); + impl_for_transmute_from!(U: ?Sized + IntoBytes => IntoBytes for $dst<U>[<U>]); + + // SAFETY: `$src<T>` is a `#[repr(transparent)]` wrapper around `T`, and + // so permits interior mutation exactly when `T` does. + unsafe_impl!(T: ?Sized + Immutable => Immutable for $src<T>); + + // SAFETY: `$dst<T>` is a `#[repr(transparent)]` wrapper around `T`, and + // so permits interior mutation exactly when `T` does. + unsafe_impl!(T: ?Sized + Immutable => Immutable for $dst<T>); + + $blk + }}; +} + +/// A no-op `unsafe fn` for use in macro expansions. +/// +/// Calling this function in a macro expansion ensures that the macro's caller +/// must wrap the call in `unsafe { ... }`. +pub(crate) const unsafe fn __unsafe() {} diff --git a/vendor/zerocopy/src/util/mod.rs b/vendor/zerocopy/src/util/mod.rs new file mode 100644 index 00000000..980c2700 --- /dev/null +++ b/vendor/zerocopy/src/util/mod.rs @@ -0,0 +1,856 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +pub(crate) mod macros; + +#[doc(hidden)] +pub mod macro_util; + +use core::{ + marker::PhantomData, + mem::{self, ManuallyDrop}, + num::NonZeroUsize, + ptr::NonNull, +}; + +use super::*; + +/// Like [`PhantomData`], but [`Send`] and [`Sync`] regardless of whether the +/// wrapped `T` is. +pub(crate) struct SendSyncPhantomData<T: ?Sized>(PhantomData<T>); + +// SAFETY: `SendSyncPhantomData` does not enable any behavior which isn't sound +// to be called from multiple threads. +unsafe impl<T: ?Sized> Send for SendSyncPhantomData<T> {} +// SAFETY: `SendSyncPhantomData` does not enable any behavior which isn't sound +// to be called from multiple threads. +unsafe impl<T: ?Sized> Sync for SendSyncPhantomData<T> {} + +impl<T: ?Sized> Default for SendSyncPhantomData<T> { + fn default() -> SendSyncPhantomData<T> { + SendSyncPhantomData(PhantomData) + } +} + +impl<T: ?Sized> PartialEq for SendSyncPhantomData<T> { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl<T: ?Sized> Eq for SendSyncPhantomData<T> {} + +impl<T: ?Sized> Clone for SendSyncPhantomData<T> { + fn clone(&self) -> Self { + SendSyncPhantomData(PhantomData) + } +} + +#[cfg(miri)] +extern "Rust" { + /// Miri-provided intrinsic that marks the pointer `ptr` as aligned to + /// `align`. + /// + /// This intrinsic is used to inform Miri's symbolic alignment checker that + /// a pointer is aligned, even if Miri cannot statically deduce that fact. + /// This is often required when performing raw pointer arithmetic or casts + /// where the alignment is guaranteed by runtime checks or invariants that + /// Miri is not aware of. + pub(crate) fn miri_promise_symbolic_alignment(ptr: *const (), align: usize); +} + +pub(crate) trait AsAddress { + fn addr(self) -> usize; +} + +impl<T: ?Sized> AsAddress for &T { + #[inline(always)] + fn addr(self) -> usize { + let ptr: *const T = self; + AsAddress::addr(ptr) + } +} + +impl<T: ?Sized> AsAddress for &mut T { + #[inline(always)] + fn addr(self) -> usize { + let ptr: *const T = self; + AsAddress::addr(ptr) + } +} + +impl<T: ?Sized> AsAddress for NonNull<T> { + #[inline(always)] + fn addr(self) -> usize { + AsAddress::addr(self.as_ptr()) + } +} + +impl<T: ?Sized> AsAddress for *const T { + #[inline(always)] + fn addr(self) -> usize { + // FIXME(#181), FIXME(https://github.com/rust-lang/rust/issues/95228): + // Use `.addr()` instead of `as usize` once it's stable, and get rid of + // this `allow`. Currently, `as usize` is the only way to accomplish + // this. + #[allow(clippy::as_conversions)] + #[cfg_attr( + __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS, + allow(lossy_provenance_casts) + )] + return self.cast::<()>() as usize; + } +} + +impl<T: ?Sized> AsAddress for *mut T { + #[inline(always)] + fn addr(self) -> usize { + let ptr: *const T = self; + AsAddress::addr(ptr) + } +} + +/// Validates that `t` is aligned to `align_of::<U>()`. +#[inline(always)] +pub(crate) fn validate_aligned_to<T: AsAddress, U>(t: T) -> Result<(), AlignmentError<(), U>> { + // `mem::align_of::<U>()` is guaranteed to return a non-zero value, which in + // turn guarantees that this mod operation will not panic. + #[allow(clippy::arithmetic_side_effects)] + let remainder = t.addr() % mem::align_of::<U>(); + if remainder == 0 { + Ok(()) + } else { + // SAFETY: We just confirmed that `t.addr() % align_of::<U>() != 0`. + // That's only possible if `align_of::<U>() > 1`. + Err(unsafe { AlignmentError::new_unchecked(()) }) + } +} + +/// Returns the bytes needed to pad `len` to the next multiple of `align`. +/// +/// This function assumes that align is a power of two; there are no guarantees +/// on the answer it gives if this is not the case. +#[cfg_attr( + kani, + kani::requires(len <= isize::MAX as usize), + kani::requires(align.is_power_of_two()), + kani::ensures(|&p| (len + p) % align.get() == 0), + // Ensures that we add the minimum required padding. + kani::ensures(|&p| p < align.get()), +)] +pub(crate) const fn padding_needed_for(len: usize, align: NonZeroUsize) -> usize { + #[cfg(kani)] + #[kani::proof_for_contract(padding_needed_for)] + fn proof() { + padding_needed_for(kani::any(), kani::any()); + } + + // Abstractly, we want to compute: + // align - (len % align). + // Handling the case where len%align is 0. + // Because align is a power of two, len % align = len & (align-1). + // Guaranteed not to underflow as align is nonzero. + #[allow(clippy::arithmetic_side_effects)] + let mask = align.get() - 1; + + // To efficiently subtract this value from align, we can use the bitwise + // complement. + // Note that ((!len) & (align-1)) gives us a number that with (len & + // (align-1)) sums to align-1. So subtracting 1 from x before taking the + // complement subtracts `len` from `align`. Some quick inspection of + // cases shows that this also handles the case where `len % align = 0` + // correctly too: len-1 % align then equals align-1, so the complement mod + // align will be 0, as desired. + // + // The following reasoning can be verified quickly by an SMT solver + // supporting the theory of bitvectors: + // ```smtlib + // ; Naive implementation of padding + // (define-fun padding1 ( + // (len (_ BitVec 32)) + // (align (_ BitVec 32))) (_ BitVec 32) + // (ite + // (= (_ bv0 32) (bvand len (bvsub align (_ bv1 32)))) + // (_ bv0 32) + // (bvsub align (bvand len (bvsub align (_ bv1 32)))))) + // + // ; The implementation below + // (define-fun padding2 ( + // (len (_ BitVec 32)) + // (align (_ BitVec 32))) (_ BitVec 32) + // (bvand (bvnot (bvsub len (_ bv1 32))) (bvsub align (_ bv1 32)))) + // + // (define-fun is-power-of-two ((x (_ BitVec 32))) Bool + // (= (_ bv0 32) (bvand x (bvsub x (_ bv1 32))))) + // + // (declare-const len (_ BitVec 32)) + // (declare-const align (_ BitVec 32)) + // ; Search for a case where align is a power of two and padding2 disagrees + // ; with padding1 + // (assert (and (is-power-of-two align) + // (not (= (padding1 len align) (padding2 len align))))) + // (simplify (padding1 (_ bv300 32) (_ bv32 32))) ; 20 + // (simplify (padding2 (_ bv300 32) (_ bv32 32))) ; 20 + // (simplify (padding1 (_ bv322 32) (_ bv32 32))) ; 30 + // (simplify (padding2 (_ bv322 32) (_ bv32 32))) ; 30 + // (simplify (padding1 (_ bv8 32) (_ bv8 32))) ; 0 + // (simplify (padding2 (_ bv8 32) (_ bv8 32))) ; 0 + // (check-sat) ; unsat, also works for 64-bit bitvectors + // ``` + !(len.wrapping_sub(1)) & mask +} + +/// Rounds `n` down to the largest value `m` such that `m <= n` and `m % align +/// == 0`. +/// +/// # Panics +/// +/// May panic if `align` is not a power of two. Even if it doesn't panic in this +/// case, it will produce nonsense results. +#[inline(always)] +#[cfg_attr( + kani, + kani::requires(align.is_power_of_two()), + kani::ensures(|&m| m <= n && m % align.get() == 0), + // Guarantees that `m` is the *largest* value such that `m % align == 0`. + kani::ensures(|&m| { + // If this `checked_add` fails, then the next multiple would wrap + // around, which trivially satisfies the "largest value" requirement. + m.checked_add(align.get()).map(|next_mul| next_mul > n).unwrap_or(true) + }) +)] +pub(crate) const fn round_down_to_next_multiple_of_alignment( + n: usize, + align: NonZeroUsize, +) -> usize { + #[cfg(kani)] + #[kani::proof_for_contract(round_down_to_next_multiple_of_alignment)] + fn proof() { + round_down_to_next_multiple_of_alignment(kani::any(), kani::any()); + } + + let align = align.get(); + #[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))] + debug_assert!(align.is_power_of_two()); + + // Subtraction can't underflow because `align.get() >= 1`. + #[allow(clippy::arithmetic_side_effects)] + let mask = !(align - 1); + n & mask +} + +pub(crate) const fn max(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize { + if a.get() < b.get() { + b + } else { + a + } +} + +pub(crate) const fn min(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize { + if a.get() > b.get() { + b + } else { + a + } +} + +/// Copies `src` into the prefix of `dst`. +/// +/// # Safety +/// +/// The caller guarantees that `src.len() <= dst.len()`. +#[inline(always)] +pub(crate) unsafe fn copy_unchecked(src: &[u8], dst: &mut [u8]) { + debug_assert!(src.len() <= dst.len()); + // SAFETY: This invocation satisfies the safety contract of + // copy_nonoverlapping [1]: + // - `src.as_ptr()` is trivially valid for reads of `src.len()` bytes + // - `dst.as_ptr()` is valid for writes of `src.len()` bytes, because the + // caller has promised that `src.len() <= dst.len()` + // - `src` and `dst` are, trivially, properly aligned + // - the region of memory beginning at `src` with a size of `src.len()` + // bytes does not overlap with the region of memory beginning at `dst` + // with the same size, because `dst` is derived from an exclusive + // reference. + unsafe { + core::ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len()); + }; +} + +/// Unsafely transmutes the given `src` into a type `Dst`. +/// +/// # Safety +/// +/// The value `src` must be a valid instance of `Dst`. +#[inline(always)] +pub(crate) const unsafe fn transmute_unchecked<Src, Dst>(src: Src) -> Dst { + static_assert!(Src, Dst => core::mem::size_of::<Src>() == core::mem::size_of::<Dst>()); + + #[repr(C)] + union Transmute<Src, Dst> { + src: ManuallyDrop<Src>, + dst: ManuallyDrop<Dst>, + } + + // SAFETY: Since `Transmute<Src, Dst>` is `#[repr(C)]`, its `src` and `dst` + // fields both start at the same offset and the types of those fields are + // transparent wrappers around `Src` and `Dst` [1]. Consequently, + // initializing `Transmute` with with `src` and then reading out `dst` is + // equivalent to transmuting from `Src` to `Dst` [2]. Transmuting from `src` + // to `Dst` is valid because — by contract on the caller — `src` is a valid + // instance of `Dst`. + // + // [1] Per https://doc.rust-lang.org/1.82.0/std/mem/struct.ManuallyDrop.html: + // + // `ManuallyDrop<T>` is guaranteed to have the same layout and bit + // validity as `T`, and is subject to the same layout optimizations as + // `T`. + // + // [2] Per https://doc.rust-lang.org/1.82.0/reference/items/unions.html#reading-and-writing-union-fields: + // + // Effectively, writing to and then reading from a union with the C + // representation is analogous to a transmute from the type used for + // writing to the type used for reading. + unsafe { ManuallyDrop::into_inner(Transmute { src: ManuallyDrop::new(src) }.dst) } +} + +/// Uses `allocate` to create a `Box<T>`. +/// +/// # Errors +/// +/// Returns an error on allocation failure. Allocation failure is guaranteed +/// never to cause a panic or an abort. +/// +/// # Safety +/// +/// `allocate` must be either `alloc::alloc::alloc` or +/// `alloc::alloc::alloc_zeroed`. The referent of the box returned by `new_box` +/// has the same bit-validity as the referent of the pointer returned by the +/// given `allocate` and sufficient size to store `T` with `meta`. +#[must_use = "has no side effects (other than allocation)"] +#[cfg(feature = "alloc")] +#[inline] +pub(crate) unsafe fn new_box<T>( + meta: T::PointerMetadata, + allocate: unsafe fn(core::alloc::Layout) -> *mut u8, +) -> Result<alloc::boxed::Box<T>, AllocError> +where + T: ?Sized + crate::KnownLayout, +{ + let size = match T::size_for_metadata(meta) { + Some(size) => size, + None => return Err(AllocError), + }; + + let align = T::LAYOUT.align.get(); + // On stable Rust versions <= 1.64.0, `Layout::from_size_align` has a bug in + // which sufficiently-large allocations (those which, when rounded up to the + // alignment, overflow `isize`) are not rejected, which can cause undefined + // behavior. See #64 for details. + // + // FIXME(#67): Once our MSRV is > 1.64.0, remove this assertion. + #[allow(clippy::as_conversions)] + let max_alloc = (isize::MAX as usize).saturating_sub(align); + if size > max_alloc { + return Err(AllocError); + } + + // FIXME(https://github.com/rust-lang/rust/issues/55724): Use + // `Layout::repeat` once it's stabilized. + let layout = Layout::from_size_align(size, align).or(Err(AllocError))?; + + let ptr = if layout.size() != 0 { + // SAFETY: By contract on the caller, `allocate` is either + // `alloc::alloc::alloc` or `alloc::alloc::alloc_zeroed`. The above + // check ensures their shared safety precondition: that the supplied + // layout is not zero-sized type [1]. + // + // [1] Per https://doc.rust-lang.org/1.81.0/std/alloc/trait.GlobalAlloc.html#tymethod.alloc: + // + // This function is unsafe because undefined behavior can result if + // the caller does not ensure that layout has non-zero size. + let ptr = unsafe { allocate(layout) }; + match NonNull::new(ptr) { + Some(ptr) => ptr, + None => return Err(AllocError), + } + } else { + let align = T::LAYOUT.align.get(); + + // We use `transmute` instead of an `as` cast since Miri (with strict + // provenance enabled) notices and complains that an `as` cast creates a + // pointer with no provenance. Miri isn't smart enough to realize that + // we're only executing this branch when we're constructing a zero-sized + // `Box`, which doesn't require provenance. + // + // SAFETY: any initialized bit sequence is a bit-valid `*mut u8`. All + // bits of a `usize` are initialized. + // + // `#[allow(unknown_lints)]` is for `integer_to_ptr_transmutes` + #[allow(unknown_lints)] + #[allow(clippy::useless_transmute, integer_to_ptr_transmutes)] + let dangling = unsafe { mem::transmute::<usize, *mut u8>(align) }; + // SAFETY: `dangling` is constructed from `T::LAYOUT.align`, which is a + // `NonZeroUsize`, which is guaranteed to be non-zero. + // + // `Box<[T]>` does not allocate when `T` is zero-sized or when `len` is + // zero, but it does require a non-null dangling pointer for its + // allocation. + // + // FIXME(https://github.com/rust-lang/rust/issues/95228): Use + // `std::ptr::without_provenance` once it's stable. That may optimize + // better. As written, Rust may assume that this consumes "exposed" + // provenance, and thus Rust may have to assume that this may consume + // provenance from any pointer whose provenance has been exposed. + unsafe { NonNull::new_unchecked(dangling) } + }; + + let ptr = T::raw_from_ptr_len(ptr, meta); + + // FIXME(#429): Add a "SAFETY" comment and remove this `allow`. Make sure to + // include a justification that `ptr.as_ptr()` is validly-aligned in the ZST + // case (in which we manually construct a dangling pointer) and to justify + // why `Box` is safe to drop (it's because `allocate` uses the system + // allocator). + #[allow(clippy::undocumented_unsafe_blocks)] + Ok(unsafe { alloc::boxed::Box::from_raw(ptr.as_ptr()) }) +} + +mod len_of { + use super::*; + + /// A witness type for metadata of a valid instance of `&T`. + pub(crate) struct MetadataOf<T: ?Sized + KnownLayout> { + /// # Safety + /// + /// The size of an instance of `&T` with the given metadata is not + /// larger than `isize::MAX`. + meta: T::PointerMetadata, + _p: PhantomData<T>, + } + + impl<T: ?Sized + KnownLayout> Copy for MetadataOf<T> {} + impl<T: ?Sized + KnownLayout> Clone for MetadataOf<T> { + fn clone(&self) -> Self { + *self + } + } + + impl<T: ?Sized> MetadataOf<T> + where + T: KnownLayout, + { + /// Returns `None` if `meta` is greater than `t`'s metadata. + #[inline(always)] + pub(crate) fn new_in_bounds(t: &T, meta: usize) -> Option<Self> + where + T: KnownLayout<PointerMetadata = usize>, + { + if meta <= Ptr::from_ref(t).len() { + // SAFETY: We have checked that `meta` is not greater than `t`'s + // metadata, which, by invariant on `&T`, addresses no more than + // `isize::MAX` bytes [1][2]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety: + // + // For all types, `T: ?Sized`, and for all `t: &T` or `t: + // &mut T`, when such values cross an API boundary, the + // following invariants must generally be upheld: + // + // * `t` is non-null + // * `t` is aligned to `align_of_val(t)` + // * if `size_of_val(t) > 0`, then `t` is dereferenceable for + // `size_of_val(t)` many bytes + // + // If `t` points at address `a`, being "dereferenceable" for + // N bytes means that the memory range `[a, a + N)` is all + // contained within a single allocated object. + // + // [2] Per https://doc.rust-lang.org/1.85.0/std/ptr/index.html#allocated-object: + // + // For any allocated object with `base` address, `size`, and + // a set of `addresses`, the following are guaranteed: + // - For all addresses `a` in `addresses`, `a` is in the + // range `base .. (base + size)` (note that this requires + // `a < base + size`, not `a <= base + size`) + // - `base` is not equal to [`null()`] (i.e., the address + // with the numerical value 0) + // - `base + size <= usize::MAX` + // - `size <= isize::MAX` + Some(unsafe { Self::new_unchecked(meta) }) + } else { + None + } + } + + /// # Safety + /// + /// The size of an instance of `&T` with the given metadata is not + /// larger than `isize::MAX`. + pub(crate) unsafe fn new_unchecked(meta: T::PointerMetadata) -> Self { + // SAFETY: The caller has promised that the size of an instance of + // `&T` with the given metadata is not larger than `isize::MAX`. + Self { meta, _p: PhantomData } + } + + pub(crate) fn get(&self) -> T::PointerMetadata + where + T::PointerMetadata: Copy, + { + self.meta + } + + #[inline] + pub(crate) fn padding_needed_for(&self) -> usize + where + T: KnownLayout<PointerMetadata = usize>, + { + let trailing_slice_layout = crate::trailing_slice_layout::<T>(); + + // FIXME(#67): Remove this allow. See NumExt for more details. + #[allow( + unstable_name_collisions, + clippy::incompatible_msrv, + clippy::multiple_unsafe_ops_per_block + )] + // SAFETY: By invariant on `self`, a `&T` with metadata `self.meta` + // describes an object of size `<= isize::MAX`. This computes the + // size of such a `&T` without any trailing padding, and so neither + // the multiplication nor the addition will overflow. + let unpadded_size = unsafe { + let trailing_size = self.meta.unchecked_mul(trailing_slice_layout.elem_size); + trailing_size.unchecked_add(trailing_slice_layout.offset) + }; + + util::padding_needed_for(unpadded_size, T::LAYOUT.align) + } + + #[inline(always)] + pub(crate) fn validate_cast_and_convert_metadata( + addr: usize, + bytes_len: MetadataOf<[u8]>, + cast_type: CastType, + meta: Option<T::PointerMetadata>, + ) -> Result<(MetadataOf<T>, MetadataOf<[u8]>), MetadataCastError> { + let layout = match meta { + None => T::LAYOUT, + // This can return `None` if the metadata describes an object + // which can't fit in an `isize`. + Some(meta) => { + let size = match T::size_for_metadata(meta) { + Some(size) => size, + None => return Err(MetadataCastError::Size), + }; + DstLayout { + align: T::LAYOUT.align, + size_info: crate::SizeInfo::Sized { size }, + statically_shallow_unpadded: false, + } + } + }; + // Lemma 0: By contract on `validate_cast_and_convert_metadata`, if + // the result is `Ok(..)`, then a `&T` with `elems` trailing slice + // elements is no larger in size than `bytes_len.get()`. + let (elems, split_at) = + layout.validate_cast_and_convert_metadata(addr, bytes_len.get(), cast_type)?; + let elems = T::PointerMetadata::from_elem_count(elems); + + // For a slice DST type, if `meta` is `Some(elems)`, then we + // synthesize `layout` to describe a sized type whose size is equal + // to the size of the instance that we are asked to cast. For sized + // types, `validate_cast_and_convert_metadata` returns `elems == 0`. + // Thus, in this case, we need to use the `elems` passed by the + // caller, not the one returned by + // `validate_cast_and_convert_metadata`. + // + // Lemma 1: A `&T` with `elems` trailing slice elements is no larger + // in size than `bytes_len.get()`. Proof: + // - If `meta` is `None`, then `elems` satisfies this condition by + // Lemma 0. + // - If `meta` is `Some(meta)`, then `layout` describes an object + // whose size is equal to the size of an `&T` with `meta` + // metadata. By Lemma 0, that size is not larger than + // `bytes_len.get()`. + // + // Lemma 2: A `&T` with `elems` trailing slice elements is no larger + // than `isize::MAX` bytes. Proof: By Lemma 1, a `&T` with metadata + // `elems` is not larger in size than `bytes_len.get()`. By + // invariant on `MetadataOf<[u8]>`, a `&[u8]` with metadata + // `bytes_len` is not larger than `isize::MAX`. Because + // `size_of::<u8>()` is `1`, a `&[u8]` with metadata `bytes_len` has + // size `bytes_len.get()` bytes. Therefore, a `&T` with metadata + // `elems` has size not larger than `isize::MAX`. + let elems = meta.unwrap_or(elems); + + // SAFETY: See Lemma 2. + let elems = unsafe { MetadataOf::new_unchecked(elems) }; + + // SAFETY: Let `size` be the size of a `&T` with metadata `elems`. + // By post-condition on `validate_cast_and_convert_metadata`, one of + // the following conditions holds: + // - `split_at == size`, in which case, by Lemma 2, `split_at <= + // isize::MAX`. Since `size_of::<u8>() == 1`, a `[u8]` with + // `split_at` elems has size not larger than `isize::MAX`. + // - `split_at == bytes_len - size`. Since `bytes_len: + // MetadataOf<u8>`, and since `size` is non-negative, `split_at` + // addresses no more bytes than `bytes_len` does. Since + // `bytes_len: MetadataOf<u8>`, `bytes_len` describes a `[u8]` + // which has no more than `isize::MAX` bytes, and thus so does + // `split_at`. + let split_at = unsafe { MetadataOf::<[u8]>::new_unchecked(split_at) }; + Ok((elems, split_at)) + } + } +} + +pub(crate) use len_of::MetadataOf; + +/// Since we support multiple versions of Rust, there are often features which +/// have been stabilized in the most recent stable release which do not yet +/// exist (stably) on our MSRV. This module provides polyfills for those +/// features so that we can write more "modern" code, and just remove the +/// polyfill once our MSRV supports the corresponding feature. Without this, +/// we'd have to write worse/more verbose code and leave FIXME comments +/// sprinkled throughout the codebase to update to the new pattern once it's +/// stabilized. +/// +/// Each trait is imported as `_` at the crate root; each polyfill should "just +/// work" at usage sites. +pub(crate) mod polyfills { + use core::ptr::{self, NonNull}; + + // A polyfill for `NonNull::slice_from_raw_parts` that we can use before our + // MSRV is 1.70, when that function was stabilized. + // + // The `#[allow(unused)]` is necessary because, on sufficiently recent + // toolchain versions, `ptr.slice_from_raw_parts()` resolves to the inherent + // method rather than to this trait, and so this trait is considered unused. + // + // FIXME(#67): Once our MSRV is 1.70, remove this. + #[allow(unused)] + pub(crate) trait NonNullExt<T> { + fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]>; + } + + impl<T> NonNullExt<T> for NonNull<T> { + // NOTE on coverage: this will never be tested in nightly since it's a + // polyfill for a feature which has been stabilized on our nightly + // toolchain. + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + #[inline(always)] + fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]> { + let ptr = ptr::slice_from_raw_parts_mut(data.as_ptr(), len); + // SAFETY: `ptr` is converted from `data`, which is non-null. + unsafe { NonNull::new_unchecked(ptr) } + } + } + + // A polyfill for `Self::unchecked_sub` that we can use until methods like + // `usize::unchecked_sub` is stabilized. + // + // The `#[allow(unused)]` is necessary because, on sufficiently recent + // toolchain versions, `ptr.slice_from_raw_parts()` resolves to the inherent + // method rather than to this trait, and so this trait is considered unused. + // + // FIXME(#67): Once our MSRV is high enough, remove this. + #[allow(unused)] + pub(crate) trait NumExt { + /// Add without checking for overflow. + /// + /// # Safety + /// + /// The caller promises that the addition will not overflow. + unsafe fn unchecked_add(self, rhs: Self) -> Self; + + /// Subtract without checking for underflow. + /// + /// # Safety + /// + /// The caller promises that the subtraction will not underflow. + unsafe fn unchecked_sub(self, rhs: Self) -> Self; + + /// Multiply without checking for overflow. + /// + /// # Safety + /// + /// The caller promises that the multiplication will not overflow. + unsafe fn unchecked_mul(self, rhs: Self) -> Self; + } + + // NOTE on coverage: these will never be tested in nightly since they're + // polyfills for a feature which has been stabilized on our nightly + // toolchain. + impl NumExt for usize { + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + #[inline(always)] + unsafe fn unchecked_add(self, rhs: usize) -> usize { + match self.checked_add(rhs) { + Some(x) => x, + None => { + // SAFETY: The caller promises that the addition will not + // underflow. + unsafe { core::hint::unreachable_unchecked() } + } + } + } + + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + #[inline(always)] + unsafe fn unchecked_sub(self, rhs: usize) -> usize { + match self.checked_sub(rhs) { + Some(x) => x, + None => { + // SAFETY: The caller promises that the subtraction will not + // underflow. + unsafe { core::hint::unreachable_unchecked() } + } + } + } + + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + #[inline(always)] + unsafe fn unchecked_mul(self, rhs: usize) -> usize { + match self.checked_mul(rhs) { + Some(x) => x, + None => { + // SAFETY: The caller promises that the multiplication will + // not overflow. + unsafe { core::hint::unreachable_unchecked() } + } + } + } + } +} + +#[cfg(test)] +pub(crate) mod testutil { + use crate::*; + + /// A `T` which is aligned to at least `align_of::<A>()`. + #[derive(Default)] + pub(crate) struct Align<T, A> { + pub(crate) t: T, + _a: [A; 0], + } + + impl<T: Default, A> Align<T, A> { + pub(crate) fn set_default(&mut self) { + self.t = T::default(); + } + } + + impl<T, A> Align<T, A> { + pub(crate) const fn new(t: T) -> Align<T, A> { + Align { t, _a: [] } + } + } + + /// A `T` which is guaranteed not to satisfy `align_of::<A>()`. + /// + /// It must be the case that `align_of::<T>() < align_of::<A>()` in order + /// for this type to work properly. + #[repr(C)] + pub(crate) struct ForceUnalign<T: Unaligned, A> { + // The outer struct is aligned to `A`, and, thanks to `repr(C)`, `t` is + // placed at the minimum offset that guarantees its alignment. If + // `align_of::<T>() < align_of::<A>()`, then that offset will be + // guaranteed *not* to satisfy `align_of::<A>()`. + // + // Note that we need `T: Unaligned` in order to guarantee that there is + // no padding between `_u` and `t`. + _u: u8, + pub(crate) t: T, + _a: [A; 0], + } + + impl<T: Unaligned, A> ForceUnalign<T, A> { + pub(crate) fn new(t: T) -> ForceUnalign<T, A> { + ForceUnalign { _u: 0, t, _a: [] } + } + } + // A `u64` with alignment 8. + // + // Though `u64` has alignment 8 on some platforms, it's not guaranteed. By + // contrast, `AU64` is guaranteed to have alignment 8 on all platforms. + #[derive( + KnownLayout, + Immutable, + FromBytes, + IntoBytes, + Eq, + PartialEq, + Ord, + PartialOrd, + Default, + Debug, + Copy, + Clone, + )] + #[repr(C, align(8))] + pub(crate) struct AU64(pub(crate) u64); + + impl AU64 { + // Converts this `AU64` to bytes using this platform's endianness. + pub(crate) fn to_bytes(self) -> [u8; 8] { + crate::transmute!(self) + } + } + + impl Display for AU64 { + #[cfg_attr( + all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + coverage(off) + )] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Display::fmt(&self.0, f) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_round_down_to_next_multiple_of_alignment() { + fn alt_impl(n: usize, align: NonZeroUsize) -> usize { + let mul = n / align.get(); + mul * align.get() + } + + for align in [1, 2, 4, 8, 16] { + for n in 0..256 { + let align = NonZeroUsize::new(align).unwrap(); + let want = alt_impl(n, align); + let got = round_down_to_next_multiple_of_alignment(n, align); + assert_eq!(got, want, "round_down_to_next_multiple_of_alignment({}, {})", n, align); + } + } + } + + #[rustversion::since(1.57.0)] + #[test] + #[should_panic] + fn test_round_down_to_next_multiple_of_alignment_zerocopy_panic_in_const_and_vec_try_reserve() { + round_down_to_next_multiple_of_alignment(0, NonZeroUsize::new(3).unwrap()); + } +} diff --git a/vendor/zerocopy/src/wrappers.rs b/vendor/zerocopy/src/wrappers.rs new file mode 100644 index 00000000..f65025aa --- /dev/null +++ b/vendor/zerocopy/src/wrappers.rs @@ -0,0 +1,766 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{fmt, hash::Hash}; + +use super::*; + +/// A type with no alignment requirement. +/// +/// An `Unalign` wraps a `T`, removing any alignment requirement. `Unalign<T>` +/// has the same size and bit validity as `T`, but not necessarily the same +/// alignment [or ABI]. This is useful if a type with an alignment requirement +/// needs to be read from a chunk of memory which provides no alignment +/// guarantees. +/// +/// Since `Unalign` has no alignment requirement, the inner `T` may not be +/// properly aligned in memory. There are five ways to access the inner `T`: +/// - by value, using [`get`] or [`into_inner`] +/// - by reference inside of a callback, using [`update`] +/// - fallibly by reference, using [`try_deref`] or [`try_deref_mut`]; these can +/// fail if the `Unalign` does not satisfy `T`'s alignment requirement at +/// runtime +/// - unsafely by reference, using [`deref_unchecked`] or +/// [`deref_mut_unchecked`]; it is the caller's responsibility to ensure that +/// the `Unalign` satisfies `T`'s alignment requirement +/// - (where `T: Unaligned`) infallibly by reference, using [`Deref::deref`] or +/// [`DerefMut::deref_mut`] +/// +/// [or ABI]: https://github.com/google/zerocopy/issues/164 +/// [`get`]: Unalign::get +/// [`into_inner`]: Unalign::into_inner +/// [`update`]: Unalign::update +/// [`try_deref`]: Unalign::try_deref +/// [`try_deref_mut`]: Unalign::try_deref_mut +/// [`deref_unchecked`]: Unalign::deref_unchecked +/// [`deref_mut_unchecked`]: Unalign::deref_mut_unchecked +/// +/// # Example +/// +/// In this example, we need `EthernetFrame` to have no alignment requirement - +/// and thus implement [`Unaligned`]. `EtherType` is `#[repr(u16)]` and so +/// cannot implement `Unaligned`. We use `Unalign` to relax `EtherType`'s +/// alignment requirement so that `EthernetFrame` has no alignment requirement +/// and can implement `Unaligned`. +/// +/// ```rust +/// use zerocopy::*; +/// # use zerocopy_derive::*; +/// # #[derive(FromBytes, KnownLayout, Immutable, Unaligned)] #[repr(C)] struct Mac([u8; 6]); +/// +/// # #[derive(PartialEq, Copy, Clone, Debug)] +/// #[derive(TryFromBytes, KnownLayout, Immutable)] +/// #[repr(u16)] +/// enum EtherType { +/// Ipv4 = 0x0800u16.to_be(), +/// Arp = 0x0806u16.to_be(), +/// Ipv6 = 0x86DDu16.to_be(), +/// # /* +/// ... +/// # */ +/// } +/// +/// #[derive(TryFromBytes, KnownLayout, Immutable, Unaligned)] +/// #[repr(C)] +/// struct EthernetFrame { +/// src: Mac, +/// dst: Mac, +/// ethertype: Unalign<EtherType>, +/// payload: [u8], +/// } +/// +/// let bytes = &[ +/// # 0, 1, 2, 3, 4, 5, +/// # 6, 7, 8, 9, 10, 11, +/// # /* +/// ... +/// # */ +/// 0x86, 0xDD, // EtherType +/// 0xDE, 0xAD, 0xBE, 0xEF // Payload +/// ][..]; +/// +/// // PANICS: Guaranteed not to panic because `bytes` is of the right +/// // length, has the right contents, and `EthernetFrame` has no +/// // alignment requirement. +/// let packet = EthernetFrame::try_ref_from_bytes(&bytes).unwrap(); +/// +/// assert_eq!(packet.ethertype.get(), EtherType::Ipv6); +/// assert_eq!(packet.payload, [0xDE, 0xAD, 0xBE, 0xEF]); +/// ``` +/// +/// # Safety +/// +/// `Unalign<T>` is guaranteed to have the same size and bit validity as `T`, +/// and to have [`UnsafeCell`]s covering the same byte ranges as `T`. +/// `Unalign<T>` is guaranteed to have alignment 1. +// NOTE: This type is sound to use with types that need to be dropped. The +// reason is that the compiler-generated drop code automatically moves all +// values to aligned memory slots before dropping them in-place. This is not +// well-documented, but it's hinted at in places like [1] and [2]. However, this +// also means that `T` must be `Sized`; unless something changes, we can never +// support unsized `T`. [3] +// +// [1] https://github.com/rust-lang/rust/issues/54148#issuecomment-420529646 +// [2] https://github.com/google/zerocopy/pull/126#discussion_r1018512323 +// [3] https://github.com/google/zerocopy/issues/209 +#[allow(missing_debug_implementations)] +#[derive(Default, Copy)] +#[cfg_attr(any(feature = "derive", test), derive(Immutable, FromBytes, IntoBytes, Unaligned))] +#[repr(C, packed)] +pub struct Unalign<T>(T); + +// We do not use `derive(KnownLayout)` on `Unalign`, because the derive is not +// smart enough to realize that `Unalign<T>` is always sized and thus emits a +// `KnownLayout` impl bounded on `T: KnownLayout.` This is overly restrictive. +impl_known_layout!(T => Unalign<T>); + +// FIXME(https://github.com/rust-lang/rust-clippy/issues/16087): Move these +// attributes below the comment once this Clippy bug is fixed. +#[cfg_attr( + all(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS, any(feature = "derive", test)), + expect(unused_unsafe) +)] +#[cfg_attr( + all( + not(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), + any(feature = "derive", test) + ), + allow(unused_unsafe) +)] +// SAFETY: +// - `Unalign<T>` promises to have alignment 1, and so we don't require that `T: +// Unaligned`. +// - `Unalign<T>` has the same bit validity as `T`, and so it is `FromZeros`, +// `FromBytes`, or `IntoBytes` exactly when `T` is as well. +// - `Immutable`: `Unalign<T>` has the same fields as `T`, so it contains +// `UnsafeCell`s exactly when `T` does. +// - `TryFromBytes`: `Unalign<T>` has the same the same bit validity as `T`, so +// `T::is_bit_valid` is a sound implementation of `is_bit_valid`. +// +#[allow(clippy::multiple_unsafe_ops_per_block)] +const _: () = unsafe { + impl_or_verify!(T => Unaligned for Unalign<T>); + impl_or_verify!(T: Immutable => Immutable for Unalign<T>); + impl_or_verify!( + T: TryFromBytes => TryFromBytes for Unalign<T>; + |c| T::is_bit_valid(c.transmute()) + ); + impl_or_verify!(T: FromZeros => FromZeros for Unalign<T>); + impl_or_verify!(T: FromBytes => FromBytes for Unalign<T>); + impl_or_verify!(T: IntoBytes => IntoBytes for Unalign<T>); +}; + +// Note that `Unalign: Clone` only if `T: Copy`. Since the inner `T` may not be +// aligned, there's no way to safely call `T::clone`, and so a `T: Clone` bound +// is not sufficient to implement `Clone` for `Unalign`. +impl<T: Copy> Clone for Unalign<T> { + #[inline(always)] + fn clone(&self) -> Unalign<T> { + *self + } +} + +impl<T> Unalign<T> { + /// Constructs a new `Unalign`. + #[inline(always)] + pub const fn new(val: T) -> Unalign<T> { + Unalign(val) + } + + /// Consumes `self`, returning the inner `T`. + #[inline(always)] + pub const fn into_inner(self) -> T { + // SAFETY: Since `Unalign` is `#[repr(C, packed)]`, it has the same size + // and bit validity as `T`. + // + // We do this instead of just destructuring in order to prevent + // `Unalign`'s `Drop::drop` from being run, since dropping is not + // supported in `const fn`s. + // + // FIXME(https://github.com/rust-lang/rust/issues/73255): Destructure + // instead of using unsafe. + unsafe { crate::util::transmute_unchecked(self) } + } + + /// Attempts to return a reference to the wrapped `T`, failing if `self` is + /// not properly aligned. + /// + /// If `self` does not satisfy `align_of::<T>()`, then `try_deref` returns + /// `Err`. + /// + /// If `T: Unaligned`, then `Unalign<T>` implements [`Deref`], and callers + /// may prefer [`Deref::deref`], which is infallible. + #[inline(always)] + pub fn try_deref(&self) -> Result<&T, AlignmentError<&Self, T>> { + let inner = Ptr::from_ref(self).transmute(); + match inner.try_into_aligned() { + Ok(aligned) => Ok(aligned.as_ref()), + Err(err) => Err(err.map_src(|src| src.into_unalign().as_ref())), + } + } + + /// Attempts to return a mutable reference to the wrapped `T`, failing if + /// `self` is not properly aligned. + /// + /// If `self` does not satisfy `align_of::<T>()`, then `try_deref` returns + /// `Err`. + /// + /// If `T: Unaligned`, then `Unalign<T>` implements [`DerefMut`], and + /// callers may prefer [`DerefMut::deref_mut`], which is infallible. + #[inline(always)] + pub fn try_deref_mut(&mut self) -> Result<&mut T, AlignmentError<&mut Self, T>> { + let inner = Ptr::from_mut(self).transmute::<_, _, (_, (_, _))>(); + match inner.try_into_aligned() { + Ok(aligned) => Ok(aligned.as_mut()), + Err(err) => Err(err.map_src(|src| src.into_unalign().as_mut())), + } + } + + /// Returns a reference to the wrapped `T` without checking alignment. + /// + /// If `T: Unaligned`, then `Unalign<T>` implements[ `Deref`], and callers + /// may prefer [`Deref::deref`], which is safe. + /// + /// # Safety + /// + /// The caller must guarantee that `self` satisfies `align_of::<T>()`. + #[inline(always)] + pub const unsafe fn deref_unchecked(&self) -> &T { + // SAFETY: `Unalign<T>` is `repr(transparent)`, so there is a valid `T` + // at the same memory location as `self`. It has no alignment guarantee, + // but the caller has promised that `self` is properly aligned, so we + // know that it is sound to create a reference to `T` at this memory + // location. + // + // We use `mem::transmute` instead of `&*self.get_ptr()` because + // dereferencing pointers is not stable in `const` on our current MSRV + // (1.56 as of this writing). + unsafe { mem::transmute(self) } + } + + /// Returns a mutable reference to the wrapped `T` without checking + /// alignment. + /// + /// If `T: Unaligned`, then `Unalign<T>` implements[ `DerefMut`], and + /// callers may prefer [`DerefMut::deref_mut`], which is safe. + /// + /// # Safety + /// + /// The caller must guarantee that `self` satisfies `align_of::<T>()`. + #[inline(always)] + pub unsafe fn deref_mut_unchecked(&mut self) -> &mut T { + // SAFETY: `self.get_mut_ptr()` returns a raw pointer to a valid `T` at + // the same memory location as `self`. It has no alignment guarantee, + // but the caller has promised that `self` is properly aligned, so we + // know that the pointer itself is aligned, and thus that it is sound to + // create a reference to a `T` at this memory location. + unsafe { &mut *self.get_mut_ptr() } + } + + /// Gets an unaligned raw pointer to the inner `T`. + /// + /// # Safety + /// + /// The returned raw pointer is not necessarily aligned to + /// `align_of::<T>()`. Most functions which operate on raw pointers require + /// those pointers to be aligned, so calling those functions with the result + /// of `get_ptr` will result in undefined behavior if alignment is not + /// guaranteed using some out-of-band mechanism. In general, the only + /// functions which are safe to call with this pointer are those which are + /// explicitly documented as being sound to use with an unaligned pointer, + /// such as [`read_unaligned`]. + /// + /// Even if the caller is permitted to mutate `self` (e.g. they have + /// ownership or a mutable borrow), it is not guaranteed to be sound to + /// write through the returned pointer. If writing is required, prefer + /// [`get_mut_ptr`] instead. + /// + /// [`read_unaligned`]: core::ptr::read_unaligned + /// [`get_mut_ptr`]: Unalign::get_mut_ptr + #[inline(always)] + pub const fn get_ptr(&self) -> *const T { + ptr::addr_of!(self.0) + } + + /// Gets an unaligned mutable raw pointer to the inner `T`. + /// + /// # Safety + /// + /// The returned raw pointer is not necessarily aligned to + /// `align_of::<T>()`. Most functions which operate on raw pointers require + /// those pointers to be aligned, so calling those functions with the result + /// of `get_ptr` will result in undefined behavior if alignment is not + /// guaranteed using some out-of-band mechanism. In general, the only + /// functions which are safe to call with this pointer are those which are + /// explicitly documented as being sound to use with an unaligned pointer, + /// such as [`read_unaligned`]. + /// + /// [`read_unaligned`]: core::ptr::read_unaligned + // FIXME(https://github.com/rust-lang/rust/issues/57349): Make this `const`. + #[inline(always)] + pub fn get_mut_ptr(&mut self) -> *mut T { + ptr::addr_of_mut!(self.0) + } + + /// Sets the inner `T`, dropping the previous value. + // FIXME(https://github.com/rust-lang/rust/issues/57349): Make this `const`. + #[inline(always)] + pub fn set(&mut self, t: T) { + *self = Unalign::new(t); + } + + /// Updates the inner `T` by calling a function on it. + /// + /// If [`T: Unaligned`], then `Unalign<T>` implements [`DerefMut`], and that + /// impl should be preferred over this method when performing updates, as it + /// will usually be faster and more ergonomic. + /// + /// For large types, this method may be expensive, as it requires copying + /// `2 * size_of::<T>()` bytes. \[1\] + /// + /// \[1\] Since the inner `T` may not be aligned, it would not be sound to + /// invoke `f` on it directly. Instead, `update` moves it into a + /// properly-aligned location in the local stack frame, calls `f` on it, and + /// then moves it back to its original location in `self`. + /// + /// [`T: Unaligned`]: Unaligned + #[inline] + pub fn update<O, F: FnOnce(&mut T) -> O>(&mut self, f: F) -> O { + if mem::align_of::<T>() == 1 { + // While we advise callers to use `DerefMut` when `T: Unaligned`, + // not all callers will be able to guarantee `T: Unaligned` in all + // cases. In particular, callers who are themselves providing an API + // which is generic over `T` may sometimes be called by *their* + // callers with `T` such that `align_of::<T>() == 1`, but cannot + // guarantee this in the general case. Thus, this optimization may + // sometimes be helpful. + + // SAFETY: Since `T`'s alignment is 1, `self` satisfies its + // alignment by definition. + let t = unsafe { self.deref_mut_unchecked() }; + return f(t); + } + + // On drop, this moves `copy` out of itself and uses `ptr::write` to + // overwrite `slf`. + struct WriteBackOnDrop<T> { + copy: ManuallyDrop<T>, + slf: *mut Unalign<T>, + } + + impl<T> Drop for WriteBackOnDrop<T> { + fn drop(&mut self) { + // SAFETY: We never use `copy` again as required by + // `ManuallyDrop::take`. + let copy = unsafe { ManuallyDrop::take(&mut self.copy) }; + // SAFETY: `slf` is the raw pointer value of `self`. We know it + // is valid for writes and properly aligned because `self` is a + // mutable reference, which guarantees both of these properties. + unsafe { ptr::write(self.slf, Unalign::new(copy)) }; + } + } + + // SAFETY: We know that `self` is valid for reads, properly aligned, and + // points to an initialized `Unalign<T>` because it is a mutable + // reference, which guarantees all of these properties. + // + // Since `T: !Copy`, it would be unsound in the general case to allow + // both the original `Unalign<T>` and the copy to be used by safe code. + // We guarantee that the copy is used to overwrite the original in the + // `Drop::drop` impl of `WriteBackOnDrop`. So long as this `drop` is + // called before any other safe code executes, soundness is upheld. + // While this method can terminate in two ways (by returning normally or + // by unwinding due to a panic in `f`), in both cases, `write_back` is + // dropped - and its `drop` called - before any other safe code can + // execute. + let copy = unsafe { ptr::read(self) }.into_inner(); + let mut write_back = WriteBackOnDrop { copy: ManuallyDrop::new(copy), slf: self }; + + let ret = f(&mut write_back.copy); + + drop(write_back); + ret + } +} + +impl<T: Copy> Unalign<T> { + /// Gets a copy of the inner `T`. + // FIXME(https://github.com/rust-lang/rust/issues/57349): Make this `const`. + #[inline(always)] + pub fn get(&self) -> T { + let Unalign(val) = *self; + val + } +} + +impl<T: Unaligned> Deref for Unalign<T> { + type Target = T; + + #[inline(always)] + fn deref(&self) -> &T { + Ptr::from_ref(self).transmute().bikeshed_recall_aligned().as_ref() + } +} + +impl<T: Unaligned> DerefMut for Unalign<T> { + #[inline(always)] + fn deref_mut(&mut self) -> &mut T { + Ptr::from_mut(self).transmute::<_, _, (_, (_, _))>().bikeshed_recall_aligned().as_mut() + } +} + +impl<T: Unaligned + PartialOrd> PartialOrd<Unalign<T>> for Unalign<T> { + #[inline(always)] + fn partial_cmp(&self, other: &Unalign<T>) -> Option<Ordering> { + PartialOrd::partial_cmp(self.deref(), other.deref()) + } +} + +impl<T: Unaligned + Ord> Ord for Unalign<T> { + #[inline(always)] + fn cmp(&self, other: &Unalign<T>) -> Ordering { + Ord::cmp(self.deref(), other.deref()) + } +} + +impl<T: Unaligned + PartialEq> PartialEq<Unalign<T>> for Unalign<T> { + #[inline(always)] + fn eq(&self, other: &Unalign<T>) -> bool { + PartialEq::eq(self.deref(), other.deref()) + } +} + +impl<T: Unaligned + Eq> Eq for Unalign<T> {} + +impl<T: Unaligned + Hash> Hash for Unalign<T> { + #[inline(always)] + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.deref().hash(state); + } +} + +impl<T: Unaligned + Debug> Debug for Unalign<T> { + #[inline(always)] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Debug::fmt(self.deref(), f) + } +} + +impl<T: Unaligned + Display> Display for Unalign<T> { + #[inline(always)] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Display::fmt(self.deref(), f) + } +} + +/// A wrapper type to construct uninitialized instances of `T`. +/// +/// `MaybeUninit` is identical to the [standard library +/// `MaybeUninit`][core-maybe-uninit] type except that it supports unsized +/// types. +/// +/// # Layout +/// +/// The same layout guarantees and caveats apply to `MaybeUninit<T>` as apply to +/// the [standard library `MaybeUninit`][core-maybe-uninit] with one exception: +/// for `T: !Sized`, there is no single value for `T`'s size. Instead, for such +/// types, the following are guaranteed: +/// - Every [valid size][valid-size] for `T` is a valid size for +/// `MaybeUninit<T>` and vice versa +/// - Given `t: *const T` and `m: *const MaybeUninit<T>` with identical fat +/// pointer metadata, `t` and `m` address the same number of bytes (and +/// likewise for `*mut`) +/// +/// [core-maybe-uninit]: core::mem::MaybeUninit +/// [valid-size]: crate::KnownLayout#what-is-a-valid-size +#[repr(transparent)] +#[doc(hidden)] +pub struct MaybeUninit<T: ?Sized + KnownLayout>( + // SAFETY: `MaybeUninit<T>` has the same size as `T`, because (by invariant + // on `T::MaybeUninit`) `T::MaybeUninit` has `T::LAYOUT` identical to `T`, + // and because (invariant on `T::LAYOUT`) we can trust that `LAYOUT` + // accurately reflects the layout of `T`. By invariant on `T::MaybeUninit`, + // it admits uninitialized bytes in all positions. Because `MaybeUninit` is + // marked `repr(transparent)`, these properties additionally hold true for + // `Self`. + T::MaybeUninit, +); + +#[doc(hidden)] +impl<T: ?Sized + KnownLayout> MaybeUninit<T> { + /// Constructs a `MaybeUninit<T>` initialized with the given value. + #[inline(always)] + pub fn new(val: T) -> Self + where + T: Sized, + Self: Sized, + { + // SAFETY: It is valid to transmute `val` to `MaybeUninit<T>` because it + // is both valid to transmute `val` to `T::MaybeUninit`, and it is valid + // to transmute from `T::MaybeUninit` to `MaybeUninit<T>`. + // + // First, it is valid to transmute `val` to `T::MaybeUninit` because, by + // invariant on `T::MaybeUninit`: + // - For `T: Sized`, `T` and `T::MaybeUninit` have the same size. + // - All byte sequences of the correct size are valid values of + // `T::MaybeUninit`. + // + // Second, it is additionally valid to transmute from `T::MaybeUninit` + // to `MaybeUninit<T>`, because `MaybeUninit<T>` is a + // `repr(transparent)` wrapper around `T::MaybeUninit`. + // + // These two transmutes are collapsed into one so we don't need to add a + // `T::MaybeUninit: Sized` bound to this function's `where` clause. + unsafe { crate::util::transmute_unchecked(val) } + } + + /// Constructs an uninitialized `MaybeUninit<T>`. + #[must_use] + #[inline(always)] + pub fn uninit() -> Self + where + T: Sized, + Self: Sized, + { + let uninit = CoreMaybeUninit::<T>::uninit(); + // SAFETY: It is valid to transmute from `CoreMaybeUninit<T>` to + // `MaybeUninit<T>` since they both admit uninitialized bytes in all + // positions, and they have the same size (i.e., that of `T`). + // + // `MaybeUninit<T>` has the same size as `T`, because (by invariant on + // `T::MaybeUninit`) `T::MaybeUninit` has `T::LAYOUT` identical to `T`, + // and because (invariant on `T::LAYOUT`) we can trust that `LAYOUT` + // accurately reflects the layout of `T`. + // + // `CoreMaybeUninit<T>` has the same size as `T` [1] and admits + // uninitialized bytes in all positions. + // + // [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: + // + // `MaybeUninit<T>` is guaranteed to have the same size, alignment, + // and ABI as `T` + unsafe { crate::util::transmute_unchecked(uninit) } + } + + /// Creates a `Box<MaybeUninit<T>>`. + /// + /// This function is useful for allocating large, uninit values on the heap + /// without ever creating a temporary instance of `Self` on the stack. + /// + /// # Errors + /// + /// Returns an error on allocation failure. Allocation failure is guaranteed + /// never to cause a panic or an abort. + #[cfg(feature = "alloc")] + #[inline] + pub fn new_boxed_uninit(meta: T::PointerMetadata) -> Result<Box<Self>, AllocError> { + // SAFETY: `alloc::alloc::alloc_zeroed` is a valid argument of + // `new_box`. The referent of the pointer returned by `alloc` (and, + // consequently, the `Box` derived from it) is a valid instance of + // `Self`, because `Self` is `MaybeUninit` and thus admits arbitrary + // (un)initialized bytes. + unsafe { crate::util::new_box(meta, alloc::alloc::alloc) } + } + + /// Extracts the value from the `MaybeUninit<T>` container. + /// + /// # Safety + /// + /// The caller must ensure that `self` is in an bit-valid state. Depending + /// on subsequent use, it may also need to be in a library-valid state. + #[inline(always)] + pub unsafe fn assume_init(self) -> T + where + T: Sized, + Self: Sized, + { + // SAFETY: The caller guarantees that `self` is in an bit-valid state. + unsafe { crate::util::transmute_unchecked(self) } + } +} + +impl<T: ?Sized + KnownLayout> fmt::Debug for MaybeUninit<T> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.pad(core::any::type_name::<Self>()) + } +} + +#[cfg(test)] +mod tests { + use core::panic::AssertUnwindSafe; + + use super::*; + use crate::util::testutil::*; + + #[test] + fn test_unalign() { + // Test methods that don't depend on alignment. + let mut u = Unalign::new(AU64(123)); + assert_eq!(u.get(), AU64(123)); + assert_eq!(u.into_inner(), AU64(123)); + assert_eq!(u.get_ptr(), <*const _>::cast::<AU64>(&u)); + assert_eq!(u.get_mut_ptr(), <*mut _>::cast::<AU64>(&mut u)); + u.set(AU64(321)); + assert_eq!(u.get(), AU64(321)); + + // Test methods that depend on alignment (when alignment is satisfied). + let mut u: Align<_, AU64> = Align::new(Unalign::new(AU64(123))); + assert_eq!(u.t.try_deref().unwrap(), &AU64(123)); + assert_eq!(u.t.try_deref_mut().unwrap(), &mut AU64(123)); + // SAFETY: The `Align<_, AU64>` guarantees proper alignment. + assert_eq!(unsafe { u.t.deref_unchecked() }, &AU64(123)); + // SAFETY: The `Align<_, AU64>` guarantees proper alignment. + assert_eq!(unsafe { u.t.deref_mut_unchecked() }, &mut AU64(123)); + *u.t.try_deref_mut().unwrap() = AU64(321); + assert_eq!(u.t.get(), AU64(321)); + + // Test methods that depend on alignment (when alignment is not + // satisfied). + let mut u: ForceUnalign<_, AU64> = ForceUnalign::new(Unalign::new(AU64(123))); + assert!(matches!(u.t.try_deref(), Err(AlignmentError { .. }))); + assert!(matches!(u.t.try_deref_mut(), Err(AlignmentError { .. }))); + + // Test methods that depend on `T: Unaligned`. + let mut u = Unalign::new(123u8); + assert_eq!(u.try_deref(), Ok(&123)); + assert_eq!(u.try_deref_mut(), Ok(&mut 123)); + assert_eq!(u.deref(), &123); + assert_eq!(u.deref_mut(), &mut 123); + *u = 21; + assert_eq!(u.get(), 21); + + // Test that some `Unalign` functions and methods are `const`. + const _UNALIGN: Unalign<u64> = Unalign::new(0); + const _UNALIGN_PTR: *const u64 = _UNALIGN.get_ptr(); + const _U64: u64 = _UNALIGN.into_inner(); + // Make sure all code is considered "used". + // + // FIXME(https://github.com/rust-lang/rust/issues/104084): Remove this + // attribute. + #[allow(dead_code)] + const _: () = { + let x: Align<_, AU64> = Align::new(Unalign::new(AU64(123))); + // Make sure that `deref_unchecked` is `const`. + // + // SAFETY: The `Align<_, AU64>` guarantees proper alignment. + let au64 = unsafe { x.t.deref_unchecked() }; + match au64 { + AU64(123) => {} + _ => const_unreachable!(), + } + }; + } + + #[test] + fn test_unalign_update() { + let mut u = Unalign::new(AU64(123)); + u.update(|a| a.0 += 1); + assert_eq!(u.get(), AU64(124)); + + // Test that, even if the callback panics, the original is still + // correctly overwritten. Use a `Box` so that Miri is more likely to + // catch any unsoundness (which would likely result in two `Box`es for + // the same heap object, which is the sort of thing that Miri would + // probably catch). + let mut u = Unalign::new(Box::new(AU64(123))); + let res = std::panic::catch_unwind(AssertUnwindSafe(|| { + u.update(|a| { + a.0 += 1; + panic!(); + }) + })); + assert!(res.is_err()); + assert_eq!(u.into_inner(), Box::new(AU64(124))); + + // Test the align_of::<T>() == 1 optimization. + let mut u = Unalign::new([0u8, 1]); + u.update(|a| a[0] += 1); + assert_eq!(u.get(), [1u8, 1]); + } + + #[test] + fn test_unalign_copy_clone() { + // Test that `Copy` and `Clone` do not cause soundness issues. This test + // is mainly meant to exercise UB that would be caught by Miri. + + // `u.t` is definitely not validly-aligned for `AU64`'s alignment of 8. + let u = ForceUnalign::<_, AU64>::new(Unalign::new(AU64(123))); + #[allow(clippy::clone_on_copy)] + let v = u.t.clone(); + let w = u.t; + assert_eq!(u.t.get(), v.get()); + assert_eq!(u.t.get(), w.get()); + assert_eq!(v.get(), w.get()); + } + + #[test] + fn test_unalign_trait_impls() { + let zero = Unalign::new(0u8); + let one = Unalign::new(1u8); + + assert!(zero < one); + assert_eq!(PartialOrd::partial_cmp(&zero, &one), Some(Ordering::Less)); + assert_eq!(Ord::cmp(&zero, &one), Ordering::Less); + + assert_ne!(zero, one); + assert_eq!(zero, zero); + assert!(!PartialEq::eq(&zero, &one)); + assert!(PartialEq::eq(&zero, &zero)); + + fn hash<T: Hash>(t: &T) -> u64 { + let mut h = std::collections::hash_map::DefaultHasher::new(); + t.hash(&mut h); + h.finish() + } + + assert_eq!(hash(&zero), hash(&0u8)); + assert_eq!(hash(&one), hash(&1u8)); + + assert_eq!(format!("{:?}", zero), format!("{:?}", 0u8)); + assert_eq!(format!("{:?}", one), format!("{:?}", 1u8)); + assert_eq!(format!("{}", zero), format!("{}", 0u8)); + assert_eq!(format!("{}", one), format!("{}", 1u8)); + } + + #[test] + #[allow(clippy::as_conversions)] + fn test_maybe_uninit() { + // int + { + let input = 42; + let uninit = MaybeUninit::new(input); + // SAFETY: `uninit` is in an initialized state + let output = unsafe { uninit.assume_init() }; + assert_eq!(input, output); + } + + // thin ref + { + let input = 42; + let uninit = MaybeUninit::new(&input); + // SAFETY: `uninit` is in an initialized state + let output = unsafe { uninit.assume_init() }; + assert_eq!(&input as *const _, output as *const _); + assert_eq!(input, *output); + } + + // wide ref + { + let input = [1, 2, 3, 4]; + let uninit = MaybeUninit::new(&input[..]); + // SAFETY: `uninit` is in an initialized state + let output = unsafe { uninit.assume_init() }; + assert_eq!(&input[..] as *const _, output as *const _); + assert_eq!(input, *output); + } + } +} diff --git a/vendor/zerocopy/testdata/include_value/data b/vendor/zerocopy/testdata/include_value/data new file mode 100644 index 00000000..85df5078 --- /dev/null +++ b/vendor/zerocopy/testdata/include_value/data @@ -0,0 +1 @@ +abcd \ No newline at end of file diff --git a/vendor/zerocopy/tests/trybuild.rs b/vendor/zerocopy/tests/trybuild.rs new file mode 100644 index 00000000..7954c2b0 --- /dev/null +++ b/vendor/zerocopy/tests/trybuild.rs @@ -0,0 +1,55 @@ +// Copyright 2019 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Many of our UI tests require the "derive" feature to function properly. In +// particular: +// - Some tests directly include `zerocopy-derive/tests/include.rs`, which +// derives traits on the `AU16` type. +// - The file `invalid-impls.rs` directly includes `src/util/macros.rs` in order +// to test the `impl_or_verify!` macro which is defined in that file. +// Specifically, it tests the verification portion of that macro, which is +// enabled when `cfg(any(feature = "derive", test))`. While `--cfg test` is of +// course passed to the code in the file you're reading right now, `trybuild` +// does not pass `--cfg test` when it invokes Cargo. As a result, this +// `trybuild` test only tests the correct behavior when the "derive" feature +// is enabled. +#![cfg(feature = "derive")] + +use testutil::{set_rustflags_w_warnings, ToolchainVersion}; + +#[test] +#[cfg_attr(miri, ignore)] +fn ui() { + let version = ToolchainVersion::extract_from_pwd().unwrap(); + // See the doc comment on this method for an explanation of what this does + // and why we store source files in different directories. + let source_files_dirname = version.get_ui_source_files_dirname_and_maybe_print_warning(); + + // Set `-Wwarnings` in the `RUSTFLAGS` environment variable to ensure that + // `.stderr` files reflect what the typical user would encounter. + set_rustflags_w_warnings(); + + let t = trybuild::TestCases::new(); + t.compile_fail(format!("tests/{}/*.rs", source_files_dirname)); +} + +#[test] +#[cfg_attr(miri, ignore)] +fn ui_invalid_impls() { + let version = ToolchainVersion::extract_from_pwd().unwrap(); + // See the doc comment on this method for an explanation of what this does + // and why we store source files in different directories. + let source_files_dirname = version.get_ui_source_files_dirname_and_maybe_print_warning(); + + // Set `-Wwarnings` in the `RUSTFLAGS` environment variable to ensure that + // `.stderr` files reflect what the typical user would encounter. + set_rustflags_w_warnings(); + + let t = trybuild::TestCases::new(); + t.compile_fail(format!("tests/{}/invalid-impls/*.rs", source_files_dirname)); +} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-bytes.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-bytes.rs new file mode 100644 index 00000000..327abca3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::FromBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_from_bytes::<NotZerocopy>(); +} + +fn takes_from_bytes<T: FromBytes>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-bytes.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-bytes.stderr new file mode 100644 index 00000000..8c7294f7 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-bytes.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-from-bytes.rs:18:24 + | +18 | takes_from_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_from_bytes` + --> tests/ui-msrv/diagnostic-not-implemented-from-bytes.rs:21:24 + | +21 | fn takes_from_bytes<T: FromBytes>() {} + | ^^^^^^^^^ required by this bound in `takes_from_bytes` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-zeros.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-zeros.rs new file mode 100644 index 00000000..6f7f41c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-zeros.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::FromZeros; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_from_zeros::<NotZerocopy>(); +} + +fn takes_from_zeros<T: FromZeros>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-zeros.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-zeros.stderr new file mode 100644 index 00000000..894701e1 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-from-zeros.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-from-zeros.rs:18:24 + | +18 | takes_from_zeros::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `FromZeros` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_from_zeros` + --> tests/ui-msrv/diagnostic-not-implemented-from-zeros.rs:21:24 + | +21 | fn takes_from_zeros<T: FromZeros>() {} + | ^^^^^^^^^ required by this bound in `takes_from_zeros` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-immutable.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-immutable.rs new file mode 100644 index 00000000..0432ed44 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-immutable.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::Immutable; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_immutable::<NotZerocopy>(); +} + +fn takes_immutable<T: Immutable>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-immutable.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-immutable.stderr new file mode 100644 index 00000000..d0093ad8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-immutable.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-immutable.rs:18:23 + | +18 | takes_immutable::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_immutable` + --> tests/ui-msrv/diagnostic-not-implemented-immutable.rs:21:23 + | +21 | fn takes_immutable<T: Immutable>() {} + | ^^^^^^^^^ required by this bound in `takes_immutable` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-into-bytes.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-into-bytes.rs new file mode 100644 index 00000000..f9435536 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-into-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::IntoBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_into_bytes::<NotZerocopy>(); +} + +fn takes_into_bytes<T: IntoBytes>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-into-bytes.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-into-bytes.stderr new file mode 100644 index 00000000..c2959ef8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-into-bytes.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-into-bytes.rs:18:24 + | +18 | takes_into_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_into_bytes` + --> tests/ui-msrv/diagnostic-not-implemented-into-bytes.rs:21:24 + | +21 | fn takes_into_bytes<T: IntoBytes>() {} + | ^^^^^^^^^ required by this bound in `takes_into_bytes` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-issue-1296.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-issue-1296.rs new file mode 100644 index 00000000..e12737a6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-issue-1296.rs @@ -0,0 +1,59 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::{Immutable, IntoBytes}; + +fn main() { + // This is adapted from #1296, which includes the following text: + // + // The compiler errors when a type is missing Immutable are somewhat + // misleading, although I'm not sure there's much zerocopy can do about + // this. An example where the compiler recommends adding a reference + // rather than implementing Immutable (some were even more confusing than + // this): + // + // error[E0277]: the trait bound `virtio::wl::CtrlVfdNewDmabuf: zerocopy::Immutable` is not satisfied + // --> devices/src/virtio/wl.rs:317:20 + // | + // 317 | .write_obj(ctrl_vfd_new_dmabuf) + // | --------- ^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `virtio::wl::CtrlVfdNewDmabuf` + // | | + // | required by a bound introduced by this call + // | + // note: required by a bound in `virtio::descriptor_utils::Writer::write_obj` + // --> devices/src/virtio/descriptor_utils.rs:536:25 + // | + // 536 | pub fn write_obj<T: Immutable + IntoBytes>(&mut self, val: T) -> io::Result<()> { + // | ^^^^^^^^^ required by this bound in `Writer::write_obj` + // help: consider borrowing here + // | + // 317 | .write_obj(&ctrl_vfd_new_dmabuf) + // | + + // 317 | .write_obj(&mut ctrl_vfd_new_dmabuf) + // | ++++ + // + // Taking the compiler's suggestion results in a different error with a + // recommendation to remove the reference (back to the original code). + // + // As of this writing, the described problem is still happening thanks to + // https://github.com/rust-lang/rust/issues/130563. We include this test so + // that we can capture the current behavior, but we will update it once that + // Rust issue is fixed. + Foo.write_obj(NotZerocopy(())); +} + +struct Foo; + +impl Foo { + fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} +} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-issue-1296.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-issue-1296.stderr new file mode 100644 index 00000000..0475a649 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-issue-1296.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-issue-1296.rs:52:19 + | +52 | Foo.write_obj(NotZerocopy(())); + | ^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-issue-1296.rs:52:19 + | +52 | Foo.write_obj(NotZerocopy(())); + | ^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-known-layout.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-known-layout.rs new file mode 100644 index 00000000..2e306c25 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-known-layout.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::KnownLayout; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_known_layout::<NotZerocopy>(); +} + +fn takes_known_layout<T: KnownLayout>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-known-layout.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-known-layout.stderr new file mode 100644 index 00000000..d3cfd29c --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-known-layout.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::KnownLayout` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-known-layout.rs:18:26 + | +18 | takes_known_layout::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `zerocopy::KnownLayout` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_known_layout` + --> tests/ui-msrv/diagnostic-not-implemented-known-layout.rs:21:26 + | +21 | fn takes_known_layout<T: KnownLayout>() {} + | ^^^^^^^^^^^ required by this bound in `takes_known_layout` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.rs new file mode 100644 index 00000000..72e72139 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::TryFromBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_try_from_bytes::<NotZerocopy>(); +} + +fn takes_try_from_bytes<T: TryFromBytes>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.stderr new file mode 100644 index 00000000..8e27c9c8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.rs:18:28 + | +18 | takes_try_from_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_try_from_bytes` + --> tests/ui-msrv/diagnostic-not-implemented-try-from-bytes.rs:21:28 + | +21 | fn takes_try_from_bytes<T: TryFromBytes>() {} + | ^^^^^^^^^^^^ required by this bound in `takes_try_from_bytes` diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-unaligned.rs b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-unaligned.rs new file mode 100644 index 00000000..95ce5bd5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-unaligned.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::Unaligned; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_unaligned::<NotZerocopy>(); +} + +fn takes_unaligned<T: Unaligned>() {} diff --git a/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-unaligned.stderr b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-unaligned.stderr new file mode 100644 index 00000000..73ad6d9b --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/diagnostic-not-implemented-unaligned.stderr @@ -0,0 +1,11 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/diagnostic-not-implemented-unaligned.rs:18:23 + | +18 | takes_unaligned::<NotZerocopy>(); + | ^^^^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `NotZerocopy` + | +note: required by a bound in `takes_unaligned` + --> tests/ui-msrv/diagnostic-not-implemented-unaligned.rs:21:23 + | +21 | fn takes_unaligned<T: Unaligned>() {} + | ^^^^^^^^^ required by this bound in `takes_unaligned` diff --git a/vendor/zerocopy/tests/ui-msrv/include_value_not_from_bytes.rs b/vendor/zerocopy/tests/ui-msrv/include_value_not_from_bytes.rs new file mode 100644 index 00000000..e6289336 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/include_value_not_from_bytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +#[macro_use] +extern crate zerocopy; + +use util::NotZerocopy; + +fn main() {} + +// Should fail because `NotZerocopy<u32>: !FromBytes`. +const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); diff --git a/vendor/zerocopy/tests/ui-msrv/include_value_not_from_bytes.stderr b/vendor/zerocopy/tests/ui-msrv/include_value_not_from_bytes.stderr new file mode 100644 index 00000000..dea5dd1f --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/include_value_not_from_bytes.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `NotZerocopy<u32>: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/include_value_not_from_bytes.rs:19:42 + | +19 | const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy<u32>` + | +note: required by a bound in `NOT_FROM_BYTES::transmute` + --> tests/ui-msrv/include_value_not_from_bytes.rs:19:42 + | +19 | const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this + | required by this bound in `NOT_FROM_BYTES::transmute` + = note: this error originates in the macro `$crate::transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/include_value_wrong_size.rs b/vendor/zerocopy/tests/ui-msrv/include_value_wrong_size.rs new file mode 100644 index 00000000..e02b2be8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/include_value_wrong_size.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +// Should fail because the file is 4 bytes long, not 8. +const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); diff --git a/vendor/zerocopy/tests/ui-msrv/include_value_wrong_size.stderr b/vendor/zerocopy/tests/ui-msrv/include_value_wrong_size.stderr new file mode 100644 index 00000000..b4531c7f --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/include_value_wrong_size.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/include_value_wrong_size.rs:15:25 + | +15 | const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `[u8; 4]` (32 bits) + = note: target type: `u64` (64 bits) + = note: this error originates in the macro `$crate::transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/invalid-impls/invalid-impls.rs b/vendor/zerocopy/tests/ui-msrv/invalid-impls/invalid-impls.rs new file mode 100644 index 00000000..78f40e84 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/invalid-impls/invalid-impls.rs @@ -0,0 +1,32 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Since some macros from `macros.rs` are unused. +#![allow(unused)] + +extern crate zerocopy; +extern crate zerocopy_derive; + +include!("../../../src/util/macros.rs"); + +use zerocopy::*; +use zerocopy_derive::*; + +fn main() {} + +#[derive(FromBytes, IntoBytes, Unaligned)] +#[repr(transparent)] +struct Foo<T>(T); + +const _: () = unsafe { + impl_or_verify!(T => TryFromBytes for Foo<T>); + impl_or_verify!(T => FromZeros for Foo<T>); + impl_or_verify!(T => FromBytes for Foo<T>); + impl_or_verify!(T => IntoBytes for Foo<T>); + impl_or_verify!(T => Unaligned for Foo<T>); +}; diff --git a/vendor/zerocopy/tests/ui-msrv/invalid-impls/invalid-impls.stderr b/vendor/zerocopy/tests/ui-msrv/invalid-impls/invalid-impls.stderr new file mode 100644 index 00000000..437864e9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/invalid-impls/invalid-impls.stderr @@ -0,0 +1,159 @@ +error[E0277]: the trait bound `T: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + | ^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `T` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:27:5 + | +27 | impl_or_verify!(T => TryFromBytes for Foo<T>); + | ---------------------------------------------- in this macro invocation + | +note: required because of the requirements on the impl of `zerocopy::TryFromBytes` for `Foo<T>` + --> tests/ui-msrv/invalid-impls/invalid-impls.rs:22:10 + | +22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::Subtrait` + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `_::Subtrait` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:27:5 + | +27 | impl_or_verify!(T => TryFromBytes for Foo<T>); + | ---------------------------------------------- in this macro invocation + = note: this error originates in the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` + | +27 | impl_or_verify!(T: zerocopy::TryFromBytes => TryFromBytes for Foo<T>); + | ++++++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::FromZeros` is not satisfied + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + | ^^^^^^^^ the trait `zerocopy::FromZeros` is not implemented for `T` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:28:5 + | +28 | impl_or_verify!(T => FromZeros for Foo<T>); + | ------------------------------------------- in this macro invocation + | +note: required because of the requirements on the impl of `zerocopy::FromZeros` for `Foo<T>` + --> tests/ui-msrv/invalid-impls/invalid-impls.rs:22:10 + | +22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::Subtrait` + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `_::Subtrait` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:28:5 + | +28 | impl_or_verify!(T => FromZeros for Foo<T>); + | ------------------------------------------- in this macro invocation + = note: this error originates in the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` + | +28 | impl_or_verify!(T: zerocopy::FromZeros => FromZeros for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + | ^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `T` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:29:5 + | +29 | impl_or_verify!(T => FromBytes for Foo<T>); + | ------------------------------------------- in this macro invocation + | +note: required because of the requirements on the impl of `zerocopy::FromBytes` for `Foo<T>` + --> tests/ui-msrv/invalid-impls/invalid-impls.rs:22:10 + | +22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::Subtrait` + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `_::Subtrait` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:29:5 + | +29 | impl_or_verify!(T => FromBytes for Foo<T>); + | ------------------------------------------- in this macro invocation + = note: this error originates in the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` + | +29 | impl_or_verify!(T: zerocopy::FromBytes => FromBytes for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + | ^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `T` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:30:5 + | +30 | impl_or_verify!(T => IntoBytes for Foo<T>); + | ------------------------------------------- in this macro invocation + | +note: required because of the requirements on the impl of `zerocopy::IntoBytes` for `Foo<T>` + --> tests/ui-msrv/invalid-impls/invalid-impls.rs:22:21 + | +22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::Subtrait` + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `_::Subtrait` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:30:5 + | +30 | impl_or_verify!(T => IntoBytes for Foo<T>); + | ------------------------------------------- in this macro invocation + = note: this error originates in the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` + | +30 | impl_or_verify!(T: zerocopy::IntoBytes => IntoBytes for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::Unaligned` is not satisfied + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} + | ^^^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `T` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:31:5 + | +31 | impl_or_verify!(T => Unaligned for Foo<T>); + | ------------------------------------------- in this macro invocation + | +note: required because of the requirements on the impl of `zerocopy::Unaligned` for `Foo<T>` + --> tests/ui-msrv/invalid-impls/invalid-impls.rs:22:32 + | +22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ +note: required by a bound in `_::Subtrait` + --> tests/ui-msrv/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `_::Subtrait` + | + ::: tests/ui-msrv/invalid-impls/invalid-impls.rs:31:5 + | +31 | impl_or_verify!(T => Unaligned for Foo<T>); + | ------------------------------------------- in this macro invocation + = note: this error originates in the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` + | +31 | impl_or_verify!(T: zerocopy::Unaligned => Unaligned for Foo<T>); + | +++++++++++++++++++++ diff --git a/vendor/zerocopy/tests/ui-msrv/max-align.rs b/vendor/zerocopy/tests/ui-msrv/max-align.rs new file mode 100644 index 00000000..53e3eb9b --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/max-align.rs @@ -0,0 +1,99 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[repr(C, align(1))] +struct Align1; + +#[repr(C, align(2))] +struct Align2; + +#[repr(C, align(4))] +struct Align4; + +#[repr(C, align(8))] +struct Align8; + +#[repr(C, align(16))] +struct Align16; + +#[repr(C, align(32))] +struct Align32; + +#[repr(C, align(64))] +struct Align64; + +#[repr(C, align(128))] +struct Align128; + +#[repr(C, align(256))] +struct Align256; + +#[repr(C, align(512))] +struct Align512; + +#[repr(C, align(1024))] +struct Align1024; + +#[repr(C, align(2048))] +struct Align2048; + +#[repr(C, align(4096))] +struct Align4096; + +#[repr(C, align(8192))] +struct Align8192; + +#[repr(C, align(16384))] +struct Align16384; + +#[repr(C, align(32768))] +struct Align32768; + +#[repr(C, align(65536))] +struct Align65536; + +#[repr(C, align(131072))] +struct Align131072; + +#[repr(C, align(262144))] +struct Align262144; + +#[repr(C, align(524288))] +struct Align524288; + +#[repr(C, align(1048576))] +struct Align1048576; + +#[repr(C, align(2097152))] +struct Align2097152; + +#[repr(C, align(4194304))] +struct Align4194304; + +#[repr(C, align(8388608))] +struct Align8388608; + +#[repr(C, align(16777216))] +struct Align16777216; + +#[repr(C, align(33554432))] +struct Align33554432; + +#[repr(C, align(67108864))] +struct Align67108864; + +#[repr(C, align(134217728))] +struct Align13421772; + +#[repr(C, align(268435456))] +struct Align26843545; + +#[repr(C, align(1073741824))] +struct Align1073741824; + +fn main() {} diff --git a/vendor/zerocopy/tests/ui-msrv/max-align.stderr b/vendor/zerocopy/tests/ui-msrv/max-align.stderr new file mode 100644 index 00000000..6ab6e47e --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/max-align.stderr @@ -0,0 +1,5 @@ +error[E0589]: invalid `repr(align)` attribute: larger than 2^29 + --> tests/ui-msrv/max-align.rs:96:11 + | +96 | #[repr(C, align(1073741824))] + | ^^^^^^^^^^^^^^^^^ diff --git a/vendor/zerocopy/tests/ui-msrv/ptr-is-invariant-over-v.rs b/vendor/zerocopy/tests/ui-msrv/ptr-is-invariant-over-v.rs new file mode 100644 index 00000000..b9a76948 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/ptr-is-invariant-over-v.rs @@ -0,0 +1,29 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use zerocopy::pointer::{ + invariant::{Aligned, Exclusive, Shared, Valid}, + Ptr, +}; + +fn _when_exclusive<'big: 'small, 'small>( + big: Ptr<'small, &'big u32, (Exclusive, Aligned, Valid)>, + mut _small: Ptr<'small, &'small u32, (Exclusive, Aligned, Valid)>, +) { + _small = big; +} + +fn _when_shared<'big: 'small, 'small>( + big: Ptr<'small, &'big u32, (Shared, Aligned, Valid)>, + mut _small: Ptr<'small, &'small u32, (Shared, Aligned, Valid)>, +) { + _small = big; +} + +fn main() {} diff --git a/vendor/zerocopy/tests/ui-msrv/ptr-is-invariant-over-v.stderr b/vendor/zerocopy/tests/ui-msrv/ptr-is-invariant-over-v.stderr new file mode 100644 index 00000000..f628f7b4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/ptr-is-invariant-over-v.stderr @@ -0,0 +1,17 @@ +error[E0623]: lifetime mismatch + --> tests/ui-msrv/ptr-is-invariant-over-v.rs:19:14 + | +16 | big: Ptr<'small, &'big u32, (Exclusive, Aligned, Valid)>, + | --------------------------------------------------- these two types are declared with different lifetimes... +... +19 | _small = big; + | ^^^ ...but data from `big` flows into `big` here + +error[E0623]: lifetime mismatch + --> tests/ui-msrv/ptr-is-invariant-over-v.rs:26:14 + | +23 | big: Ptr<'small, &'big u32, (Shared, Aligned, Valid)>, + | ------------------------------------------------ these two types are declared with different lifetimes... +... +26 | _small = big; + | ^^^ ...but data from `big` flows into `big` here diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-dst-not-frombytes.rs new file mode 100644 index 00000000..7b5098e5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-dst-not-frombytes.rs @@ -0,0 +1,19 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::transmute; + +fn main() {} + +// `transmute` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-dst-not-frombytes.stderr new file mode 100644 index 00000000..e3f7f5d6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-dst-not-frombytes.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/transmute-dst-not-frombytes.rs:19:41 + | +19 | const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `DST_NOT_FROM_BYTES::transmute` + --> tests/ui-msrv/transmute-dst-not-frombytes.rs:19:41 + | +19 | const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this + | required by this bound in `DST_NOT_FROM_BYTES::transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-const.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-const.rs new file mode 100644 index 00000000..47372b1b --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-const.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +const ARRAY_OF_U8S: [u8; 2] = [0u8; 2]; + +// `transmute_mut!` cannot, generally speaking, be used in const contexts. +const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-const.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-const.stderr new file mode 100644 index 00000000..8578fa19 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-const.stderr @@ -0,0 +1,40 @@ +warning: taking a mutable reference to a `const` item + --> tests/ui-msrv/transmute-mut-const.rs:20:52 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(const_item_mutation)]` on by default + = note: each usage of a `const` item creates a new temporary + = note: the mutable reference will refer to this temporary, not the original `const` item +note: `const` item defined here + --> tests/ui-msrv/transmute-mut-const.rs:17:1 + | +17 | const ARRAY_OF_U8S: [u8; 2] = [0u8; 2]; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0658]: mutable references are not allowed in constants + --> tests/ui-msrv/transmute-mut-const.rs:20:52 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^ + | + = note: see issue #57349 <https://github.com/rust-lang/rust/issues/57349> for more information + +error[E0015]: calls in constants are limited to constant functions, tuple structs and tuple variants + --> tests/ui-msrv/transmute-mut-const.rs:20:37 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0716]: temporary value dropped while borrowed + --> tests/ui-msrv/transmute-mut-const.rs:20:57 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | --------------------^^^^^^^^^^^^- + | | | + | | creates a temporary which is freed while still in use + | temporary value is freed at the end of this statement + | using this value as a constant requires that borrow lasts for `'static` diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-a-reference.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-a-reference.rs new file mode 100644 index 00000000..33a9ecd9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting into a non-reference +// destination type. +const DST_NOT_A_REFERENCE: usize = transmute_mut!(&mut 0u8); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-a-reference.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-a-reference.stderr new file mode 100644 index 00000000..295aa211 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-a-reference.stderr @@ -0,0 +1,9 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-mut-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_mut!(&mut 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&mut _` + | + = note: expected type `usize` + found mutable reference `&mut _` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-frombytes.rs new file mode 100644 index 00000000..00390755 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-frombytes.stderr new file mode 100644 index 00000000..abb87e63 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-frombytes.stderr @@ -0,0 +1,7 @@ +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-msrv/transmute-mut-dst-not-frombytes.rs:24:38 + | +24 | const DST_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FromBytes` is not implemented for `Dst` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-intobytes.rs new file mode 100644 index 00000000..27cf93ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the destination type implements `IntoBytes` +const DST_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-intobytes.stderr new file mode 100644 index 00000000..2f4da83a --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-not-intobytes.stderr @@ -0,0 +1,7 @@ +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-msrv/transmute-mut-dst-not-intobytes.rs:24:36 + | +24 | const DST_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `IntoBytes` is not implemented for `Dst` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-unsized.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-unsized.rs new file mode 100644 index 00000000..693ccda5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting into an unsized destination +// type. +const DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8; 1]); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-unsized.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-unsized.stderr new file mode 100644 index 00000000..087d7b81 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-dst-unsized.stderr @@ -0,0 +1,8 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-msrv/transmute-mut-dst-unsized.rs:17:32 + | +17 | const DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8; 1]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-illegal-lifetime.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-illegal-lifetime.rs new file mode 100644 index 00000000..c31765e4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-illegal-lifetime.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +fn main() {} + +fn increase_lifetime() { + let mut x = 0u64; + // It is illegal to increase the lifetime scope. + let _: &'static mut u64 = zerocopy::transmute_mut!(&mut x); +} diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-illegal-lifetime.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-illegal-lifetime.stderr new file mode 100644 index 00000000..5ff71459 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-illegal-lifetime.stderr @@ -0,0 +1,9 @@ +error[E0597]: `x` does not live long enough + --> tests/ui-msrv/transmute-mut-illegal-lifetime.rs:14:56 + | +14 | let _: &'static mut u64 = zerocopy::transmute_mut!(&mut x); + | ---------------- ^^^^^^ borrowed value does not live long enough + | | + | type annotation requires that `x` is borrowed for `'static` +15 | } + | - `x` dropped here while still borrowed diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-dst-not-references.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-dst-not-references.rs new file mode 100644 index 00000000..98cc5208 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-dst-not-references.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting between non-reference source +// and destination types. +const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(0usize); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-dst-not-references.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-dst-not-references.stderr new file mode 100644 index 00000000..c500a93a --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-dst-not-references.stderr @@ -0,0 +1,12 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-mut-src-dst-not-references.rs:17:59 + | +17 | const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&mut _`, found `usize` + | | help: consider mutably borrowing here: `&mut 0usize` + | expected due to this + | + = note: expected mutable reference `&mut _` + found type `usize` diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-immutable.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-immutable.rs new file mode 100644 index 00000000..08088d0d --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-immutable.rs @@ -0,0 +1,18 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +fn ref_src_immutable() { + // `transmute_mut!` requires that its source type be a mutable reference. + let _: &mut u8 = transmute_mut!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-immutable.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-immutable.stderr new file mode 100644 index 00000000..8262f169 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-immutable.stderr @@ -0,0 +1,11 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-mut-src-immutable.rs:17:37 + | +17 | let _: &mut u8 = transmute_mut!(&0u8); + | ---------------^^^^- + | | | + | | types differ in mutability + | expected due to this + | + = note: expected mutable reference `&mut _` + found reference `&u8` diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-a-reference.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-a-reference.rs new file mode 100644 index 00000000..bf8bc325 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting from a non-reference source +// type. +const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(0usize); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-a-reference.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-a-reference.stderr new file mode 100644 index 00000000..3a6bdf78 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-a-reference.stderr @@ -0,0 +1,12 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-mut-src-not-a-reference.rs:17:53 + | +17 | const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&mut _`, found `usize` + | | help: consider mutably borrowing here: `&mut 0usize` + | expected due to this + | + = note: expected mutable reference `&mut _` + found type `usize` diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-frombytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-frombytes.rs new file mode 100644 index 00000000..0fc6f984 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the source type implements `FromBytes` +const SRC_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-frombytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-frombytes.stderr new file mode 100644 index 00000000..01dd91fa --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-frombytes.stderr @@ -0,0 +1,7 @@ +error[E0277]: the trait bound `Src: FromBytes` is not satisfied + --> tests/ui-msrv/transmute-mut-src-not-frombytes.rs:24:38 + | +24 | const SRC_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FromBytes` is not implemented for `Src` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-intobytes.rs new file mode 100644 index 00000000..518402df --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-intobytes.stderr new file mode 100644 index 00000000..5b873562 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-not-intobytes.stderr @@ -0,0 +1,7 @@ +error[E0277]: the trait bound `Src: IntoBytes` is not satisfied + --> tests/ui-msrv/transmute-mut-src-not-intobytes.rs:24:36 + | +24 | const SRC_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `IntoBytes` is not implemented for `Src` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-unsized.rs b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-unsized.rs new file mode 100644 index 00000000..473070ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting from an unsized source type to +// a sized destination type. +const SRC_UNSIZED: &mut [u8; 1] = transmute_mut!(&mut [0u8][..]); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-unsized.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-unsized.stderr new file mode 100644 index 00000000..bacd62f3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-mut-src-unsized.stderr @@ -0,0 +1,8 @@ +error[E0271]: type mismatch resolving `<[u8; 1] as KnownLayout>::PointerMetadata == usize` + --> tests/ui-msrv/transmute-mut-src-unsized.rs:17:35 + | +17 | const SRC_UNSIZED: &mut [u8; 1] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `()`, found `usize` + | + = note: required because of the requirements on the impl of `TransmuteMutDst<'_>` for `Wrap<&mut [u8], &mut [u8; 1]>` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ptr-to-usize.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ptr-to-usize.rs new file mode 100644 index 00000000..5af88593 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ptr-to-usize.rs @@ -0,0 +1,20 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute; + +fn main() {} + +// It is unclear whether we can or should support this transmutation, especially +// in a const context. This test ensures that even if such a transmutation +// becomes valid due to the requisite implementations of `FromBytes` being +// added, that we re-examine whether it should specifically be valid in a const +// context. +const POINTER_VALUE: usize = transmute!(&0usize as *const usize); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ptr-to-usize.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ptr-to-usize.stderr new file mode 100644 index 00000000..c57f7cdc --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ptr-to-usize.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `*const usize: IntoBytes` is not satisfied + --> tests/ui-msrv/transmute-ptr-to-usize.rs:20:30 + | +20 | const POINTER_VALUE: usize = transmute!(&0usize as *const usize); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `IntoBytes` is not implemented for `*const usize` + | +note: required by a bound in `POINTER_VALUE::transmute` + --> tests/ui-msrv/transmute-ptr-to-usize.rs:20:30 + | +20 | const POINTER_VALUE: usize = transmute!(&0usize as *const usize); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this + | required by this bound in `POINTER_VALUE::transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-mutable.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-mutable.rs new file mode 100644 index 00000000..fa0e6e4c --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-mutable.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +fn ref_dst_mutable() { + // `transmute_ref!` requires that its destination type be an immutable + // reference. + let _: &mut u8 = transmute_ref!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-mutable.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-mutable.stderr new file mode 100644 index 00000000..5ccf2cd2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-mutable.stderr @@ -0,0 +1,29 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-a-reference.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-a-reference.rs new file mode 100644 index 00000000..de55f9ac --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting into a non-reference +// destination type. +const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-a-reference.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-a-reference.stderr new file mode 100644 index 00000000..9a61c4c7 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-a-reference.stderr @@ -0,0 +1,29 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found reference + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found reference + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found reference + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-frombytes.rs new file mode 100644 index 00000000..f7619d34 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-frombytes.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::Immutable)] +#[repr(transparent)] +struct Dst(AU16); + +// `transmute_ref` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-frombytes.stderr new file mode 100644 index 00000000..9cdc03ef --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-frombytes.stderr @@ -0,0 +1,12 @@ +error[E0277]: the trait bound `Dst: zerocopy::FromBytes` is not satisfied + --> tests/ui-msrv/transmute-ref-dst-not-frombytes.rs:23:34 + | +23 | const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `Dst` + | +note: required by `AssertDstIsFromBytes` + --> tests/ui-msrv/transmute-ref-dst-not-frombytes.rs:23:34 + | +23 | const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-nocell.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-nocell.rs new file mode 100644 index 00000000..f1f63bfc --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-nocell.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::FromBytes)] +#[repr(transparent)] +struct Dst(AU16); + +// `transmute_ref` requires that the destination type implements `Immutable` +const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-nocell.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-nocell.stderr new file mode 100644 index 00000000..899805b0 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-not-nocell.stderr @@ -0,0 +1,12 @@ +error[E0277]: the trait bound `Dst: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/transmute-ref-dst-not-nocell.rs:23:33 + | +23 | const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `Dst` + | +note: required by `AssertDstIsImmutable` + --> tests/ui-msrv/transmute-ref-dst-not-nocell.rs:23:33 + | +23 | const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-unsized.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-unsized.rs new file mode 100644 index 00000000..625f1fac --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting into an unsized destination +// type. +const DST_UNSIZED: &[u8] = transmute_ref!(&[0u8; 1]); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-unsized.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-unsized.stderr new file mode 100644 index 00000000..11b38acb --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-dst-unsized.stderr @@ -0,0 +1,8 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-msrv/transmute-ref-dst-unsized.rs:17:28 + | +17 | const DST_UNSIZED: &[u8] = transmute_ref!(&[0u8; 1]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-illegal-lifetime.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-illegal-lifetime.rs new file mode 100644 index 00000000..8dd191e6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-illegal-lifetime.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +fn main() {} + +fn increase_lifetime() { + let x = 0u64; + // It is illegal to increase the lifetime scope. + let _: &'static u64 = zerocopy::transmute_ref!(&x); +} diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-illegal-lifetime.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-illegal-lifetime.stderr new file mode 100644 index 00000000..866ea56a --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-illegal-lifetime.stderr @@ -0,0 +1,9 @@ +error[E0597]: `x` does not live long enough + --> tests/ui-msrv/transmute-ref-illegal-lifetime.rs:14:52 + | +14 | let _: &'static u64 = zerocopy::transmute_ref!(&x); + | ------------ ^^ borrowed value does not live long enough + | | + | type annotation requires that `x` is borrowed for `'static` +15 | } + | - `x` dropped here while still borrowed diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-dst-not-references.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-dst-not-references.rs new file mode 100644 index 00000000..114e917b --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-dst-not-references.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting between non-reference source +// and destination types. +const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-dst-not-references.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-dst-not-references.stderr new file mode 100644 index 00000000..2c5e23b6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-dst-not-references.stderr @@ -0,0 +1,42 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-src-dst-not-references.rs:17:54 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ---------------^^^^^^- + | | | + | | expected reference, found `usize` + | | help: consider borrowing here: `&0usize` + | expected due to this + | + = note: expected reference `&_` + found type `usize` + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found reference + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found reference + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found reference + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-a-reference.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-a-reference.rs new file mode 100644 index 00000000..90661b3e --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting from a non-reference source +// type. +const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(0usize); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-a-reference.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-a-reference.stderr new file mode 100644 index 00000000..0f4aeec9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-a-reference.stderr @@ -0,0 +1,12 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/transmute-ref-src-not-a-reference.rs:17:49 + | +17 | const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(0usize); + | ---------------^^^^^^- + | | | + | | expected reference, found `usize` + | | help: consider borrowing here: `&0usize` + | expected due to this + | + = note: expected reference `&_` + found type `usize` diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-intobytes.rs new file mode 100644 index 00000000..a5146fa8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-intobytes.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::Immutable)] +#[repr(transparent)] +struct Src(AU16); + +// `transmute_ref` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-intobytes.stderr new file mode 100644 index 00000000..84036b70 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-intobytes.stderr @@ -0,0 +1,25 @@ +error[E0277]: the trait bound `Src: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `Src` + | +note: required by `AssertSrcIsIntoBytes` + --> tests/ui-msrv/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Src: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `Src` + | +note: required by a bound in `AssertSrcIsIntoBytes` + --> tests/ui-msrv/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsIntoBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-nocell.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-nocell.rs new file mode 100644 index 00000000..ee28a98d --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-nocell.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::IntoBytes)] +#[repr(transparent)] +struct Src(AU16); + +// `transmute_ref` requires that the source type implements `Immutable` +const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-nocell.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-nocell.stderr new file mode 100644 index 00000000..2e94e806 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-not-nocell.stderr @@ -0,0 +1,25 @@ +error[E0277]: the trait bound `Src: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected an implementor of trait `zerocopy::Immutable` + | +note: required by `AssertSrcIsImmutable` + --> tests/ui-msrv/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Src: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `Src` + | +note: required by a bound in `AssertSrcIsImmutable` + --> tests/ui-msrv/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-unsized.rs b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-unsized.rs new file mode 100644 index 00000000..14e72b4d --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-unsized.rs @@ -0,0 +1,16 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting from an unsized source type. +const SRC_UNSIZED: &[u8; 1] = transmute_ref!(&[0u8][..]); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-unsized.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-unsized.stderr new file mode 100644 index 00000000..736a6530 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-ref-src-unsized.stderr @@ -0,0 +1,8 @@ +error[E0271]: type mismatch resolving `<[u8; 1] as KnownLayout>::PointerMetadata == usize` + --> tests/ui-msrv/transmute-ref-src-unsized.rs:16:31 + | +16 | const SRC_UNSIZED: &[u8; 1] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `()`, found `usize` + | + = note: required because of the requirements on the impl of `TransmuteRefDst<'_>` for `Wrap<&[u8], &[u8; 1]>` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-size-decrease.rs b/vendor/zerocopy/tests/ui-msrv/transmute-size-decrease.rs new file mode 100644 index 00000000..567b6733 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +const DECREASE_SIZE: u8 = transmute!(AU16(0)); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-size-decrease.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-size-decrease.stderr new file mode 100644 index 00000000..33f9cf2e --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/transmute-size-decrease.rs:20:27 + | +20 | const DECREASE_SIZE: u8 = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-size-increase-allow-shrink.rs b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase-allow-shrink.rs new file mode 100644 index 00000000..4922373f --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase-allow-shrink.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// `transmute!` does not support transmuting from a smaller type to a larger +// one. +const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-size-increase-allow-shrink.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase-allow-shrink.stderr new file mode 100644 index 00000000..9d03a363 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase-allow-shrink.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/transmute-size-increase-allow-shrink.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `Transmute<u8, AU16>` (16 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-size-increase.rs b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase.rs new file mode 100644 index 00000000..35dc780e --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// `transmute!` does not support transmuting from a smaller type to a larger +// one. +const INCREASE_SIZE: AU16 = transmute!(0u8); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-size-increase.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase.stderr new file mode 100644 index 00000000..64aa798f --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/transmute-size-increase.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(0u8); + | ^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `AU16` (16 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/transmute-src-not-intobytes.rs new file mode 100644 index 00000000..73be6c1b --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-src-not-intobytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::transmute; + +fn main() {} + +// `transmute` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-msrv/transmute-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/transmute-src-not-intobytes.stderr new file mode 100644 index 00000000..1a96a136 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/transmute-src-not-intobytes.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/transmute-src-not-intobytes.rs:19:32 + | +19 | const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + | +note: required by a bound in `SRC_NOT_AS_BYTES::transmute` + --> tests/ui-msrv/transmute-src-not-intobytes.rs:19:32 + | +19 | const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this + | required by this bound in `SRC_NOT_AS_BYTES::transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs new file mode 100644 index 00000000..0658bccf --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs @@ -0,0 +1,18 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute; + +fn main() { + let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-dst-not-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute-dst-not-tryfrombytes.stderr new file mode 100644 index 00000000..5536f612 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-dst-not-tryfrombytes.stderr @@ -0,0 +1,37 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs:17:58 + | +17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `try_transmute` + --> src/util/macro_util.rs + | + | Dst: TryFromBytes, + | ^^^^^^^^^^^^ required by this bound in `try_transmute` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs:17:33 + | +17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute-dst-not-tryfrombytes.rs:17:58 + | +17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-size-decrease.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-decrease.rs new file mode 100644 index 00000000..097623c8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let _decrease_size: Result<u8, _> = try_transmute!(AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-size-decrease.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-decrease.stderr new file mode 100644 index 00000000..3dcc4286 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute-size-decrease.rs:19:41 + | +19 | let _decrease_size: Result<u8, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-size-increase.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-increase.rs new file mode 100644 index 00000000..4b40a566 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute; + +// `try_transmute!` does not support transmuting from a smaller type to a larger +// one. +fn main() { + let _increase_size: Result<AU16, _> = try_transmute!(0u8); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-size-increase.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-increase.stderr new file mode 100644 index 00000000..5b875e09 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute-size-increase.rs:19:43 + | +19 | let _increase_size: Result<AU16, _> = try_transmute!(0u8); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `AU16` (16 bits) + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute-src-not-intobytes.rs new file mode 100644 index 00000000..c2a7b417 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-src-not-intobytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute; + +fn main() { + // `try_transmute` requires that the source type implements `IntoBytes` + let src_not_into_bytes: Result<AU16, _> = try_transmute!(NotZerocopy(AU16(0))); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute-src-not-intobytes.stderr new file mode 100644 index 00000000..589f8931 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute-src-not-intobytes.stderr @@ -0,0 +1,12 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/try_transmute-src-not-intobytes.rs:18:47 + | +18 | let src_not_into_bytes: Result<AU16, _> = try_transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + | +note: required by a bound in `try_transmute` + --> src/util/macro_util.rs + | + | Src: IntoBytes, + | ^^^^^^^^^ required by this bound in `try_transmute` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-alignment-increase.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-alignment-increase.rs new file mode 100644 index 00000000..d9c9a9dc --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-alignment-increase.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// `try_transmute_mut!` does not support transmuting from a type of smaller +// alignment to one of larger alignment. +fn main() { + let src = &mut [0u8; 2]; + let _increase_size: Result<&mut AU16, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-alignment-increase.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-alignment-increase.stderr new file mode 100644 index 00000000..640936cc --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-alignment-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute_mut-alignment-increase.rs:20:48 + | +20 | let _increase_size: Result<&mut AU16, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs new file mode 100644 index 00000000..89096cd1 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_mut; + +fn main() { + // `try_transmute_mut` requires that the destination type implements + // `IntoBytes` + let src = &mut AU16(0); + let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.stderr new file mode 100644 index 00000000..a3b496b4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.stderr @@ -0,0 +1,50 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | +20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `try_transmute_mut` + --> src/util/macro_util.rs + | + | Dst: TryFromBytes + IntoBytes, + | ^^^^^^^^^^^^ required by this bound in `try_transmute_mut` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | +20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `try_transmute_mut` + --> src/util/macro_util.rs + | + | Dst: TryFromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `try_transmute_mut` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs:20:33 + | +20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | +20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-decrease.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-decrease.rs new file mode 100644 index 00000000..3d522efd --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-decrease.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let src = &mut AU16(0); + let _decrease_size: Result<&mut u8, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-decrease.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-decrease.stderr new file mode 100644 index 00000000..cd0e3b54 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute_mut-size-decrease.rs:20:46 + | +20 | let _decrease_size: Result<&mut u8, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `$crate::assert_size_eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-increase.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-increase.rs new file mode 100644 index 00000000..526d465c --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-increase.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// `try_transmute_mut!` does not support transmuting from a smaller type to a +// larger one. +fn main() { + let src = &mut 0u8; + let _increase_size: Result<&mut [u8; 2], _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-increase.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-increase.stderr new file mode 100644 index 00000000..c1d78167 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-size-increase.stderr @@ -0,0 +1,17 @@ +warning: unused import: `util::AU16` + --> tests/ui-msrv/try_transmute_mut-size-increase.rs:13:5 + | +13 | use util::AU16; + | ^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute_mut-size-increase.rs:20:51 + | +20 | let _increase_size: Result<&mut [u8; 2], _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `[u8; 2]` (16 bits) + = note: this error originates in the macro `$crate::assert_size_eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs new file mode 100644 index 00000000..12b2e0d3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +#[derive(zerocopy::IntoBytes)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::TryFromBytes)] +#[repr(C)] +struct Dst; + +fn main() { + // `try_transmute_mut` requires that the source type implements `FromBytes` + let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-frombytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-frombytes.stderr new file mode 100644 index 00000000..f088592e --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-frombytes.stderr @@ -0,0 +1,23 @@ +error[E0277]: the trait bound `Src: FromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs:23:40 + | +23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FromBytes` is not implemented for `Src` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs:23:40 + | +23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FromBytes` is not implemented for `Dst` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-src-not-frombytes.rs:23:40 + | +23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `IntoBytes` is not implemented for `Dst` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs new file mode 100644 index 00000000..fa3b7032 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +#[derive(zerocopy::FromBytes)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::TryFromBytes)] +#[repr(C)] +struct Dst; + +fn main() { + // `try_transmute_mut` requires that the source type implements `IntoBytes` + let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-intobytes.stderr new file mode 100644 index 00000000..76ace191 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_mut-src-not-intobytes.stderr @@ -0,0 +1,23 @@ +error[E0277]: the trait bound `Src: IntoBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs:23:40 + | +23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `IntoBytes` is not implemented for `Src` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs:23:40 + | +23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FromBytes` is not implemented for `Dst` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-msrv/try_transmute_mut-src-not-intobytes.rs:23:40 + | +23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `IntoBytes` is not implemented for `Dst` + | + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-alignment-increase.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-alignment-increase.rs new file mode 100644 index 00000000..ad1062fb --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-alignment-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// `try_transmute_ref!` does not support transmuting from a type of smaller +// alignment to one of larger alignment. +fn main() { + let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-alignment-increase.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-alignment-increase.stderr new file mode 100644 index 00000000..631aec10 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-alignment-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute_ref-alignment-increase.rs:19:44 + | +19 | let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-mutable.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-mutable.rs new file mode 100644 index 00000000..e27a1294 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-mutable.rs @@ -0,0 +1,19 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::try_transmute_ref; + +fn main() {} + +fn ref_dst_mutable() { + // `try_transmute_ref!` requires that its destination type be an immutable + // reference. + let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-mutable.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-mutable.stderr new file mode 100644 index 00000000..2c4ca40c --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-mutable.stderr @@ -0,0 +1,22 @@ +error[E0308]: mismatched types + --> tests/ui-msrv/try_transmute_ref-dst-mutable.rs:18:33 + | +18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-msrv/try_transmute_ref-dst-mutable.rs:18:33 + | +18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | types differ in mutability + | help: try using a variant of the expected enum: `Err($crate::util::macro_util::try_transmute_ref::<_, _>(e))` + | + = note: expected enum `Result<&mut u8, _>` + found enum `Result<&_, ValidityError<&u8, _>>` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs new file mode 100644 index 00000000..3928a1cb --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_ref; + +fn main() { + // `try_transmute_ref` requires that the source type implements `Immutable` + // and `IntoBytes` + let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr new file mode 100644 index 00000000..fb6e75f2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr @@ -0,0 +1,50 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | +19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | Dst: TryFromBytes + Immutable, + | ^^^^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | +19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + | +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | Dst: TryFromBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:33 + | +19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-msrv/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | +19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + | +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-decrease.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-decrease.rs new file mode 100644 index 00000000..60bd70c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let _decrease_size: Result<&u8, _> = try_transmute_ref!(&AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-decrease.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-decrease.stderr new file mode 100644 index 00000000..bbee5e13 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute_ref-size-decrease.rs:19:42 + | +19 | let _decrease_size: Result<&u8, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `$crate::assert_size_eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-increase.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-increase.rs new file mode 100644 index 00000000..ee5a5fd4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// `try_transmute_ref!` does not support transmuting from a smaller type to a +// larger one. +fn main() { + let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-increase.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-increase.stderr new file mode 100644 index 00000000..66362f08 --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-msrv/try_transmute_ref-size-increase.rs:19:44 + | +19 | let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.rs b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.rs new file mode 100644 index 00000000..2aec95ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_ref; + +fn main() { + // `try_transmute_ref` requires that the source type implements `Immutable` + // and `IntoBytes` + let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); +} diff --git a/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.stderr b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.stderr new file mode 100644 index 00000000..ca5a0daf --- /dev/null +++ b/vendor/zerocopy/tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.stderr @@ -0,0 +1,25 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.rs:19:48 + | +19 | let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + | +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | Src: IntoBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::Immutable` is not satisfied + --> tests/ui-msrv/try_transmute_ref-src-not-immutable-intobytes.rs:19:48 + | +19 | let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected an implementor of trait `zerocopy::Immutable` + | +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | Src: IntoBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-bytes.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-bytes.rs new file mode 100644 index 00000000..327abca3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::FromBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_from_bytes::<NotZerocopy>(); +} + +fn takes_from_bytes<T: FromBytes>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-bytes.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-bytes.stderr new file mode 100644 index 00000000..96aedb28 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-bytes.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-from-bytes.rs:18:24 + | +18 | takes_from_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `takes_from_bytes` + --> tests/ui-nightly/diagnostic-not-implemented-from-bytes.rs:21:24 + | +21 | fn takes_from_bytes<T: FromBytes>() {} + | ^^^^^^^^^ required by this bound in `takes_from_bytes` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-zeros.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-zeros.rs new file mode 100644 index 00000000..6f7f41c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-zeros.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::FromZeros; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_from_zeros::<NotZerocopy>(); +} + +fn takes_from_zeros<T: FromZeros>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-zeros.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-zeros.stderr new file mode 100644 index 00000000..00d4d841 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-from-zeros.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-from-zeros.rs:18:24 + | +18 | takes_from_zeros::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `takes_from_zeros` + --> tests/ui-nightly/diagnostic-not-implemented-from-zeros.rs:21:24 + | +21 | fn takes_from_zeros<T: FromZeros>() {} + | ^^^^^^^^^ required by this bound in `takes_from_zeros` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-immutable.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-immutable.rs new file mode 100644 index 00000000..0432ed44 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-immutable.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::Immutable; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_immutable::<NotZerocopy>(); +} + +fn takes_immutable<T: Immutable>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-immutable.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-immutable.stderr new file mode 100644 index 00000000..6a7dfb9b --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-immutable.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-immutable.rs:18:23 + | +18 | takes_immutable::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `takes_immutable` + --> tests/ui-nightly/diagnostic-not-implemented-immutable.rs:21:23 + | +21 | fn takes_immutable<T: Immutable>() {} + | ^^^^^^^^^ required by this bound in `takes_immutable` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-into-bytes.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-into-bytes.rs new file mode 100644 index 00000000..f9435536 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-into-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::IntoBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_into_bytes::<NotZerocopy>(); +} + +fn takes_into_bytes<T: IntoBytes>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-into-bytes.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-into-bytes.stderr new file mode 100644 index 00000000..07031fe3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-into-bytes.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-into-bytes.rs:18:24 + | +18 | takes_into_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `takes_into_bytes` + --> tests/ui-nightly/diagnostic-not-implemented-into-bytes.rs:21:24 + | +21 | fn takes_into_bytes<T: IntoBytes>() {} + | ^^^^^^^^^ required by this bound in `takes_into_bytes` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs new file mode 100644 index 00000000..e12737a6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs @@ -0,0 +1,59 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::{Immutable, IntoBytes}; + +fn main() { + // This is adapted from #1296, which includes the following text: + // + // The compiler errors when a type is missing Immutable are somewhat + // misleading, although I'm not sure there's much zerocopy can do about + // this. An example where the compiler recommends adding a reference + // rather than implementing Immutable (some were even more confusing than + // this): + // + // error[E0277]: the trait bound `virtio::wl::CtrlVfdNewDmabuf: zerocopy::Immutable` is not satisfied + // --> devices/src/virtio/wl.rs:317:20 + // | + // 317 | .write_obj(ctrl_vfd_new_dmabuf) + // | --------- ^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `virtio::wl::CtrlVfdNewDmabuf` + // | | + // | required by a bound introduced by this call + // | + // note: required by a bound in `virtio::descriptor_utils::Writer::write_obj` + // --> devices/src/virtio/descriptor_utils.rs:536:25 + // | + // 536 | pub fn write_obj<T: Immutable + IntoBytes>(&mut self, val: T) -> io::Result<()> { + // | ^^^^^^^^^ required by this bound in `Writer::write_obj` + // help: consider borrowing here + // | + // 317 | .write_obj(&ctrl_vfd_new_dmabuf) + // | + + // 317 | .write_obj(&mut ctrl_vfd_new_dmabuf) + // | ++++ + // + // Taking the compiler's suggestion results in a different error with a + // recommendation to remove the reference (back to the original code). + // + // As of this writing, the described problem is still happening thanks to + // https://github.com/rust-lang/rust/issues/130563. We include this test so + // that we can capture the current behavior, but we will update it once that + // Rust issue is fixed. + Foo.write_obj(NotZerocopy(())); +} + +struct Foo; + +impl Foo { + fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} +} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-issue-1296.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-issue-1296.stderr new file mode 100644 index 00000000..a6c817b7 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-issue-1296.stderr @@ -0,0 +1,49 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs:52:19 + | +52 | Foo.write_obj(NotZerocopy(())); + | --------- ^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + | | + | required by a bound introduced by this call + | +note: required by a bound in `Foo::write_obj` + --> tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs:58:21 + | +58 | fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} + | ^^^^^^^^^ required by this bound in `Foo::write_obj` +help: consider borrowing here + | +52 | Foo.write_obj(&NotZerocopy(())); + | + +52 | Foo.write_obj(&mut NotZerocopy(())); + | ++++ + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs:52:19 + | +52 | Foo.write_obj(NotZerocopy(())); + | --------- ^^^^^^^^^^^^^^^ unsatisfied trait bound + | | + | required by a bound introduced by this call + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `Foo::write_obj` + --> tests/ui-nightly/diagnostic-not-implemented-issue-1296.rs:58:33 + | +58 | fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} + | ^^^^^^^^^ required by this bound in `Foo::write_obj` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-known-layout.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-known-layout.rs new file mode 100644 index 00000000..2e306c25 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-known-layout.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::KnownLayout; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_known_layout::<NotZerocopy>(); +} + +fn takes_known_layout<T: KnownLayout>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-known-layout.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-known-layout.stderr new file mode 100644 index 00000000..3047a38c --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-known-layout.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::KnownLayout` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-known-layout.rs:18:26 + | +18 | takes_known_layout::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others +note: required by a bound in `takes_known_layout` + --> tests/ui-nightly/diagnostic-not-implemented-known-layout.rs:21:26 + | +21 | fn takes_known_layout<T: KnownLayout>() {} + | ^^^^^^^^^^^ required by this bound in `takes_known_layout` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.rs new file mode 100644 index 00000000..72e72139 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::TryFromBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_try_from_bytes::<NotZerocopy>(); +} + +fn takes_try_from_bytes<T: TryFromBytes>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.stderr new file mode 100644 index 00000000..f080bb44 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.rs:18:28 + | +18 | takes_try_from_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `takes_try_from_bytes` + --> tests/ui-nightly/diagnostic-not-implemented-try-from-bytes.rs:21:28 + | +21 | fn takes_try_from_bytes<T: TryFromBytes>() {} + | ^^^^^^^^^^^^ required by this bound in `takes_try_from_bytes` diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-unaligned.rs b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-unaligned.rs new file mode 100644 index 00000000..95ce5bd5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-unaligned.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::Unaligned; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_unaligned::<NotZerocopy>(); +} + +fn takes_unaligned<T: Unaligned>() {} diff --git a/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-unaligned.stderr b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-unaligned.stderr new file mode 100644 index 00000000..6dc71104 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/diagnostic-not-implemented-unaligned.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/diagnostic-not-implemented-unaligned.rs:18:23 + | +18 | takes_unaligned::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others +note: required by a bound in `takes_unaligned` + --> tests/ui-nightly/diagnostic-not-implemented-unaligned.rs:21:23 + | +21 | fn takes_unaligned<T: Unaligned>() {} + | ^^^^^^^^^ required by this bound in `takes_unaligned` diff --git a/vendor/zerocopy/tests/ui-nightly/include_value_not_from_bytes.rs b/vendor/zerocopy/tests/ui-nightly/include_value_not_from_bytes.rs new file mode 100644 index 00000000..e6289336 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/include_value_not_from_bytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +#[macro_use] +extern crate zerocopy; + +use util::NotZerocopy; + +fn main() {} + +// Should fail because `NotZerocopy<u32>: !FromBytes`. +const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); diff --git a/vendor/zerocopy/tests/ui-nightly/include_value_not_from_bytes.stderr b/vendor/zerocopy/tests/ui-nightly/include_value_not_from_bytes.stderr new file mode 100644 index 00000000..f7999508 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/include_value_not_from_bytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `NotZerocopy<u32>: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/include_value_not_from_bytes.rs:19:42 + | +19 | const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy<u32>` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy<u32>` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `NOT_FROM_BYTES::transmute` + --> tests/ui-nightly/include_value_not_from_bytes.rs:19:42 + | +19 | const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `$crate::transmute` which comes from the expansion of the macro `include_value` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/include_value_wrong_size.rs b/vendor/zerocopy/tests/ui-nightly/include_value_wrong_size.rs new file mode 100644 index 00000000..e02b2be8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/include_value_wrong_size.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +// Should fail because the file is 4 bytes long, not 8. +const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); diff --git a/vendor/zerocopy/tests/ui-nightly/include_value_wrong_size.stderr b/vendor/zerocopy/tests/ui-nightly/include_value_wrong_size.stderr new file mode 100644 index 00000000..4f9472ea --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/include_value_wrong_size.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 4-byte type to 8-byte type: `[u8; 4]` -> `u64` + --> tests/ui-nightly/include_value_wrong_size.rs:15:25 + | +15 | const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ evaluation of `WRONG_SIZE` failed here + | + = note: this error originates in the macro `$crate::transmute` which comes from the expansion of the macro `include_value` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/include_value_wrong_size.rs:15:25 + | +15 | const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `[u8; 4]` (32 bits) + = note: target type: `u64` (64 bits) + = note: this error originates in the macro `$crate::transmute` which comes from the expansion of the macro `include_value` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/invalid-impls/invalid-impls.rs b/vendor/zerocopy/tests/ui-nightly/invalid-impls/invalid-impls.rs new file mode 100644 index 00000000..78f40e84 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/invalid-impls/invalid-impls.rs @@ -0,0 +1,32 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Since some macros from `macros.rs` are unused. +#![allow(unused)] + +extern crate zerocopy; +extern crate zerocopy_derive; + +include!("../../../src/util/macros.rs"); + +use zerocopy::*; +use zerocopy_derive::*; + +fn main() {} + +#[derive(FromBytes, IntoBytes, Unaligned)] +#[repr(transparent)] +struct Foo<T>(T); + +const _: () = unsafe { + impl_or_verify!(T => TryFromBytes for Foo<T>); + impl_or_verify!(T => FromZeros for Foo<T>); + impl_or_verify!(T => FromBytes for Foo<T>); + impl_or_verify!(T => IntoBytes for Foo<T>); + impl_or_verify!(T => Unaligned for Foo<T>); +}; diff --git a/vendor/zerocopy/tests/ui-nightly/invalid-impls/invalid-impls.stderr b/vendor/zerocopy/tests/ui-nightly/invalid-impls/invalid-impls.stderr new file mode 100644 index 00000000..169f5d19 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/invalid-impls/invalid-impls.stderr @@ -0,0 +1,139 @@ +error[E0277]: the trait bound `T: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:27:43 + | + 27 | impl_or_verify!(T => TryFromBytes for Foo<T>); + | ^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `T` + | + = note: Consider adding `#[derive(TryFromBytes)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::TryFromBytes` + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:22:10 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-nightly/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-nightly/invalid-impls/invalid-impls.rs:27:5 + | + 27 | impl_or_verify!(T => TryFromBytes for Foo<T>); + | --------------------------------------------- in this macro invocation + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `TryFromBytes` + | + 27 | impl_or_verify!(T: zerocopy::TryFromBytes => TryFromBytes for Foo<T>); + | ++++++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::FromZeros` is not satisfied + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:28:40 + | + 28 | impl_or_verify!(T => FromZeros for Foo<T>); + | ^^^^^^ the trait `zerocopy::FromZeros` is not implemented for `T` + | + = note: Consider adding `#[derive(FromZeros)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::FromZeros` + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:22:10 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-nightly/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-nightly/invalid-impls/invalid-impls.rs:28:5 + | + 28 | impl_or_verify!(T => FromZeros for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `FromZeros` + | + 28 | impl_or_verify!(T: zerocopy::FromZeros => FromZeros for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:29:40 + | + 29 | impl_or_verify!(T => FromBytes for Foo<T>); + | ^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `T` + | + = note: Consider adding `#[derive(FromBytes)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::FromBytes` + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:22:10 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-nightly/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-nightly/invalid-impls/invalid-impls.rs:29:5 + | + 29 | impl_or_verify!(T => FromBytes for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `FromBytes` + | + 29 | impl_or_verify!(T: zerocopy::FromBytes => FromBytes for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:30:40 + | + 30 | impl_or_verify!(T => IntoBytes for Foo<T>); + | ^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `T` + | + = note: Consider adding `#[derive(IntoBytes)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::IntoBytes` + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:22:21 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-nightly/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-nightly/invalid-impls/invalid-impls.rs:30:5 + | + 30 | impl_or_verify!(T => IntoBytes for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `IntoBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `IntoBytes` + | + 30 | impl_or_verify!(T: zerocopy::IntoBytes => IntoBytes for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::Unaligned` is not satisfied + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:31:40 + | + 31 | impl_or_verify!(T => Unaligned for Foo<T>); + | ^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `T` + | + = note: Consider adding `#[derive(Unaligned)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::Unaligned` + --> tests/ui-nightly/invalid-impls/invalid-impls.rs:22:32 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-nightly/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-nightly/invalid-impls/invalid-impls.rs:31:5 + | + 31 | impl_or_verify!(T => Unaligned for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `Unaligned` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `Unaligned` + | + 31 | impl_or_verify!(T: zerocopy::Unaligned => Unaligned for Foo<T>); + | +++++++++++++++++++++ diff --git a/vendor/zerocopy/tests/ui-nightly/max-align.rs b/vendor/zerocopy/tests/ui-nightly/max-align.rs new file mode 100644 index 00000000..53e3eb9b --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/max-align.rs @@ -0,0 +1,99 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[repr(C, align(1))] +struct Align1; + +#[repr(C, align(2))] +struct Align2; + +#[repr(C, align(4))] +struct Align4; + +#[repr(C, align(8))] +struct Align8; + +#[repr(C, align(16))] +struct Align16; + +#[repr(C, align(32))] +struct Align32; + +#[repr(C, align(64))] +struct Align64; + +#[repr(C, align(128))] +struct Align128; + +#[repr(C, align(256))] +struct Align256; + +#[repr(C, align(512))] +struct Align512; + +#[repr(C, align(1024))] +struct Align1024; + +#[repr(C, align(2048))] +struct Align2048; + +#[repr(C, align(4096))] +struct Align4096; + +#[repr(C, align(8192))] +struct Align8192; + +#[repr(C, align(16384))] +struct Align16384; + +#[repr(C, align(32768))] +struct Align32768; + +#[repr(C, align(65536))] +struct Align65536; + +#[repr(C, align(131072))] +struct Align131072; + +#[repr(C, align(262144))] +struct Align262144; + +#[repr(C, align(524288))] +struct Align524288; + +#[repr(C, align(1048576))] +struct Align1048576; + +#[repr(C, align(2097152))] +struct Align2097152; + +#[repr(C, align(4194304))] +struct Align4194304; + +#[repr(C, align(8388608))] +struct Align8388608; + +#[repr(C, align(16777216))] +struct Align16777216; + +#[repr(C, align(33554432))] +struct Align33554432; + +#[repr(C, align(67108864))] +struct Align67108864; + +#[repr(C, align(134217728))] +struct Align13421772; + +#[repr(C, align(268435456))] +struct Align26843545; + +#[repr(C, align(1073741824))] +struct Align1073741824; + +fn main() {} diff --git a/vendor/zerocopy/tests/ui-nightly/max-align.stderr b/vendor/zerocopy/tests/ui-nightly/max-align.stderr new file mode 100644 index 00000000..c11eed53 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/max-align.stderr @@ -0,0 +1,5 @@ +error[E0589]: invalid `repr(align)` attribute: larger than 2^29 + --> tests/ui-nightly/max-align.rs:96:17 + | +96 | #[repr(C, align(1073741824))] + | ^^^^^^^^^^ diff --git a/vendor/zerocopy/tests/ui-nightly/ptr-is-invariant-over-v.rs b/vendor/zerocopy/tests/ui-nightly/ptr-is-invariant-over-v.rs new file mode 100644 index 00000000..b9a76948 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/ptr-is-invariant-over-v.rs @@ -0,0 +1,29 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use zerocopy::pointer::{ + invariant::{Aligned, Exclusive, Shared, Valid}, + Ptr, +}; + +fn _when_exclusive<'big: 'small, 'small>( + big: Ptr<'small, &'big u32, (Exclusive, Aligned, Valid)>, + mut _small: Ptr<'small, &'small u32, (Exclusive, Aligned, Valid)>, +) { + _small = big; +} + +fn _when_shared<'big: 'small, 'small>( + big: Ptr<'small, &'big u32, (Shared, Aligned, Valid)>, + mut _small: Ptr<'small, &'small u32, (Shared, Aligned, Valid)>, +) { + _small = big; +} + +fn main() {} diff --git a/vendor/zerocopy/tests/ui-nightly/ptr-is-invariant-over-v.stderr b/vendor/zerocopy/tests/ui-nightly/ptr-is-invariant-over-v.stderr new file mode 100644 index 00000000..80cc65a5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/ptr-is-invariant-over-v.stderr @@ -0,0 +1,31 @@ +error: lifetime may not live long enough + --> tests/ui-nightly/ptr-is-invariant-over-v.rs:19:5 + | +15 | fn _when_exclusive<'big: 'small, 'small>( + | ---- ------ lifetime `'small` defined here + | | + | lifetime `'big` defined here +... +19 | _small = big; + | ^^^^^^^^^^^^ assignment requires that `'small` must outlive `'big` + | + = help: consider adding the following bound: `'small: 'big` + = note: requirement occurs because of the type `Ptr<'_, &u32, (invariant::Exclusive, Aligned, Valid)>`, which makes the generic argument `&u32` invariant + = note: the struct `Ptr<'a, T, I>` is invariant over the parameter `T` + = help: see <https://doc.rust-lang.org/nomicon/subtyping.html> for more information about variance + +error: lifetime may not live long enough + --> tests/ui-nightly/ptr-is-invariant-over-v.rs:26:5 + | +22 | fn _when_shared<'big: 'small, 'small>( + | ---- ------ lifetime `'small` defined here + | | + | lifetime `'big` defined here +... +26 | _small = big; + | ^^^^^^^^^^^^ assignment requires that `'small` must outlive `'big` + | + = help: consider adding the following bound: `'small: 'big` + = note: requirement occurs because of the type `Ptr<'_, &u32, (Shared, Aligned, Valid)>`, which makes the generic argument `&u32` invariant + = note: the struct `Ptr<'a, T, I>` is invariant over the parameter `T` + = help: see <https://doc.rust-lang.org/nomicon/subtyping.html> for more information about variance diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-dst-not-frombytes.rs new file mode 100644 index 00000000..7b5098e5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-dst-not-frombytes.rs @@ -0,0 +1,19 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::transmute; + +fn main() {} + +// `transmute` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-dst-not-frombytes.stderr new file mode 100644 index 00000000..ade6fad6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-dst-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/transmute-dst-not-frombytes.rs:19:41 + | +19 | const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `DST_NOT_FROM_BYTES::transmute` + --> tests/ui-nightly/transmute-dst-not-frombytes.rs:19:41 + | +19 | const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-const.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-const.rs new file mode 100644 index 00000000..47372b1b --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-const.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +const ARRAY_OF_U8S: [u8; 2] = [0u8; 2]; + +// `transmute_mut!` cannot, generally speaking, be used in const contexts. +const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-const.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-const.stderr new file mode 100644 index 00000000..9396f611 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-const.stderr @@ -0,0 +1,23 @@ +warning: taking a mutable reference to a `const` item + --> tests/ui-nightly/transmute-mut-const.rs:20:52 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^ + | + = note: each usage of a `const` item creates a new temporary + = note: the mutable reference will refer to this temporary, not the original `const` item +note: `const` item defined here + --> tests/ui-nightly/transmute-mut-const.rs:17:1 + | +17 | const ARRAY_OF_U8S: [u8; 2] = [0u8; 2]; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: `#[warn(const_item_mutation)]` on by default + +error[E0015]: cannot call non-const method `Wrap::<&mut [u8; 2], &mut [u8; 2]>::transmute_mut` in constants + --> tests/ui-nightly/transmute-mut-const.rs:20:37 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: calls in constants are limited to constant functions, tuple structs and tuple variants + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-a-reference.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-a-reference.rs new file mode 100644 index 00000000..33a9ecd9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting into a non-reference +// destination type. +const DST_NOT_A_REFERENCE: usize = transmute_mut!(&mut 0u8); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-a-reference.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-a-reference.stderr new file mode 100644 index 00000000..f1c44581 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-a-reference.stderr @@ -0,0 +1,9 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-mut-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_mut!(&mut 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&mut _` + | + = note: expected type `usize` + found mutable reference `&mut _` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-frombytes.rs new file mode 100644 index 00000000..00390755 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-frombytes.stderr new file mode 100644 index 00000000..6d117e21 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-nightly/transmute-mut-dst-not-frombytes.rs:24:38 + | + 24 | const DST_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Dst` + --> tests/ui-nightly/transmute-mut-dst-not-frombytes.rs:21:1 + | + 21 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-intobytes.rs new file mode 100644 index 00000000..27cf93ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the destination type implements `IntoBytes` +const DST_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-intobytes.stderr new file mode 100644 index 00000000..30bd62ac --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-not-intobytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-nightly/transmute-mut-dst-not-intobytes.rs:24:36 + | + 24 | const DST_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Dst` + --> tests/ui-nightly/transmute-mut-dst-not-intobytes.rs:21:1 + | + 21 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Dst` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-unsized.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-unsized.rs new file mode 100644 index 00000000..693ccda5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting into an unsized destination +// type. +const DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8; 1]); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-unsized.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-unsized.stderr new file mode 100644 index 00000000..225498a0 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-dst-unsized.stderr @@ -0,0 +1,16 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-nightly/transmute-mut-dst-unsized.rs:17:32 + | + 17 | const DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8; 1]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | impl<'a, Src, Dst> Wrap<&'a mut Src, &'a mut Dst> { + | ^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` +... + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-illegal-lifetime.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-illegal-lifetime.rs new file mode 100644 index 00000000..c31765e4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-illegal-lifetime.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +fn main() {} + +fn increase_lifetime() { + let mut x = 0u64; + // It is illegal to increase the lifetime scope. + let _: &'static mut u64 = zerocopy::transmute_mut!(&mut x); +} diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-illegal-lifetime.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-illegal-lifetime.stderr new file mode 100644 index 00000000..b826fcc7 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-illegal-lifetime.stderr @@ -0,0 +1,12 @@ +error[E0597]: `x` does not live long enough + --> tests/ui-nightly/transmute-mut-illegal-lifetime.rs:14:56 + | +12 | let mut x = 0u64; + | ----- binding `x` declared here +13 | // It is illegal to increase the lifetime scope. +14 | let _: &'static mut u64 = zerocopy::transmute_mut!(&mut x); + | ---------------- ^^^^^^ borrowed value does not live long enough + | | + | type annotation requires that `x` is borrowed for `'static` +15 | } + | - `x` dropped here while still borrowed diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-dst-not-references.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-dst-not-references.rs new file mode 100644 index 00000000..98cc5208 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-dst-not-references.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting between non-reference source +// and destination types. +const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(0usize); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-dst-not-references.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-dst-not-references.stderr new file mode 100644 index 00000000..df3cf2db --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-dst-not-references.stderr @@ -0,0 +1,15 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-mut-src-dst-not-references.rs:17:59 + | +17 | const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&mut _`, found `usize` + | expected due to this + | + = note: expected mutable reference `&mut _` + found type `usize` +help: consider mutably borrowing here + | +17 | const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(&mut 0usize); + | ++++ diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-immutable.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-immutable.rs new file mode 100644 index 00000000..08088d0d --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-immutable.rs @@ -0,0 +1,18 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +fn ref_src_immutable() { + // `transmute_mut!` requires that its source type be a mutable reference. + let _: &mut u8 = transmute_mut!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-immutable.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-immutable.stderr new file mode 100644 index 00000000..7b7969d5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-immutable.stderr @@ -0,0 +1,11 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-mut-src-immutable.rs:17:37 + | +17 | let _: &mut u8 = transmute_mut!(&0u8); + | ---------------^^^^- + | | | + | | types differ in mutability + | expected due to this + | + = note: expected mutable reference `&mut _` + found reference `&u8` diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-a-reference.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-a-reference.rs new file mode 100644 index 00000000..bf8bc325 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting from a non-reference source +// type. +const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(0usize); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-a-reference.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-a-reference.stderr new file mode 100644 index 00000000..12b7674f --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-a-reference.stderr @@ -0,0 +1,15 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-mut-src-not-a-reference.rs:17:53 + | +17 | const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&mut _`, found `usize` + | expected due to this + | + = note: expected mutable reference `&mut _` + found type `usize` +help: consider mutably borrowing here + | +17 | const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(&mut 0usize); + | ++++ diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-frombytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-frombytes.rs new file mode 100644 index 00000000..0fc6f984 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the source type implements `FromBytes` +const SRC_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-frombytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-frombytes.stderr new file mode 100644 index 00000000..fc626273 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Src: FromBytes` is not satisfied + --> tests/ui-nightly/transmute-mut-src-not-frombytes.rs:24:38 + | + 24 | const SRC_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Src` + --> tests/ui-nightly/transmute-mut-src-not-frombytes.rs:17:1 + | + 17 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Src` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-intobytes.rs new file mode 100644 index 00000000..518402df --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-intobytes.stderr new file mode 100644 index 00000000..eef8bd1d --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-not-intobytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Src: IntoBytes` is not satisfied + --> tests/ui-nightly/transmute-mut-src-not-intobytes.rs:24:36 + | + 24 | const SRC_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Src` + --> tests/ui-nightly/transmute-mut-src-not-intobytes.rs:17:1 + | + 17 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-unsized.rs b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-unsized.rs new file mode 100644 index 00000000..473070ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting from an unsized source type to +// a sized destination type. +const SRC_UNSIZED: &mut [u8; 1] = transmute_mut!(&mut [0u8][..]); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-unsized.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-unsized.stderr new file mode 100644 index 00000000..4b99fa6c --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-mut-src-unsized.stderr @@ -0,0 +1,8 @@ +error[E0271]: type mismatch resolving `<[u8; 1] as KnownLayout>::PointerMetadata == usize` + --> tests/ui-nightly/transmute-mut-src-unsized.rs:17:35 + | +17 | const SRC_UNSIZED: &mut [u8; 1] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `()` + | + = note: required for `Wrap<&mut [u8], &mut [u8; 1]>` to implement `TransmuteMutDst<'_>` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ptr-to-usize.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ptr-to-usize.rs new file mode 100644 index 00000000..5af88593 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ptr-to-usize.rs @@ -0,0 +1,20 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute; + +fn main() {} + +// It is unclear whether we can or should support this transmutation, especially +// in a const context. This test ensures that even if such a transmutation +// becomes valid due to the requisite implementations of `FromBytes` being +// added, that we re-examine whether it should specifically be valid in a const +// context. +const POINTER_VALUE: usize = transmute!(&0usize as *const usize); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ptr-to-usize.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ptr-to-usize.stderr new file mode 100644 index 00000000..f4b0491b --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ptr-to-usize.stderr @@ -0,0 +1,24 @@ +error[E0277]: the trait bound `*const usize: IntoBytes` is not satisfied + --> tests/ui-nightly/transmute-ptr-to-usize.rs:20:30 + | +20 | const POINTER_VALUE: usize = transmute!(&0usize as *const usize); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | the trait `IntoBytes` is not implemented for `*const usize` + | required by a bound introduced by this call + | + = note: Consider adding `#[derive(IntoBytes)]` to `*const usize` +help: the trait `IntoBytes` is implemented for `usize` + --> src/impls.rs + | + | unsafe_impl!(usize: Immutable, TryFromBytes, FromZeros, FromBytes, IntoBytes); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +note: required by a bound in `POINTER_VALUE::transmute` + --> tests/ui-nightly/transmute-ptr-to-usize.rs:20:30 + | +20 | const POINTER_VALUE: usize = transmute!(&0usize as *const usize); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-mutable.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-mutable.rs new file mode 100644 index 00000000..fa0e6e4c --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-mutable.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +fn ref_dst_mutable() { + // `transmute_ref!` requires that its destination type be an immutable + // reference. + let _: &mut u8 = transmute_ref!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-mutable.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-mutable.stderr new file mode 100644 index 00000000..0cbdd176 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-mutable.stderr @@ -0,0 +1,29 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-a-reference.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-a-reference.rs new file mode 100644 index 00000000..de55f9ac --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting into a non-reference +// destination type. +const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-a-reference.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-a-reference.stderr new file mode 100644 index 00000000..847d5473 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-a-reference.stderr @@ -0,0 +1,29 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-frombytes.rs new file mode 100644 index 00000000..f7619d34 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-frombytes.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::Immutable)] +#[repr(transparent)] +struct Dst(AU16); + +// `transmute_ref` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-frombytes.stderr new file mode 100644 index 00000000..8d0fc363 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: zerocopy::FromBytes` is not satisfied + --> tests/ui-nightly/transmute-ref-dst-not-frombytes.rs:23:34 + | +23 | const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::FromBytes` is not implemented for `Dst` + --> tests/ui-nightly/transmute-ref-dst-not-frombytes.rs:20:1 + | +20 | struct Dst(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `AssertDstIsFromBytes` + --> tests/ui-nightly/transmute-ref-dst-not-frombytes.rs:23:34 + | +23 | const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertDstIsFromBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-nocell.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-nocell.rs new file mode 100644 index 00000000..f1f63bfc --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-nocell.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::FromBytes)] +#[repr(transparent)] +struct Dst(AU16); + +// `transmute_ref` requires that the destination type implements `Immutable` +const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-nocell.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-nocell.stderr new file mode 100644 index 00000000..62bec102 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-not-nocell.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/transmute-ref-dst-not-nocell.rs:23:33 + | +23 | const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::Immutable` is not implemented for `Dst` + --> tests/ui-nightly/transmute-ref-dst-not-nocell.rs:20:1 + | +20 | struct Dst(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `Dst` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `AssertDstIsImmutable` + --> tests/ui-nightly/transmute-ref-dst-not-nocell.rs:23:33 + | +23 | const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertDstIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-unsized.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-unsized.rs new file mode 100644 index 00000000..625f1fac --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting into an unsized destination +// type. +const DST_UNSIZED: &[u8] = transmute_ref!(&[0u8; 1]); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-unsized.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-unsized.stderr new file mode 100644 index 00000000..bc7d1b99 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-dst-unsized.stderr @@ -0,0 +1,16 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-nightly/transmute-ref-dst-unsized.rs:17:28 + | + 17 | const DST_UNSIZED: &[u8] = transmute_ref!(&[0u8; 1]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `Wrap::<&'a Src, &'a Dst>::transmute_ref` + --> src/util/macro_util.rs + | + | impl<'a, Src, Dst> Wrap<&'a Src, &'a Dst> { + | ^^^ required by this bound in `Wrap::<&Src, &Dst>::transmute_ref` +... + | pub const unsafe fn transmute_ref(self) -> &'a Dst { + | ------------- required by a bound in this associated function + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-illegal-lifetime.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-illegal-lifetime.rs new file mode 100644 index 00000000..8dd191e6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-illegal-lifetime.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +fn main() {} + +fn increase_lifetime() { + let x = 0u64; + // It is illegal to increase the lifetime scope. + let _: &'static u64 = zerocopy::transmute_ref!(&x); +} diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-illegal-lifetime.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-illegal-lifetime.stderr new file mode 100644 index 00000000..e16a5576 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-illegal-lifetime.stderr @@ -0,0 +1,12 @@ +error[E0597]: `x` does not live long enough + --> tests/ui-nightly/transmute-ref-illegal-lifetime.rs:14:52 + | +12 | let x = 0u64; + | - binding `x` declared here +13 | // It is illegal to increase the lifetime scope. +14 | let _: &'static u64 = zerocopy::transmute_ref!(&x); + | ------------ ^^ borrowed value does not live long enough + | | + | type annotation requires that `x` is borrowed for `'static` +15 | } + | - `x` dropped here while still borrowed diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-dst-not-references.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-dst-not-references.rs new file mode 100644 index 00000000..114e917b --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-dst-not-references.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting between non-reference source +// and destination types. +const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-dst-not-references.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-dst-not-references.stderr new file mode 100644 index 00000000..0f1f7fc7 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-dst-not-references.stderr @@ -0,0 +1,45 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-src-dst-not-references.rs:17:54 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&_`, found `usize` + | expected due to this + | + = note: expected reference `&_` + found type `usize` +help: consider borrowing here + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(&0usize); + | + + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-a-reference.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-a-reference.rs new file mode 100644 index 00000000..90661b3e --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting from a non-reference source +// type. +const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(0usize); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-a-reference.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-a-reference.stderr new file mode 100644 index 00000000..be477c6c --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-a-reference.stderr @@ -0,0 +1,15 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/transmute-ref-src-not-a-reference.rs:17:49 + | +17 | const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&_`, found `usize` + | expected due to this + | + = note: expected reference `&_` + found type `usize` +help: consider borrowing here + | +17 | const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(&0usize); + | + diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-intobytes.rs new file mode 100644 index 00000000..a5146fa8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-intobytes.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::Immutable)] +#[repr(transparent)] +struct Src(AU16); + +// `transmute_ref` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-intobytes.stderr new file mode 100644 index 00000000..1e874d65 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-intobytes.stderr @@ -0,0 +1,60 @@ +error[E0277]: the trait bound `Src: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::IntoBytes` is not implemented for `Src` + --> tests/ui-nightly/transmute-ref-src-not-intobytes.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `AssertSrcIsIntoBytes` + --> tests/ui-nightly/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsIntoBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Src: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `Src` + --> tests/ui-nightly/transmute-ref-src-not-intobytes.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `AssertSrcIsIntoBytes` + --> tests/ui-nightly/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsIntoBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-nocell.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-nocell.rs new file mode 100644 index 00000000..ee28a98d --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-nocell.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::IntoBytes)] +#[repr(transparent)] +struct Src(AU16); + +// `transmute_ref` requires that the source type implements `Immutable` +const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-nocell.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-nocell.stderr new file mode 100644 index 00000000..33240d5a --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-not-nocell.stderr @@ -0,0 +1,60 @@ +error[E0277]: the trait bound `Src: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::Immutable` is not implemented for `Src` + --> tests/ui-nightly/transmute-ref-src-not-nocell.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `Src` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `AssertSrcIsImmutable` + --> tests/ui-nightly/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Src: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `Src` + --> tests/ui-nightly/transmute-ref-src-not-nocell.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `Src` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `AssertSrcIsImmutable` + --> tests/ui-nightly/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-unsized.rs b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-unsized.rs new file mode 100644 index 00000000..14e72b4d --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-unsized.rs @@ -0,0 +1,16 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting from an unsized source type. +const SRC_UNSIZED: &[u8; 1] = transmute_ref!(&[0u8][..]); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-unsized.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-unsized.stderr new file mode 100644 index 00000000..cd4d16a0 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-ref-src-unsized.stderr @@ -0,0 +1,8 @@ +error[E0271]: type mismatch resolving `<[u8; 1] as KnownLayout>::PointerMetadata == usize` + --> tests/ui-nightly/transmute-ref-src-unsized.rs:16:31 + | +16 | const SRC_UNSIZED: &[u8; 1] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `()` + | + = note: required for `Wrap<&[u8], &[u8; 1]>` to implement `TransmuteRefDst<'_>` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-size-decrease.rs b/vendor/zerocopy/tests/ui-nightly/transmute-size-decrease.rs new file mode 100644 index 00000000..567b6733 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +const DECREASE_SIZE: u8 = transmute!(AU16(0)); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-size-decrease.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-size-decrease.stderr new file mode 100644 index 00000000..c5345e37 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-size-decrease.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 2-byte type to 1-byte type: `AU16` -> `u8` + --> tests/ui-nightly/transmute-size-decrease.rs:20:27 + | +20 | const DECREASE_SIZE: u8 = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ evaluation of `DECREASE_SIZE` failed here + | + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/transmute-size-decrease.rs:20:27 + | +20 | const DECREASE_SIZE: u8 = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-size-increase-allow-shrink.rs b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase-allow-shrink.rs new file mode 100644 index 00000000..4922373f --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase-allow-shrink.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// `transmute!` does not support transmuting from a smaller type to a larger +// one. +const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-size-increase-allow-shrink.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase-allow-shrink.stderr new file mode 100644 index 00000000..4602bdb6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase-allow-shrink.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 1-byte type to 2-byte type: `u8` -> `Transmute<u8, AU16>` + --> tests/ui-nightly/transmute-size-increase-allow-shrink.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ evaluation of `INCREASE_SIZE` failed here + | + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/transmute-size-increase-allow-shrink.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `Transmute<u8, AU16>` (16 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-size-increase.rs b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase.rs new file mode 100644 index 00000000..35dc780e --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// `transmute!` does not support transmuting from a smaller type to a larger +// one. +const INCREASE_SIZE: AU16 = transmute!(0u8); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-size-increase.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase.stderr new file mode 100644 index 00000000..5758a095 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-size-increase.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 1-byte type to 2-byte type: `u8` -> `AU16` + --> tests/ui-nightly/transmute-size-increase.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(0u8); + | ^^^^^^^^^^^^^^^ evaluation of `INCREASE_SIZE` failed here + | + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/transmute-size-increase.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(0u8); + | ^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `AU16` (16 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/transmute-src-not-intobytes.rs new file mode 100644 index 00000000..73be6c1b --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-src-not-intobytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::transmute; + +fn main() {} + +// `transmute` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-nightly/transmute-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/transmute-src-not-intobytes.stderr new file mode 100644 index 00000000..aae39238 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/transmute-src-not-intobytes.stderr @@ -0,0 +1,34 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/transmute-src-not-intobytes.rs:19:32 + | +19 | const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `SRC_NOT_AS_BYTES::transmute` + --> tests/ui-nightly/transmute-src-not-intobytes.rs:19:32 + | +19 | const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs new file mode 100644 index 00000000..0658bccf --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs @@ -0,0 +1,18 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute; + +fn main() { + let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-dst-not-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute-dst-not-tryfrombytes.stderr new file mode 100644 index 00000000..86ded303 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-dst-not-tryfrombytes.stderr @@ -0,0 +1,88 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs:17:33 + | + 17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs:17:58 + | + 17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `try_transmute` + --> src/util/macro_util.rs + | + | pub fn try_transmute<Src, Dst>(src: Src) -> Result<Dst, ValidityError<Src, Dst>> + | ------------- required by a bound in this function +... + | Dst: TryFromBytes, + | ^^^^^^^^^^^^ required by this bound in `try_transmute` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute-dst-not-tryfrombytes.rs:17:58 + | + 17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-size-decrease.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-decrease.rs new file mode 100644 index 00000000..097623c8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let _decrease_size: Result<u8, _> = try_transmute!(AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-size-decrease.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-decrease.stderr new file mode 100644 index 00000000..4cb92372 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute-size-decrease.rs:19:41 + | +19 | let _decrease_size: Result<u8, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-size-increase.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-increase.rs new file mode 100644 index 00000000..4b40a566 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute; + +// `try_transmute!` does not support transmuting from a smaller type to a larger +// one. +fn main() { + let _increase_size: Result<AU16, _> = try_transmute!(0u8); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-size-increase.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-increase.stderr new file mode 100644 index 00000000..26d28766 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute-size-increase.rs:19:43 + | +19 | let _increase_size: Result<AU16, _> = try_transmute!(0u8); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `AU16` (16 bits) + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute-src-not-intobytes.rs new file mode 100644 index 00000000..c2a7b417 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-src-not-intobytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute; + +fn main() { + // `try_transmute` requires that the source type implements `IntoBytes` + let src_not_into_bytes: Result<AU16, _> = try_transmute!(NotZerocopy(AU16(0))); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute-src-not-intobytes.stderr new file mode 100644 index 00000000..6c29ac4e --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute-src-not-intobytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/try_transmute-src-not-intobytes.rs:18:47 + | + 18 | let src_not_into_bytes: Result<AU16, _> = try_transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `try_transmute` + --> src/util/macro_util.rs + | + | pub fn try_transmute<Src, Dst>(src: Src) -> Result<Dst, ValidityError<Src, Dst>> + | ------------- required by a bound in this function + | where + | Src: IntoBytes, + | ^^^^^^^^^ required by this bound in `try_transmute` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-alignment-increase.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-alignment-increase.rs new file mode 100644 index 00000000..d9c9a9dc --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-alignment-increase.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// `try_transmute_mut!` does not support transmuting from a type of smaller +// alignment to one of larger alignment. +fn main() { + let src = &mut [0u8; 2]; + let _increase_size: Result<&mut AU16, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-alignment-increase.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-alignment-increase.stderr new file mode 100644 index 00000000..50a3f0c9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-alignment-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute_mut-alignment-increase.rs:20:48 + | +20 | let _increase_size: Result<&mut AU16, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs new file mode 100644 index 00000000..89096cd1 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_mut; + +fn main() { + // `try_transmute_mut` requires that the destination type implements + // `IntoBytes` + let src = &mut AU16(0); + let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.stderr new file mode 100644 index 00000000..390c6f2c --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.stderr @@ -0,0 +1,120 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs:20:33 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `try_transmute_mut` + --> src/util/macro_util.rs + | + | pub fn try_transmute_mut<Src, Dst>(src: &mut Src) -> Result<&mut Dst, ValidityError<&mut Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + IntoBytes, + | ^^^^^^^^^^^^ required by this bound in `try_transmute_mut` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `try_transmute_mut` + --> src/util/macro_util.rs + | + | pub fn try_transmute_mut<Src, Dst>(src: &mut Src) -> Result<&mut Dst, ValidityError<&mut Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `try_transmute_mut` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-decrease.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-decrease.rs new file mode 100644 index 00000000..3d522efd --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-decrease.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let src = &mut AU16(0); + let _decrease_size: Result<&mut u8, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-decrease.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-decrease.stderr new file mode 100644 index 00000000..fe0494ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute_mut-size-decrease.rs:20:46 + | +20 | let _decrease_size: Result<&mut u8, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-increase.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-increase.rs new file mode 100644 index 00000000..526d465c --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-increase.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// `try_transmute_mut!` does not support transmuting from a smaller type to a +// larger one. +fn main() { + let src = &mut 0u8; + let _increase_size: Result<&mut [u8; 2], _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-increase.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-increase.stderr new file mode 100644 index 00000000..1e33cef8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-size-increase.stderr @@ -0,0 +1,17 @@ +warning: unused import: `util::AU16` + --> tests/ui-nightly/try_transmute_mut-size-increase.rs:13:5 + | +13 | use util::AU16; + | ^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute_mut-size-increase.rs:20:51 + | +20 | let _increase_size: Result<&mut [u8; 2], _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `[u8; 2]` (16 bits) + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs new file mode 100644 index 00000000..12b2e0d3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +#[derive(zerocopy::IntoBytes)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::TryFromBytes)] +#[repr(C)] +struct Dst; + +fn main() { + // `try_transmute_mut` requires that the source type implements `FromBytes` + let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-frombytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-frombytes.stderr new file mode 100644 index 00000000..1cb57173 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-frombytes.stderr @@ -0,0 +1,95 @@ +error[E0277]: the trait bound `Src: FromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Src` + --> tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs:15:1 + | + 15 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Src` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Dst` + --> tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Dst` + --> tests/ui-nightly/try_transmute_mut-src-not-frombytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Dst` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs new file mode 100644 index 00000000..fa3b7032 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +#[derive(zerocopy::FromBytes)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::TryFromBytes)] +#[repr(C)] +struct Dst; + +fn main() { + // `try_transmute_mut` requires that the source type implements `IntoBytes` + let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-intobytes.stderr new file mode 100644 index 00000000..4c5b59fb --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_mut-src-not-intobytes.stderr @@ -0,0 +1,95 @@ +error[E0277]: the trait bound `Src: IntoBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Src` + --> tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs:15:1 + | + 15 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Dst` + --> tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Dst` + --> tests/ui-nightly/try_transmute_mut-src-not-intobytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Dst` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-alignment-increase.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-alignment-increase.rs new file mode 100644 index 00000000..ad1062fb --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-alignment-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// `try_transmute_ref!` does not support transmuting from a type of smaller +// alignment to one of larger alignment. +fn main() { + let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-alignment-increase.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-alignment-increase.stderr new file mode 100644 index 00000000..790a0e4e --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-alignment-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute_ref-alignment-increase.rs:19:44 + | +19 | let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-mutable.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-mutable.rs new file mode 100644 index 00000000..e27a1294 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-mutable.rs @@ -0,0 +1,19 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::try_transmute_ref; + +fn main() {} + +fn ref_dst_mutable() { + // `try_transmute_ref!` requires that its destination type be an immutable + // reference. + let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-mutable.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-mutable.stderr new file mode 100644 index 00000000..29d51281 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-mutable.stderr @@ -0,0 +1,32 @@ +error[E0308]: mismatched types + --> tests/ui-nightly/try_transmute_ref-dst-mutable.rs:18:33 + | + 18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | types differ in mutability + | arguments to this enum variant are incorrect + | + = note: expected mutable reference `&mut u8` + found reference `&_` +help: the type constructed contains `&_` due to the type of the argument passed + --> tests/ui-nightly/try_transmute_ref-dst-mutable.rs:18:33 + | + 18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ this argument influences the type of `Ok` +note: tuple variant defined here + --> $RUST/core/src/result.rs + | + | Ok(#[stable(feature = "rust1", since = "1.0.0")] T), + | ^^ + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-nightly/try_transmute_ref-dst-mutable.rs:18:33 + | +18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected enum `Result<&mut u8, _>` + found enum `Result<&_, ValidityError<&u8, _>>` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs new file mode 100644 index 00000000..3928a1cb --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_ref; + +fn main() { + // `try_transmute_ref` requires that the source type implements `Immutable` + // and `IntoBytes` + let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr new file mode 100644 index 00000000..9454a1c8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr @@ -0,0 +1,120 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:33 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + Immutable, + | ^^^^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-nightly/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-decrease.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-decrease.rs new file mode 100644 index 00000000..60bd70c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let _decrease_size: Result<&u8, _> = try_transmute_ref!(&AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-decrease.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-decrease.stderr new file mode 100644 index 00000000..9b932438 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute_ref-size-decrease.rs:19:42 + | +19 | let _decrease_size: Result<&u8, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-increase.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-increase.rs new file mode 100644 index 00000000..ee5a5fd4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// `try_transmute_ref!` does not support transmuting from a smaller type to a +// larger one. +fn main() { + let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-increase.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-increase.stderr new file mode 100644 index 00000000..08e511fc --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-nightly/try_transmute_ref-size-increase.rs:19:44 + | +19 | let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.rs b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.rs new file mode 100644 index 00000000..2aec95ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_ref; + +fn main() { + // `try_transmute_ref` requires that the source type implements `Immutable` + // and `IntoBytes` + let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); +} diff --git a/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.stderr b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.stderr new file mode 100644 index 00000000..3c74c381 --- /dev/null +++ b/vendor/zerocopy/tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.stderr @@ -0,0 +1,63 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.rs:19:48 + | + 19 | let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function + | where + | Src: IntoBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::Immutable` is not satisfied + --> tests/ui-nightly/try_transmute_ref-src-not-immutable-intobytes.rs:19:48 + | + 19 | let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-nightly/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function + | where + | Src: IntoBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-bytes.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-bytes.rs new file mode 100644 index 00000000..327abca3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::FromBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_from_bytes::<NotZerocopy>(); +} + +fn takes_from_bytes<T: FromBytes>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-bytes.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-bytes.stderr new file mode 100644 index 00000000..a8af9c7c --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-bytes.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-from-bytes.rs:18:24 + | +18 | takes_from_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `takes_from_bytes` + --> tests/ui-stable/diagnostic-not-implemented-from-bytes.rs:21:24 + | +21 | fn takes_from_bytes<T: FromBytes>() {} + | ^^^^^^^^^ required by this bound in `takes_from_bytes` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-zeros.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-zeros.rs new file mode 100644 index 00000000..6f7f41c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-zeros.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::FromZeros; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_from_zeros::<NotZerocopy>(); +} + +fn takes_from_zeros<T: FromZeros>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-zeros.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-zeros.stderr new file mode 100644 index 00000000..6da6fa29 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-from-zeros.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: FromZeros` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-from-zeros.rs:18:24 + | +18 | takes_from_zeros::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromZeros` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromZeros)]` to `NotZerocopy` + = help: the following other types implement trait `FromZeros`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `takes_from_zeros` + --> tests/ui-stable/diagnostic-not-implemented-from-zeros.rs:21:24 + | +21 | fn takes_from_zeros<T: FromZeros>() {} + | ^^^^^^^^^ required by this bound in `takes_from_zeros` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-immutable.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-immutable.rs new file mode 100644 index 00000000..0432ed44 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-immutable.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::Immutable; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_immutable::<NotZerocopy>(); +} + +fn takes_immutable<T: Immutable>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-immutable.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-immutable.stderr new file mode 100644 index 00000000..47c4e839 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-immutable.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-immutable.rs:18:23 + | +18 | takes_immutable::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `takes_immutable` + --> tests/ui-stable/diagnostic-not-implemented-immutable.rs:21:23 + | +21 | fn takes_immutable<T: Immutable>() {} + | ^^^^^^^^^ required by this bound in `takes_immutable` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-into-bytes.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-into-bytes.rs new file mode 100644 index 00000000..f9435536 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-into-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::IntoBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_into_bytes::<NotZerocopy>(); +} + +fn takes_into_bytes<T: IntoBytes>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-into-bytes.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-into-bytes.stderr new file mode 100644 index 00000000..dc84aacc --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-into-bytes.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-into-bytes.rs:18:24 + | +18 | takes_into_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `takes_into_bytes` + --> tests/ui-stable/diagnostic-not-implemented-into-bytes.rs:21:24 + | +21 | fn takes_into_bytes<T: IntoBytes>() {} + | ^^^^^^^^^ required by this bound in `takes_into_bytes` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-issue-1296.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-issue-1296.rs new file mode 100644 index 00000000..e12737a6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-issue-1296.rs @@ -0,0 +1,59 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::{Immutable, IntoBytes}; + +fn main() { + // This is adapted from #1296, which includes the following text: + // + // The compiler errors when a type is missing Immutable are somewhat + // misleading, although I'm not sure there's much zerocopy can do about + // this. An example where the compiler recommends adding a reference + // rather than implementing Immutable (some were even more confusing than + // this): + // + // error[E0277]: the trait bound `virtio::wl::CtrlVfdNewDmabuf: zerocopy::Immutable` is not satisfied + // --> devices/src/virtio/wl.rs:317:20 + // | + // 317 | .write_obj(ctrl_vfd_new_dmabuf) + // | --------- ^^^^^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `virtio::wl::CtrlVfdNewDmabuf` + // | | + // | required by a bound introduced by this call + // | + // note: required by a bound in `virtio::descriptor_utils::Writer::write_obj` + // --> devices/src/virtio/descriptor_utils.rs:536:25 + // | + // 536 | pub fn write_obj<T: Immutable + IntoBytes>(&mut self, val: T) -> io::Result<()> { + // | ^^^^^^^^^ required by this bound in `Writer::write_obj` + // help: consider borrowing here + // | + // 317 | .write_obj(&ctrl_vfd_new_dmabuf) + // | + + // 317 | .write_obj(&mut ctrl_vfd_new_dmabuf) + // | ++++ + // + // Taking the compiler's suggestion results in a different error with a + // recommendation to remove the reference (back to the original code). + // + // As of this writing, the described problem is still happening thanks to + // https://github.com/rust-lang/rust/issues/130563. We include this test so + // that we can capture the current behavior, but we will update it once that + // Rust issue is fixed. + Foo.write_obj(NotZerocopy(())); +} + +struct Foo; + +impl Foo { + fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} +} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-issue-1296.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-issue-1296.stderr new file mode 100644 index 00000000..cb4476e0 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-issue-1296.stderr @@ -0,0 +1,49 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-issue-1296.rs:52:19 + | +52 | Foo.write_obj(NotZerocopy(())); + | --------- ^^^^^^^^^^^^^^^ the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + | | + | required by a bound introduced by this call + | +note: required by a bound in `Foo::write_obj` + --> tests/ui-stable/diagnostic-not-implemented-issue-1296.rs:58:21 + | +58 | fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} + | ^^^^^^^^^ required by this bound in `Foo::write_obj` +help: consider borrowing here + | +52 | Foo.write_obj(&NotZerocopy(())); + | + +52 | Foo.write_obj(&mut NotZerocopy(())); + | ++++ + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-issue-1296.rs:52:19 + | +52 | Foo.write_obj(NotZerocopy(())); + | --------- ^^^^^^^^^^^^^^^ unsatisfied trait bound + | | + | required by a bound introduced by this call + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `Foo::write_obj` + --> tests/ui-stable/diagnostic-not-implemented-issue-1296.rs:58:33 + | +58 | fn write_obj<T: Immutable + IntoBytes>(&mut self, _val: T) {} + | ^^^^^^^^^ required by this bound in `Foo::write_obj` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-known-layout.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-known-layout.rs new file mode 100644 index 00000000..2e306c25 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-known-layout.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::KnownLayout; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_known_layout::<NotZerocopy>(); +} + +fn takes_known_layout<T: KnownLayout>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-known-layout.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-known-layout.stderr new file mode 100644 index 00000000..8ee5a62b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-known-layout.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::KnownLayout` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-known-layout.rs:18:26 + | +18 | takes_known_layout::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::KnownLayout` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(KnownLayout)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::KnownLayout`: + &T + &mut T + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + and $N others +note: required by a bound in `takes_known_layout` + --> tests/ui-stable/diagnostic-not-implemented-known-layout.rs:21:26 + | +21 | fn takes_known_layout<T: KnownLayout>() {} + | ^^^^^^^^^^^ required by this bound in `takes_known_layout` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-try-from-bytes.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-try-from-bytes.rs new file mode 100644 index 00000000..72e72139 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-try-from-bytes.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::TryFromBytes; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_try_from_bytes::<NotZerocopy>(); +} + +fn takes_try_from_bytes<T: TryFromBytes>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-try-from-bytes.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-try-from-bytes.stderr new file mode 100644 index 00000000..8b5b3a8f --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-try-from-bytes.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-try-from-bytes.rs:18:28 + | +18 | takes_try_from_bytes::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `takes_try_from_bytes` + --> tests/ui-stable/diagnostic-not-implemented-try-from-bytes.rs:21:28 + | +21 | fn takes_try_from_bytes<T: TryFromBytes>() {} + | ^^^^^^^^^^^^ required by this bound in `takes_try_from_bytes` diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-unaligned.rs b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-unaligned.rs new file mode 100644 index 00000000..95ce5bd5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-unaligned.rs @@ -0,0 +1,21 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::NotZerocopy; +use zerocopy::Unaligned; + +fn main() { + // We expect the proper diagnostic to be emitted on Rust 1.78.0 and later. + takes_unaligned::<NotZerocopy>(); +} + +fn takes_unaligned<T: Unaligned>() {} diff --git a/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-unaligned.stderr b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-unaligned.stderr new file mode 100644 index 00000000..ae638f0e --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/diagnostic-not-implemented-unaligned.stderr @@ -0,0 +1,27 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/diagnostic-not-implemented-unaligned.rs:18:23 + | +18 | takes_unaligned::<NotZerocopy>(); + | ^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Unaligned` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Unaligned)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Unaligned`: + () + AtomicBool + AtomicI8 + AtomicU8 + Cell<T> + F32<O> + F64<O> + I128<O> + and $N others +note: required by a bound in `takes_unaligned` + --> tests/ui-stable/diagnostic-not-implemented-unaligned.rs:21:23 + | +21 | fn takes_unaligned<T: Unaligned>() {} + | ^^^^^^^^^ required by this bound in `takes_unaligned` diff --git a/vendor/zerocopy/tests/ui-stable/include_value_not_from_bytes.rs b/vendor/zerocopy/tests/ui-stable/include_value_not_from_bytes.rs new file mode 100644 index 00000000..e6289336 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/include_value_not_from_bytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +#[macro_use] +extern crate zerocopy; + +use util::NotZerocopy; + +fn main() {} + +// Should fail because `NotZerocopy<u32>: !FromBytes`. +const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); diff --git a/vendor/zerocopy/tests/ui-stable/include_value_not_from_bytes.stderr b/vendor/zerocopy/tests/ui-stable/include_value_not_from_bytes.stderr new file mode 100644 index 00000000..8f2bcbea --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/include_value_not_from_bytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `NotZerocopy<u32>: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/include_value_not_from_bytes.rs:19:42 + | +19 | const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy<u32>` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy<u32>` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `NOT_FROM_BYTES::transmute` + --> tests/ui-stable/include_value_not_from_bytes.rs:19:42 + | +19 | const NOT_FROM_BYTES: NotZerocopy<u32> = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `$crate::transmute` which comes from the expansion of the macro `include_value` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/include_value_wrong_size.rs b/vendor/zerocopy/tests/ui-stable/include_value_wrong_size.rs new file mode 100644 index 00000000..e02b2be8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/include_value_wrong_size.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[macro_use] +extern crate zerocopy; + +fn main() {} + +// Should fail because the file is 4 bytes long, not 8. +const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); diff --git a/vendor/zerocopy/tests/ui-stable/include_value_wrong_size.stderr b/vendor/zerocopy/tests/ui-stable/include_value_wrong_size.stderr new file mode 100644 index 00000000..9782163f --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/include_value_wrong_size.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 4-byte type to 8-byte type: `[u8; 4]` -> `u64` + --> tests/ui-stable/include_value_wrong_size.rs:15:25 + | +15 | const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ evaluation of `WRONG_SIZE` failed here + | + = note: this error originates in the macro `$crate::transmute` which comes from the expansion of the macro `include_value` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/include_value_wrong_size.rs:15:25 + | +15 | const WRONG_SIZE: u64 = include_value!("../../testdata/include_value/data"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `[u8; 4]` (32 bits) + = note: target type: `u64` (64 bits) + = note: this error originates in the macro `$crate::transmute` which comes from the expansion of the macro `include_value` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/invalid-impls/invalid-impls.rs b/vendor/zerocopy/tests/ui-stable/invalid-impls/invalid-impls.rs new file mode 100644 index 00000000..78f40e84 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/invalid-impls/invalid-impls.rs @@ -0,0 +1,32 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +// Since some macros from `macros.rs` are unused. +#![allow(unused)] + +extern crate zerocopy; +extern crate zerocopy_derive; + +include!("../../../src/util/macros.rs"); + +use zerocopy::*; +use zerocopy_derive::*; + +fn main() {} + +#[derive(FromBytes, IntoBytes, Unaligned)] +#[repr(transparent)] +struct Foo<T>(T); + +const _: () = unsafe { + impl_or_verify!(T => TryFromBytes for Foo<T>); + impl_or_verify!(T => FromZeros for Foo<T>); + impl_or_verify!(T => FromBytes for Foo<T>); + impl_or_verify!(T => IntoBytes for Foo<T>); + impl_or_verify!(T => Unaligned for Foo<T>); +}; diff --git a/vendor/zerocopy/tests/ui-stable/invalid-impls/invalid-impls.stderr b/vendor/zerocopy/tests/ui-stable/invalid-impls/invalid-impls.stderr new file mode 100644 index 00000000..45f473cb --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/invalid-impls/invalid-impls.stderr @@ -0,0 +1,139 @@ +error[E0277]: the trait bound `T: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/invalid-impls/invalid-impls.rs:27:43 + | + 27 | impl_or_verify!(T => TryFromBytes for Foo<T>); + | ^^^^^^ the trait `zerocopy::TryFromBytes` is not implemented for `T` + | + = note: Consider adding `#[derive(TryFromBytes)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::TryFromBytes` + --> tests/ui-stable/invalid-impls/invalid-impls.rs:22:10 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-stable/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-stable/invalid-impls/invalid-impls.rs:27:5 + | + 27 | impl_or_verify!(T => TryFromBytes for Foo<T>); + | --------------------------------------------- in this macro invocation + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `TryFromBytes` + | + 27 | impl_or_verify!(T: zerocopy::TryFromBytes => TryFromBytes for Foo<T>); + | ++++++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::FromZeros` is not satisfied + --> tests/ui-stable/invalid-impls/invalid-impls.rs:28:40 + | + 28 | impl_or_verify!(T => FromZeros for Foo<T>); + | ^^^^^^ the trait `zerocopy::FromZeros` is not implemented for `T` + | + = note: Consider adding `#[derive(FromZeros)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::FromZeros` + --> tests/ui-stable/invalid-impls/invalid-impls.rs:22:10 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-stable/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-stable/invalid-impls/invalid-impls.rs:28:5 + | + 28 | impl_or_verify!(T => FromZeros for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `FromZeros` + | + 28 | impl_or_verify!(T: zerocopy::FromZeros => FromZeros for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/invalid-impls/invalid-impls.rs:29:40 + | + 29 | impl_or_verify!(T => FromBytes for Foo<T>); + | ^^^^^^ the trait `zerocopy::FromBytes` is not implemented for `T` + | + = note: Consider adding `#[derive(FromBytes)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::FromBytes` + --> tests/ui-stable/invalid-impls/invalid-impls.rs:22:10 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-stable/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-stable/invalid-impls/invalid-impls.rs:29:5 + | + 29 | impl_or_verify!(T => FromBytes for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `FromBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `FromBytes` + | + 29 | impl_or_verify!(T: zerocopy::FromBytes => FromBytes for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/invalid-impls/invalid-impls.rs:30:40 + | + 30 | impl_or_verify!(T => IntoBytes for Foo<T>); + | ^^^^^^ the trait `zerocopy::IntoBytes` is not implemented for `T` + | + = note: Consider adding `#[derive(IntoBytes)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::IntoBytes` + --> tests/ui-stable/invalid-impls/invalid-impls.rs:22:21 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-stable/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-stable/invalid-impls/invalid-impls.rs:30:5 + | + 30 | impl_or_verify!(T => IntoBytes for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `IntoBytes` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `IntoBytes` + | + 30 | impl_or_verify!(T: zerocopy::IntoBytes => IntoBytes for Foo<T>); + | +++++++++++++++++++++ + +error[E0277]: the trait bound `T: zerocopy::Unaligned` is not satisfied + --> tests/ui-stable/invalid-impls/invalid-impls.rs:31:40 + | + 31 | impl_or_verify!(T => Unaligned for Foo<T>); + | ^^^^^^ the trait `zerocopy::Unaligned` is not implemented for `T` + | + = note: Consider adding `#[derive(Unaligned)]` to `T` +note: required for `Foo<T>` to implement `zerocopy::Unaligned` + --> tests/ui-stable/invalid-impls/invalid-impls.rs:22:32 + | + 22 | #[derive(FromBytes, IntoBytes, Unaligned)] + | ^^^^^^^^^ unsatisfied trait bound introduced in this `derive` macro +note: required by a bound in `_::Subtrait` + --> tests/ui-stable/invalid-impls/../../../src/util/macros.rs + | + | trait Subtrait: $trait {} + | ^^^^^^ required by this bound in `Subtrait` + | + ::: tests/ui-stable/invalid-impls/invalid-impls.rs:31:5 + | + 31 | impl_or_verify!(T => Unaligned for Foo<T>); + | ------------------------------------------ in this macro invocation + = note: this error originates in the derive macro `Unaligned` which comes from the expansion of the macro `impl_or_verify` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider restricting type parameter `T` with trait `Unaligned` + | + 31 | impl_or_verify!(T: zerocopy::Unaligned => Unaligned for Foo<T>); + | +++++++++++++++++++++ diff --git a/vendor/zerocopy/tests/ui-stable/max-align.rs b/vendor/zerocopy/tests/ui-stable/max-align.rs new file mode 100644 index 00000000..53e3eb9b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/max-align.rs @@ -0,0 +1,99 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +#[repr(C, align(1))] +struct Align1; + +#[repr(C, align(2))] +struct Align2; + +#[repr(C, align(4))] +struct Align4; + +#[repr(C, align(8))] +struct Align8; + +#[repr(C, align(16))] +struct Align16; + +#[repr(C, align(32))] +struct Align32; + +#[repr(C, align(64))] +struct Align64; + +#[repr(C, align(128))] +struct Align128; + +#[repr(C, align(256))] +struct Align256; + +#[repr(C, align(512))] +struct Align512; + +#[repr(C, align(1024))] +struct Align1024; + +#[repr(C, align(2048))] +struct Align2048; + +#[repr(C, align(4096))] +struct Align4096; + +#[repr(C, align(8192))] +struct Align8192; + +#[repr(C, align(16384))] +struct Align16384; + +#[repr(C, align(32768))] +struct Align32768; + +#[repr(C, align(65536))] +struct Align65536; + +#[repr(C, align(131072))] +struct Align131072; + +#[repr(C, align(262144))] +struct Align262144; + +#[repr(C, align(524288))] +struct Align524288; + +#[repr(C, align(1048576))] +struct Align1048576; + +#[repr(C, align(2097152))] +struct Align2097152; + +#[repr(C, align(4194304))] +struct Align4194304; + +#[repr(C, align(8388608))] +struct Align8388608; + +#[repr(C, align(16777216))] +struct Align16777216; + +#[repr(C, align(33554432))] +struct Align33554432; + +#[repr(C, align(67108864))] +struct Align67108864; + +#[repr(C, align(134217728))] +struct Align13421772; + +#[repr(C, align(268435456))] +struct Align26843545; + +#[repr(C, align(1073741824))] +struct Align1073741824; + +fn main() {} diff --git a/vendor/zerocopy/tests/ui-stable/max-align.stderr b/vendor/zerocopy/tests/ui-stable/max-align.stderr new file mode 100644 index 00000000..7e83b2f5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/max-align.stderr @@ -0,0 +1,5 @@ +error[E0589]: invalid `repr(align)` attribute: larger than 2^29 + --> tests/ui-stable/max-align.rs:96:17 + | +96 | #[repr(C, align(1073741824))] + | ^^^^^^^^^^ diff --git a/vendor/zerocopy/tests/ui-stable/ptr-is-invariant-over-v.rs b/vendor/zerocopy/tests/ui-stable/ptr-is-invariant-over-v.rs new file mode 100644 index 00000000..b9a76948 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/ptr-is-invariant-over-v.rs @@ -0,0 +1,29 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License <LICENSE-BSD or +// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use zerocopy::pointer::{ + invariant::{Aligned, Exclusive, Shared, Valid}, + Ptr, +}; + +fn _when_exclusive<'big: 'small, 'small>( + big: Ptr<'small, &'big u32, (Exclusive, Aligned, Valid)>, + mut _small: Ptr<'small, &'small u32, (Exclusive, Aligned, Valid)>, +) { + _small = big; +} + +fn _when_shared<'big: 'small, 'small>( + big: Ptr<'small, &'big u32, (Shared, Aligned, Valid)>, + mut _small: Ptr<'small, &'small u32, (Shared, Aligned, Valid)>, +) { + _small = big; +} + +fn main() {} diff --git a/vendor/zerocopy/tests/ui-stable/ptr-is-invariant-over-v.stderr b/vendor/zerocopy/tests/ui-stable/ptr-is-invariant-over-v.stderr new file mode 100644 index 00000000..b3559596 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/ptr-is-invariant-over-v.stderr @@ -0,0 +1,31 @@ +error: lifetime may not live long enough + --> tests/ui-stable/ptr-is-invariant-over-v.rs:19:5 + | +15 | fn _when_exclusive<'big: 'small, 'small>( + | ---- ------ lifetime `'small` defined here + | | + | lifetime `'big` defined here +... +19 | _small = big; + | ^^^^^^^^^^^^ assignment requires that `'small` must outlive `'big` + | + = help: consider adding the following bound: `'small: 'big` + = note: requirement occurs because of the type `Ptr<'_, &u32, (invariant::Exclusive, Aligned, Valid)>`, which makes the generic argument `&u32` invariant + = note: the struct `Ptr<'a, T, I>` is invariant over the parameter `T` + = help: see <https://doc.rust-lang.org/nomicon/subtyping.html> for more information about variance + +error: lifetime may not live long enough + --> tests/ui-stable/ptr-is-invariant-over-v.rs:26:5 + | +22 | fn _when_shared<'big: 'small, 'small>( + | ---- ------ lifetime `'small` defined here + | | + | lifetime `'big` defined here +... +26 | _small = big; + | ^^^^^^^^^^^^ assignment requires that `'small` must outlive `'big` + | + = help: consider adding the following bound: `'small: 'big` + = note: requirement occurs because of the type `Ptr<'_, &u32, (Shared, Aligned, Valid)>`, which makes the generic argument `&u32` invariant + = note: the struct `Ptr<'a, T, I>` is invariant over the parameter `T` + = help: see <https://doc.rust-lang.org/nomicon/subtyping.html> for more information about variance diff --git a/vendor/zerocopy/tests/ui-stable/transmute-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-dst-not-frombytes.rs new file mode 100644 index 00000000..7b5098e5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-dst-not-frombytes.rs @@ -0,0 +1,19 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::transmute; + +fn main() {} + +// `transmute` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-dst-not-frombytes.stderr new file mode 100644 index 00000000..bd31ed0c --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-dst-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/transmute-dst-not-frombytes.rs:19:41 + | +19 | const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::FromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `DST_NOT_FROM_BYTES::transmute` + --> tests/ui-stable/transmute-dst-not-frombytes.rs:19:41 + | +19 | const DST_NOT_FROM_BYTES: NotZerocopy = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-const.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-const.rs new file mode 100644 index 00000000..47372b1b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-const.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +const ARRAY_OF_U8S: [u8; 2] = [0u8; 2]; + +// `transmute_mut!` cannot, generally speaking, be used in const contexts. +const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-const.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-const.stderr new file mode 100644 index 00000000..d5c2aabf --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-const.stderr @@ -0,0 +1,23 @@ +warning: taking a mutable reference to a `const` item + --> tests/ui-stable/transmute-mut-const.rs:20:52 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^ + | + = note: each usage of a `const` item creates a new temporary + = note: the mutable reference will refer to this temporary, not the original `const` item +note: `const` item defined here + --> tests/ui-stable/transmute-mut-const.rs:17:1 + | +17 | const ARRAY_OF_U8S: [u8; 2] = [0u8; 2]; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: `#[warn(const_item_mutation)]` on by default + +error[E0015]: cannot call non-const method `Wrap::<&mut [u8; 2], &mut [u8; 2]>::transmute_mut` in constants + --> tests/ui-stable/transmute-mut-const.rs:20:37 + | +20 | const CONST_CONTEXT: &mut [u8; 2] = transmute_mut!(&mut ARRAY_OF_U8S); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: calls in constants are limited to constant functions, tuple structs and tuple variants + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-a-reference.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-a-reference.rs new file mode 100644 index 00000000..33a9ecd9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting into a non-reference +// destination type. +const DST_NOT_A_REFERENCE: usize = transmute_mut!(&mut 0u8); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-a-reference.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-a-reference.stderr new file mode 100644 index 00000000..1f438bad --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-a-reference.stderr @@ -0,0 +1,9 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-mut-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_mut!(&mut 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&mut _` + | + = note: expected type `usize` + found mutable reference `&mut _` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-frombytes.rs new file mode 100644 index 00000000..00390755 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-frombytes.stderr new file mode 100644 index 00000000..cfd6c214 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-stable/transmute-mut-dst-not-frombytes.rs:24:38 + | + 24 | const DST_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Dst` + --> tests/ui-stable/transmute-mut-dst-not-frombytes.rs:21:1 + | + 21 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-intobytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-intobytes.rs new file mode 100644 index 00000000..27cf93ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the destination type implements `IntoBytes` +const DST_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-intobytes.stderr new file mode 100644 index 00000000..90088e26 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-not-intobytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-stable/transmute-mut-dst-not-intobytes.rs:24:36 + | + 24 | const DST_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Dst` + --> tests/ui-stable/transmute-mut-dst-not-intobytes.rs:21:1 + | + 21 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Dst` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-unsized.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-unsized.rs new file mode 100644 index 00000000..693ccda5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting into an unsized destination +// type. +const DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8; 1]); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-unsized.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-unsized.stderr new file mode 100644 index 00000000..5c824711 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-dst-unsized.stderr @@ -0,0 +1,16 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-dst-unsized.rs:17:32 + | + 17 | const DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8; 1]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | impl<'a, Src, Dst> Wrap<&'a mut Src, &'a mut Dst> { + | ^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` +... + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-illegal-lifetime.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-illegal-lifetime.rs new file mode 100644 index 00000000..c31765e4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-illegal-lifetime.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +fn main() {} + +fn increase_lifetime() { + let mut x = 0u64; + // It is illegal to increase the lifetime scope. + let _: &'static mut u64 = zerocopy::transmute_mut!(&mut x); +} diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-illegal-lifetime.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-illegal-lifetime.stderr new file mode 100644 index 00000000..7f128138 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-illegal-lifetime.stderr @@ -0,0 +1,12 @@ +error[E0597]: `x` does not live long enough + --> tests/ui-stable/transmute-mut-illegal-lifetime.rs:14:56 + | +12 | let mut x = 0u64; + | ----- binding `x` declared here +13 | // It is illegal to increase the lifetime scope. +14 | let _: &'static mut u64 = zerocopy::transmute_mut!(&mut x); + | ---------------- ^^^^^^ borrowed value does not live long enough + | | + | type annotation requires that `x` is borrowed for `'static` +15 | } + | - `x` dropped here while still borrowed diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-not-references.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-not-references.rs new file mode 100644 index 00000000..98cc5208 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-not-references.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting between non-reference source +// and destination types. +const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(0usize); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-not-references.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-not-references.stderr new file mode 100644 index 00000000..c0d9e0f0 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-not-references.stderr @@ -0,0 +1,15 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-mut-src-dst-not-references.rs:17:59 + | +17 | const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&mut _`, found `usize` + | expected due to this + | + = note: expected mutable reference `&mut _` + found type `usize` +help: consider mutably borrowing here + | +17 | const SRC_DST_NOT_REFERENCES: &mut usize = transmute_mut!(&mut 0usize); + | ++++ diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-unsized.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-unsized.stderr new file mode 100644 index 00000000..0c0e1009 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-dst-unsized.stderr @@ -0,0 +1,183 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AssertSrcIsSized` + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsSized` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AssertSrcIsSized` + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsSized` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AssertDstIsSized` + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertDstIsSized` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: all local variables must have a statically known size + = help: unsized locals are gated as an unstable feature + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AlignOf::<T>::into_t` + --> src/util/macro_util.rs + | + | impl<T> AlignOf<T> { + | ^ required by this bound in `AlignOf::<T>::into_t` +... + | pub fn into_t(self) -> T { + | ------ required by a bound in this associated function + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: the left-hand-side of an assignment must have a statically known size + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `AlignOf` + --> src/util/macro_util.rs + | + | pub struct AlignOf<T> { + | ^ required by the implicit `Sized` requirement on this type parameter in `AlignOf` + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `MaxAlignsOf` + --> src/util/macro_util.rs + | + | pub union MaxAlignsOf<T, U> { + | ^ required by the implicit `Sized` requirement on this type parameter in `MaxAlignsOf` + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `AlignOf` + --> src/util/macro_util.rs + | + | pub struct AlignOf<T> { + | ^ required by the implicit `Sized` requirement on this type parameter in `AlignOf` + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: all local variables must have a statically known size + = help: unsized locals are gated as an unstable feature + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `std::intrinsics::transmute` + --> $RUST/core/src/intrinsics/mod.rs + | + | pub const unsafe fn transmute<Src, Dst>(src: Src) -> Dst; + | ^^^ required by the implicit `Sized` requirement on this type parameter in `transmute` + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `transmute_mut` + --> src/util/macro_util.rs + | + | pub unsafe fn transmute_mut<'dst, 'src: 'dst, Src: 'src, Dst: 'dst>( + | ^^^ required by the implicit `Sized` requirement on this type parameter in `transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-mut-src-dst-unsized.rs:17:36 + | +17 | const SRC_DST_UNSIZED: &mut [u8] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `transmute_mut` + --> src/util/macro_util.rs + | + | pub unsafe fn transmute_mut<'dst, 'src: 'dst, Src: 'src, Dst: 'dst>( + | ^^^ required by the implicit `Sized` requirement on this type parameter in `transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-immutable.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-immutable.rs new file mode 100644 index 00000000..08088d0d --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-immutable.rs @@ -0,0 +1,18 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +fn ref_src_immutable() { + // `transmute_mut!` requires that its source type be a mutable reference. + let _: &mut u8 = transmute_mut!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-immutable.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-immutable.stderr new file mode 100644 index 00000000..0115c791 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-immutable.stderr @@ -0,0 +1,11 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-mut-src-immutable.rs:17:37 + | +17 | let _: &mut u8 = transmute_mut!(&0u8); + | ---------------^^^^- + | | | + | | types differ in mutability + | expected due to this + | + = note: expected mutable reference `&mut _` + found reference `&u8` diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-a-reference.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-a-reference.rs new file mode 100644 index 00000000..bf8bc325 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting from a non-reference source +// type. +const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(0usize); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-a-reference.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-a-reference.stderr new file mode 100644 index 00000000..8c1d9b47 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-a-reference.stderr @@ -0,0 +1,15 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-mut-src-not-a-reference.rs:17:53 + | +17 | const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&mut _`, found `usize` + | expected due to this + | + = note: expected mutable reference `&mut _` + found type `usize` +help: consider mutably borrowing here + | +17 | const SRC_NOT_A_REFERENCE: &mut u8 = transmute_mut!(&mut 0usize); + | ++++ diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-frombytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-frombytes.rs new file mode 100644 index 00000000..0fc6f984 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the source type implements `FromBytes` +const SRC_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-frombytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-frombytes.stderr new file mode 100644 index 00000000..5d7e7fb4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Src: FromBytes` is not satisfied + --> tests/ui-stable/transmute-mut-src-not-frombytes.rs:24:38 + | + 24 | const SRC_NOT_FROM_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Src` + --> tests/ui-stable/transmute-mut-src-not-frombytes.rs:17:1 + | + 17 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Src` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-intobytes.rs new file mode 100644 index 00000000..518402df --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +#[derive(zerocopy::FromBytes, zerocopy::Immutable)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::FromBytes, zerocopy::IntoBytes, zerocopy::Immutable)] +#[repr(C)] +struct Dst; + +// `transmute_mut` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-intobytes.stderr new file mode 100644 index 00000000..ac96b53c --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-not-intobytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Src: IntoBytes` is not satisfied + --> tests/ui-stable/transmute-mut-src-not-intobytes.rs:24:36 + | + 24 | const SRC_NOT_AS_BYTES: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Src` + --> tests/ui-stable/transmute-mut-src-not-intobytes.rs:17:1 + | + 17 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-unsized.rs b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-unsized.rs new file mode 100644 index 00000000..473070ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +fn main() {} + +// `transmute_mut!` does not support transmuting from an unsized source type to +// a sized destination type. +const SRC_UNSIZED: &mut [u8; 1] = transmute_mut!(&mut [0u8][..]); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-mut-src-unsized.stderr b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-unsized.stderr new file mode 100644 index 00000000..4ef157ea --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-mut-src-unsized.stderr @@ -0,0 +1,8 @@ +error[E0271]: type mismatch resolving `<[u8; 1] as KnownLayout>::PointerMetadata == usize` + --> tests/ui-stable/transmute-mut-src-unsized.rs:17:35 + | +17 | const SRC_UNSIZED: &mut [u8; 1] = transmute_mut!(&mut [0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `()` + | + = note: required for `Wrap<&mut [u8], &mut [u8; 1]>` to implement `TransmuteMutDst<'_>` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ptr-to-usize.rs b/vendor/zerocopy/tests/ui-stable/transmute-ptr-to-usize.rs new file mode 100644 index 00000000..5af88593 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ptr-to-usize.rs @@ -0,0 +1,20 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute; + +fn main() {} + +// It is unclear whether we can or should support this transmutation, especially +// in a const context. This test ensures that even if such a transmutation +// becomes valid due to the requisite implementations of `FromBytes` being +// added, that we re-examine whether it should specifically be valid in a const +// context. +const POINTER_VALUE: usize = transmute!(&0usize as *const usize); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ptr-to-usize.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ptr-to-usize.stderr new file mode 100644 index 00000000..a3b9f995 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ptr-to-usize.stderr @@ -0,0 +1,20 @@ +error[E0277]: the trait bound `*const usize: IntoBytes` is not satisfied + --> tests/ui-stable/transmute-ptr-to-usize.rs:20:30 + | +20 | const POINTER_VALUE: usize = transmute!(&0usize as *const usize); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | the trait `IntoBytes` is not implemented for `*const usize` + | required by a bound introduced by this call + | + = note: Consider adding `#[derive(IntoBytes)]` to `*const usize` + = help: the trait `IntoBytes` is implemented for `usize` +note: required by a bound in `POINTER_VALUE::transmute` + --> tests/ui-stable/transmute-ptr-to-usize.rs:20:30 + | +20 | const POINTER_VALUE: usize = transmute!(&0usize as *const usize); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-mutable.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-mutable.rs new file mode 100644 index 00000000..fa0e6e4c --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-mutable.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +fn ref_dst_mutable() { + // `transmute_ref!` requires that its destination type be an immutable + // reference. + let _: &mut u8 = transmute_ref!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-mutable.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-mutable.stderr new file mode 100644 index 00000000..c70f6ea6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-mutable.stderr @@ -0,0 +1,29 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-dst-mutable.rs:18:22 + | +18 | let _: &mut u8 = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected mutable reference `&mut u8` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-a-reference.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-a-reference.rs new file mode 100644 index 00000000..de55f9ac --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting into a non-reference +// destination type. +const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-a-reference.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-a-reference.stderr new file mode 100644 index 00000000..ab3f90c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-a-reference.stderr @@ -0,0 +1,29 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-dst-not-a-reference.rs:17:36 + | +17 | const DST_NOT_A_REFERENCE: usize = transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-frombytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-frombytes.rs new file mode 100644 index 00000000..f7619d34 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-frombytes.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::Immutable)] +#[repr(transparent)] +struct Dst(AU16); + +// `transmute_ref` requires that the destination type implements `FromBytes` +const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-frombytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-frombytes.stderr new file mode 100644 index 00000000..4e775bf3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-frombytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: zerocopy::FromBytes` is not satisfied + --> tests/ui-stable/transmute-ref-dst-not-frombytes.rs:23:34 + | +23 | const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::FromBytes` is not implemented for `Dst` + --> tests/ui-stable/transmute-ref-dst-not-frombytes.rs:20:1 + | +20 | struct Dst(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `zerocopy::FromBytes`: + () + AU16 + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `AssertDstIsFromBytes` + --> tests/ui-stable/transmute-ref-dst-not-frombytes.rs:23:34 + | +23 | const DST_NOT_FROM_BYTES: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertDstIsFromBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-nocell.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-nocell.rs new file mode 100644 index 00000000..f1f63bfc --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-nocell.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::FromBytes)] +#[repr(transparent)] +struct Dst(AU16); + +// `transmute_ref` requires that the destination type implements `Immutable` +const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-nocell.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-nocell.stderr new file mode 100644 index 00000000..3f7edc29 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-not-nocell.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `Dst: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/transmute-ref-dst-not-nocell.rs:23:33 + | +23 | const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::Immutable` is not implemented for `Dst` + --> tests/ui-stable/transmute-ref-dst-not-nocell.rs:20:1 + | +20 | struct Dst(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `Dst` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `AssertDstIsImmutable` + --> tests/ui-stable/transmute-ref-dst-not-nocell.rs:23:33 + | +23 | const DST_NOT_IMMUTABLE: &Dst = transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertDstIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-unsized.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-unsized.rs new file mode 100644 index 00000000..625f1fac --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-unsized.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting into an unsized destination +// type. +const DST_UNSIZED: &[u8] = transmute_ref!(&[0u8; 1]); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-unsized.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-unsized.stderr new file mode 100644 index 00000000..2da70424 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-dst-unsized.stderr @@ -0,0 +1,16 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-dst-unsized.rs:17:28 + | + 17 | const DST_UNSIZED: &[u8] = transmute_ref!(&[0u8; 1]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `Wrap::<&'a Src, &'a Dst>::transmute_ref` + --> src/util/macro_util.rs + | + | impl<'a, Src, Dst> Wrap<&'a Src, &'a Dst> { + | ^^^ required by this bound in `Wrap::<&Src, &Dst>::transmute_ref` +... + | pub const unsafe fn transmute_ref(self) -> &'a Dst { + | ------------- required by a bound in this associated function + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-illegal-lifetime.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-illegal-lifetime.rs new file mode 100644 index 00000000..8dd191e6 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-illegal-lifetime.rs @@ -0,0 +1,15 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +fn main() {} + +fn increase_lifetime() { + let x = 0u64; + // It is illegal to increase the lifetime scope. + let _: &'static u64 = zerocopy::transmute_ref!(&x); +} diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-illegal-lifetime.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-illegal-lifetime.stderr new file mode 100644 index 00000000..1ef34feb --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-illegal-lifetime.stderr @@ -0,0 +1,12 @@ +error[E0597]: `x` does not live long enough + --> tests/ui-stable/transmute-ref-illegal-lifetime.rs:14:52 + | +12 | let x = 0u64; + | - binding `x` declared here +13 | // It is illegal to increase the lifetime scope. +14 | let _: &'static u64 = zerocopy::transmute_ref!(&x); + | ------------ ^^ borrowed value does not live long enough + | | + | type annotation requires that `x` is borrowed for `'static` +15 | } + | - `x` dropped here while still borrowed diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-not-references.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-not-references.rs new file mode 100644 index 00000000..114e917b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-not-references.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting between non-reference source +// and destination types. +const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-not-references.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-not-references.stderr new file mode 100644 index 00000000..8a80e991 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-not-references.stderr @@ -0,0 +1,45 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-src-dst-not-references.rs:17:54 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&_`, found `usize` + | expected due to this + | + = note: expected reference `&_` + found type `usize` +help: consider borrowing here + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(&0usize); + | + + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-src-dst-not-references.rs:17:39 + | +17 | const SRC_DST_NOT_REFERENCES: usize = transmute_ref!(0usize); + | ^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `&_` + | + = note: expected type `usize` + found reference `&_` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-unsized.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-unsized.stderr new file mode 100644 index 00000000..59172e43 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-dst-unsized.stderr @@ -0,0 +1,183 @@ +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AssertSrcIsSized` + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsSized` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AssertSrcIsSized` + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsSized` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AssertDstIsSized` + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertDstIsSized` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: all local variables must have a statically known size + = help: unsized locals are gated as an unstable feature + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by a bound in `AlignOf::<T>::into_t` + --> src/util/macro_util.rs + | + | impl<T> AlignOf<T> { + | ^ required by this bound in `AlignOf::<T>::into_t` +... + | pub fn into_t(self) -> T { + | ------ required by a bound in this associated function + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: the left-hand-side of an assignment must have a statically known size + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `AlignOf` + --> src/util/macro_util.rs + | + | pub struct AlignOf<T> { + | ^ required by the implicit `Sized` requirement on this type parameter in `AlignOf` + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `MaxAlignsOf` + --> src/util/macro_util.rs + | + | pub union MaxAlignsOf<T, U> { + | ^ required by the implicit `Sized` requirement on this type parameter in `MaxAlignsOf` + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `AlignOf` + --> src/util/macro_util.rs + | + | pub struct AlignOf<T> { + | ^ required by the implicit `Sized` requirement on this type parameter in `AlignOf` + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: all local variables must have a statically known size + = help: unsized locals are gated as an unstable feature + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `std::intrinsics::transmute` + --> $RUST/core/src/intrinsics/mod.rs + | + | pub const unsafe fn transmute<Src, Dst>(src: Src) -> Dst; + | ^^^ required by the implicit `Sized` requirement on this type parameter in `transmute` + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `transmute_ref` + --> src/util/macro_util.rs + | + | pub const unsafe fn transmute_ref<'dst, 'src: 'dst, Src: 'src, Dst: 'dst>( + | ^^^ required by the implicit `Sized` requirement on this type parameter in `transmute_ref` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> tests/ui-stable/transmute-ref-src-dst-unsized.rs:17:32 + | +17 | const SRC_DST_UNSIZED: &[u8] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` +note: required by an implicit `Sized` bound in `transmute_ref` + --> src/util/macro_util.rs + | + | pub const unsafe fn transmute_ref<'dst, 'src: 'dst, Src: 'src, Dst: 'dst>( + | ^^^ required by the implicit `Sized` requirement on this type parameter in `transmute_ref` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-a-reference.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-a-reference.rs new file mode 100644 index 00000000..90661b3e --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-a-reference.rs @@ -0,0 +1,17 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting from a non-reference source +// type. +const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(0usize); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-a-reference.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-a-reference.stderr new file mode 100644 index 00000000..622c3db9 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-a-reference.stderr @@ -0,0 +1,15 @@ +error[E0308]: mismatched types + --> tests/ui-stable/transmute-ref-src-not-a-reference.rs:17:49 + | +17 | const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(0usize); + | ---------------^^^^^^- + | | | + | | expected `&_`, found `usize` + | expected due to this + | + = note: expected reference `&_` + found type `usize` +help: consider borrowing here + | +17 | const SRC_NOT_A_REFERENCE: &u8 = transmute_ref!(&0usize); + | + diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-intobytes.rs new file mode 100644 index 00000000..a5146fa8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-intobytes.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::Immutable)] +#[repr(transparent)] +struct Src(AU16); + +// `transmute_ref` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-intobytes.stderr new file mode 100644 index 00000000..11556f74 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-intobytes.stderr @@ -0,0 +1,60 @@ +error[E0277]: the trait bound `Src: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::IntoBytes` is not implemented for `Src` + --> tests/ui-stable/transmute-ref-src-not-intobytes.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `AssertSrcIsIntoBytes` + --> tests/ui-stable/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsIntoBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Src: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `Src` + --> tests/ui-stable/transmute-ref-src-not-intobytes.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `AssertSrcIsIntoBytes` + --> tests/ui-stable/transmute-ref-src-not-intobytes.rs:23:33 + | +23 | const SRC_NOT_AS_BYTES: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsIntoBytes` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-nocell.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-nocell.rs new file mode 100644 index 00000000..ee28a98d --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-nocell.rs @@ -0,0 +1,23 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute_ref; + +fn main() {} + +#[derive(zerocopy::IntoBytes)] +#[repr(transparent)] +struct Src(AU16); + +// `transmute_ref` requires that the source type implements `Immutable` +const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-nocell.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-nocell.stderr new file mode 100644 index 00000000..46bf46ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-not-nocell.stderr @@ -0,0 +1,60 @@ +error[E0277]: the trait bound `Src: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::Immutable` is not implemented for `Src` + --> tests/ui-stable/transmute-ref-src-not-nocell.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `Src` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `AssertSrcIsImmutable` + --> tests/ui-stable/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Src: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `Src` + --> tests/ui-stable/transmute-ref-src-not-nocell.rs:20:1 + | +20 | struct Src(AU16); + | ^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `Src` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `AssertSrcIsImmutable` + --> tests/ui-stable/transmute-ref-src-not-nocell.rs:23:34 + | +23 | const SRC_NOT_IMMUTABLE: &AU16 = transmute_ref!(&Src(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `AssertSrcIsImmutable` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-unsized.rs b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-unsized.rs new file mode 100644 index 00000000..14e72b4d --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-unsized.rs @@ -0,0 +1,16 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_ref; + +fn main() {} + +// `transmute_ref!` does not support transmuting from an unsized source type. +const SRC_UNSIZED: &[u8; 1] = transmute_ref!(&[0u8][..]); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-ref-src-unsized.stderr b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-unsized.stderr new file mode 100644 index 00000000..30937fda --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-ref-src-unsized.stderr @@ -0,0 +1,8 @@ +error[E0271]: type mismatch resolving `<[u8; 1] as KnownLayout>::PointerMetadata == usize` + --> tests/ui-stable/transmute-ref-src-unsized.rs:16:31 + | +16 | const SRC_UNSIZED: &[u8; 1] = transmute_ref!(&[0u8][..]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `usize`, found `()` + | + = note: required for `Wrap<&[u8], &[u8; 1]>` to implement `TransmuteRefDst<'_>` + = note: this error originates in the macro `transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-size-decrease.rs b/vendor/zerocopy/tests/ui-stable/transmute-size-decrease.rs new file mode 100644 index 00000000..567b6733 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +const DECREASE_SIZE: u8 = transmute!(AU16(0)); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-size-decrease.stderr b/vendor/zerocopy/tests/ui-stable/transmute-size-decrease.stderr new file mode 100644 index 00000000..2852cb33 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-size-decrease.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 2-byte type to 1-byte type: `AU16` -> `u8` + --> tests/ui-stable/transmute-size-decrease.rs:20:27 + | +20 | const DECREASE_SIZE: u8 = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ evaluation of `DECREASE_SIZE` failed here + | + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/transmute-size-decrease.rs:20:27 + | +20 | const DECREASE_SIZE: u8 = transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-size-increase-allow-shrink.rs b/vendor/zerocopy/tests/ui-stable/transmute-size-increase-allow-shrink.rs new file mode 100644 index 00000000..4922373f --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-size-increase-allow-shrink.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// `transmute!` does not support transmuting from a smaller type to a larger +// one. +const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-size-increase-allow-shrink.stderr b/vendor/zerocopy/tests/ui-stable/transmute-size-increase-allow-shrink.stderr new file mode 100644 index 00000000..9050a73b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-size-increase-allow-shrink.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 1-byte type to 2-byte type: `u8` -> `Transmute<u8, AU16>` + --> tests/ui-stable/transmute-size-increase-allow-shrink.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ evaluation of `INCREASE_SIZE` failed here + | + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/transmute-size-increase-allow-shrink.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(#![allow(shrink)] 0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `Transmute<u8, AU16>` (16 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-size-increase.rs b/vendor/zerocopy/tests/ui-stable/transmute-size-increase.rs new file mode 100644 index 00000000..35dc780e --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::transmute; + +fn main() {} + +// `transmute!` does not support transmuting from a smaller type to a larger +// one. +const INCREASE_SIZE: AU16 = transmute!(0u8); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-size-increase.stderr b/vendor/zerocopy/tests/ui-stable/transmute-size-increase.stderr new file mode 100644 index 00000000..40be466f --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-size-increase.stderr @@ -0,0 +1,17 @@ +error[E0080]: transmuting from 1-byte type to 2-byte type: `u8` -> `AU16` + --> tests/ui-stable/transmute-size-increase.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(0u8); + | ^^^^^^^^^^^^^^^ evaluation of `INCREASE_SIZE` failed here + | + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/transmute-size-increase.rs:20:29 + | +20 | const INCREASE_SIZE: AU16 = transmute!(0u8); + | ^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `AU16` (16 bits) + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/transmute-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-stable/transmute-src-not-intobytes.rs new file mode 100644 index 00000000..73be6c1b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-src-not-intobytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::transmute; + +fn main() {} + +// `transmute` requires that the source type implements `IntoBytes` +const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); diff --git a/vendor/zerocopy/tests/ui-stable/transmute-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/transmute-src-not-intobytes.stderr new file mode 100644 index 00000000..a3cfa394 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/transmute-src-not-intobytes.stderr @@ -0,0 +1,34 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/transmute-src-not-intobytes.rs:19:32 + | +19 | const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | unsatisfied trait bound + | required by a bound introduced by this call + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `SRC_NOT_AS_BYTES::transmute` + --> tests/ui-stable/transmute-src-not-intobytes.rs:19:32 + | +19 | const SRC_NOT_AS_BYTES: AU16 = transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | required by a bound in this function + | required by this bound in `transmute` + = note: this error originates in the macro `transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs new file mode 100644 index 00000000..0658bccf --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs @@ -0,0 +1,18 @@ +// Copyright 2022 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute; + +fn main() { + let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-dst-not-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute-dst-not-tryfrombytes.stderr new file mode 100644 index 00000000..3d96eaf3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-dst-not-tryfrombytes.stderr @@ -0,0 +1,88 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs:17:33 + | + 17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs:17:58 + | + 17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `try_transmute` + --> src/util/macro_util.rs + | + | pub fn try_transmute<Src, Dst>(src: Src) -> Result<Dst, ValidityError<Src, Dst>> + | ------------- required by a bound in this function +... + | Dst: TryFromBytes, + | ^^^^^^^^^^^^ required by this bound in `try_transmute` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute-dst-not-tryfrombytes.rs:17:58 + | + 17 | let dst_not_try_from_bytes: Result<NotZerocopy, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-size-decrease.rs b/vendor/zerocopy/tests/ui-stable/try_transmute-size-decrease.rs new file mode 100644 index 00000000..097623c8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let _decrease_size: Result<u8, _> = try_transmute!(AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-size-decrease.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute-size-decrease.stderr new file mode 100644 index 00000000..d52462d5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute-size-decrease.rs:19:41 + | +19 | let _decrease_size: Result<u8, _> = try_transmute!(AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-size-increase.rs b/vendor/zerocopy/tests/ui-stable/try_transmute-size-increase.rs new file mode 100644 index 00000000..4b40a566 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute; + +// `try_transmute!` does not support transmuting from a smaller type to a larger +// one. +fn main() { + let _increase_size: Result<AU16, _> = try_transmute!(0u8); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-size-increase.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute-size-increase.stderr new file mode 100644 index 00000000..076dc26b --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute-size-increase.rs:19:43 + | +19 | let _increase_size: Result<AU16, _> = try_transmute!(0u8); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `AU16` (16 bits) + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute-src-not-intobytes.rs new file mode 100644 index 00000000..c2a7b417 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-src-not-intobytes.rs @@ -0,0 +1,19 @@ +// Copyright 2023 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute; + +fn main() { + // `try_transmute` requires that the source type implements `IntoBytes` + let src_not_into_bytes: Result<AU16, _> = try_transmute!(NotZerocopy(AU16(0))); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute-src-not-intobytes.stderr new file mode 100644 index 00000000..5d6a48a1 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute-src-not-intobytes.stderr @@ -0,0 +1,31 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/try_transmute-src-not-intobytes.rs:18:47 + | + 18 | let src_not_into_bytes: Result<AU16, _> = try_transmute!(NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `try_transmute` + --> src/util/macro_util.rs + | + | pub fn try_transmute<Src, Dst>(src: Src) -> Result<Dst, ValidityError<Src, Dst>> + | ------------- required by a bound in this function + | where + | Src: IntoBytes, + | ^^^^^^^^^ required by this bound in `try_transmute` + = note: this error originates in the macro `try_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-alignment-increase.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-alignment-increase.rs new file mode 100644 index 00000000..d9c9a9dc --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-alignment-increase.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// `try_transmute_mut!` does not support transmuting from a type of smaller +// alignment to one of larger alignment. +fn main() { + let src = &mut [0u8; 2]; + let _increase_size: Result<&mut AU16, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-alignment-increase.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-alignment-increase.stderr new file mode 100644 index 00000000..800bfae2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-alignment-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute_mut-alignment-increase.rs:20:48 + | +20 | let _increase_size: Result<&mut AU16, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs new file mode 100644 index 00000000..89096cd1 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_mut; + +fn main() { + // `try_transmute_mut` requires that the destination type implements + // `IntoBytes` + let src = &mut AU16(0); + let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.stderr new file mode 100644 index 00000000..139dcaa5 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.stderr @@ -0,0 +1,120 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs:20:33 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `try_transmute_mut` + --> src/util/macro_util.rs + | + | pub fn try_transmute_mut<Src, Dst>(src: &mut Src) -> Result<&mut Dst, ValidityError<&mut Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + IntoBytes, + | ^^^^^^^^^^^^ required by this bound in `try_transmute_mut` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `try_transmute_mut` + --> src/util/macro_util.rs + | + | pub fn try_transmute_mut<Src, Dst>(src: &mut Src) -> Result<&mut Dst, ValidityError<&mut Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `try_transmute_mut` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-dst-not-tryfrombytes.rs:20:63 + | + 20 | let dst_not_try_from_bytes: Result<&mut NotZerocopy, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-decrease.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-decrease.rs new file mode 100644 index 00000000..3d522efd --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-decrease.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let src = &mut AU16(0); + let _decrease_size: Result<&mut u8, _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-decrease.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-decrease.stderr new file mode 100644 index 00000000..8876faef --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute_mut-size-decrease.rs:20:46 + | +20 | let _decrease_size: Result<&mut u8, _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-increase.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-increase.rs new file mode 100644 index 00000000..526d465c --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-increase.rs @@ -0,0 +1,21 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_mut; + +// `try_transmute_mut!` does not support transmuting from a smaller type to a +// larger one. +fn main() { + let src = &mut 0u8; + let _increase_size: Result<&mut [u8; 2], _> = try_transmute_mut!(src); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-increase.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-increase.stderr new file mode 100644 index 00000000..8be07db4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-size-increase.stderr @@ -0,0 +1,17 @@ +warning: unused import: `util::AU16` + --> tests/ui-stable/try_transmute_mut-size-increase.rs:13:5 + | +13 | use util::AU16; + | ^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default + +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute_mut-size-increase.rs:20:51 + | +20 | let _increase_size: Result<&mut [u8; 2], _> = try_transmute_mut!(src); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `u8` (8 bits) + = note: target type: `[u8; 2]` (16 bits) + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `try_transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-frombytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-frombytes.rs new file mode 100644 index 00000000..12b2e0d3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-frombytes.rs @@ -0,0 +1,24 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +#[derive(zerocopy::IntoBytes)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::TryFromBytes)] +#[repr(C)] +struct Dst; + +fn main() { + // `try_transmute_mut` requires that the source type implements `FromBytes` + let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-frombytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-frombytes.stderr new file mode 100644 index 00000000..e99b2f09 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-frombytes.stderr @@ -0,0 +1,95 @@ +error[E0277]: the trait bound `Src: FromBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-src-not-frombytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Src` + --> tests/ui-stable/try_transmute_mut-src-not-frombytes.rs:15:1 + | + 15 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Src` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-src-not-frombytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Dst` + --> tests/ui-stable/try_transmute_mut-src-not-frombytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-src-not-frombytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Dst` + --> tests/ui-stable/try_transmute_mut-src-not-frombytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Dst` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-intobytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-intobytes.rs new file mode 100644 index 00000000..fa3b7032 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-intobytes.rs @@ -0,0 +1,24 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::transmute_mut; + +#[derive(zerocopy::FromBytes)] +#[repr(C)] +struct Src; + +#[derive(zerocopy::TryFromBytes)] +#[repr(C)] +struct Dst; + +fn main() { + // `try_transmute_mut` requires that the source type implements `IntoBytes` + let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-intobytes.stderr new file mode 100644 index 00000000..cd2c51f8 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_mut-src-not-intobytes.stderr @@ -0,0 +1,95 @@ +error[E0277]: the trait bound `Src: IntoBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-src-not-intobytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Src` + --> tests/ui-stable/try_transmute_mut-src-not-intobytes.rs:15:1 + | + 15 | struct Src; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Src` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function + | where + | Src: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: FromBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-src-not-intobytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `FromBytes` is not implemented for `Dst` + --> tests/ui-stable/try_transmute_mut-src-not-intobytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(FromBytes)]` to `Dst` + = help: the following other types implement trait `FromBytes`: + () + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + AtomicU32 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `Dst: IntoBytes` is not satisfied + --> tests/ui-stable/try_transmute_mut-src-not-intobytes.rs:23:40 + | + 23 | let src_not_from_bytes: &mut Dst = transmute_mut!(&mut Src); + | ^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `IntoBytes` is not implemented for `Dst` + --> tests/ui-stable/try_transmute_mut-src-not-intobytes.rs:19:1 + | + 19 | struct Dst; + | ^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `Dst` + = help: the following other types implement trait `IntoBytes`: + () + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + AtomicU16 + and $N others +note: required by a bound in `Wrap::<&'a mut Src, &'a mut Dst>::transmute_mut` + --> src/util/macro_util.rs + | + | pub fn transmute_mut(self) -> &'a mut Dst + | ------------- required by a bound in this associated function +... + | Dst: FromBytes + IntoBytes, + | ^^^^^^^^^ required by this bound in `Wrap::<&mut Src, &mut Dst>::transmute_mut` + = note: this error originates in the macro `transmute_mut` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-alignment-increase.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-alignment-increase.rs new file mode 100644 index 00000000..ad1062fb --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-alignment-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// `try_transmute_ref!` does not support transmuting from a type of smaller +// alignment to one of larger alignment. +fn main() { + let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-alignment-increase.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-alignment-increase.stderr new file mode 100644 index 00000000..d009ee76 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-alignment-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute_ref-alignment-increase.rs:19:44 + | +19 | let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-mutable.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-mutable.rs new file mode 100644 index 00000000..e27a1294 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-mutable.rs @@ -0,0 +1,19 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +extern crate zerocopy; + +use zerocopy::try_transmute_ref; + +fn main() {} + +fn ref_dst_mutable() { + // `try_transmute_ref!` requires that its destination type be an immutable + // reference. + let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-mutable.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-mutable.stderr new file mode 100644 index 00000000..2caaf36e --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-mutable.stderr @@ -0,0 +1,32 @@ +error[E0308]: mismatched types + --> tests/ui-stable/try_transmute_ref-dst-mutable.rs:18:33 + | + 18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | types differ in mutability + | arguments to this enum variant are incorrect + | + = note: expected mutable reference `&mut u8` + found reference `&_` +help: the type constructed contains `&_` due to the type of the argument passed + --> tests/ui-stable/try_transmute_ref-dst-mutable.rs:18:33 + | + 18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ this argument influences the type of `Ok` +note: tuple variant defined here + --> $RUST/core/src/result.rs + | + | Ok(#[stable(feature = "rust1", since = "1.0.0")] T), + | ^^ + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/ui-stable/try_transmute_ref-dst-mutable.rs:18:33 + | +18 | let _: Result<&mut u8, _> = try_transmute_ref!(&0u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability + | + = note: expected enum `Result<&mut u8, _>` + found enum `Result<&_, ValidityError<&u8, _>>` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs new file mode 100644 index 00000000..3928a1cb --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_ref; + +fn main() { + // `try_transmute_ref` requires that the source type implements `Immutable` + // and `IntoBytes` + let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr new file mode 100644 index 00000000..0a004a60 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.stderr @@ -0,0 +1,120 @@ +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:33 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + Immutable, + | ^^^^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function +... + | Dst: TryFromBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy: zerocopy::TryFromBytes` is not satisfied + --> tests/ui-stable/try_transmute_ref-dst-not-immutable-tryfrombytes.rs:19:59 + | + 19 | let dst_not_try_from_bytes: Result<&NotZerocopy, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::TryFromBytes` is not implemented for `NotZerocopy` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(TryFromBytes)]` to `NotZerocopy` + = help: the following other types implement trait `zerocopy::TryFromBytes`: + () + *const T + *mut T + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + and $N others +note: required by a bound in `ValidityError` + --> src/error.rs + | + | pub struct ValidityError<Src, Dst: ?Sized + TryFromBytes> { + | ^^^^^^^^^^^^ required by this bound in `ValidityError` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-decrease.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-decrease.rs new file mode 100644 index 00000000..60bd70c2 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-decrease.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// Although this is not a soundness requirement, we currently require that the +// size of the destination type is not smaller than the size of the source type. +fn main() { + let _decrease_size: Result<&u8, _> = try_transmute_ref!(&AU16(0)); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-decrease.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-decrease.stderr new file mode 100644 index 00000000..ae3f68d3 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-decrease.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute_ref-size-decrease.rs:19:42 + | +19 | let _decrease_size: Result<&u8, _> = try_transmute_ref!(&AU16(0)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AU16` (16 bits) + = note: target type: `u8` (8 bits) + = note: this error originates in the macro `$crate::assert_size_eq` which comes from the expansion of the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-increase.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-increase.rs new file mode 100644 index 00000000..ee5a5fd4 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-increase.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::AU16; +use zerocopy::try_transmute_ref; + +// `try_transmute_ref!` does not support transmuting from a smaller type to a +// larger one. +fn main() { + let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-increase.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-increase.stderr new file mode 100644 index 00000000..8d71ab36 --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-size-increase.stderr @@ -0,0 +1,9 @@ +error[E0512]: cannot transmute between types of different sizes, or dependently-sized types + --> tests/ui-stable/try_transmute_ref-size-increase.rs:19:44 + | +19 | let _increase_size: Result<&AU16, _> = try_transmute_ref!(&[0u8; 2]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: source type: `AlignOf<[u8; 2]>` (8 bits) + = note: target type: `MaxAlignsOf<[u8; 2], AU16>` (16 bits) + = note: this error originates in the macro `$crate::assert_align_gt_eq` which comes from the expansion of the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.rs b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.rs new file mode 100644 index 00000000..2aec95ae --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.rs @@ -0,0 +1,20 @@ +// Copyright 2024 The Fuchsia Authors +// +// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +include!("../../zerocopy-derive/tests/include.rs"); + +extern crate zerocopy; + +use util::{NotZerocopy, AU16}; +use zerocopy::try_transmute_ref; + +fn main() { + // `try_transmute_ref` requires that the source type implements `Immutable` + // and `IntoBytes` + let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); +} diff --git a/vendor/zerocopy/tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.stderr b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.stderr new file mode 100644 index 00000000..fcd3836c --- /dev/null +++ b/vendor/zerocopy/tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.stderr @@ -0,0 +1,63 @@ +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::IntoBytes` is not satisfied + --> tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.rs:19:48 + | + 19 | let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::IntoBytes` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(IntoBytes)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::IntoBytes`: + () + AU16 + AtomicBool + AtomicI16 + AtomicI32 + AtomicI64 + AtomicI8 + AtomicIsize + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function + | where + | Src: IntoBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `NotZerocopy<AU16>: zerocopy::Immutable` is not satisfied + --> tests/ui-stable/try_transmute_ref-src-not-immutable-intobytes.rs:19:48 + | + 19 | let src_not_into_bytes: Result<&AU16, _> = try_transmute_ref!(&NotZerocopy(AU16(0))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsatisfied trait bound + | +help: the trait `zerocopy::Immutable` is not implemented for `NotZerocopy<AU16>` + --> tests/ui-stable/../../zerocopy-derive/tests/include.rs + | + 48 | pub struct NotZerocopy<T = ()>(pub T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: Consider adding `#[derive(Immutable)]` to `NotZerocopy<AU16>` + = help: the following other types implement trait `zerocopy::Immutable`: + &T + &mut T + () + *const T + *mut T + AU16 + Box<T> + F32<O> + and $N others +note: required by a bound in `try_transmute_ref` + --> src/util/macro_util.rs + | + | pub fn try_transmute_ref<Src, Dst>(src: &Src) -> Result<&Dst, ValidityError<&Src, Dst>> + | ----------------- required by a bound in this function + | where + | Src: IntoBytes + Immutable, + | ^^^^^^^^^ required by this bound in `try_transmute_ref` + = note: this error originates in the macro `try_transmute_ref` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/zerocopy/win-cargo.bat b/vendor/zerocopy/win-cargo.bat new file mode 100644 index 00000000..c0e3b044 --- /dev/null +++ b/vendor/zerocopy/win-cargo.bat @@ -0,0 +1,16 @@ +@rem Copyright 2024 The Fuchsia Authors + +@rem Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0 +@rem <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT +@rem license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option. +@rem This file may not be copied, modified, or distributed except according to +@rem those terms. + +@rem Build `cargo-zerocopy` without any RUSTFLAGS set in the environment +@set TEMP_RUSTFLAGS=%RUSTFLAGS% +@set RUSTFLAGS= +@cargo +stable build --manifest-path tools/Cargo.toml -p cargo-zerocopy -q +@set RUSTFLAGS=%TEMP_RUSTFLAGS% +@set TEMP_RUSTFLAGS= +@rem Thin wrapper around the `cargo-zerocopy` binary in `tools/cargo-zerocopy` +@tools\target\debug\cargo-zerocopy %*