Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 66 additions & 36 deletions crates/class-hash/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@

use anyhow::{Context, Error, Result};
use pathfinder_common::class_definition::EntryPointType::*;
use pathfinder_common::class_definition::{
SerializedCairoDefinition,
SerializedClassDefinition,
SerializedOpaqueClassDefinition,
SerializedSierraDefinition,
};
use pathfinder_common::{felt_bytes, ClassHash};
use pathfinder_crypto::hash::{HashChain, PoseidonHasher};
use pathfinder_crypto::Felt;
Expand All @@ -80,22 +86,47 @@ impl ComputedClassHash {
}
}

/// Computes the starknet class hash for given class definition JSON blob.
/// Consumes an opaque serialized class definition and outputs the computed
/// class hash as well as the definition reinterpreted as either a serialized
/// Cairo or Sierra definition.
///
/// This function first parses the JSON blob to decide if it's a Cairo or Sierra
/// class definition and then calls the appropriate function to compute the
/// class hash with the parsed definition.
pub fn compute_class_hash(contract_definition_dump: &[u8]) -> Result<ComputedClassHash> {
let contract_definition = parse_contract_definition(contract_definition_dump)
pub fn compute_class_hash(
serialized_definition: SerializedOpaqueClassDefinition,
) -> Result<(ComputedClassHash, SerializedClassDefinition)> {
let contract_definition = parse_contract_definition(&serialized_definition)
.context("Failed to parse contract definition")?;

match contract_definition {
json::ContractDefinition::Sierra(definition) => compute_sierra_class_hash(definition)
.map(ComputedClassHash::Sierra)
.context("Compute class hash"),
.context("Compute class hash")
.map(|hash| {
(
hash,
// It is safe to reinterpret the serialized definition as a Sierra definition
// since the parsing step succeeded and confirmed it is a
// Sierra definition.
SerializedClassDefinition::Sierra(SerializedSierraDefinition::from_bytes(
serialized_definition.into_bytes(),
)),
)
}),
json::ContractDefinition::Cairo(definition) => compute_cairo_class_hash(definition.into())
.map(ComputedClassHash::Cairo)
.context("Compute class hash"),
.context("Compute class hash")
.map(|hash| {
(
hash,
// It is safe to reinterpret the serialized definition as a Cairo definition
// since the parsing step succeeded and confirmed it is a Cairo definition.
SerializedClassDefinition::Cairo(SerializedCairoDefinition::from_bytes(
serialized_definition.into_bytes(),
)),
)
}),
}
}

Expand Down Expand Up @@ -132,14 +163,16 @@ pub fn compute_cairo_hinted_class_hash(
///
/// Due to an issue in serde_json we can't use an untagged enum and simply
/// derive a Deserialize implementation: <https://github.com/serde-rs/json/issues/559>
pub fn parse_contract_definition(
contract_definition_dump: &[u8],
fn parse_contract_definition(
serialized_definition: &SerializedOpaqueClassDefinition,
) -> serde_json::Result<json::ContractDefinition<'_>> {
serde_json::from_slice::<json::SierraContractDefinition<'_>>(contract_definition_dump)
serde_json::from_slice::<json::SierraContractDefinition<'_>>(serialized_definition.as_bytes())
.map(json::ContractDefinition::Sierra)
.or_else(|_| {
serde_json::from_slice::<json::CairoContractDefinition<'_>>(contract_definition_dump)
.map(json::ContractDefinition::Cairo)
serde_json::from_slice::<json::CairoContractDefinition<'_>>(
serialized_definition.as_bytes(),
)
.map(json::ContractDefinition::Cairo)
})
}

Expand Down Expand Up @@ -799,17 +832,22 @@ pub mod json {

#[cfg(test)]
mod test_vectors {
use pathfinder_common::class_definition::SerializedOpaqueClassDefinition;
use pathfinder_common::macro_prelude::*;
use starknet_gateway_test_fixtures::class_definitions::*;

use super::super::{compute_class_hash, ComputedClassHash};

fn hash(data: &[u8]) -> ComputedClassHash {
compute_class_hash(SerializedOpaqueClassDefinition::from_slice(data))
.unwrap()
.0
}

#[tokio::test]
async fn first() {
let hash = compute_class_hash(INTEGRATION_TEST).unwrap();

assert_eq!(
hash,
hash(INTEGRATION_TEST),
ComputedClassHash::Cairo(class_hash!(
"0x031da92cf5f54bcb81b447e219e2b791b23f3052d12b6c9abd04ff2e5626576"
))
Expand All @@ -818,10 +856,8 @@ pub mod json {

#[test]
fn second() {
let hash = super::super::compute_class_hash(CONTRACT_DEFINITION).unwrap();

assert_eq!(
hash,
hash(CONTRACT_DEFINITION),
ComputedClassHash::Cairo(class_hash!(
"0x50b2148c0d782914e0b12a1a32abe5e398930b7e914f82c65cb7afce0a0ab9b"
))
Expand All @@ -830,10 +866,8 @@ pub mod json {

#[tokio::test]
async fn genesis_contract() {
let hash = compute_class_hash(GOERLI_GENESIS).unwrap();

assert_eq!(
hash,
hash(GOERLI_GENESIS),
ComputedClassHash::Cairo(class_hash!(
"0x10455c752b86932ce552f2b0fe81a880746649b9aee7e0d842bf3f52378f9f8"
))
Expand All @@ -851,10 +885,11 @@ pub mod json {

// Known contract which triggered a hash mismatch failure.
let extract = tokio::task::spawn_blocking(move || -> anyhow::Result<_> {
let hash = compute_class_hash(CAIRO_0_8_NEW_ATTRIBUTES)?;
Ok(hash)
compute_class_hash(SerializedOpaqueClassDefinition::from_slice(
CAIRO_0_8_NEW_ATTRIBUTES,
))
});
let calculated_hash = extract.await.unwrap().unwrap();
let (calculated_hash, _) = extract.await.unwrap().unwrap();

assert_eq!(calculated_hash, expected);
}
Expand All @@ -863,10 +898,8 @@ pub mod json {
async fn cairo_0_10() {
// Contract whose class triggered a deserialization issue because of the new
// `compiler_version` property.
let hash = compute_class_hash(CAIRO_0_10_COMPILER_VERSION).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_10_COMPILER_VERSION),
ComputedClassHash::Cairo(class_hash!(
"0xa69700a89b1fa3648adff91c438b79c75f7dcb0f4798938a144cce221639d6"
))
Expand All @@ -878,10 +911,8 @@ pub mod json {
// Contract who's class contains `compiler_version` property as well as
// `cairo_type` with tuple values. These tuple values require a
// space to be injected in order to achieve the correct hash.
let hash = compute_class_hash(CAIRO_0_10_TUPLES_INTEGRATION).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_10_TUPLES_INTEGRATION),
ComputedClassHash::Cairo(class_hash!(
"0x542460935cea188d21e752d8459d82d60497866aaad21f873cbb61621d34f7f"
))
Expand All @@ -893,10 +924,8 @@ pub mod json {
// Contract who's class contains `compiler_version` property as well as
// `cairo_type` with tuple values. These tuple values require a
// space to be injected in order to achieve the correct hash.
let hash = compute_class_hash(CAIRO_0_10_TUPLES_GOERLI).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_10_TUPLES_GOERLI),
ComputedClassHash::Cairo(class_hash!(
"0x66af14b94491ba4e2aea1117acf0a3155c53d92fdfd9c1f1dcac90dc2d30157"
))
Expand All @@ -905,10 +934,8 @@ pub mod json {

#[tokio::test]
async fn cairo_0_11_sierra() {
let hash = compute_class_hash(CAIRO_0_11_SIERRA).unwrap();

assert_eq!(
hash,
hash(CAIRO_0_11_SIERRA),
ComputedClassHash::Sierra(class_hash!(
"0x4e70b19333ae94bd958625f7b61ce9eec631653597e68645e13780061b2136c"
))
Expand All @@ -917,14 +944,17 @@ pub mod json {

#[tokio::test]
async fn cairo_0_11_with_decimal_entry_point_offset() {
let hash = compute_class_hash(CAIRO_0_11_WITH_DECIMAL_ENTRY_POINT_OFFSET).unwrap();
let (hash, _) = compute_class_hash(SerializedOpaqueClassDefinition::from_slice(
CAIRO_0_11_WITH_DECIMAL_ENTRY_POINT_OFFSET,
))
.unwrap();

assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x0484c163658bcce5f9916f486171ac60143a92897533aa7ff7ac800b16c63311"
))
)
);
}
}

Expand Down
10 changes: 5 additions & 5 deletions crates/common/src/casm_class.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@ pub enum NestedIntList {
Node(Vec<NestedIntList>),
}

impl TryFrom<&str> for CasmContractClass {
type Error = serde_json::Error;

fn try_from(value: &str) -> Result<Self, Self::Error> {
serde_json::from_str(value)
impl CasmContractClass {
pub fn try_from_serialized_definition(
definition: &crate::class_definition::SerializedCasmDefinition,
) -> Result<Self, serde_json::Error> {
serde_json::from_slice(definition.as_bytes())
}
}
108 changes: 108 additions & 0 deletions crates/common/src/class_definition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,28 @@ use crate::{ByteCodeOffset, EntryPoint};

pub const CLASS_DEFINITION_MAX_ALLOWED_SIZE: u64 = 4 * 1024 * 1024;

#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedSierraDefinition(Vec<u8>);

#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedCasmDefinition(Vec<u8>);

#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedCairoDefinition(Vec<u8>);

/// Carries the definition of a serialized contract class, either Sierra or
/// Cairo. The caller does not care which class definition it is.
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Dummy)]
pub struct SerializedOpaqueClassDefinition(Vec<u8>);

/// Carries the definition of a serialized contract class, either Sierra or
/// Cairo.
#[derive(Clone, Debug)]
pub enum SerializedClassDefinition {
Sierra(SerializedSierraDefinition),
Cairo(SerializedCairoDefinition),
}

#[derive(Debug, Deserialize, Dummy)]
pub enum ClassDefinition<'a> {
Sierra(Sierra<'a>),
Expand Down Expand Up @@ -190,3 +212,89 @@ pub struct SelectorAndFunctionIndex {
pub selector: EntryPoint,
pub function_idx: u64,
}

impl SerializedSierraDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

impl SerializedCasmDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

impl SerializedCairoDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

impl SerializedOpaqueClassDefinition {
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self(bytes)
}

pub fn from_slice(bytes: &[u8]) -> Self {
Self(bytes.to_vec())
}

pub fn into_bytes(self) -> Vec<u8> {
self.0
}

pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}

/// We can use `From` because this is always safe.
impl From<SerializedSierraDefinition> for SerializedOpaqueClassDefinition {
fn from(d: SerializedSierraDefinition) -> Self {
Self::from_bytes(d.into_bytes())
}
}

/// We can use `From` because this is always safe.
impl From<SerializedCairoDefinition> for SerializedOpaqueClassDefinition {
fn from(d: SerializedCairoDefinition) -> Self {
Self::from_bytes(d.into_bytes())
}
}
5 changes: 3 additions & 2 deletions crates/common/src/l2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use std::sync::{Arc, RwLock};

use fake::Dummy;

use crate::class_definition::{SerializedCasmDefinition, SerializedSierraDefinition};
use crate::event::Event;
use crate::receipt::Receipt;
use crate::state_update::StateUpdateData;
Expand Down Expand Up @@ -97,8 +98,8 @@ pub struct ConsensusFinalizedBlockHeader {
pub struct DeclaredClass {
pub sierra_hash: SierraHash,
pub casm_hash_v2: CasmHash,
pub sierra_def: Vec<u8>,
pub casm_def: Vec<u8>,
pub sierra_def: SerializedSierraDefinition,
pub casm_def: SerializedCasmDefinition,
}

impl From<L2Block> for L2BlockToCommit {
Expand Down
Loading
Loading