From 188617516a53ee5180d804cac99bff85d77b8899 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 3 Apr 2026 17:55:03 +0200 Subject: [PATCH 1/6] IsAuthorized invocation --- bin/test-runner/w3f/codec/work-package.ts | 4 +- packages/jam/block/work-package.ts | 18 +-- packages/jam/executor/pvm-executor.ts | 27 ++++ packages/jam/in-core/in-core.test.ts | 4 +- packages/jam/in-core/in-core.ts | 120 +++++++++++++++--- .../jam/jam-host-calls/general/fetch.test.ts | 32 ++--- packages/jam/jam-host-calls/general/fetch.ts | 120 +++++++++--------- .../jam/transition/externalities/index.ts | 1 + .../is-authorized-fetch-externalities.ts | 54 ++++++++ .../refine-fetch-externalities.ts | 34 +++-- 10 files changed, 292 insertions(+), 122 deletions(-) create mode 100644 packages/jam/transition/externalities/is-authorized-fetch-externalities.ts diff --git a/bin/test-runner/w3f/codec/work-package.ts b/bin/test-runner/w3f/codec/work-package.ts index 3bc91d571..9875b5666 100644 --- a/bin/test-runner/w3f/codec/work-package.ts +++ b/bin/test-runner/w3f/codec/work-package.ts @@ -21,10 +21,10 @@ export const workPackageFromJson = json.object( }, ({ authorization, auth_code_host, auth_code_hash, authorizer_config, context, items }) => WorkPackage.create({ - authorization, + authToken: authorization, authCodeHost: auth_code_host, authCodeHash: auth_code_hash, - parametrization: authorizer_config, + authConfiguration: authorizer_config, context, items: FixedSizeArray.new(items, tryAsWorkItemsCount(items.length)), }), diff --git a/packages/jam/block/work-package.ts b/packages/jam/block/work-package.ts index 1423e5946..2a3519d5c 100644 --- a/packages/jam/block/work-package.ts +++ b/packages/jam/block/work-package.ts @@ -31,7 +31,7 @@ export const MAX_NUMBER_OF_WORK_ITEMS = 16; /** * A piece of work done within a core. * - * `P = (j ∈ Y, h ∈ NS, u ∈ H, p ∈ Y, x ∈ X, w ∈ ⟦I⟧1∶I) + * `P = (j ∈ Y, h ∈ NS, u ∈ H, f ∈ Y, x ∈ X, w ∈ ⟦I⟧1∶I) * * https://graypaper.fluffylabs.dev/#/579bd12/197000197200 */ @@ -40,8 +40,8 @@ export class WorkPackage extends WithDebug { authCodeHost: codec.u32.asOpaque(), authCodeHash: codec.bytes(HASH_SIZE).asOpaque(), context: RefineContext.Codec, - authorization: codec.blob, - parametrization: codec.blob, + authToken: codec.blob, + authConfiguration: codec.blob, items: codec.sequenceVarLen(WorkItem.Codec).convert( (x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length)), @@ -49,25 +49,25 @@ export class WorkPackage extends WithDebug { }); static create({ - authorization, + authToken, authCodeHost, authCodeHash, - parametrization, + authConfiguration, context, items, }: CodecRecord) { - return new WorkPackage(authorization, authCodeHost, authCodeHash, parametrization, context, items); + return new WorkPackage(authToken, authCodeHost, authCodeHash, authConfiguration, context, items); } private constructor( /** `j`: simple blob acting as an authorization token */ - public readonly authorization: BytesBlob, + public readonly authToken: BytesBlob, /** `h`: index of the service that hosts the authorization code */ public readonly authCodeHost: ServiceId, /** `u`: authorization code hash */ public readonly authCodeHash: CodeHash, - /** `p`: authorization parametrization blob */ - public readonly parametrization: BytesBlob, + /** `f`: authorization configuration blob */ + public readonly authConfiguration: BytesBlob, /** `x`: context in which the refine function should run */ public readonly context: RefineContext, /** diff --git a/packages/jam/executor/pvm-executor.ts b/packages/jam/executor/pvm-executor.ts index 2cac1314a..45c0db4f4 100644 --- a/packages/jam/executor/pvm-executor.ts +++ b/packages/jam/executor/pvm-executor.ts @@ -57,6 +57,10 @@ export type AccumulateHostCallExternalities = { serviceExternalities: general.AccountsInfo & general.AccountsLookup & general.AccountsWrite & general.AccountsRead; }; +export type IsAuthorizedHostCallExternalities = { + fetchExternalities: general.IIsAuthorizedFetch; +}; + type OnTransferHostCallExternalities = { partialState: general.AccountsInfo & general.AccountsLookup & general.AccountsWrite & general.AccountsRead; fetchExternalities: general.IFetchExternalities; @@ -134,6 +138,17 @@ export class PvmExecutor { return accumulateHandlers.concat(generalHandlers); } + /** Prepare is-authorized host call handlers */ + private static prepareIsAuthorizedHostCalls(serviceId: ServiceId, externalities: IsAuthorizedHostCallExternalities) { + const generalHandlers: HostCallHandler[] = [ + new general.LogHostCall(serviceId), + new general.GasHostCall(serviceId), + new general.Fetch(serviceId, externalities.fetchExternalities), + ]; + + return generalHandlers; + } + /** Prepare on transfer host call handlers */ private static prepareOnTransferHostCalls(serviceId: ServiceId, externalities: OnTransferHostCallExternalities) { const generalHandlers: HostCallHandler[] = [ @@ -175,6 +190,18 @@ export class PvmExecutor { return new PvmExecutor(serviceCode, hostCallHandlers, entrypoint.REFINE, instances); } + /** A utility function that can be used to prepare is-authorized executor */ + static async createIsAuthorizedExecutor( + serviceId: ServiceId, + serviceCode: BytesBlob, + externalities: IsAuthorizedHostCallExternalities, + pvm: PvmBackend, + ) { + const hostCallHandlers = PvmExecutor.prepareIsAuthorizedHostCalls(serviceId, externalities); + const instances = await PvmExecutor.prepareBackend(pvm); + return new PvmExecutor(serviceCode, hostCallHandlers, entrypoint.IS_AUTHORIZED, instances); + } + /** A utility function that can be used to prepare accumulate executor */ static async createAccumulateExecutor( serviceId: ServiceId, diff --git a/packages/jam/in-core/in-core.test.ts b/packages/jam/in-core/in-core.test.ts index 397e23c97..7cda9f7f7 100644 --- a/packages/jam/in-core/in-core.test.ts +++ b/packages/jam/in-core/in-core.test.ts @@ -37,10 +37,10 @@ function createWorkItem(serviceId = 1) { function createWorkPackage(anchorHash: HeaderHash, stateRoot: StateRootHash, lookupAnchorSlot = 0) { return WorkPackage.create({ - authorization: BytesBlob.empty(), + authToken: BytesBlob.empty(), authCodeHost: tryAsServiceId(1), authCodeHash: Bytes.zero(HASH_SIZE).asOpaque(), - parametrization: BytesBlob.empty(), + authConfiguration: BytesBlob.empty(), context: RefineContext.create({ anchor: anchorHash, stateRoot, diff --git a/packages/jam/in-core/in-core.ts b/packages/jam/in-core/in-core.ts index c53157807..06ab1c7dc 100644 --- a/packages/jam/in-core/in-core.ts +++ b/packages/jam/in-core/in-core.ts @@ -8,7 +8,7 @@ import { tryAsCoreIndex, tryAsServiceGas, } from "@typeberry/block"; -import { W_C } from "@typeberry/block/gp-constants.js"; +import { G_I, W_A, W_C } from "@typeberry/block/gp-constants.js"; import { type AuthorizerHash, type RefineContext, @@ -29,6 +29,7 @@ import { type Blake2b, HASH_SIZE, type WithHash } from "@typeberry/hash"; import { Logger } from "@typeberry/logger"; import { tryAsU8, tryAsU16, tryAsU32 } from "@typeberry/numbers"; import type { State } from "@typeberry/state"; +import { IsAuthorizedFetchExternalities } from "@typeberry/transition/externalities/is-authorized-fetch-externalities.js"; import { RefineFetchExternalities } from "@typeberry/transition/externalities/refine-fetch-externalities.js"; import { assertEmpty, assertNever, Result } from "@typeberry/utils"; import { RefineExternalitiesImpl } from "./externalities/refine.js"; @@ -65,7 +66,14 @@ enum ServiceCodeError { ServiceCodeTooBig = 3, } -enum AuthorizationError {} +enum AuthorizationError { + /** BAD: authorizer code not found (service or preimage missing). */ + CodeNotFound = 0, + /** BIG: authorizer code exceeds W_A limit. */ + CodeTooBig = 1, + /** PANIC/OOG: PVM execution failed. */ + PvmFailed = 2, +} type AuthorizationOk = { authorizerHash: AuthorizerHash; @@ -83,7 +91,7 @@ export type ImportedSegment = { const logger = Logger.new(import.meta.filename, "refine"); /** https://graypaper.fluffylabs.dev/#/ab2cdbd/2ffe002ffe00?v=0.7.2 */ -const ARGS_CODEC = codec.object({ +const REFINE_ARGS_CODEC = codec.object({ core: codec.varU32.convert( (x) => tryAsU32(x), (x) => tryAsCoreIndex(x), @@ -93,6 +101,9 @@ const ARGS_CODEC = codec.object({ payloadLength: codec.varU32, packageHash: codec.bytes(HASH_SIZE).asOpaque(), }); +const AUTH_ARGS_CODEC = codec.object({ + coreIndex: codec.u16, +}); export class InCore { constructor( @@ -119,7 +130,7 @@ export class InCore { extrinsics: PerWorkItem, ): Promise> { const workPackageHash = workPackageAndHash.hash; - const { context, authorization, authCodeHash, authCodeHost, parametrization, items, ...rest } = + const { context, authToken, authCodeHash, authCodeHost, authConfiguration, items, ...rest } = workPackageAndHash.data; assertEmpty(rest); @@ -127,8 +138,9 @@ export class InCore { // TODO [ToDr] Verify prerequisites logger.log`[core:${core}] Attempting to refine work package with ${items.length} items.`; - // TODO [ToDr] GP link // Verify anchor block + // https://graypaper.fluffylabs.dev/#/ab2cdbd/15cd0215cd02?v=0.7.2 + // TODO [ToDr] Validation const state = this.states.getState(context.anchor); if (state === null) { return Result.error(RefineError.StateMissing, () => `State at anchor block ${context.anchor} is missing.`); @@ -160,7 +172,14 @@ export class InCore { } // Check authorization - const authResult = await this.authorizePackage(authorization, authCodeHost, authCodeHash, parametrization); + const authResult = await this.authorizePackage( + state, + core, + authToken, + authCodeHost, + authCodeHash, + authConfiguration, + ); if (authResult.isError) { return Result.error( RefineError.AuthorizationError, @@ -252,22 +271,83 @@ export class InCore { }; } + /** + * IsAuthorized invocation. + * + * https://graypaper.fluffylabs.dev/#/ab2cdbd/2e64002e6400?v=0.7.2 + */ private async authorizePackage( - _authorization: BytesBlob, - _authCodeHost: ServiceId, - _authCodeHash: CodeHash, - _parametrization: BytesBlob, + state: State, + coreIndex: CoreIndex, + authToken: BytesBlob, + authCodeHost: ServiceId, + authCodeHash: CodeHash, + authConfiguration: BytesBlob, ): Promise> { - // TODO [ToDr] Check authorization? - const authorizerHash = Bytes.zero(HASH_SIZE).asOpaque(); - const authorizationGasUsed = tryAsServiceGas(0); - const authorizationOutput = BytesBlob.empty(); - - return Result.ok({ - authorizerHash, - authorizationGasUsed, - authorizationOutput, + // Look up the authorizer code from the auth code host service + const service = state.getService(authCodeHost); + // https://graypaper.fluffylabs.dev/#/ab2cdbd/2eca002eca00?v=0.7.2 + if (service === null) { + return Result.error( + AuthorizationError.CodeNotFound, + () => `Auth code host service ${authCodeHost} not found in state.`, + ); + } + + const code = service.getPreimage(authCodeHash.asOpaque()); + if (code === null) { + return Result.error( + AuthorizationError.CodeNotFound, + () => `Auth code preimage ${authCodeHash} not found in service ${authCodeHost}.`, + ); + } + + // BIG: code exceeds W_A + // https://graypaper.fluffylabs.dev/#/ab2cdbd/2ed6002ed600?v=0.7.2 + if (code.length > W_A) { + return Result.error( + AuthorizationError.CodeTooBig, + () => `Auth code is too big: ${code.length} bytes vs ${W_A} max.`, + ); + } + + // Prepare fetch externalities and executor + const fetchExternalities = new IsAuthorizedFetchExternalities( + this.chainSpec, { + authToken: authToken, + authConfiguration: authConfiguration, + } + ); + const executor = await PvmExecutor.createIsAuthorizedExecutor( + authCodeHost, + code, + { fetchExternalities }, + this.pvmBackend, + ); + + const args = Encoder.encodeObject(AUTH_ARGS_CODEC, { + coreIndex, }); + + // Run PVM with gas budget G_I + const gasLimit = tryAsServiceGas(G_I); + const execResult = await executor.run(args, gasLimit); + + if (execResult.status !== ReturnStatus.OK) { + return Result.error( + AuthorizationError.PvmFailed, + () => + `IsAuthorized PVM ${ReturnStatus[execResult.status]} (gas used: ${execResult.consumedGas}).`, + ); + } + + // Compute authorizer hash: blake2b(codeHash ++ configuration) + // https://graypaper-reader.netlify.app/#/ab2cdbd/1b81011b8401?v=0.7.2 + const authorizerHash = this.blake2b.hashBlobs([authCodeHash, authConfiguration]); + const authorizationOutput = BytesBlob.blobFrom(execResult.memorySlice); + const authorizationGasUsed = tryAsServiceGas(execResult.consumedGas); + + return Result.ok({ authorizerHash, authorizationGasUsed, authorizationOutput }); } private async refineItem( @@ -328,7 +408,7 @@ export class InCore { const executor = await PvmExecutor.createRefineExecutor(item.service, code, externalities, this.pvmBackend); - const args = Encoder.encodeObject(ARGS_CODEC, { + const args = Encoder.encodeObject(REFINE_ARGS_CODEC, { serviceId: item.service, core: coreIndex, workItemIndex: tryAsU32(idx), diff --git a/packages/jam/jam-host-calls/general/fetch.test.ts b/packages/jam/jam-host-calls/general/fetch.test.ts index 1c84497c9..27c16bc2a 100644 --- a/packages/jam/jam-host-calls/general/fetch.test.ts +++ b/packages/jam/jam-host-calls/general/fetch.test.ts @@ -269,7 +269,7 @@ describe("Fetch", () => { const fetchMock = new RefineFetchMock(); fetchMock.authorizerResponse = blob; - const { registers, memory, readBack, expectedLength } = prepareRegsAndMemory(blob, FetchKind.Authorizer); + const { registers, memory, readBack, expectedLength } = prepareRegsAndMemory(blob, FetchKind.AuthConfiguration); const fetch = new Fetch(currentServiceId, fetchMock); const result = await fetch.execute(gas, registers, memory); @@ -285,7 +285,7 @@ describe("Fetch", () => { const fetchMock = new RefineFetchMock(); fetchMock.authorizationTokenResponse = blob; - const { registers, memory, readBack, expectedLength } = prepareRegsAndMemory(blob, FetchKind.AuthorizationToken); + const { registers, memory, readBack, expectedLength } = prepareRegsAndMemory(blob, FetchKind.AuthToken); const fetch = new Fetch(currentServiceId, fetchMock); const result = await fetch.execute(gas, registers, memory); @@ -418,8 +418,8 @@ describe("Fetch", () => { FetchKind.OtherWorkItemImports, FetchKind.MyImports, FetchKind.WorkPackage, - FetchKind.Authorizer, - FetchKind.AuthorizationToken, + FetchKind.AuthConfiguration, + FetchKind.AuthToken, FetchKind.RefineContext, FetchKind.AllWorkItems, FetchKind.OneWorkItem, @@ -491,14 +491,14 @@ class RefineFetchMock implements IRefineFetch { public constantsResponse: BytesBlob | null = null; public entropyResponse: EntropyHash | null = null; - public authorizerTraceResponse: BytesBlob | null = null; + public authorizerTraceResponse: BytesBlob = BytesBlob.empty(); public workItemExtrinsicResponses: Map = new Map(); public workItemImportResponses: Map = new Map(); - public workPackageResponse: BytesBlob | null = null; - public authorizerResponse: BytesBlob | null = null; - public authorizationTokenResponse: BytesBlob | null = null; - public refineContextResponse: BytesBlob | null = null; - public allWorkItemsResponse: BytesBlob | null = null; + public workPackageResponse: BytesBlob = BytesBlob.empty(); + public authorizerResponse: BytesBlob = BytesBlob.empty(); + public authorizationTokenResponse: BytesBlob = BytesBlob.empty(); + public refineContextResponse: BytesBlob = BytesBlob.empty(); + public allWorkItemsResponse: BytesBlob = BytesBlob.empty(); public oneWorkItemResponses: Map = new Map(); public workItemPayloadResponses: Map = new Map(); @@ -516,7 +516,7 @@ class RefineFetchMock implements IRefineFetch { return this.entropyResponse; } - authorizerTrace(): BytesBlob | null { + authorizerTrace(): BytesBlob { return this.authorizerTraceResponse; } @@ -538,23 +538,23 @@ class RefineFetchMock implements IRefineFetch { return this.workItemImportResponses.get(key) ?? null; } - workPackage(): BytesBlob | null { + workPackage(): BytesBlob { return this.workPackageResponse; } - authorizer(): BytesBlob | null { + authConfiguration(): BytesBlob { return this.authorizerResponse; } - authorizationToken(): BytesBlob | null { + authToken(): BytesBlob { return this.authorizationTokenResponse; } - refineContext(): BytesBlob | null { + refineContext(): BytesBlob { return this.refineContextResponse; } - allWorkItems(): BytesBlob | null { + allWorkItems(): BytesBlob { return this.allWorkItemsResponse; } diff --git a/packages/jam/jam-host-calls/general/fetch.ts b/packages/jam/jam-host-calls/general/fetch.ts index 07153717f..dccc36d11 100644 --- a/packages/jam/jam-host-calls/general/fetch.ts +++ b/packages/jam/jam-host-calls/general/fetch.ts @@ -18,7 +18,7 @@ import { HostCallResult } from "./results.js"; * Ω_Y signature: Ω_Y(ρ, φ, μ, p, n, r, i, ī, x̄, 𝐢, ...) * * Context parameter mapping - * Is-Authorized: Ω_Y(ρ, φ, μ, 𝐩, ∅, ∅, ∅, ∅, ∅, ∅, ∅) + * IsAuthorized: Ω_Y(ρ, φ, μ, 𝐩, ∅, ∅, ∅, ∅, ∅, ∅, ∅) * https://graypaper.fluffylabs.dev/#/ab2cdbd/2e43012e4301?v=0.7.2 * Refine: Ω_Y(ρ, φ, μ, p, H₀, r, i, ī, x̄, ∅, (m,e)) * https://graypaper.fluffylabs.dev/#/ab2cdbd/2fe0012fe001?v=0.7.2 @@ -26,13 +26,13 @@ import { HostCallResult } from "./results.js"; * https://graypaper.fluffylabs.dev/#/ab2cdbd/30c00030c000?v=0.7.2 * * Kind availability per context: - * Kind 0 (constants) — all contexts - * Kind 1 (n) — Refine (H₀), Accumulate (η'₀) - * Kind 2 (r) — Refine only - * Kind 3-4 (x̄ extrinsics) — Refine only - * Kind 5-6 (ī imports) — Refine only - * Kind 7-13 (p work pkg) — Is-Authorized, Refine - * Kind 14-15 (𝐢 acc items) — Accumulate only + * Kind 0 (constants) - all contexts + * Kind 1 (n) - Refine (H₀), Accumulate (η'₀) + * Kind 2 (r) - Refine only + * Kind 3-4 (x̄ extrinsics) - Refine only + * Kind 5-6 (ī imports) - Refine only + * Kind 7-13 (p work pkg) - IsAuthorized, Refine + * Kind 14-15 (𝐢 acc items) - Accumulate only */ export enum FetchContext { IsAuthorized = "isAuthorized", @@ -41,7 +41,7 @@ export enum FetchContext { } /** - * Fetch externalities for the Is-Authorized context. + * Fetch externalities for the IsAuthorized context. * * Ω_Y(ρ, φ, μ, 𝐩, ∅, ∅, ∅, ∅, ∅, ∅, ∅) * https://graypaper.fluffylabs.dev/#/ab2cdbd/2e43012e4301?v=0.7.2 @@ -59,49 +59,49 @@ export interface IIsAuthorizedFetch { constants(): BytesBlob; /** - * Kind 7: Encoded work package — E(𝐩). + * Kind 7: Encoded work package - E(𝐩). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31c10231c102?v=0.7.2 */ - workPackage(): BytesBlob | null; + workPackage(): BytesBlob; /** - * Kind 8: Authorizer code hash and config — p_f. + * Kind 8: Authorizer configuration - p_f. * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31c80231c802?v=0.7.2 */ - authorizer(): BytesBlob | null; + authConfiguration(): BytesBlob; /** - * Kind 9: Authorization token — p_j. + * Kind 9: Authorization token - p_j. * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31cf0231cf02?v=0.7.2 */ - authorizationToken(): BytesBlob | null; + authToken(): BytesBlob; /** - * Kind 10: Refinement context — E(p_x). + * Kind 10: Refinement context - E(p_x). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31da0231da02?v=0.7.2 */ - refineContext(): BytesBlob | null; + refineContext(): BytesBlob; /** - * Kind 11: All work-item summaries — E(↕[S(w) | w ← p_w]). + * Kind 11: All work-item summaries - E(↕[S(w) | w ← p_w]). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31f40231f402?v=0.7.2 */ - allWorkItems(): BytesBlob | null; + allWorkItems(): BytesBlob; /** - * Kind 12: Single work-item summary — S(p_w[φ₁₁]). + * Kind 12: Single work-item summary - S(p_w[φ₁₁]). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31fc0231fc02?v=0.7.2 */ oneWorkItem(workItem: U64): BytesBlob | null; /** - * Kind 13: Work-item payload — p_w[φ₁₁]_y. + * Kind 13: Work-item payload - p_w[φ₁₁]_y. * * https://graypaper.fluffylabs.dev/#/ab2cdbd/313b03313b03?v=0.7.2 */ @@ -127,7 +127,7 @@ export interface IRefineFetch { constants(): BytesBlob; /** - * Kind 1: Entropy pool — H₀ (zero hash). + * Kind 1: Entropy pool - H₀ (zero hash). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/2fe0012fe201?v=0.7.2 */ @@ -138,7 +138,7 @@ export interface IRefineFetch { * * https://graypaper.fluffylabs.dev/#/ab2cdbd/314902314902?v=0.7.2 */ - authorizerTrace(): BytesBlob | null; + authorizerTrace(): BytesBlob; /** * Kind 3 (other) / Kind 4 (my): Work-item extrinsics (x̄). @@ -163,49 +163,49 @@ export interface IRefineFetch { workItemImport(workItem: U64 | null, index: U64): BytesBlob | null; /** - * Kind 7: Encoded work package — E(p). + * Kind 7: Encoded work package - E(p). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31c10231c102?v=0.7.2 */ - workPackage(): BytesBlob | null; + workPackage(): BytesBlob; /** - * Kind 8: Authorizer code hash and config — p_f. + * Kind 8: Authorizer configuration - p_f. * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31c80231c802?v=0.7.2 */ - authorizer(): BytesBlob | null; + authConfiguration(): BytesBlob; /** - * Kind 9: Authorization token — p_j. + * Kind 9: Authorization token - p_j. * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31cf0231cf02?v=0.7.2 */ - authorizationToken(): BytesBlob | null; + authToken(): BytesBlob; /** - * Kind 10: Refinement context — E(p_x). + * Kind 10: Refinement context - E(p_x). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31da0231da02?v=0.7.2 */ - refineContext(): BytesBlob | null; + refineContext(): BytesBlob; /** - * Kind 11: All work-item summaries — E(↕[S(w) | w ← p_w]). + * Kind 11: All work-item summaries - E(↕[S(w) | w ← p_w]). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31f40231f402?v=0.7.2 */ - allWorkItems(): BytesBlob | null; + allWorkItems(): BytesBlob; /** - * Kind 12: Single work-item summary — S(p_w[φ₁₁]). + * Kind 12: Single work-item summary - S(p_w[φ₁₁]). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/31fc0231fc02?v=0.7.2 */ oneWorkItem(workItem: U64): BytesBlob | null; /** - * Kind 13: Work-item payload — p_w[φ₁₁]_y. + * Kind 13: Work-item payload - p_w[φ₁₁]_y. * * https://graypaper.fluffylabs.dev/#/ab2cdbd/313b03313b03?v=0.7.2 */ @@ -231,21 +231,21 @@ export interface IAccumulateFetch { constants(): BytesBlob; /** - * Kind 1: Entropy pool — η'₀ (posterior entropy). + * Kind 1: Entropy pool - η'₀ (posterior entropy). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/314302314602?v=0.7.2 */ entropy(): EntropyHash; /** - * Kind 14: All accumulation operands and transfers — E(↕𝐢). + * Kind 14: All accumulation operands and transfers - E(↕𝐢). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/314e03314e03?v=0.7.2 */ allTransfersAndOperands(): BytesBlob | null; /** - * Kind 15: Single accumulation operand or transfer — E(𝐢[φ₁₁]). + * Kind 15: Single accumulation operand or transfer - E(𝐢[φ₁₁]). * * https://graypaper.fluffylabs.dev/#/ab2cdbd/315903315903?v=0.7.2 */ @@ -304,12 +304,12 @@ export class Fetch implements HostCallHandler { private getValue(kind: U32, regs: HostCallRegisters): BytesBlob | null { const ext = this.fetch; - // Kind 0: constants — all contexts + // Kind 0: constants - all contexts if (kind === FetchKind.Constants) { return ext.constants(); } - // Kind 1: entropy — Refine, Accumulate + // Kind 1: entropy - Refine, Accumulate if (kind === FetchKind.Entropy) { if (ext.context === FetchContext.IsAuthorized) { return null; @@ -317,7 +317,7 @@ export class Fetch implements HostCallHandler { return ext.entropy(); } - // Kind 2: authorizer trace — Refine only + // Kind 2: authorizer trace - Refine only if (kind === FetchKind.AuthorizerTrace) { if (ext.context !== FetchContext.Refine) { return null; @@ -325,7 +325,7 @@ export class Fetch implements HostCallHandler { return ext.authorizerTrace(); } - // Kind 3: other work item extrinsics — Refine only + // Kind 3: other work item extrinsics - Refine only if (kind === FetchKind.OtherWorkItemExtrinsics) { if (ext.context !== FetchContext.Refine) { return null; @@ -335,7 +335,7 @@ export class Fetch implements HostCallHandler { return ext.workItemExtrinsic(workItem, index); } - // Kind 4: my extrinsics — Refine only + // Kind 4: my extrinsics - Refine only if (kind === FetchKind.MyExtrinsics) { if (ext.context !== FetchContext.Refine) { return null; @@ -344,7 +344,7 @@ export class Fetch implements HostCallHandler { return ext.workItemExtrinsic(null, index); } - // Kind 5: other work item imports — Refine only + // Kind 5: other work item imports - Refine only if (kind === FetchKind.OtherWorkItemImports) { if (ext.context !== FetchContext.Refine) { return null; @@ -354,7 +354,7 @@ export class Fetch implements HostCallHandler { return ext.workItemImport(workItem, index); } - // Kind 6: my imports — Refine only + // Kind 6: my imports - Refine only if (kind === FetchKind.MyImports) { if (ext.context !== FetchContext.Refine) { return null; @@ -363,7 +363,7 @@ export class Fetch implements HostCallHandler { return ext.workItemImport(null, index); } - // Kind 7: work package — Is-Authorized, Refine + // Kind 7: work package - IsAuthorized, Refine if (kind === FetchKind.WorkPackage) { if (ext.context === FetchContext.Accumulate) { return null; @@ -371,23 +371,23 @@ export class Fetch implements HostCallHandler { return ext.workPackage(); } - // Kind 8: authorizer — Is-Authorized, Refine - if (kind === FetchKind.Authorizer) { + // Kind 8: auth configuration - IsAuthorized, Refine + if (kind === FetchKind.AuthConfiguration) { if (ext.context === FetchContext.Accumulate) { return null; } - return ext.authorizer(); + return ext.authConfiguration(); } - // Kind 9: authorization token — Is-Authorized, Refine - if (kind === FetchKind.AuthorizationToken) { + // Kind 9: authorization token - IsAuthorized, Refine + if (kind === FetchKind.AuthToken) { if (ext.context === FetchContext.Accumulate) { return null; } - return ext.authorizationToken(); + return ext.authToken(); } - // Kind 10: refine context — Is-Authorized, Refine + // Kind 10: refine context - IsAuthorized, Refine if (kind === FetchKind.RefineContext) { if (ext.context === FetchContext.Accumulate) { return null; @@ -395,7 +395,7 @@ export class Fetch implements HostCallHandler { return ext.refineContext(); } - // Kind 11: all work items — Is-Authorized, Refine + // Kind 11: all work items - IsAuthorized, Refine if (kind === FetchKind.AllWorkItems) { if (ext.context === FetchContext.Accumulate) { return null; @@ -403,7 +403,7 @@ export class Fetch implements HostCallHandler { return ext.allWorkItems(); } - // Kind 12: one work item — Is-Authorized, Refine + // Kind 12: one work item - IsAuthorized, Refine if (kind === FetchKind.OneWorkItem) { if (ext.context === FetchContext.Accumulate) { return null; @@ -412,7 +412,7 @@ export class Fetch implements HostCallHandler { return ext.oneWorkItem(workItem); } - // Kind 13: work item payload — Is-Authorized, Refine + // Kind 13: work item payload - IsAuthorized, Refine if (kind === FetchKind.WorkItemPayload) { if (ext.context === FetchContext.Accumulate) { return null; @@ -421,7 +421,7 @@ export class Fetch implements HostCallHandler { return ext.workItemPayload(workItem); } - // Kind 14: all transfers and operands — Accumulate only + // Kind 14: all transfers and operands - Accumulate only if (kind === FetchKind.AllTransfersAndOperands) { if (ext.context !== FetchContext.Accumulate) { return null; @@ -429,7 +429,7 @@ export class Fetch implements HostCallHandler { return ext.allTransfersAndOperands(); } - // Kind 15: one transfer or operand — Accumulate only + // Kind 15: one transfer or operand - Accumulate only if (kind === FetchKind.OneTransferOrOperand) { if (ext.context !== FetchContext.Accumulate) { return null; @@ -451,8 +451,8 @@ export enum FetchKind { OtherWorkItemImports = 5, MyImports = 6, WorkPackage = 7, - Authorizer = 8, - AuthorizationToken = 9, + AuthConfiguration = 8, + AuthToken = 9, RefineContext = 10, AllWorkItems = 11, OneWorkItem = 12, diff --git a/packages/jam/transition/externalities/index.ts b/packages/jam/transition/externalities/index.ts index 189228ba8..c2b85dc83 100644 --- a/packages/jam/transition/externalities/index.ts +++ b/packages/jam/transition/externalities/index.ts @@ -1,4 +1,5 @@ export * from "./accumulate-externalities.js"; export * from "./accumulate-fetch-externalities.js"; export * from "./fetch-externalities.js"; +export * from "./is-authorized-fetch-externalities.js"; export * from "./refine-fetch-externalities.js"; diff --git a/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts b/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts new file mode 100644 index 000000000..b3df06d17 --- /dev/null +++ b/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts @@ -0,0 +1,54 @@ +import { BytesBlob } from "@typeberry/bytes"; +import type { ChainSpec } from "@typeberry/config"; +import { general } from "@typeberry/jam-host-calls"; +import type { U64 } from "@typeberry/numbers"; +import { getEncodedConstants } from "./fetch-externalities.js"; + +export class IsAuthorizedFetchExternalities implements general.IIsAuthorizedFetch { + readonly context = general.FetchContext.IsAuthorized; + + constructor( + private readonly chainSpec: ChainSpec, + private readonly params: { + authToken: BytesBlob, + authConfiguration: BytesBlob, + } + ) {} + + constants(): BytesBlob { + return getEncodedConstants(this.chainSpec); + } + + // TODO [ToDr] Return encoded work package E(p) + workPackage(): BytesBlob { + return BytesBlob.empty(); + } + + authConfiguration(): BytesBlob { + return this.params.authConfiguration; + } + + authToken(): BytesBlob { + return this.params.authToken; + } + + // TODO [ToDr] Return encoded refinement context + refineContext(): BytesBlob { + return BytesBlob.empty(); + } + + // TODO [ToDr] Return encoded work items + allWorkItems(): BytesBlob { + return BytesBlob.empty(); + } + + // TODO [ToDr] Return single work item summary + oneWorkItem(_workItem: U64): BytesBlob | null { + return null; + } + + // TODO [ToDr] Return work item payload + workItemPayload(_workItem: U64): BytesBlob | null { + return null; + } +} diff --git a/packages/jam/transition/externalities/refine-fetch-externalities.ts b/packages/jam/transition/externalities/refine-fetch-externalities.ts index 2d5d50adc..ea5a05627 100644 --- a/packages/jam/transition/externalities/refine-fetch-externalities.ts +++ b/packages/jam/transition/externalities/refine-fetch-externalities.ts @@ -1,5 +1,5 @@ import type { EntropyHash } from "@typeberry/block"; -import { Bytes, type BytesBlob } from "@typeberry/bytes"; +import { Bytes, BytesBlob } from "@typeberry/bytes"; import type { ChainSpec } from "@typeberry/config"; import { HASH_SIZE } from "@typeberry/hash"; import { general } from "@typeberry/jam-host-calls"; @@ -20,36 +20,44 @@ export class RefineFetchExternalities implements general.IRefineFetch { return Bytes.zero(HASH_SIZE).asOpaque(); } - authorizerTrace(): BytesBlob | null { - return null; + // TODO [ToDr] implement + authorizerTrace(): BytesBlob { + return BytesBlob.empty(); } + // TODO [ToDr] implement workItemExtrinsic(_workItem: U64 | null, _index: U64): BytesBlob | null { return null; } + // TODO [ToDr] implement workItemImport(_workItem: U64 | null, _index: U64): BytesBlob | null { return null; } - workPackage(): BytesBlob | null { - return null; + // TODO [ToDr] implement + workPackage(): BytesBlob { + return BytesBlob.empty(); } - authorizer(): BytesBlob | null { - return null; + // TODO [ToDr] implement + authConfiguration(): BytesBlob { + return BytesBlob.empty(); } - authorizationToken(): BytesBlob | null { - return null; + // TODO [ToDr] implement + authToken(): BytesBlob { + return BytesBlob.empty(); } - refineContext(): BytesBlob | null { - return null; + // TODO [ToDr] implement + refineContext(): BytesBlob { + return BytesBlob.empty(); } - allWorkItems(): BytesBlob | null { - return null; + // TODO [ToDr] implement + allWorkItems(): BytesBlob { + return BytesBlob.empty(); } oneWorkItem(_workItem: U64): BytesBlob | null { From 7426efd01d911449b35a58eee0231a6c85121b2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Mon, 6 Apr 2026 15:33:22 +0200 Subject: [PATCH 2/6] Is authorized tests --- packages/jam/in-core/authorize.test.ts | 268 ++++++++++++++++++ packages/jam/in-core/fixtures/authorizer.pvm | Bin 0 -> 18299 bytes packages/jam/in-core/in-core.ts | 14 +- .../is-authorized-fetch-externalities.ts | 6 +- 4 files changed, 277 insertions(+), 11 deletions(-) create mode 100644 packages/jam/in-core/authorize.test.ts create mode 100644 packages/jam/in-core/fixtures/authorizer.pvm diff --git a/packages/jam/in-core/authorize.test.ts b/packages/jam/in-core/authorize.test.ts new file mode 100644 index 000000000..10bc22e09 --- /dev/null +++ b/packages/jam/in-core/authorize.test.ts @@ -0,0 +1,268 @@ +import { readFileSync } from "node:fs"; +import { resolve } from "node:path"; +import assert from "node:assert"; +import { before, describe, it } from "node:test"; +import type { CodeHash, HeaderHash, StateRootHash } from "@typeberry/block"; +import { tryAsCoreIndex, tryAsServiceGas, tryAsServiceId, tryAsTimeSlot } from "@typeberry/block"; +import type { WorkPackageHash } from "@typeberry/block/refine-context.js"; +import { RefineContext } from "@typeberry/block/refine-context.js"; +import { WorkItem } from "@typeberry/block/work-item.js"; +import { tryAsWorkItemsCount, WorkPackage } from "@typeberry/block/work-package.js"; +import { Bytes, BytesBlob } from "@typeberry/bytes"; +import { Encoder } from "@typeberry/codec"; +import { asKnownSize, FixedSizeArray, HashDictionary } from "@typeberry/collections"; +import { type ChainSpec, PvmBackend, tinyChainSpec } from "@typeberry/config"; +import { InMemoryStates } from "@typeberry/database"; +import { Blake2b, HASH_SIZE, type OpaqueHash, WithHash } from "@typeberry/hash"; +import { tryAsU16, tryAsU32, tryAsU64 } from "@typeberry/numbers"; +import { InMemoryService, InMemoryState, PreimageItem, ServiceAccountInfo } from "@typeberry/state"; +import { InCore, RefineError } from "./in-core.js"; + +let blake2b: Blake2b; + +before(async () => { + blake2b = await Blake2b.createHasher(); +}); + +// Load the authorizer PVM fixture. +// This authorizer checks that authToken === authConfiguration and returns "Auth=". +// Source: https://github.com/tomusdrw/as-lan/blob/12bd8fd/examples/authorizer/assembly/authorize.ts#L25 +const AUTHORIZER_PVM = BytesBlob.blobFrom(readFileSync(resolve(import.meta.dirname, "fixtures/authorizer.pvm"))); + +const AUTH_SERVICE_ID = tryAsServiceId(42); + +function createService(serviceId: typeof AUTH_SERVICE_ID, codeHash: OpaqueHash, code: BytesBlob): InMemoryService { + return new InMemoryService(serviceId, { + info: ServiceAccountInfo.create({ + codeHash: codeHash.asOpaque(), + balance: tryAsU64(10_000_000_000), + accumulateMinGas: tryAsServiceGas(0n), + onTransferMinGas: tryAsServiceGas(0n), + storageUtilisationBytes: tryAsU64(0), + storageUtilisationCount: tryAsU32(0), + gratisStorage: tryAsU64(0), + created: tryAsTimeSlot(0), + lastAccumulation: tryAsTimeSlot(0), + parentService: tryAsServiceId(0), + }), + preimages: HashDictionary.fromEntries( + [PreimageItem.create({ hash: codeHash.asOpaque(), blob: code })].map((x) => [x.hash, x]), + ), + lookupHistory: HashDictionary.fromEntries([]), + storage: new Map(), + }); +} + +function createWorkItem(serviceId = AUTH_SERVICE_ID) { + return WorkItem.create({ + service: serviceId, + codeHash: Bytes.zero(HASH_SIZE).asOpaque(), + payload: BytesBlob.empty(), + refineGasLimit: tryAsServiceGas(1_000_000), + accumulateGasLimit: tryAsServiceGas(1_000_000), + importSegments: asKnownSize([]), + extrinsic: [], + exportCount: tryAsU16(0), + }); +} + +function createWorkPackage(opts: { + anchorHash: HeaderHash; + stateRoot: StateRootHash; + lookupAnchorSlot: number; + authCodeHash: CodeHash; + authToken: BytesBlob; + authConfiguration: BytesBlob; +}) { + return WorkPackage.create({ + authToken: opts.authToken, + authCodeHost: AUTH_SERVICE_ID, + authCodeHash: opts.authCodeHash, + authConfiguration: opts.authConfiguration, + context: RefineContext.create({ + anchor: opts.anchorHash, + stateRoot: opts.stateRoot, + beefyRoot: Bytes.zero(HASH_SIZE).asOpaque(), + lookupAnchor: opts.anchorHash, + lookupAnchorSlot: tryAsTimeSlot(opts.lookupAnchorSlot), + prerequisites: [], + }), + items: FixedSizeArray.new([createWorkItem()], tryAsWorkItemsCount(1)), + }); +} + +function hashWorkPackage(spec: ChainSpec, workPackage: WorkPackage): WithHash { + const workPackageHash = blake2b + .hashBytes(Encoder.encodeObject(WorkPackage.Codec, workPackage, spec)) + .asOpaque(); + return new WithHash(workPackageHash, workPackage); +} + +describe("InCore authorization", () => { + const spec = tinyChainSpec; + + function getAuthCodeHash() { + return blake2b.hashBytes(AUTHORIZER_PVM).asOpaque(); + } + + async function setup() { + const authCodeHash = getAuthCodeHash(); + const states = new InMemoryStates(spec); + const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); + + const authService = createService(AUTH_SERVICE_ID, authCodeHash, AUTHORIZER_PVM); + + const state = InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map([[AUTH_SERVICE_ID, authService]]), + }); + await states.insertInitialState(anchorHash, state); + const stateRoot = await states.getStateRoot(state); + + const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); + return { states, anchorHash, stateRoot, inCore, state, authCodeHash }; + } + + it("should authorize when token matches configuration", async () => { + const { anchorHash, stateRoot, inCore, state, authCodeHash } = await setup(); + const token = BytesBlob.blobFromString("hello"); + + const wp = createWorkPackage({ + anchorHash, + stateRoot, + lookupAnchorSlot: state.timeslot, + authCodeHash, + authToken: token, + authConfiguration: token, // same as token -> auth succeeds + }); + + const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); + + assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); + const report = result.ok.report; + + // Verify the authorization output starts with "Auth=" + const outputStr = Buffer.from(report.authorizationOutput.raw).toString("utf8"); + assert.ok(outputStr.startsWith("Auth="), `Expected output to start with "Auth=" but got "${outputStr.slice(0, 30)}"`); + + // Verify the authorizer hash is H(code_hash ++ configuration) + const expectedHash = blake2b.hashBlobs([authCodeHash, token]); + assert.ok(report.authorizerHash.isEqualTo(expectedHash), "authorizerHash should be H(code_hash || config)"); + + // Verify gas was consumed (should be > 0) + assert.ok(Number(report.authorizationGasUsed) > 0, "should have consumed some gas"); + }); + + it("should authorize with empty token and configuration", async () => { + const { anchorHash, stateRoot, inCore, state, authCodeHash } = await setup(); + + const wp = createWorkPackage({ + anchorHash, + stateRoot, + lookupAnchorSlot: state.timeslot, + authCodeHash, + authToken: BytesBlob.empty(), + authConfiguration: BytesBlob.empty(), + }); + + const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); + + assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); + const outputStr = Buffer.from(result.ok.report.authorizationOutput.raw).toString("utf8"); + assert.ok(outputStr.startsWith("Auth=<>"), `Expected output to start with "Auth=<>" but got "${outputStr.slice(0, 30)}"`); + }); + + it("should fail authorization when token does not match configuration", async () => { + const { anchorHash, stateRoot, inCore, state, authCodeHash } = await setup(); + + const wp = createWorkPackage({ + anchorHash, + stateRoot, + lookupAnchorSlot: state.timeslot, + authCodeHash, + authToken: BytesBlob.blobFromString("wrong"), + authConfiguration: BytesBlob.blobFromString("right"), + }); + + const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); + + assert.strictEqual(result.isError, true); + assert.strictEqual(result.error, RefineError.AuthorizationError); + }); + + it("should fail when auth code host service is missing", async () => { + const authCodeHash = getAuthCodeHash(); + const states = new InMemoryStates(spec); + const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); + + // State with no services at all + const state = InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map(), + }); + await states.insertInitialState(anchorHash, state); + const stateRoot = await states.getStateRoot(state); + const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); + + const wp = createWorkPackage({ + anchorHash, + stateRoot, + lookupAnchorSlot: state.timeslot, + authCodeHash, + authToken: BytesBlob.empty(), + authConfiguration: BytesBlob.empty(), + }); + + const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); + + assert.strictEqual(result.isError, true); + assert.strictEqual(result.error, RefineError.AuthorizationError); + }); + + it("should fail when auth code preimage is missing", async () => { + const authCodeHash = getAuthCodeHash(); + const states = new InMemoryStates(spec); + const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); + + // Service exists but has no preimages + const emptyService = new InMemoryService(AUTH_SERVICE_ID, { + info: ServiceAccountInfo.create({ + codeHash: Bytes.zero(HASH_SIZE).asOpaque(), + balance: tryAsU64(0), + accumulateMinGas: tryAsServiceGas(0n), + onTransferMinGas: tryAsServiceGas(0n), + storageUtilisationBytes: tryAsU64(0), + storageUtilisationCount: tryAsU32(0), + gratisStorage: tryAsU64(0), + created: tryAsTimeSlot(0), + lastAccumulation: tryAsTimeSlot(0), + parentService: tryAsServiceId(0), + }), + preimages: HashDictionary.fromEntries([]), + lookupHistory: HashDictionary.fromEntries([]), + storage: new Map(), + }); + + const state = InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map([[AUTH_SERVICE_ID, emptyService]]), + }); + await states.insertInitialState(anchorHash, state); + const stateRoot = await states.getStateRoot(state); + const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); + + const wp = createWorkPackage({ + anchorHash, + stateRoot, + lookupAnchorSlot: state.timeslot, + authCodeHash, + authToken: BytesBlob.empty(), + authConfiguration: BytesBlob.empty(), + }); + + const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); + + assert.strictEqual(result.isError, true); + assert.strictEqual(result.error, RefineError.AuthorizationError); + }); +}); diff --git a/packages/jam/in-core/fixtures/authorizer.pvm b/packages/jam/in-core/fixtures/authorizer.pvm new file mode 100644 index 0000000000000000000000000000000000000000..6642a755ddbbbcca9bef9084fb38af4ea157d5c0 GIT binary patch literal 18299 zcmeHN4Rl*)dH${}Iag{*udbB%7m(#^_c{qhwMc773W}m+lF8|=R5vwcFooPCPFq^n z`Kjxhb?TMfDNg8IVKFl}o0F!&g|K-AS{HhPffUjosA)SeE93YK*eM(9@y5mir>*zA z-@TG0C2cu}bGEZ{?Btd1_xFD9|M!0H)!PU5+_q=efjw8>fufq2MzH0fSoFCes z=uKe1!`Owe=zPaL=-G$WeXN?^cFbSik0=|!%Db`KA+{I)_Mlz9Z}s{>#d@7{ybHVU z$I81ZR$?Ols@E&%BYJhr9|Wy;aM{xLUbKi7qTG@dtv^B^_8;+^2J~ar03@@AZN<6) zd@o~sAF%8PjsrG7`=J%0?RH2b050#w_a2M}upg}rTmpx^9sg=BYta540PTM}uX?%lutRpUe6oOBmq%?6YNCEstA318MCbsOe)VV84s~ zA=YRm8c8C3*qLyVglR5Ob32}hw(BwfdbSNuMTwAR`Z2m2Gp#vAJztvN>w%9vAOKIW zJcT5?pABL406Z|jJ`5_YziN5bqWdF!&ZWm!qdUN*c7V^il)RVabrAX`%8A~hWtUrX zWjr8#QalK7ZISJLyo{1JqlbL^R_t^8Mf$hmL8TPRqrzV7vLCV{8tJcE9>i}^iq`>q z^*GV|{^RdrEHmOf{nu;IqG%ig{1dJ`I=#_@R~=z_Wn*Rq$BAO@QA5>JbwE*v(i6Nd3YUun357Ki~&|R{>kS z@RGHRH2~HD(shg-1ekz4U>fiWU`IV;{|a~qUQ3yPd4SQ#*xQ>J`vo8Y)^7vk06zsx zzMZj+2$ut`j6Da~seli_);7jcfL(xoz@Gx{2b==@5bz4%H9${0WCOaN28eLuZvwsr zI0twV@NK|%0OtWu0iFgt19%qjUBLGM&jIED-v>MocmePt;O_xH0Q?Xz5BL$_9{?`_ z3YRhVZ-8Tv*9(BV-iEyamqT{F04HSjSAd^DZtG>n4g+>Wv7M07-vJsRqdNhsA%lkj zuRsnvAcwO7#;$FFOaY&R9H1(8IrvoRffc-qMx9YFYLa6KgJzvsFBZH~!OMPAxQi{m zYSgJleMktq!=6=|-Jc z*)Tqm&fBojnbrh8!?iVYdTp{6I%MWXqpc7W3t_RacPUl-tEHu-aP6w+b!XDK3ByM= z4q{j+goQ$EDdnVLHHiMg_NDV5bDZB@BUm$*3gM;FjIdh@d)%5N{s|-p>e57=X!!ex ztjN#-5d@-Vwc#Jsqw!8qrw`a)oQpB0N4N9t&dhr-YWOFaDaSJkf1YBx)osdCEE_cB z8BEVc4S5!mW|--lWnb@vEY71DDJUXD65|W!S=hZg?D1%l=VzC5!WLNN#*4sIA_N38 z!ET!klT4FVhe_I|JkPhS#}oGO)eC1ko%%p_z>rnUhv0?zR_q_bc)f7H^6Jvk2TVC6 z=v!aJyy%FsWXcOzH_3)tiS^EGry-|pWND5p8mU*OYC85H>D zt+l3{Hhl}l=RPhVJz9t@^E`>wgS_9ACk2w$G-xDg&0u62sAdF6OONi9g%3cl>JZt4 zJjRZYuxVz}u@*5(b>ahr2uXPfZoyPau)-j3PEenRQ51K9%p5iu*=RQA(92l~I55rh z1sp|A97NEfpiXfNV~i_Z2s@T|Z=Uz2ZM8>$tDG|p$QFgHf-jBZNn?WPOFMoTr-6m&v`SC;-mmE3d;Z+_SZs^fuE6BM7^lVb5in zB>g3V1x$R-VOvtzT_5(W)+G5}Yha-`z&*6wyvm+empw~m&wl%v?{>K+`8QZdBSnV^ zyO~py)_xc&fjZHBz!vR-1D;l6H|Ee-u!L^w#^OH__8{ccA4A*%3z-^Io+~P^Gy!hh zc#Ab*qNdB~WaN!745-5JPvN07QvsNsdsBq`zA!+%6%m8qHd~M+J&VP z#id<*={h>cE0;#FbOW=NPFk}Kf7-chONPH6h-|h?5hqjLSBMo2FYI=hjcKRZgf-0> zZgdjl+Nnubea~63HN=cz9+FCA8va@ImAk^+GW&*qzG4t6b#f;|FM0h1OkH%Ez6;LJ zx5Ac~`U%^QMVIQrqbZ9nnlHIb-=g!2oqWzA8fchxnvJ4MlR}r{+=smcTxP?(-PidW zOHl#7IOMXzVV4VW4~V*4_GSUTgXGF82tp;pFtDeZgR_U-X~_9KEmoAZwNlP1@sN$J zDqE{}Xwo&`M3N)ucZWTbSl;y^9LFGU0-M_?V6m=pG=_iLMX_tf1uf5Tl4mh{nm?V# z6I#CWt}_<*bYcRP3(AIcfrwkY;a{K)&|3_)i0T}{1xz7PvszQW07R_zOeg)K$84Bz zg*~$%@qLf_If+uOeQej)0xLha6PmR76T}XV{NiY+IBNL2fK#e1Mnp8~t%fOM#22ak zzd7%T4BJLSMdH#RPOatAxz~!Mf1w0$Ue(&iAhbW%S(E((oHFOH6~oAsmpT!NCtNmj zIjoI<(eE%`y}Yp=s5Siv{@H?7FADSw+~<^6M|{y9T38bqT} zGMeg*=78YQkLceq8$`2FGMnnn=75PTFVtDFx^Jnpyl<&;z5XeqF=RA#8O@Qf12Myd zSmI$vz4fFSYgJgZ>aT$_EfHbQce?bRup{uC;+#{Q1bcFUN!4bP zW;XX0yy;53gUb>byp%-Ns1O1Ku4+=`0JJISM)ru=6v}?>%rh`-&{)*5-mc=}!c^SC zn7*%?pB(rZeS)ka8`N9RZpJxK3cPlT0|ZY2Svl9|OI)7^*Nvjlq#Dg3{j>TPxE)Kt zCn58-e5C>Bg4v{+&7p#~tCGw4*Gq`zUnfkGU^E2;qq$ztKVvjOip^s7+54ZN9C_rc zOI={3Ci{i#cYMP~7SBGhs>`{W@{L~0@6)4-WYlTo0nKrkjK1~E0e(JZiSw;rzn0Fp z7z|&J9#bSePJDKWb6p6~rZ4@YP8@r{WYmwbms&-Agd;!OQ+sARu3RkalFsZQ5nfk| zJ-@!)6|R+@+~eAFqlz0SuH{fb)Q<}~%K+e4*emJayYEID4pWkF^QkU zj9GRaLI=W7$n?z>bM~$*rK6>_`js#k2+4$yObDsqRfSGwt{G#$H?`rK7doA}n4sQR zrR%aFX6qW^#nN>%g)P!`;YtV(>c-WQ2!y)3xVj^i>i*qOi5wL!fxkbh)g0C8H%Fx| zyP8_!p-nx|CDUnL9am{UX=^xUh=(`d41^saLQ1h^U1}Mf)4%goeHZate>aJ=CEuQ8+kUhU=zk_nfZ^H<^;ZWU3-R+(pH;0~c6a zLSn5vBpiuvrv2&WAFu}0(ZWBZ+OFHjH58*0Z8BBXsspp#SCtfv2AeYE=JBJrDGBq#Fufo=r1O|%_>5~ISLWDJ#Sb-5C&UekDy6Ank_U&^CUJ3*O-1aI{;)! z=k?ez>{?>a-{A7s3ijXAd*Ibp=>rKEvkD(n^|%0O7ONgKGT2Hr4=Q-j2%NigTjw`! z`{r)S{mb$LO_gWk`6~I%lr%d-2T8}DMlw9D2g$41I@64iDY8G zf_KKz$p-%n3$kW*81dGSrx~{$1YRhj48e7Tz#s&eQi`5b3d-sz0~Et9m4K)sbe?e< z)sKLT?4e^lwMgXIXb%pCVTGYMGmqng*9h)YW-~ZGN*MLlyR$YuK+Ea*bHZ^XcaHs- z8GcpZ`(P{Ta;Thv-hu+$_fT$zx&_z< zO|N2UvJ+d}i7Cgs^=in;$DHTJ8d|oIF0|mq(3}zjc zlyQOQTs;m)8a?N+Hot<-QM!Ymm}AO)ylf;ohLu@_4(8yZGW_)z5*?#TJzvAv2Ote} z)uYQoLxNR{vm+Q{j$?G#SuCKOLn6oFZjZ>Y7ORg)U(94O6?zyd>Y@C&#GSyA)zMQ_ z_!N*$pxQCzuoQkZ1aV40kUedr7!?M7 zRd3Tb3)ycQYmqglZIfRIUEOnzt1$=tm07)&Eqn;$>Mnecd2J;0dIt|<*WdUX#7v`Z z3jTn6Ps{!nnt0TQ%%&-(NvRtsfSY{0GRUNn9b|jBtIRVMb#@qhj+MEKwe!`q^LzQ| zASz~Mf;Hrqv1A@T@-icy)OV42_h5>FUiJHE5k}i2!!;#qrsAk=`)O|bxPg=HmyEjw z+ZD)U?>Prsvka?f)s(4%NA5woF7lWx9&w-1Hg zW@E^Tym~5JW7lT>s1w!iiYQ#ReWfkwbf>X0_^?|Hd%~LZ(L4W}_BsOF89V?RDI7vZ z6&gA~#d_QlD%ILs8STrDj>_v~5qsKaVc*X4fkRH70$hx@>J*t+4*e6aRdN7tKxRbO z27Vu>>IpPsS3W52m%2;kkA`^Iqkj--Vc22IEL?+&DV4Q8ru;iGKX9|WLcaVcPSKRkkBhD; zJn)1-LQ%N4{Omv9Ot^dDI9A*@eO<*D64_`r{8RF8_^hB_#TlZbfu`VYg{q`Qsjm+- z&I&KJVk%W@<^{Cj*+D~Iz|%audBJ++5UwfVEFHM0PzqGjyDdjC(5S<4W9gok$581- zfDZ`EbywPh5LQ=$x-buOefaiYz~*#_|L?yBvt0l7!w4KO2J7vH%@68=4$J$wJ^u<3 zzC5zM@jYNUv>>7(PH9sAyLo?!?VFS<7=Ua)YpY4zY0HyA;_>xvS9b%xpl+ zF`E1MO9j6s9sJ>c5P649U*8Z#ri*I`)mMSVJ-JTz7Yi~9HsPn^6=xYa4mL) z9eSIP-3bC<2W#Vc>zQjfg;8!sYGH#jqYzbolyB7Xl4>8>4dpRpN;zgUr%~z0X_aGV zIw^}Z>(uHfBGHZ6c3Q(f!7_9tBQ4-B?eOxDqngGP9Jjt{R&6=6ID|v`&6+gWL&Vd) zy;ML<+YthG8-t^jSuNhs(Yv_C;=LZ%|MCfXn@Tmgat%+H&aUao(2KnLA!7~lc?c20 zXcsf<>4oXzT-QSe9cq{0TB|%V#AOwUyzqh&&daE~EDE@EG@=wqC7k-C1Mjksj!?36 zo~ciwq2lFrF)e=?HHbyjATDr;O1uEBmQH~X)dk9`-+0-KVx%Qr)al};PqQoNkMLlP zObUwNr8+D2L1MH7_dHTA5-D7#1}oLUT?QK!L$EFQV!B6K#0_NHlDgWBeUc(2JM?B2 z8lW^^H^ViaR1L4wK11<~-k#bFzi1IwXtT`EBIE*JZTc>dX;7L|p=c3uD`VU6BlL3m zaY5p*W&;A!A-Wlu7&3h{U!`bBFNCd`T_k!6oBDeFTGYcLLh>^14S4Ut->D7?DqgX$ zOb>Y@kn64U2YBEeq*yvABpr11$sR_KwuEX;I(rQ!mD%K!pg*$A`*(@=EK=)G#iaAV zI0ZU!C$Ng-5xlXhl2?vXs@_D>@`Z$<8$g%(k5#F4Wr^1+8fM_INQJAerD8^<;!jOsA`3!?D?~UtBHlpRJS{FoCH8MRH`^-m8#SY;7xh8=ZY_($ z>tAQNuX^lqZ7BQLO)s5Ur5m)#rd4y-t|IK3+0<(`$1f_5i@G_7G!38@AQgXAvQ-7PBbOkvVm`%?Gz6$vaH*|^|uxg)F zb?9fg>9L46{5gvwn_FB< zcv1l?hf7ZhXFADestpIBGhlw6XCyV z&~SsI@JQEW4GB$e)z1_lK#a@?!^#y%X#*Md3w~Eb@9#jyq(#PLk&J%)o&x-r5!xrH zRH`dKxZe2IbIF4XO3zT$QGZ;&MGKhhly#TA=-95h^Sr!#fW2ImW8i)lz^^Z~z%YB{ zpCNNx_jvO@_Mp{F^Jb2XTg?n_jioLuhiv+1%$lhwFDNQpBKCp@Vz)tQ1z1%3s6i|lPR2k=S7r`DR-3fMr zcEK$Z*ZRQ?JeZ&)*2aH?VO1R3)z9!lug~;N6qA+S>m2x9N44wJ6h@g6-Hk8SHdrFkF7506UsCer@= ztU4a;;QxVEJRXhZ#1opMHG0CiJtvO@Ls8}7S#jHfxTgMucwF+HL?b<%lU&e7;6c}fVh(T=y1;8J)R%aFaqYgW$(SooYs+u4aXBf=UBRS zEZv=7pXe51q3(QsSnh7mj4R_xA|Fd7dh&jRiYo$UGT$S2cM3`(5zQ-Y?zY6|^QY8| z^M=%EC7~wTIx^z+MCvK$PLD$!#>m6tN{<-$#`vi`Mm<;_8}Ep#iFjKgC3T+|Kc#H* zxaDDGWJAV3tQ8Cy**LM z9Y|=hvL!Jl9<}DVPoLV6$ZH^#(;Q8tXz5gb2N0ijwzPoEd@kA%WgX%JN=o*&jmNzY zX?gUtq~x|}%h8mSkNQ1hR_|y&HmZ$|MO#khRBuNv-qIZ#RlP@3v>*XyyG0VH-kl#_ z*YOoaZ1DtKEeZE==V|e{($Vb=VmH`gvSnLNiwE11o~UQUaY{Tot_0UXUrz@dJ|*Pd z;69^_IBMddwnW<}XXU(>A9tYf6eK{8x%9a5)Nm>u?KqWE{k;JtF`A!+m5yniHU1WV z#IvIXc0L~M0qzGQIWq0!136XhZt2EgRPI$rD z@=?i*kEch2%86hwnU8zmskvNi+sW)25+m1Yb4MRK73zpx zJ$e#7Q?aBoks22_Ae2ny;)#>vv3ry8sc}Vav3r84Syk!j2Av%VZpAQOrDs-2a3AUP r)Vq#5whIqtlCu#xG?I_Hk7n9pvufJ?71a|Z;{DUGv5{Cjo)7*HNJrk3 literal 0 HcmV?d00001 diff --git a/packages/jam/in-core/in-core.ts b/packages/jam/in-core/in-core.ts index 06ab1c7dc..4f081e911 100644 --- a/packages/jam/in-core/in-core.ts +++ b/packages/jam/in-core/in-core.ts @@ -312,12 +312,10 @@ export class InCore { } // Prepare fetch externalities and executor - const fetchExternalities = new IsAuthorizedFetchExternalities( - this.chainSpec, { - authToken: authToken, - authConfiguration: authConfiguration, - } - ); + const fetchExternalities = new IsAuthorizedFetchExternalities(this.chainSpec, { + authToken, + authConfiguration, + }); const executor = await PvmExecutor.createIsAuthorizedExecutor( authCodeHost, code, @@ -341,8 +339,8 @@ export class InCore { ); } - // Compute authorizer hash: blake2b(codeHash ++ configuration) - // https://graypaper-reader.netlify.app/#/ab2cdbd/1b81011b8401?v=0.7.2 + // Compute authorizer hash: H(code_hash ++ configuration) + // https://graypaper.fluffylabs.dev/#/ab2cdbd/1b81011b8401?v=0.7.2 const authorizerHash = this.blake2b.hashBlobs([authCodeHash, authConfiguration]); const authorizationOutput = BytesBlob.blobFrom(execResult.memorySlice); const authorizationGasUsed = tryAsServiceGas(execResult.consumedGas); diff --git a/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts b/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts index b3df06d17..effefbfc9 100644 --- a/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts +++ b/packages/jam/transition/externalities/is-authorized-fetch-externalities.ts @@ -10,9 +10,9 @@ export class IsAuthorizedFetchExternalities implements general.IIsAuthorizedFetc constructor( private readonly chainSpec: ChainSpec, private readonly params: { - authToken: BytesBlob, - authConfiguration: BytesBlob, - } + authToken: BytesBlob; + authConfiguration: BytesBlob; + }, ) {} constants(): BytesBlob { From 141dfe988dfc74e08d990b027f1b5b781ea5efbb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 7 Apr 2026 07:07:51 +0200 Subject: [PATCH 3/6] Split in-core into refine and is-authorized --- packages/jam/in-core/authorize.test.ts | 268 ------------- packages/jam/in-core/in-core.test.ts | 72 +++- packages/jam/in-core/in-core.ts | 359 ++---------------- packages/jam/in-core/is-authorized.test.ts | 184 +++++++++ packages/jam/in-core/is-authorized.ts | 114 ++++++ packages/jam/in-core/refine.test.ts | 7 + packages/jam/in-core/refine.ts | 233 ++++++++++++ .../jam/jam-host-calls/general/fetch.test.ts | 7 +- 8 files changed, 624 insertions(+), 620 deletions(-) delete mode 100644 packages/jam/in-core/authorize.test.ts create mode 100644 packages/jam/in-core/is-authorized.test.ts create mode 100644 packages/jam/in-core/is-authorized.ts create mode 100644 packages/jam/in-core/refine.test.ts create mode 100644 packages/jam/in-core/refine.ts diff --git a/packages/jam/in-core/authorize.test.ts b/packages/jam/in-core/authorize.test.ts deleted file mode 100644 index 10bc22e09..000000000 --- a/packages/jam/in-core/authorize.test.ts +++ /dev/null @@ -1,268 +0,0 @@ -import { readFileSync } from "node:fs"; -import { resolve } from "node:path"; -import assert from "node:assert"; -import { before, describe, it } from "node:test"; -import type { CodeHash, HeaderHash, StateRootHash } from "@typeberry/block"; -import { tryAsCoreIndex, tryAsServiceGas, tryAsServiceId, tryAsTimeSlot } from "@typeberry/block"; -import type { WorkPackageHash } from "@typeberry/block/refine-context.js"; -import { RefineContext } from "@typeberry/block/refine-context.js"; -import { WorkItem } from "@typeberry/block/work-item.js"; -import { tryAsWorkItemsCount, WorkPackage } from "@typeberry/block/work-package.js"; -import { Bytes, BytesBlob } from "@typeberry/bytes"; -import { Encoder } from "@typeberry/codec"; -import { asKnownSize, FixedSizeArray, HashDictionary } from "@typeberry/collections"; -import { type ChainSpec, PvmBackend, tinyChainSpec } from "@typeberry/config"; -import { InMemoryStates } from "@typeberry/database"; -import { Blake2b, HASH_SIZE, type OpaqueHash, WithHash } from "@typeberry/hash"; -import { tryAsU16, tryAsU32, tryAsU64 } from "@typeberry/numbers"; -import { InMemoryService, InMemoryState, PreimageItem, ServiceAccountInfo } from "@typeberry/state"; -import { InCore, RefineError } from "./in-core.js"; - -let blake2b: Blake2b; - -before(async () => { - blake2b = await Blake2b.createHasher(); -}); - -// Load the authorizer PVM fixture. -// This authorizer checks that authToken === authConfiguration and returns "Auth=". -// Source: https://github.com/tomusdrw/as-lan/blob/12bd8fd/examples/authorizer/assembly/authorize.ts#L25 -const AUTHORIZER_PVM = BytesBlob.blobFrom(readFileSync(resolve(import.meta.dirname, "fixtures/authorizer.pvm"))); - -const AUTH_SERVICE_ID = tryAsServiceId(42); - -function createService(serviceId: typeof AUTH_SERVICE_ID, codeHash: OpaqueHash, code: BytesBlob): InMemoryService { - return new InMemoryService(serviceId, { - info: ServiceAccountInfo.create({ - codeHash: codeHash.asOpaque(), - balance: tryAsU64(10_000_000_000), - accumulateMinGas: tryAsServiceGas(0n), - onTransferMinGas: tryAsServiceGas(0n), - storageUtilisationBytes: tryAsU64(0), - storageUtilisationCount: tryAsU32(0), - gratisStorage: tryAsU64(0), - created: tryAsTimeSlot(0), - lastAccumulation: tryAsTimeSlot(0), - parentService: tryAsServiceId(0), - }), - preimages: HashDictionary.fromEntries( - [PreimageItem.create({ hash: codeHash.asOpaque(), blob: code })].map((x) => [x.hash, x]), - ), - lookupHistory: HashDictionary.fromEntries([]), - storage: new Map(), - }); -} - -function createWorkItem(serviceId = AUTH_SERVICE_ID) { - return WorkItem.create({ - service: serviceId, - codeHash: Bytes.zero(HASH_SIZE).asOpaque(), - payload: BytesBlob.empty(), - refineGasLimit: tryAsServiceGas(1_000_000), - accumulateGasLimit: tryAsServiceGas(1_000_000), - importSegments: asKnownSize([]), - extrinsic: [], - exportCount: tryAsU16(0), - }); -} - -function createWorkPackage(opts: { - anchorHash: HeaderHash; - stateRoot: StateRootHash; - lookupAnchorSlot: number; - authCodeHash: CodeHash; - authToken: BytesBlob; - authConfiguration: BytesBlob; -}) { - return WorkPackage.create({ - authToken: opts.authToken, - authCodeHost: AUTH_SERVICE_ID, - authCodeHash: opts.authCodeHash, - authConfiguration: opts.authConfiguration, - context: RefineContext.create({ - anchor: opts.anchorHash, - stateRoot: opts.stateRoot, - beefyRoot: Bytes.zero(HASH_SIZE).asOpaque(), - lookupAnchor: opts.anchorHash, - lookupAnchorSlot: tryAsTimeSlot(opts.lookupAnchorSlot), - prerequisites: [], - }), - items: FixedSizeArray.new([createWorkItem()], tryAsWorkItemsCount(1)), - }); -} - -function hashWorkPackage(spec: ChainSpec, workPackage: WorkPackage): WithHash { - const workPackageHash = blake2b - .hashBytes(Encoder.encodeObject(WorkPackage.Codec, workPackage, spec)) - .asOpaque(); - return new WithHash(workPackageHash, workPackage); -} - -describe("InCore authorization", () => { - const spec = tinyChainSpec; - - function getAuthCodeHash() { - return blake2b.hashBytes(AUTHORIZER_PVM).asOpaque(); - } - - async function setup() { - const authCodeHash = getAuthCodeHash(); - const states = new InMemoryStates(spec); - const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); - - const authService = createService(AUTH_SERVICE_ID, authCodeHash, AUTHORIZER_PVM); - - const state = InMemoryState.partial(spec, { - timeslot: tryAsTimeSlot(16), - services: new Map([[AUTH_SERVICE_ID, authService]]), - }); - await states.insertInitialState(anchorHash, state); - const stateRoot = await states.getStateRoot(state); - - const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); - return { states, anchorHash, stateRoot, inCore, state, authCodeHash }; - } - - it("should authorize when token matches configuration", async () => { - const { anchorHash, stateRoot, inCore, state, authCodeHash } = await setup(); - const token = BytesBlob.blobFromString("hello"); - - const wp = createWorkPackage({ - anchorHash, - stateRoot, - lookupAnchorSlot: state.timeslot, - authCodeHash, - authToken: token, - authConfiguration: token, // same as token -> auth succeeds - }); - - const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); - - assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); - const report = result.ok.report; - - // Verify the authorization output starts with "Auth=" - const outputStr = Buffer.from(report.authorizationOutput.raw).toString("utf8"); - assert.ok(outputStr.startsWith("Auth="), `Expected output to start with "Auth=" but got "${outputStr.slice(0, 30)}"`); - - // Verify the authorizer hash is H(code_hash ++ configuration) - const expectedHash = blake2b.hashBlobs([authCodeHash, token]); - assert.ok(report.authorizerHash.isEqualTo(expectedHash), "authorizerHash should be H(code_hash || config)"); - - // Verify gas was consumed (should be > 0) - assert.ok(Number(report.authorizationGasUsed) > 0, "should have consumed some gas"); - }); - - it("should authorize with empty token and configuration", async () => { - const { anchorHash, stateRoot, inCore, state, authCodeHash } = await setup(); - - const wp = createWorkPackage({ - anchorHash, - stateRoot, - lookupAnchorSlot: state.timeslot, - authCodeHash, - authToken: BytesBlob.empty(), - authConfiguration: BytesBlob.empty(), - }); - - const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); - - assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); - const outputStr = Buffer.from(result.ok.report.authorizationOutput.raw).toString("utf8"); - assert.ok(outputStr.startsWith("Auth=<>"), `Expected output to start with "Auth=<>" but got "${outputStr.slice(0, 30)}"`); - }); - - it("should fail authorization when token does not match configuration", async () => { - const { anchorHash, stateRoot, inCore, state, authCodeHash } = await setup(); - - const wp = createWorkPackage({ - anchorHash, - stateRoot, - lookupAnchorSlot: state.timeslot, - authCodeHash, - authToken: BytesBlob.blobFromString("wrong"), - authConfiguration: BytesBlob.blobFromString("right"), - }); - - const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); - - assert.strictEqual(result.isError, true); - assert.strictEqual(result.error, RefineError.AuthorizationError); - }); - - it("should fail when auth code host service is missing", async () => { - const authCodeHash = getAuthCodeHash(); - const states = new InMemoryStates(spec); - const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); - - // State with no services at all - const state = InMemoryState.partial(spec, { - timeslot: tryAsTimeSlot(16), - services: new Map(), - }); - await states.insertInitialState(anchorHash, state); - const stateRoot = await states.getStateRoot(state); - const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); - - const wp = createWorkPackage({ - anchorHash, - stateRoot, - lookupAnchorSlot: state.timeslot, - authCodeHash, - authToken: BytesBlob.empty(), - authConfiguration: BytesBlob.empty(), - }); - - const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); - - assert.strictEqual(result.isError, true); - assert.strictEqual(result.error, RefineError.AuthorizationError); - }); - - it("should fail when auth code preimage is missing", async () => { - const authCodeHash = getAuthCodeHash(); - const states = new InMemoryStates(spec); - const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); - - // Service exists but has no preimages - const emptyService = new InMemoryService(AUTH_SERVICE_ID, { - info: ServiceAccountInfo.create({ - codeHash: Bytes.zero(HASH_SIZE).asOpaque(), - balance: tryAsU64(0), - accumulateMinGas: tryAsServiceGas(0n), - onTransferMinGas: tryAsServiceGas(0n), - storageUtilisationBytes: tryAsU64(0), - storageUtilisationCount: tryAsU32(0), - gratisStorage: tryAsU64(0), - created: tryAsTimeSlot(0), - lastAccumulation: tryAsTimeSlot(0), - parentService: tryAsServiceId(0), - }), - preimages: HashDictionary.fromEntries([]), - lookupHistory: HashDictionary.fromEntries([]), - storage: new Map(), - }); - - const state = InMemoryState.partial(spec, { - timeslot: tryAsTimeSlot(16), - services: new Map([[AUTH_SERVICE_ID, emptyService]]), - }); - await states.insertInitialState(anchorHash, state); - const stateRoot = await states.getStateRoot(state); - const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); - - const wp = createWorkPackage({ - anchorHash, - stateRoot, - lookupAnchorSlot: state.timeslot, - authCodeHash, - authToken: BytesBlob.empty(), - authConfiguration: BytesBlob.empty(), - }); - - const result = await inCore.refine(hashWorkPackage(spec, wp), tryAsCoreIndex(0), asKnownSize([[]]), asKnownSize([[]])); - - assert.strictEqual(result.isError, true); - assert.strictEqual(result.error, RefineError.AuthorizationError); - }); -}); diff --git a/packages/jam/in-core/in-core.test.ts b/packages/jam/in-core/in-core.test.ts index 7cda9f7f7..ba4bccbe1 100644 --- a/packages/jam/in-core/in-core.test.ts +++ b/packages/jam/in-core/in-core.test.ts @@ -1,6 +1,8 @@ import assert from "node:assert"; +import { readFileSync } from "node:fs"; +import { resolve } from "node:path"; import { before, describe, it } from "node:test"; -import type { HeaderHash, StateRootHash } from "@typeberry/block"; +import type { CodeHash, HeaderHash, StateRootHash } from "@typeberry/block"; import { tryAsCoreIndex, tryAsServiceGas, tryAsServiceId, tryAsTimeSlot } from "@typeberry/block"; import type { WorkPackageHash } from "@typeberry/block/refine-context.js"; import { RefineContext } from "@typeberry/block/refine-context.js"; @@ -8,24 +10,54 @@ import { WorkItem } from "@typeberry/block/work-item.js"; import { tryAsWorkItemsCount, WorkPackage } from "@typeberry/block/work-package.js"; import { Bytes, BytesBlob } from "@typeberry/bytes"; import { Encoder } from "@typeberry/codec"; -import { asKnownSize, FixedSizeArray } from "@typeberry/collections"; +import { asKnownSize, FixedSizeArray, HashDictionary } from "@typeberry/collections"; import { type ChainSpec, PvmBackend, tinyChainSpec } from "@typeberry/config"; import { InMemoryStates } from "@typeberry/database"; -import { Blake2b, HASH_SIZE, WithHash } from "@typeberry/hash"; -import { tryAsU16 } from "@typeberry/numbers"; -import { testState } from "@typeberry/state/test.utils.js"; +import { Blake2b, HASH_SIZE, type OpaqueHash, WithHash } from "@typeberry/hash"; +import { tryAsU16, tryAsU32, tryAsU64 } from "@typeberry/numbers"; +import { InMemoryService, InMemoryState, PreimageItem, ServiceAccountInfo } from "@typeberry/state"; import { InCore, RefineError } from "./in-core.js"; +// Load the authorizer PVM fixture (checks authToken === authConfiguration). +const AUTHORIZER_PVM = BytesBlob.blobFrom(readFileSync(resolve(import.meta.dirname, "fixtures/authorizer.pvm"))); +const AUTH_SERVICE_ID = tryAsServiceId(1); + let blake2b: Blake2b; before(async () => { blake2b = await Blake2b.createHasher(); }); -function createWorkItem(serviceId = 1) { +function getAuthCodeHash() { + return blake2b.hashBytes(AUTHORIZER_PVM).asOpaque(); +} + +function createService(serviceId: typeof AUTH_SERVICE_ID, codeHash: OpaqueHash, code: BytesBlob): InMemoryService { + return new InMemoryService(serviceId, { + info: ServiceAccountInfo.create({ + codeHash: codeHash.asOpaque(), + balance: tryAsU64(10_000_000_000), + accumulateMinGas: tryAsServiceGas(0n), + onTransferMinGas: tryAsServiceGas(0n), + storageUtilisationBytes: tryAsU64(0), + storageUtilisationCount: tryAsU32(0), + gratisStorage: tryAsU64(0), + created: tryAsTimeSlot(0), + lastAccumulation: tryAsTimeSlot(0), + parentService: tryAsServiceId(0), + }), + preimages: HashDictionary.fromEntries( + [PreimageItem.create({ hash: codeHash.asOpaque(), blob: code })].map((x) => [x.hash, x]), + ), + lookupHistory: HashDictionary.fromEntries([]), + storage: new Map(), + }); +} + +function createWorkItem(codeHash: CodeHash, serviceId = 1) { return WorkItem.create({ service: tryAsServiceId(serviceId), - codeHash: Bytes.zero(HASH_SIZE).asOpaque(), + codeHash, payload: BytesBlob.empty(), refineGasLimit: tryAsServiceGas(1_000_000), accumulateGasLimit: tryAsServiceGas(1_000_000), @@ -35,11 +67,16 @@ function createWorkItem(serviceId = 1) { }); } -function createWorkPackage(anchorHash: HeaderHash, stateRoot: StateRootHash, lookupAnchorSlot = 0) { +function createWorkPackage( + anchorHash: HeaderHash, + stateRoot: StateRootHash, + authCodeHash: CodeHash, + lookupAnchorSlot = 0, +) { return WorkPackage.create({ authToken: BytesBlob.empty(), - authCodeHost: tryAsServiceId(1), - authCodeHash: Bytes.zero(HASH_SIZE).asOpaque(), + authCodeHost: AUTH_SERVICE_ID, + authCodeHash, authConfiguration: BytesBlob.empty(), context: RefineContext.create({ anchor: anchorHash, @@ -49,7 +86,7 @@ function createWorkPackage(anchorHash: HeaderHash, stateRoot: StateRootHash, loo lookupAnchorSlot: tryAsTimeSlot(lookupAnchorSlot), prerequisites: [], }), - items: FixedSizeArray.new([createWorkItem()], tryAsWorkItemsCount(1)), + items: FixedSizeArray.new([createWorkItem(authCodeHash)], tryAsWorkItemsCount(1)), }); } @@ -68,7 +105,8 @@ describe("InCore", () => { const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); const stateRoot = Bytes.zero(HASH_SIZE).asOpaque(); - const workPackage = createWorkPackage(anchorHash, stateRoot); + const authCodeHash = getAuthCodeHash(); + const workPackage = createWorkPackage(anchorHash, stateRoot, authCodeHash); const result = await inCore.refine( hashWorkPackage(spec, workPackage), @@ -86,12 +124,16 @@ describe("InCore", () => { const states = new InMemoryStates(spec); const inCore = new InCore(spec, states, PvmBackend.BuiltIn, blake2b); + const authCodeHash = getAuthCodeHash(); const anchorHash = Bytes.fill(HASH_SIZE, 1).asOpaque(); - const state = testState(); + const state = InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map([[AUTH_SERVICE_ID, createService(AUTH_SERVICE_ID, authCodeHash, AUTHORIZER_PVM)]]), + }); await states.insertInitialState(anchorHash, state); const correctStateRoot = await states.getStateRoot(state); - const workPackage = createWorkPackage(anchorHash, correctStateRoot, state.timeslot); + const workPackage = createWorkPackage(anchorHash, correctStateRoot, authCodeHash, state.timeslot); const result = await inCore.refine( hashWorkPackage(spec, workPackage), @@ -100,7 +142,7 @@ describe("InCore", () => { asKnownSize([[]]), ); - assert.strictEqual(result.isOk, true); + assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); assert.strictEqual(result.ok.report.coreIndex, 0); assert.strictEqual(result.ok.report.results.length, 1); }); diff --git a/packages/jam/in-core/in-core.ts b/packages/jam/in-core/in-core.ts index 4f081e911..5e8a46531 100644 --- a/packages/jam/in-core/in-core.ts +++ b/packages/jam/in-core/in-core.ts @@ -1,38 +1,20 @@ -import { - type CodeHash, - type CoreIndex, - type Segment, - type SegmentIndex, - type ServiceGas, - type ServiceId, - tryAsCoreIndex, - tryAsServiceGas, -} from "@typeberry/block"; -import { G_I, W_A, W_C } from "@typeberry/block/gp-constants.js"; -import { - type AuthorizerHash, - type RefineContext, - type WorkPackageHash, - WorkPackageInfo, -} from "@typeberry/block/refine-context.js"; -import type { WorkItem, WorkItemExtrinsic } from "@typeberry/block/work-item.js"; +import type { CoreIndex, Segment, SegmentIndex } from "@typeberry/block"; +import { type RefineContext, type WorkPackageHash, WorkPackageInfo } from "@typeberry/block/refine-context.js"; +import type { WorkItemExtrinsic } from "@typeberry/block/work-item.js"; import type { WorkPackage } from "@typeberry/block/work-package.js"; import { WorkPackageSpec, WorkReport } from "@typeberry/block/work-report.js"; -import { WorkExecResult, WorkExecResultKind, WorkRefineLoad, WorkResult } from "@typeberry/block/work-result.js"; -import { Bytes, BytesBlob } from "@typeberry/bytes"; -import { codec, Encoder } from "@typeberry/codec"; +import type { WorkResult } from "@typeberry/block/work-result.js"; +import { Bytes } from "@typeberry/bytes"; import { asKnownSize, FixedSizeArray, type KnownSizeArray } from "@typeberry/collections"; import type { ChainSpec, PvmBackend } from "@typeberry/config"; import type { StatesDb } from "@typeberry/database"; -import { PvmExecutor, type RefineHostCallExternalities, ReturnStatus, type ReturnValue } from "@typeberry/executor"; -import { type Blake2b, HASH_SIZE, type WithHash } from "@typeberry/hash"; +import type { Blake2b, WithHash } from "@typeberry/hash"; +import { HASH_SIZE } from "@typeberry/hash"; import { Logger } from "@typeberry/logger"; import { tryAsU8, tryAsU16, tryAsU32 } from "@typeberry/numbers"; -import type { State } from "@typeberry/state"; -import { IsAuthorizedFetchExternalities } from "@typeberry/transition/externalities/is-authorized-fetch-externalities.js"; -import { RefineFetchExternalities } from "@typeberry/transition/externalities/refine-fetch-externalities.js"; -import { assertEmpty, assertNever, Result } from "@typeberry/utils"; -import { RefineExternalitiesImpl } from "./externalities/refine.js"; +import { assertEmpty, Result } from "@typeberry/utils"; +import { AuthorizationError, type AuthorizationOk, IsAuthorized } from "./is-authorized.js"; +import { Refine } from "./refine.js"; export type RefineResult = { report: WorkReport; @@ -55,32 +37,6 @@ export enum RefineError { AuthorizationError = 3, } -enum ServiceCodeError { - /** Service id is not found in the state. */ - ServiceNotFound = 0, - /** Expected service code does not match the state one. */ - ServiceCodeMismatch = 1, - /** Code preimage missing. */ - ServiceCodeMissing = 2, - /** Code blob is too big. */ - ServiceCodeTooBig = 3, -} - -enum AuthorizationError { - /** BAD: authorizer code not found (service or preimage missing). */ - CodeNotFound = 0, - /** BIG: authorizer code exceeds W_A limit. */ - CodeTooBig = 1, - /** PANIC/OOG: PVM execution failed. */ - PvmFailed = 2, -} - -type AuthorizationOk = { - authorizerHash: AuthorizerHash; - authorizationGasUsed: ServiceGas; - authorizationOutput: BytesBlob; -}; - export type PerWorkItem = KnownSizeArray; export type ImportedSegment = { @@ -90,28 +46,19 @@ export type ImportedSegment = { const logger = Logger.new(import.meta.filename, "refine"); -/** https://graypaper.fluffylabs.dev/#/ab2cdbd/2ffe002ffe00?v=0.7.2 */ -const REFINE_ARGS_CODEC = codec.object({ - core: codec.varU32.convert( - (x) => tryAsU32(x), - (x) => tryAsCoreIndex(x), - ), - workItemIndex: codec.varU32, - serviceId: codec.varU32.asOpaque(), - payloadLength: codec.varU32, - packageHash: codec.bytes(HASH_SIZE).asOpaque(), -}); -const AUTH_ARGS_CODEC = codec.object({ - coreIndex: codec.u16, -}); - export class InCore { + private readonly isAuthorized: IsAuthorized; + private readonly refineItem: Refine; + constructor( public readonly chainSpec: ChainSpec, private readonly states: StatesDb, - private readonly pvmBackend: PvmBackend, - private readonly blake2b: Blake2b, - ) {} + pvmBackend: PvmBackend, + blake2b: Blake2b, + ) { + this.isAuthorized = new IsAuthorized(chainSpec, pvmBackend, blake2b); + this.refineItem = new Refine(chainSpec, pvmBackend, blake2b); + } /** * Work-report computation function. @@ -172,7 +119,7 @@ export class InCore { } // Check authorization - const authResult = await this.authorizePackage( + const authResult = await this.isAuthorized.invoke( state, core, authToken, @@ -191,11 +138,11 @@ export class InCore { // Verify the work items let exportOffset = 0; - const refineResults: Awaited>[] = []; + const refineResults: RefineItemResult[] = []; for (const [idx, item] of items.entries()) { logger.info`[core:${core}][i:${idx}] Refining item for service ${item.service}.`; - const result = await this.refineItem( + const result = await this.refineItem.invoke( state, lookupState, idx, @@ -212,11 +159,11 @@ export class InCore { // amalgamate the work report now return Result.ok( - this.amalgamateWorkReport(asKnownSize(refineResults), authResult.ok, workPackageHash, context, core), + InCore.amalgamateWorkReport(asKnownSize(refineResults), authResult.ok, workPackageHash, context, core), ); } - private amalgamateWorkReport( + private static amalgamateWorkReport( refineResults: PerWorkItem, authResult: AuthorizationOk, workPackageHash: WorkPackageHash, @@ -270,262 +217,4 @@ export class InCore { exports: asKnownSize(exports), }; } - - /** - * IsAuthorized invocation. - * - * https://graypaper.fluffylabs.dev/#/ab2cdbd/2e64002e6400?v=0.7.2 - */ - private async authorizePackage( - state: State, - coreIndex: CoreIndex, - authToken: BytesBlob, - authCodeHost: ServiceId, - authCodeHash: CodeHash, - authConfiguration: BytesBlob, - ): Promise> { - // Look up the authorizer code from the auth code host service - const service = state.getService(authCodeHost); - // https://graypaper.fluffylabs.dev/#/ab2cdbd/2eca002eca00?v=0.7.2 - if (service === null) { - return Result.error( - AuthorizationError.CodeNotFound, - () => `Auth code host service ${authCodeHost} not found in state.`, - ); - } - - const code = service.getPreimage(authCodeHash.asOpaque()); - if (code === null) { - return Result.error( - AuthorizationError.CodeNotFound, - () => `Auth code preimage ${authCodeHash} not found in service ${authCodeHost}.`, - ); - } - - // BIG: code exceeds W_A - // https://graypaper.fluffylabs.dev/#/ab2cdbd/2ed6002ed600?v=0.7.2 - if (code.length > W_A) { - return Result.error( - AuthorizationError.CodeTooBig, - () => `Auth code is too big: ${code.length} bytes vs ${W_A} max.`, - ); - } - - // Prepare fetch externalities and executor - const fetchExternalities = new IsAuthorizedFetchExternalities(this.chainSpec, { - authToken, - authConfiguration, - }); - const executor = await PvmExecutor.createIsAuthorizedExecutor( - authCodeHost, - code, - { fetchExternalities }, - this.pvmBackend, - ); - - const args = Encoder.encodeObject(AUTH_ARGS_CODEC, { - coreIndex, - }); - - // Run PVM with gas budget G_I - const gasLimit = tryAsServiceGas(G_I); - const execResult = await executor.run(args, gasLimit); - - if (execResult.status !== ReturnStatus.OK) { - return Result.error( - AuthorizationError.PvmFailed, - () => - `IsAuthorized PVM ${ReturnStatus[execResult.status]} (gas used: ${execResult.consumedGas}).`, - ); - } - - // Compute authorizer hash: H(code_hash ++ configuration) - // https://graypaper.fluffylabs.dev/#/ab2cdbd/1b81011b8401?v=0.7.2 - const authorizerHash = this.blake2b.hashBlobs([authCodeHash, authConfiguration]); - const authorizationOutput = BytesBlob.blobFrom(execResult.memorySlice); - const authorizationGasUsed = tryAsServiceGas(execResult.consumedGas); - - return Result.ok({ authorizerHash, authorizationGasUsed, authorizationOutput }); - } - - private async refineItem( - state: State, - lookupState: State, - idx: number, - item: WorkItem, - allImports: PerWorkItem, - allExtrinsics: PerWorkItem, - coreIndex: CoreIndex, - workPackageHash: WorkPackageHash, - exportOffset: number, - ): Promise { - const payloadHash = this.blake2b.hashBytes(item.payload); - const baseResult = { - serviceId: item.service, - codeHash: item.codeHash, - payloadHash, - gas: item.refineGasLimit, - }; - const imports = allImports[idx]; - const extrinsics = allExtrinsics[idx]; - const baseLoad = { - importedSegments: tryAsU32(imports.length), - extrinsicCount: tryAsU32(extrinsics.length), - extrinsicSize: tryAsU32(extrinsics.reduce((acc, x) => acc + x.length, 0)), - }; - const maybeCode = this.getServiceCode(state, idx, item); - - if (maybeCode.isError) { - const error = - maybeCode.error === ServiceCodeError.ServiceCodeTooBig - ? WorkExecResultKind.codeOversize - : WorkExecResultKind.badCode; - return { - exports: [], - result: WorkResult.create({ - ...baseResult, - result: WorkExecResult.error(error), - load: WorkRefineLoad.create({ - ...baseLoad, - gasUsed: tryAsServiceGas(item.refineGasLimit), - exportedSegments: tryAsU32(0), - }), - }), - }; - } - - const code = maybeCode.ok; - const externalities = this.createRefineExternalities({ - payload: item.payload, - imports: allImports, - extrinsics: allExtrinsics, - currentServiceId: item.service, - lookupState, - exportOffset, - }); - - const executor = await PvmExecutor.createRefineExecutor(item.service, code, externalities, this.pvmBackend); - - const args = Encoder.encodeObject(REFINE_ARGS_CODEC, { - serviceId: item.service, - core: coreIndex, - workItemIndex: tryAsU32(idx), - payloadLength: tryAsU32(item.payload.length), - packageHash: workPackageHash, - }); - - const execResult = await executor.run(args, item.refineGasLimit); - - const exports = externalities.refine.getExportedSegments(); - if (exports.length !== item.exportCount) { - return { - exports, - result: WorkResult.create({ - ...baseResult, - result: WorkExecResult.error(WorkExecResultKind.incorrectNumberOfExports), - load: WorkRefineLoad.create({ - ...baseLoad, - gasUsed: tryAsServiceGas(item.refineGasLimit), - exportedSegments: tryAsU32(0), - }), - }), - }; - } - - const result = this.extractWorkResult(execResult); - - return { - exports, - result: WorkResult.create({ - ...baseResult, - result, - load: WorkRefineLoad.create({ - ...baseLoad, - gasUsed: tryAsServiceGas(execResult.consumedGas), - exportedSegments: tryAsU32(exports.length), - }), - }), - }; - } - - extractWorkResult(execResult: ReturnValue) { - if (execResult.status === ReturnStatus.OK) { - const slice = execResult.memorySlice; - // TODO [ToDr] Verify the output size and change digestTooBig? - return WorkExecResult.ok(BytesBlob.blobFrom(slice)); - } - - switch (execResult.status) { - case ReturnStatus.OOG: - return WorkExecResult.error(WorkExecResultKind.outOfGas); - case ReturnStatus.PANIC: - return WorkExecResult.error(WorkExecResultKind.panic); - default: - assertNever(execResult); - } - } - - private getServiceCode(state: State, idx: number, item: WorkItem) { - const serviceId = item.service; - const service = state.getService(serviceId); - // TODO [ToDr] GP link - // missing service - if (service === null) { - return Result.error( - ServiceCodeError.ServiceNotFound, - () => `[i:${idx}] Service ${serviceId} is missing in state.`, - ); - } - - // TODO [ToDr] GP link - // TODO [ToDr] shall we rather use the old codehash instead - if (!service.getInfo().codeHash.isEqualTo(item.codeHash)) { - return Result.error( - ServiceCodeError.ServiceCodeMismatch, - () => - `[i:${idx}] Service ${serviceId} has invalid code hash. Ours: ${service.getInfo().codeHash}, expected: ${item.codeHash}`, - ); - } - - const code = service.getPreimage(item.codeHash.asOpaque()); - if (code === null) { - return Result.error( - ServiceCodeError.ServiceCodeMissing, - () => `[i:${idx}] Code ${item.codeHash} for service ${serviceId} was not found.`, - ); - } - - if (code.length > W_C) { - return Result.error( - ServiceCodeError.ServiceCodeTooBig, - () => - `[i:${idx}] Code ${item.codeHash} for service ${serviceId} is too big! ${code.length} bytes vs ${W_C} bytes max.`, - ); - } - - return Result.ok(code); - } - - private createRefineExternalities(args: { - payload: BytesBlob; - imports: PerWorkItem; - extrinsics: PerWorkItem; - currentServiceId: ServiceId; - lookupState: State; - exportOffset: number; - }): RefineHostCallExternalities { - // TODO [ToDr] Pass all required fetch data - const fetchExternalities = new RefineFetchExternalities(this.chainSpec); - const refine = RefineExternalitiesImpl.create({ - currentServiceId: args.currentServiceId, - lookupState: args.lookupState, - exportOffset: args.exportOffset, - pvmBackend: this.pvmBackend, - }); - - return { - fetchExternalities, - refine, - }; - } } diff --git a/packages/jam/in-core/is-authorized.test.ts b/packages/jam/in-core/is-authorized.test.ts new file mode 100644 index 000000000..f9bb1d4e5 --- /dev/null +++ b/packages/jam/in-core/is-authorized.test.ts @@ -0,0 +1,184 @@ +import assert from "node:assert"; +import { readFileSync } from "node:fs"; +import { resolve } from "node:path"; +import { before, describe, it } from "node:test"; +import type { CodeHash } from "@typeberry/block"; +import { tryAsCoreIndex, tryAsServiceGas, tryAsServiceId, tryAsTimeSlot } from "@typeberry/block"; +import { Bytes, BytesBlob } from "@typeberry/bytes"; +import { HashDictionary } from "@typeberry/collections"; +import { PvmBackend, tinyChainSpec } from "@typeberry/config"; +import { Blake2b, HASH_SIZE, type OpaqueHash } from "@typeberry/hash"; +import { tryAsU32, tryAsU64 } from "@typeberry/numbers"; +import { InMemoryService, InMemoryState, PreimageItem, ServiceAccountInfo } from "@typeberry/state"; +import { AuthorizationError, IsAuthorized } from "./is-authorized.js"; + +let blake2b: Blake2b; + +before(async () => { + blake2b = await Blake2b.createHasher(); +}); + +// Load the authorizer PVM fixture. +// This authorizer checks that authToken === authConfiguration and returns "Auth=". +const AUTHORIZER_PVM = BytesBlob.blobFrom(readFileSync(resolve(import.meta.dirname, "fixtures/authorizer.pvm"))); + +const AUTH_SERVICE_ID = tryAsServiceId(42); + +function createService(serviceId: typeof AUTH_SERVICE_ID, codeHash: OpaqueHash, code: BytesBlob): InMemoryService { + return new InMemoryService(serviceId, { + info: ServiceAccountInfo.create({ + codeHash: codeHash.asOpaque(), + balance: tryAsU64(10_000_000_000), + accumulateMinGas: tryAsServiceGas(0n), + onTransferMinGas: tryAsServiceGas(0n), + storageUtilisationBytes: tryAsU64(0), + storageUtilisationCount: tryAsU32(0), + gratisStorage: tryAsU64(0), + created: tryAsTimeSlot(0), + lastAccumulation: tryAsTimeSlot(0), + parentService: tryAsServiceId(0), + }), + preimages: HashDictionary.fromEntries( + [PreimageItem.create({ hash: codeHash.asOpaque(), blob: code })].map((x) => [x.hash, x]), + ), + lookupHistory: HashDictionary.fromEntries([]), + storage: new Map(), + }); +} + +describe("IsAuthorized", () => { + const spec = tinyChainSpec; + + function getAuthCodeHash() { + return blake2b.hashBytes(AUTHORIZER_PVM).asOpaque(); + } + + function createStateWithService(codeHash: OpaqueHash, code: BytesBlob) { + return InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map([[AUTH_SERVICE_ID, createService(AUTH_SERVICE_ID, codeHash, code)]]), + }); + } + + it("should authorize when token matches configuration", async () => { + const authCodeHash = getAuthCodeHash(); + const state = createStateWithService(authCodeHash, AUTHORIZER_PVM); + const isAuthorized = new IsAuthorized(spec, PvmBackend.BuiltIn, blake2b); + const token = BytesBlob.blobFromString("hello"); + + const result = await isAuthorized.invoke(state, tryAsCoreIndex(0), token, AUTH_SERVICE_ID, authCodeHash, token); + + assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); + + // Verify the authorization output starts with "Auth=" + const outputStr = Buffer.from(result.ok.authorizationOutput.raw).toString("utf8"); + assert.ok( + outputStr.startsWith("Auth="), + `Expected "Auth=" prefix but got "${outputStr.slice(0, 30)}"`, + ); + + // Verify the authorizer hash is H(code_hash ++ configuration) + const expectedHash = blake2b.hashBlobs([authCodeHash, token]); + assert.ok(result.ok.authorizerHash.isEqualTo(expectedHash), "authorizerHash should be H(code_hash || config)"); + + // Verify gas was consumed + assert.ok(Number(result.ok.authorizationGasUsed) > 0, "should have consumed some gas"); + }); + + it("should authorize with empty token and configuration", async () => { + const authCodeHash = getAuthCodeHash(); + const state = createStateWithService(authCodeHash, AUTHORIZER_PVM); + const isAuthorized = new IsAuthorized(spec, PvmBackend.BuiltIn, blake2b); + + const result = await isAuthorized.invoke( + state, + tryAsCoreIndex(0), + BytesBlob.empty(), + AUTH_SERVICE_ID, + authCodeHash, + BytesBlob.empty(), + ); + + assert.strictEqual(result.isOk, true, `Expected OK but got error: ${result.isError ? result.details() : ""}`); + const outputStr = Buffer.from(result.ok.authorizationOutput.raw).toString("utf8"); + assert.ok(outputStr.startsWith("Auth=<>"), `Expected "Auth=<>" prefix but got "${outputStr.slice(0, 30)}"`); + }); + + it("should fail when token does not match configuration", async () => { + const authCodeHash = getAuthCodeHash(); + const state = createStateWithService(authCodeHash, AUTHORIZER_PVM); + const isAuthorized = new IsAuthorized(spec, PvmBackend.BuiltIn, blake2b); + + const result = await isAuthorized.invoke( + state, + tryAsCoreIndex(0), + BytesBlob.blobFromString("wrong"), + AUTH_SERVICE_ID, + authCodeHash, + BytesBlob.blobFromString("right"), + ); + + assert.strictEqual(result.isError, true); + assert.strictEqual(result.error, AuthorizationError.PvmFailed); + }); + + it("should fail when auth code host service is missing", async () => { + const authCodeHash = getAuthCodeHash(); + const state = InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map(), + }); + const isAuthorized = new IsAuthorized(spec, PvmBackend.BuiltIn, blake2b); + + const result = await isAuthorized.invoke( + state, + tryAsCoreIndex(0), + BytesBlob.empty(), + AUTH_SERVICE_ID, + authCodeHash, + BytesBlob.empty(), + ); + + assert.strictEqual(result.isError, true); + assert.strictEqual(result.error, AuthorizationError.CodeNotFound); + }); + + it("should fail when auth code preimage is missing", async () => { + const authCodeHash = getAuthCodeHash(); + // Service exists but with no preimages + const emptyService = new InMemoryService(AUTH_SERVICE_ID, { + info: ServiceAccountInfo.create({ + codeHash: Bytes.zero(HASH_SIZE).asOpaque(), + balance: tryAsU64(0), + accumulateMinGas: tryAsServiceGas(0n), + onTransferMinGas: tryAsServiceGas(0n), + storageUtilisationBytes: tryAsU64(0), + storageUtilisationCount: tryAsU32(0), + gratisStorage: tryAsU64(0), + created: tryAsTimeSlot(0), + lastAccumulation: tryAsTimeSlot(0), + parentService: tryAsServiceId(0), + }), + preimages: HashDictionary.fromEntries([]), + lookupHistory: HashDictionary.fromEntries([]), + storage: new Map(), + }); + const state = InMemoryState.partial(spec, { + timeslot: tryAsTimeSlot(16), + services: new Map([[AUTH_SERVICE_ID, emptyService]]), + }); + const isAuthorized = new IsAuthorized(spec, PvmBackend.BuiltIn, blake2b); + + const result = await isAuthorized.invoke( + state, + tryAsCoreIndex(0), + BytesBlob.empty(), + AUTH_SERVICE_ID, + authCodeHash, + BytesBlob.empty(), + ); + + assert.strictEqual(result.isError, true); + assert.strictEqual(result.error, AuthorizationError.CodeNotFound); + }); +}); diff --git a/packages/jam/in-core/is-authorized.ts b/packages/jam/in-core/is-authorized.ts new file mode 100644 index 000000000..1469c09dc --- /dev/null +++ b/packages/jam/in-core/is-authorized.ts @@ -0,0 +1,114 @@ +import { type CodeHash, type CoreIndex, type ServiceGas, type ServiceId, tryAsServiceGas } from "@typeberry/block"; +import { G_I, W_A } from "@typeberry/block/gp-constants.js"; +import type { AuthorizerHash } from "@typeberry/block/refine-context.js"; +import { BytesBlob } from "@typeberry/bytes"; +import { codec, Encoder } from "@typeberry/codec"; +import type { ChainSpec, PvmBackend } from "@typeberry/config"; +import { PvmExecutor, ReturnStatus } from "@typeberry/executor"; +import type { Blake2b } from "@typeberry/hash"; +import type { State } from "@typeberry/state"; +import { IsAuthorizedFetchExternalities } from "@typeberry/transition/externalities/is-authorized-fetch-externalities.js"; +import { Result } from "@typeberry/utils"; + +export enum AuthorizationError { + /** BAD: authorizer code not found (service or preimage missing). */ + CodeNotFound = 0, + /** BIG: authorizer code exceeds W_A limit. */ + CodeTooBig = 1, + /** PANIC/OOG: PVM execution failed. */ + PvmFailed = 2, +} + +export type AuthorizationOk = { + authorizerHash: AuthorizerHash; + authorizationGasUsed: ServiceGas; + authorizationOutput: BytesBlob; +}; + +const AUTH_ARGS_CODEC = codec.object({ + coreIndex: codec.u16, +}); + +/** + * IsAuthorized PVM invocation (Psi_I). + * + * https://graypaper.fluffylabs.dev/#/ab2cdbd/2e64002e6400?v=0.7.2 + */ +export class IsAuthorized { + constructor( + private readonly chainSpec: ChainSpec, + private readonly pvmBackend: PvmBackend, + private readonly blake2b: Blake2b, + ) {} + + async invoke( + state: State, + coreIndex: CoreIndex, + authToken: BytesBlob, + authCodeHost: ServiceId, + authCodeHash: CodeHash, + authConfiguration: BytesBlob, + ): Promise> { + // Look up the authorizer code from the auth code host service + const service = state.getService(authCodeHost); + // https://graypaper.fluffylabs.dev/#/ab2cdbd/2eca002eca00?v=0.7.2 + if (service === null) { + return Result.error( + AuthorizationError.CodeNotFound, + () => `Auth code host service ${authCodeHost} not found in state.`, + ); + } + + const code = service.getPreimage(authCodeHash.asOpaque()); + if (code === null) { + return Result.error( + AuthorizationError.CodeNotFound, + () => `Auth code preimage ${authCodeHash} not found in service ${authCodeHost}.`, + ); + } + + // BIG: code exceeds W_A + // https://graypaper.fluffylabs.dev/#/ab2cdbd/2ed6002ed600?v=0.7.2 + if (code.length > W_A) { + return Result.error( + AuthorizationError.CodeTooBig, + () => `Auth code is too big: ${code.length} bytes vs ${W_A} max.`, + ); + } + + // Prepare fetch externalities and executor + const fetchExternalities = new IsAuthorizedFetchExternalities(this.chainSpec, { + authToken, + authConfiguration, + }); + const executor = await PvmExecutor.createIsAuthorizedExecutor( + authCodeHost, + code, + { fetchExternalities }, + this.pvmBackend, + ); + + const args = Encoder.encodeObject(AUTH_ARGS_CODEC, { + coreIndex, + }); + + // Run PVM with gas budget G_I + const gasLimit = tryAsServiceGas(G_I); + const execResult = await executor.run(args, gasLimit); + + if (execResult.status !== ReturnStatus.OK) { + return Result.error( + AuthorizationError.PvmFailed, + () => `IsAuthorized PVM ${ReturnStatus[execResult.status]} (gas used: ${execResult.consumedGas}).`, + ); + } + + // Compute authorizer hash: H(code_hash ++ configuration) + // https://graypaper.fluffylabs.dev/#/ab2cdbd/1b81011b8401?v=0.7.2 + const authorizerHash = this.blake2b.hashBlobs([authCodeHash, authConfiguration]); + const authorizationOutput = BytesBlob.blobFrom(execResult.memorySlice); + const authorizationGasUsed = tryAsServiceGas(execResult.consumedGas); + + return Result.ok({ authorizerHash, authorizationGasUsed, authorizationOutput }); + } +} diff --git a/packages/jam/in-core/refine.test.ts b/packages/jam/in-core/refine.test.ts new file mode 100644 index 000000000..7d294828e --- /dev/null +++ b/packages/jam/in-core/refine.test.ts @@ -0,0 +1,7 @@ +import { describe } from "node:test"; + +describe("Refine", () => { + // TODO [ToDr] Add refine-specific PVM invocation tests. + // These should test Refine.invoke() directly, similar to + // how is-authorized.test.ts tests IsAuthorized.invoke(). +}); diff --git a/packages/jam/in-core/refine.ts b/packages/jam/in-core/refine.ts new file mode 100644 index 000000000..db5c6e1ae --- /dev/null +++ b/packages/jam/in-core/refine.ts @@ -0,0 +1,233 @@ +import { type CoreIndex, type ServiceGas, type ServiceId, tryAsCoreIndex, tryAsServiceGas } from "@typeberry/block"; +import { W_C } from "@typeberry/block/gp-constants.js"; +import type { WorkPackageHash } from "@typeberry/block/refine-context.js"; +import type { WorkItem, WorkItemExtrinsic } from "@typeberry/block/work-item.js"; +import { WorkExecResult, WorkExecResultKind, WorkRefineLoad, WorkResult } from "@typeberry/block/work-result.js"; +import { BytesBlob } from "@typeberry/bytes"; +import { codec, Encoder } from "@typeberry/codec"; +import type { ChainSpec, PvmBackend } from "@typeberry/config"; +import { PvmExecutor, type RefineHostCallExternalities, ReturnStatus, type ReturnValue } from "@typeberry/executor"; +import { type Blake2b, HASH_SIZE } from "@typeberry/hash"; +import { tryAsU32 } from "@typeberry/numbers"; +import type { State } from "@typeberry/state"; +import { RefineFetchExternalities } from "@typeberry/transition/externalities/refine-fetch-externalities.js"; +import { assertNever, Result } from "@typeberry/utils"; +import { RefineExternalitiesImpl } from "./externalities/refine.js"; +import type { ImportedSegment, PerWorkItem, RefineItemResult } from "./in-core.js"; + +enum ServiceCodeError { + /** Service id is not found in the state. */ + ServiceNotFound = 0, + /** Expected service code does not match the state one. */ + ServiceCodeMismatch = 1, + /** Code preimage missing. */ + ServiceCodeMissing = 2, + /** Code blob is too big. */ + ServiceCodeTooBig = 3, +} + +/** https://graypaper.fluffylabs.dev/#/ab2cdbd/2ffe002ffe00?v=0.7.2 */ +const REFINE_ARGS_CODEC = codec.object({ + core: codec.varU32.convert( + (x) => tryAsU32(x), + (x) => tryAsCoreIndex(x), + ), + workItemIndex: codec.varU32, + serviceId: codec.varU32.asOpaque(), + payloadLength: codec.varU32, + packageHash: codec.bytes(HASH_SIZE).asOpaque(), +}); + +/** + * Refine PVM invocation (Psi_R). + * + * Executes a single work item's refinement logic. + */ +export class Refine { + constructor( + private readonly chainSpec: ChainSpec, + private readonly pvmBackend: PvmBackend, + private readonly blake2b: Blake2b, + ) {} + + async invoke( + state: State, + lookupState: State, + idx: number, + item: WorkItem, + allImports: PerWorkItem, + allExtrinsics: PerWorkItem, + coreIndex: CoreIndex, + workPackageHash: WorkPackageHash, + exportOffset: number, + ): Promise { + const payloadHash = this.blake2b.hashBytes(item.payload); + const baseResult = { + serviceId: item.service, + codeHash: item.codeHash, + payloadHash, + gas: item.refineGasLimit, + }; + const imports = allImports[idx]; + const extrinsics = allExtrinsics[idx]; + const baseLoad = { + importedSegments: tryAsU32(imports.length), + extrinsicCount: tryAsU32(extrinsics.length), + extrinsicSize: tryAsU32(extrinsics.reduce((acc, x) => acc + x.length, 0)), + }; + const maybeCode = this.getServiceCode(state, idx, item); + + if (maybeCode.isError) { + const error = + maybeCode.error === ServiceCodeError.ServiceCodeTooBig + ? WorkExecResultKind.codeOversize + : WorkExecResultKind.badCode; + return { + exports: [], + result: WorkResult.create({ + ...baseResult, + result: WorkExecResult.error(error), + load: WorkRefineLoad.create({ + ...baseLoad, + gasUsed: tryAsServiceGas(item.refineGasLimit), + exportedSegments: tryAsU32(0), + }), + }), + }; + } + + const code = maybeCode.ok; + const externalities = this.createRefineExternalities({ + payload: item.payload, + imports: allImports, + extrinsics: allExtrinsics, + currentServiceId: item.service, + lookupState, + exportOffset, + }); + + const executor = await PvmExecutor.createRefineExecutor(item.service, code, externalities, this.pvmBackend); + + const args = Encoder.encodeObject(REFINE_ARGS_CODEC, { + serviceId: item.service, + core: coreIndex, + workItemIndex: tryAsU32(idx), + payloadLength: tryAsU32(item.payload.length), + packageHash: workPackageHash, + }); + + const execResult = await executor.run(args, item.refineGasLimit); + + const exports = externalities.refine.getExportedSegments(); + if (exports.length !== item.exportCount) { + return { + exports, + result: WorkResult.create({ + ...baseResult, + result: WorkExecResult.error(WorkExecResultKind.incorrectNumberOfExports), + load: WorkRefineLoad.create({ + ...baseLoad, + gasUsed: tryAsServiceGas(item.refineGasLimit), + exportedSegments: tryAsU32(0), + }), + }), + }; + } + + const result = Refine.extractWorkResult(execResult); + + return { + exports, + result: WorkResult.create({ + ...baseResult, + result, + load: WorkRefineLoad.create({ + ...baseLoad, + gasUsed: tryAsServiceGas(execResult.consumedGas), + exportedSegments: tryAsU32(exports.length), + }), + }), + }; + } + + static extractWorkResult(execResult: ReturnValue) { + if (execResult.status === ReturnStatus.OK) { + const slice = execResult.memorySlice; + // TODO [ToDr] Verify the output size and change digestTooBig? + return WorkExecResult.ok(BytesBlob.blobFrom(slice)); + } + + switch (execResult.status) { + case ReturnStatus.OOG: + return WorkExecResult.error(WorkExecResultKind.outOfGas); + case ReturnStatus.PANIC: + return WorkExecResult.error(WorkExecResultKind.panic); + default: + assertNever(execResult); + } + } + + private getServiceCode(state: State, idx: number, item: WorkItem) { + const serviceId = item.service; + const service = state.getService(serviceId); + // TODO [ToDr] GP link + // missing service + if (service === null) { + return Result.error( + ServiceCodeError.ServiceNotFound, + () => `[i:${idx}] Service ${serviceId} is missing in state.`, + ); + } + + // TODO [ToDr] GP link + // TODO [ToDr] shall we rather use the old codehash instead + if (!service.getInfo().codeHash.isEqualTo(item.codeHash)) { + return Result.error( + ServiceCodeError.ServiceCodeMismatch, + () => + `[i:${idx}] Service ${serviceId} has invalid code hash. Ours: ${service.getInfo().codeHash}, expected: ${item.codeHash}`, + ); + } + + const code = service.getPreimage(item.codeHash.asOpaque()); + if (code === null) { + return Result.error( + ServiceCodeError.ServiceCodeMissing, + () => `[i:${idx}] Code ${item.codeHash} for service ${serviceId} was not found.`, + ); + } + + if (code.length > W_C) { + return Result.error( + ServiceCodeError.ServiceCodeTooBig, + () => + `[i:${idx}] Code ${item.codeHash} for service ${serviceId} is too big! ${code.length} bytes vs ${W_C} bytes max.`, + ); + } + + return Result.ok(code); + } + + private createRefineExternalities(args: { + payload: BytesBlob; + imports: PerWorkItem; + extrinsics: PerWorkItem; + currentServiceId: ServiceId; + lookupState: State; + exportOffset: number; + }): RefineHostCallExternalities { + // TODO [ToDr] Pass all required fetch data + const fetchExternalities = new RefineFetchExternalities(this.chainSpec); + const refine = RefineExternalitiesImpl.create({ + currentServiceId: args.currentServiceId, + lookupState: args.lookupState, + exportOffset: args.exportOffset, + pvmBackend: this.pvmBackend, + }); + + return { + fetchExternalities, + refine, + }; + } +} diff --git a/packages/jam/jam-host-calls/general/fetch.test.ts b/packages/jam/jam-host-calls/general/fetch.test.ts index 27c16bc2a..1f5f7eea3 100644 --- a/packages/jam/jam-host-calls/general/fetch.test.ts +++ b/packages/jam/jam-host-calls/general/fetch.test.ts @@ -44,10 +44,13 @@ describe("Fetch", () => { it("should write empty result and set IN_OUT_REG to NONE if fetch returns null", async () => { const currentServiceId = tryAsServiceId(10_000); const fetchMock = new RefineFetchMock(); - // authorizerTraceResponse is null by default — Kind 2 legitimately returns null + // oneWorkItem returns null when the work item index has no mock response registered const blob = BytesBlob.blobFromNumbers([]); - const { registers, memory, readBack } = prepareRegsAndMemory(blob, FetchKind.AuthorizerTrace); + const { registers, memory, readBack } = prepareRegsAndMemory(blob, FetchKind.OneWorkItem); + // set work item index to one that has no response → oneWorkItem returns null + registers.set(11, tryAsU64(999)); + fetchMock.oneWorkItemResponses.set("999", null); const fetch = new Fetch(currentServiceId, fetchMock); const result = await fetch.execute(gas, registers, memory); From 842f693f9182cc734258ab74da24ed155b9bc0cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 7 Apr 2026 15:56:36 +0200 Subject: [PATCH 4/6] add doc --- packages/jam/in-core/is-authorized.test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/jam/in-core/is-authorized.test.ts b/packages/jam/in-core/is-authorized.test.ts index f9bb1d4e5..866e6eba8 100644 --- a/packages/jam/in-core/is-authorized.test.ts +++ b/packages/jam/in-core/is-authorized.test.ts @@ -20,6 +20,7 @@ before(async () => { // Load the authorizer PVM fixture. // This authorizer checks that authToken === authConfiguration and returns "Auth=". +// https://github.com/tomusdrw/as-lan/blob/main/examples/authorizer/assembly/authorize.ts const AUTHORIZER_PVM = BytesBlob.blobFrom(readFileSync(resolve(import.meta.dirname, "fixtures/authorizer.pvm"))); const AUTH_SERVICE_ID = tryAsServiceId(42); From a6247f30e1cd123c56dda4566cb47b1e9b2736b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 7 Apr 2026 16:11:36 +0200 Subject: [PATCH 5/6] Fix circular depedendecy --- packages/jam/in-core/in-core.ts | 21 +++++---------------- packages/jam/in-core/refine.ts | 24 ++++++++++++++++++++++-- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/packages/jam/in-core/in-core.ts b/packages/jam/in-core/in-core.ts index 5e8a46531..e4bf46286 100644 --- a/packages/jam/in-core/in-core.ts +++ b/packages/jam/in-core/in-core.ts @@ -1,11 +1,10 @@ -import type { CoreIndex, Segment, SegmentIndex } from "@typeberry/block"; +import type { CoreIndex, Segment } from "@typeberry/block"; import { type RefineContext, type WorkPackageHash, WorkPackageInfo } from "@typeberry/block/refine-context.js"; import type { WorkItemExtrinsic } from "@typeberry/block/work-item.js"; import type { WorkPackage } from "@typeberry/block/work-package.js"; import { WorkPackageSpec, WorkReport } from "@typeberry/block/work-report.js"; -import type { WorkResult } from "@typeberry/block/work-result.js"; import { Bytes } from "@typeberry/bytes"; -import { asKnownSize, FixedSizeArray, type KnownSizeArray } from "@typeberry/collections"; +import { asKnownSize, FixedSizeArray } from "@typeberry/collections"; import type { ChainSpec, PvmBackend } from "@typeberry/config"; import type { StatesDb } from "@typeberry/database"; import type { Blake2b, WithHash } from "@typeberry/hash"; @@ -14,18 +13,15 @@ import { Logger } from "@typeberry/logger"; import { tryAsU8, tryAsU16, tryAsU32 } from "@typeberry/numbers"; import { assertEmpty, Result } from "@typeberry/utils"; import { AuthorizationError, type AuthorizationOk, IsAuthorized } from "./is-authorized.js"; -import { Refine } from "./refine.js"; +import { type ImportedSegment, type PerWorkItem, Refine, type RefineItemResult } from "./refine.js"; + +export type { ImportedSegment, PerWorkItem, RefineItemResult } from "./refine.js"; export type RefineResult = { report: WorkReport; exports: PerWorkItem; }; -export type RefineItemResult = { - result: WorkResult; - exports: readonly Segment[]; -}; - export enum RefineError { /** State for context anchor block or lookup anchor is not found in the DB. */ StateMissing = 0, @@ -37,13 +33,6 @@ export enum RefineError { AuthorizationError = 3, } -export type PerWorkItem = KnownSizeArray; - -export type ImportedSegment = { - index: SegmentIndex; - data: Segment; -}; - const logger = Logger.new(import.meta.filename, "refine"); export class InCore { diff --git a/packages/jam/in-core/refine.ts b/packages/jam/in-core/refine.ts index db5c6e1ae..ecd7e9b10 100644 --- a/packages/jam/in-core/refine.ts +++ b/packages/jam/in-core/refine.ts @@ -1,10 +1,19 @@ -import { type CoreIndex, type ServiceGas, type ServiceId, tryAsCoreIndex, tryAsServiceGas } from "@typeberry/block"; +import { + type CoreIndex, + type Segment, + type SegmentIndex, + type ServiceGas, + type ServiceId, + tryAsCoreIndex, + tryAsServiceGas, +} from "@typeberry/block"; import { W_C } from "@typeberry/block/gp-constants.js"; import type { WorkPackageHash } from "@typeberry/block/refine-context.js"; import type { WorkItem, WorkItemExtrinsic } from "@typeberry/block/work-item.js"; import { WorkExecResult, WorkExecResultKind, WorkRefineLoad, WorkResult } from "@typeberry/block/work-result.js"; import { BytesBlob } from "@typeberry/bytes"; import { codec, Encoder } from "@typeberry/codec"; +import type { KnownSizeArray } from "@typeberry/collections"; import type { ChainSpec, PvmBackend } from "@typeberry/config"; import { PvmExecutor, type RefineHostCallExternalities, ReturnStatus, type ReturnValue } from "@typeberry/executor"; import { type Blake2b, HASH_SIZE } from "@typeberry/hash"; @@ -13,7 +22,18 @@ import type { State } from "@typeberry/state"; import { RefineFetchExternalities } from "@typeberry/transition/externalities/refine-fetch-externalities.js"; import { assertNever, Result } from "@typeberry/utils"; import { RefineExternalitiesImpl } from "./externalities/refine.js"; -import type { ImportedSegment, PerWorkItem, RefineItemResult } from "./in-core.js"; + +export type RefineItemResult = { + result: WorkResult; + exports: readonly Segment[]; +}; + +export type PerWorkItem = KnownSizeArray; + +export type ImportedSegment = { + index: SegmentIndex; + data: Segment; +}; enum ServiceCodeError { /** Service id is not found in the state. */ From 8b98af35d26f89d049ef3fc1e30507541efd886a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 7 Apr 2026 16:36:55 +0200 Subject: [PATCH 6/6] no exports on failure --- packages/jam/in-core/refine.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jam/in-core/refine.ts b/packages/jam/in-core/refine.ts index ecd7e9b10..0c76c925d 100644 --- a/packages/jam/in-core/refine.ts +++ b/packages/jam/in-core/refine.ts @@ -141,7 +141,7 @@ export class Refine { const exports = externalities.refine.getExportedSegments(); if (exports.length !== item.exportCount) { return { - exports, + exports: [], result: WorkResult.create({ ...baseResult, result: WorkExecResult.error(WorkExecResultKind.incorrectNumberOfExports),