diff --git a/packages/cli/package.json b/packages/cli/package.json index ccbbfd1..4b42f08 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -11,6 +11,7 @@ "dependencies": { "@b64/sfpm-core": "workspace:^0.1.0", "@b64/sfpm-orgs": "workspace:^", + "@b64/sfpm-sfdmu": "workspace:^", "@oclif/core": "^4", "@oclif/plugin-help": "^6", "@oclif/plugin-plugins": "^5", diff --git a/packages/cli/src/commands/build.ts b/packages/cli/src/commands/build.ts index 1a7b299..c0c8b6b 100644 --- a/packages/cli/src/commands/build.ts +++ b/packages/cli/src/commands/build.ts @@ -4,6 +4,8 @@ import { import { Args, Flags, } from '@oclif/core' +// Register SFDMU data builder (side-effect import triggers decorator registration) +import '@b64/sfpm-sfdmu' import SfpmCommand from '../sfpm-command.js' import {BuildProgressRenderer, OutputMode} from '../ui/build-progress-renderer.js' diff --git a/packages/cli/src/commands/install.ts b/packages/cli/src/commands/install.ts index 87bca00..66eacf5 100644 --- a/packages/cli/src/commands/install.ts +++ b/packages/cli/src/commands/install.ts @@ -2,6 +2,8 @@ import { InstallationMode, InstallationSource, InstallOrchestrator, Logger, PackageInstaller, ProjectService, } from '@b64/sfpm-core' import {Args, Flags} from '@oclif/core' +// Register SFDMU data installer (side-effect import triggers decorator registration) +import '@b64/sfpm-sfdmu' import SfpmCommand from '../sfpm-command.js' import {InstallProgressRenderer, OutputMode} from '../ui/install-progress-renderer.js' diff --git a/packages/core/src/artifacts/artifact-assembler.ts b/packages/core/src/artifacts/artifact-assembler.ts index a9ec2d3..9d70f0f 100644 --- a/packages/core/src/artifacts/artifact-assembler.ts +++ b/packages/core/src/artifacts/artifact-assembler.ts @@ -1,55 +1,56 @@ -import path from 'path'; import fs from 'fs-extra'; -import crypto from 'crypto'; -import { execSync } from 'child_process'; -import { EventEmitter } from 'events'; -import { Logger } from '../types/logger.js'; -import SfpmPackage, { SfpmMetadataPackage } from '../package/sfpm-package.js'; -import { VersionManager } from '../project/version-manager.js'; -import { ArtifactRepository } from './artifact-repository.js'; -import { NpmPackageJson } from '../types/npm.js'; -import { ArtifactError } from '../types/errors.js'; +import {execSync} from 'node:child_process'; +import crypto from 'node:crypto'; +import {EventEmitter} from 'node:events'; +import path from 'node:path'; + +import SfpmPackage, {SfpmDataPackage, SfpmMetadataPackage} from '../package/sfpm-package.js'; +import {VersionManager} from '../project/version-manager.js'; +import {ArtifactError} from '../types/errors.js'; +import {Logger} from '../types/logger.js'; +import {NpmPackageJson} from '../types/npm.js'; +import {ArtifactRepository} from './artifact-repository.js'; /** * Interface for providing changelogs. * Can be implemented later with Git or other providers. */ export interface ChangelogProvider { - generateChangelog(pkg: SfpmPackage, projectDirectory: string): Promise; + generateChangelog(pkg: SfpmPackage, projectDirectory: string): Promise; } /** * Stub implementation of the ChangelogProvider. */ class StubChangelogProvider implements ChangelogProvider { - async generateChangelog(_pkg: SfpmPackage, _projectDirectory: string): Promise { - return { - message: 'Changelog generation is currently disabled.', - timestamp: Date.now(), - }; - } + async generateChangelog(_pkg: SfpmPackage, _projectDirectory: string): Promise { + return { + message: 'Changelog generation is currently disabled.', + timestamp: Date.now(), + }; + } } /** * Options for artifact assembly */ export interface ArtifactAssemblerOptions { - /** npm scope for the package (e.g., "@myorg") - required */ - npmScope: string; - /** Changelog provider for generating changelog.json */ - changelogProvider?: ChangelogProvider; - /** Additional keywords for package.json */ - additionalKeywords?: string[]; - /** Author string for package.json */ - author?: string; - /** License identifier for package.json */ - license?: string; - /** Homepage URL (e.g., AppExchange listing, project docs) */ - homepage?: string; - /** Pre-classified versioned dependencies (scoped npm name → semver range) */ - versionedDependencies?: Record; - /** Pre-classified managed dependencies (alias → packageVersionId 04t...) */ - managedDependencies?: Record; + /** Additional keywords for package.json */ + additionalKeywords?: string[]; + /** Author string for package.json */ + author?: string; + /** Changelog provider for generating changelog.json */ + changelogProvider?: ChangelogProvider; + /** Homepage URL (e.g., AppExchange listing, project docs) */ + homepage?: string; + /** License identifier for package.json */ + license?: string; + /** Pre-classified managed dependencies (alias → packageVersionId 04t...) */ + managedDependencies?: Record; + /** npm scope for the package (e.g., "@myorg") - required */ + npmScope: string; + /** Pre-classified versioned dependencies (scoped npm name → semver range) */ + versionedDependencies?: Record; } /** @@ -65,331 +66,336 @@ export interface ArtifactAssemblerOptions { * 7. Clean up staging directory */ export default class ArtifactAssembler extends EventEmitter { - private repository: ArtifactRepository; - private versionDirectory: string; - private packageVersionNumber: string; - private changelogProvider: ChangelogProvider; - private options: ArtifactAssemblerOptions; - - constructor( - private sfpmPackage: SfpmPackage, - private projectDirectory: string, - options: ArtifactAssemblerOptions, - private logger?: Logger, - ) { - super(); - this.options = options; - this.packageVersionNumber = VersionManager.normalizeVersion(sfpmPackage.version || '0.0.0.1'); - - // Create repository for artifact operations - this.repository = new ArtifactRepository(projectDirectory, logger); - - // artifacts// - this.versionDirectory = this.repository.getVersionPath(sfpmPackage.packageName, this.packageVersionNumber); - - this.changelogProvider = options.changelogProvider || new StubChangelogProvider(); + private changelogProvider: ChangelogProvider; + private options: ArtifactAssemblerOptions; + private packageVersionNumber: string; + private repository: ArtifactRepository; + private versionDirectory: string; + + constructor( + private sfpmPackage: SfpmPackage, + private projectDirectory: string, + options: ArtifactAssemblerOptions, + private logger?: Logger, + ) { + super(); + this.options = options; + this.packageVersionNumber = VersionManager.normalizeVersion(sfpmPackage.version || '0.0.0.1'); + + // Create repository for artifact operations + this.repository = new ArtifactRepository(projectDirectory, logger); + + // artifacts// + this.versionDirectory = this.repository.getVersionPath(sfpmPackage.packageName, this.packageVersionNumber); + + this.changelogProvider = options.changelogProvider || new StubChangelogProvider(); + } + + /** + * @description Orchestrates the artifact assembly process using npm pack. + * @returns {Promise} The path to the generated artifact.tgz. + */ + public async assemble(): Promise { + const startTime = Date.now(); + try { + this.emitStart(); + + // 1. Calculate sourceHash from current package state + const currentSourceHash = await this.calculateSourceHash(); + + // 2. Prepare staging directory with source files + const stagingDir = await this.prepareStagingDirectory(); + + // 3. Generate package.json with sfpm metadata + await this.generatePackageJson(stagingDir); + + // 4. Generate changelog + await this.generateChangelog(stagingDir); + + // 5. Create an empty index.js (npm requires a main entry point) + await this.createStubEntryPoint(stagingDir); + + // 6. Run npm pack in staging directory + const tarballName = await this.runNpmPack(stagingDir); + + // 7. Move tarball to version directory + const artifactPath = await this.moveTarball(stagingDir, tarballName); + + // 8. Calculate artifact hash and finalize + const artifactHash = await this.finalizeArtifact(artifactPath, currentSourceHash); + + // 9. Cleanup staging directory + await fs.remove(stagingDir); + + this.emitComplete(artifactPath, currentSourceHash, artifactHash, startTime); + return artifactPath; + } catch (error: any) { + this.emitError(error); + throw new ArtifactError(this.sfpmPackage.packageName, 'assembly', 'Failed to assemble artifact', { + cause: error instanceof Error ? error : new Error(String(error)), + version: this.packageVersionNumber, + }); } - - /** - * @description Orchestrates the artifact assembly process using npm pack. - * @returns {Promise} The path to the generated artifact.tgz. - */ - public async assemble(): Promise { - const startTime = Date.now(); - try { - this.emitStart(); - - // 1. Calculate sourceHash from current package state - const currentSourceHash = await this.calculateSourceHash(); - - // 2. Prepare staging directory with source files - const stagingDir = await this.prepareStagingDirectory(); - - // 3. Generate package.json with sfpm metadata - await this.generatePackageJson(stagingDir); - - // 4. Generate changelog - await this.generateChangelog(stagingDir); - - // 5. Create an empty index.js (npm requires a main entry point) - await this.createStubEntryPoint(stagingDir); - - // 6. Run npm pack in staging directory - const tarballName = await this.runNpmPack(stagingDir); - - // 7. Move tarball to version directory - const artifactPath = await this.moveTarball(stagingDir, tarballName); - - // 8. Calculate artifact hash and finalize - const artifactHash = await this.finalizeArtifact(artifactPath, currentSourceHash); - - // 9. Cleanup staging directory - await fs.remove(stagingDir); - - this.emitComplete(artifactPath, currentSourceHash, artifactHash, startTime); - return artifactPath; - } catch (error: any) { - this.emitError(error); - throw new ArtifactError(this.sfpmPackage.packageName, 'assembly', 'Failed to assemble artifact', { - version: this.packageVersionNumber, - cause: error instanceof Error ? error : new Error(String(error)), - }); - } + } + + /** + * Get or calculate the source hash for the package. + * Prefers the package's existing sourceHash if already set. + * For metadata packages, calculates and sets the hash on the package. + */ + private async calculateSourceHash(): Promise { + // If sourceHash is already set on the package, use it + if (this.sfpmPackage.sourceHash) { + this.logger?.debug(`Using existing source hash: ${this.sfpmPackage.sourceHash}`); + return this.sfpmPackage.sourceHash; } - /** - * Prepare staging directory with source files. - * Uses the package's staging directory from PackageAssembler. - */ - private async prepareStagingDirectory(): Promise { - if (this.sfpmPackage.stagingDirectory) { - this.logger?.debug(`Using staging directory: ${this.sfpmPackage.stagingDirectory}`); - - // Cleanup noise from staging directory - const noise = ['.sfpm', '.sfdx', 'node_modules']; - for (const dir of noise) { - const noiseDir = path.join(this.sfpmPackage.stagingDirectory, dir); - if (await fs.pathExists(noiseDir)) { - await fs.remove(noiseDir); - } - } - - return this.sfpmPackage.stagingDirectory; - } - - throw new ArtifactError( - this.sfpmPackage.packageName, - 'assembly', - 'No staging directory available - package must be staged before assembly', - { version: this.packageVersionNumber }, - ); + let hash: string; + if (this.sfpmPackage instanceof SfpmMetadataPackage) { + // Calculate and set the hash on the package + hash = await this.sfpmPackage.calculateSourceHash(); + } else if (this.sfpmPackage instanceof SfpmDataPackage) { + // Data packages: deterministic hash of all files in the data directory + hash = await this.sfpmPackage.calculateSourceHash(); + } else { + // For non-metadata packages, use a simple timestamp-based hash + hash = crypto.createHash('sha256').update(Date.now().toString()).digest('hex'); + this.sfpmPackage.sourceHash = hash; } - /** - * Generate package.json in the staging directory. - * Constructs the full npm package.json with sfpm metadata from the package. - */ - private async generatePackageJson(stagingDir: string): Promise { - const { npmScope, additionalKeywords, author, license, homepage } = this.options; - const pkg = this.sfpmPackage; - - // Get sfpm metadata from the package and strip empty properties - const sfpmMeta = removeEmptyValues(await pkg.toJson()); - - // Use pre-classified dependency maps (resolved by caller via ProjectConfig) - const optionalDependencies = this.options.versionedDependencies ?? {}; - const managedDependencies = this.options.managedDependencies ?? {}; - - // Build keywords - const keywords = ['sfpm', 'salesforce', String(pkg.type), ...(additionalKeywords || [])]; - - // Get the package source path (e.g., "force-app", "src", etc.) - const packageSourcePath = pkg.packageDefinition?.path || 'force-app'; - - // Construct package.json - const packageJson: NpmPackageJson = { - name: `${npmScope}/${pkg.packageName}`, - version: this.packageVersionNumber, - description: pkg.packageDefinition?.versionDescription || `SFPM ${pkg.type} package: ${pkg.packageName}`, - main: 'index.js', - keywords, - license: license || 'UNLICENSED', - files: [ - `${packageSourcePath}/**`, - 'scripts/**', - 'manifest/**', - 'config/**', - 'sfdx-project.json', - '.forceignore', - 'changelog.json', - ], - sfpm: sfpmMeta, - }; - - // Add optional fields - if (author) { - packageJson.author = author; - } - - if (homepage) { - packageJson.homepage = homepage; - } - - if (Object.keys(optionalDependencies).length > 0) { - packageJson.optionalDependencies = optionalDependencies; - } - - if (Object.keys(managedDependencies).length > 0) { - packageJson.managedDependencies = managedDependencies; - } - - // Add repository if available - if (pkg.metadata?.source?.repositoryUrl) { - packageJson.repository = { - type: 'git', - url: pkg.metadata.source.repositoryUrl, - }; - } - - // Write package.json - const packageJsonPath = path.join(stagingDir, 'package.json'); - await fs.writeJson(packageJsonPath, packageJson, { spaces: 2 }); - this.logger?.debug(`Generated package.json at ${packageJsonPath}`); + this.logger?.debug(`Calculated source hash: ${hash}`); + return hash; + } + + /** + * Create a stub index.js file (npm pack requires main entry point). + */ + private async createStubEntryPoint(stagingDir: string): Promise { + const indexPath = path.join(stagingDir, 'index.js'); + await fs.writeFile(indexPath, '// SFPM Package - See sfpm metadata in package.json\n'); + } + + private emitComplete(artifactPath: string, sourceHash: string, artifactHash: string, startTime: number): void { + this.logger?.info(`Artifact successfully stored at ${artifactPath}`); + this.emit('assembly:complete', { + artifactHash, + artifactPath, + duration: Date.now() - startTime, + packageName: this.sfpmPackage.packageName, + sourceHash, + timestamp: new Date(), + version: this.packageVersionNumber, + }); + } + + private emitError(error: any): void { + this.logger?.error(`Failed to assemble artifact: ${error.message}`); + this.emit('assembly:error', { + error: error instanceof Error ? error : new Error(String(error)), + packageName: this.sfpmPackage.packageName, + timestamp: new Date(), + version: this.packageVersionNumber, + }); + } + + private emitStart(): void { + this.logger?.info(`Assembling artifact for ${this.sfpmPackage.packageName}@${this.packageVersionNumber}`); + this.emit('assembly:start', { + packageName: this.sfpmPackage.packageName, + timestamp: new Date(), + version: this.packageVersionNumber, + }); + } + + /** + * Calculate artifact hash and update manifest. + */ + private async finalizeArtifact(artifactPath: string, sourceHash: string): Promise { + const artifactHash = await this.repository.calculateFileHash(artifactPath); + this.logger?.debug(`Artifact hash: ${artifactHash}`); + + await this.repository.finalizeArtifact(this.sfpmPackage.packageName, this.packageVersionNumber, { + artifactHash, + commit: this.sfpmPackage.commitId, + generatedAt: Date.now(), + path: this.repository.getRelativeArtifactPath(this.sfpmPackage.packageName, this.packageVersionNumber), + sourceHash, + }); + + return artifactHash; + } + + /** + * Generate changelog.json in the staging directory. + */ + private async generateChangelog(stagingDir: string): Promise { + const changelog = await this.changelogProvider.generateChangelog(this.sfpmPackage, this.projectDirectory); + const changelogPath = path.join(stagingDir, 'changelog.json'); + await fs.writeJson(changelogPath, changelog, {spaces: 4}); + } + + /** + * Generate package.json in the staging directory. + * Constructs the full npm package.json with sfpm metadata from the package. + */ + private async generatePackageJson(stagingDir: string): Promise { + const {additionalKeywords, author, homepage, license, npmScope} = this.options; + const pkg = this.sfpmPackage; + + // Get sfpm metadata from the package and strip empty properties + const sfpmMeta = removeEmptyValues(await pkg.toJson()); + + // Use pre-classified dependency maps (resolved by caller via ProjectConfig) + const optionalDependencies = this.options.versionedDependencies ?? {}; + const managedDependencies = this.options.managedDependencies ?? {}; + + // Build keywords + const keywords = ['sfpm', 'salesforce', String(pkg.type), ...(additionalKeywords || [])]; + + // Get the package source path (e.g., "force-app", "src", etc.) + const packageSourcePath = pkg.packageDefinition?.path || 'force-app'; + + // Construct package.json + const packageJson: NpmPackageJson = { + description: pkg.packageDefinition?.versionDescription || `SFPM ${pkg.type} package: ${pkg.packageName}`, + files: [ + `${packageSourcePath}/**`, + 'scripts/**', + 'manifest/**', + 'config/**', + 'sfdx-project.json', + '.forceignore', + 'changelog.json', + ], + keywords, + license: license || 'UNLICENSED', + main: 'index.js', + name: `${npmScope}/${pkg.packageName}`, + sfpm: sfpmMeta, + version: this.packageVersionNumber, + }; + + // Add optional fields + if (author) { + packageJson.author = author; } - /** - * Generate changelog.json in the staging directory. - */ - private async generateChangelog(stagingDir: string): Promise { - const changelog = await this.changelogProvider.generateChangelog(this.sfpmPackage, this.projectDirectory); - const changelogPath = path.join(stagingDir, 'changelog.json'); - await fs.writeJson(changelogPath, changelog, { spaces: 4 }); + if (homepage) { + packageJson.homepage = homepage; } - /** - * Create a stub index.js file (npm pack requires main entry point). - */ - private async createStubEntryPoint(stagingDir: string): Promise { - const indexPath = path.join(stagingDir, 'index.js'); - await fs.writeFile(indexPath, '// SFPM Package - See sfpm metadata in package.json\n'); + if (Object.keys(optionalDependencies).length > 0) { + packageJson.optionalDependencies = optionalDependencies; } - /** - * Run npm pack in the staging directory. - * @returns The name of the generated tarball file. - */ - private async runNpmPack(stagingDir: string): Promise { - this.logger?.debug(`Running npm pack in ${stagingDir}`); - - try { - // npm pack outputs the filename of the created tarball - const output = execSync('npm pack', { - cwd: stagingDir, - encoding: 'utf-8', - timeout: 60000, - }).trim(); - - // The output is the tarball filename (e.g., "myorg-my-package-1.0.0-1.tgz") - const tarballName = output.split('\n').pop()?.trim(); - - if (!tarballName || !tarballName.endsWith('.tgz')) { - throw new Error(`Unexpected npm pack output: ${output}`); - } - - this.logger?.debug(`npm pack created: ${tarballName}`); - - this.emit('assembly:pack', { - timestamp: new Date(), - packageName: this.sfpmPackage.packageName, - tarballName, - }); - - return tarballName; - } catch (error) { - throw new ArtifactError(this.sfpmPackage.packageName, 'pack', 'npm pack failed', { - version: this.packageVersionNumber, - context: { stagingDir }, - cause: error instanceof Error ? error : new Error(String(error)), - }); - } + if (Object.keys(managedDependencies).length > 0) { + packageJson.managedDependencies = managedDependencies; } - /** - * Move the tarball from staging to the version directory. - */ - private async moveTarball(stagingDir: string, tarballName: string): Promise { - const sourcePath = path.join(stagingDir, tarballName); - const targetPath = this.repository.getArtifactPath(this.sfpmPackage.packageName, this.packageVersionNumber); - - // Ensure version directory exists - await fs.ensureDir(path.dirname(targetPath)); - - // Move the tarball - await fs.move(sourcePath, targetPath, { overwrite: true }); - this.logger?.debug(`Moved tarball to ${targetPath}`); - - return targetPath; + // Add repository if available + if (pkg.metadata?.source?.repositoryUrl) { + packageJson.repository = { + type: 'git', + url: pkg.metadata.source.repositoryUrl, + }; } - /** - * Get or calculate the source hash for the package. - * Prefers the package's existing sourceHash if already set. - * For metadata packages, calculates and sets the hash on the package. - */ - private async calculateSourceHash(): Promise { - // If sourceHash is already set on the package, use it - if (this.sfpmPackage.sourceHash) { - this.logger?.debug(`Using existing source hash: ${this.sfpmPackage.sourceHash}`); - return this.sfpmPackage.sourceHash; - } - - let hash: string; - if (this.sfpmPackage instanceof SfpmMetadataPackage) { - // Calculate and set the hash on the package - hash = await this.sfpmPackage.calculateSourceHash(); - } else { - // For non-metadata packages, use a simple timestamp-based hash - hash = crypto.createHash('sha256').update(Date.now().toString()).digest('hex'); - this.sfpmPackage.sourceHash = hash; + // Write package.json + const packageJsonPath = path.join(stagingDir, 'package.json'); + await fs.writeJson(packageJsonPath, packageJson, {spaces: 2}); + this.logger?.debug(`Generated package.json at ${packageJsonPath}`); + } + + // ========================================================================= + // Event Emission Helpers + // ========================================================================= + + /** + * Move the tarball from staging to the version directory. + */ + private async moveTarball(stagingDir: string, tarballName: string): Promise { + const sourcePath = path.join(stagingDir, tarballName); + const targetPath = this.repository.getArtifactPath(this.sfpmPackage.packageName, this.packageVersionNumber); + + // Ensure version directory exists + await fs.ensureDir(path.dirname(targetPath)); + + // Move the tarball + await fs.move(sourcePath, targetPath, {overwrite: true}); + this.logger?.debug(`Moved tarball to ${targetPath}`); + + return targetPath; + } + + /** + * Prepare staging directory with source files. + * Uses the package's staging directory from PackageAssembler. + */ + private async prepareStagingDirectory(): Promise { + if (this.sfpmPackage.stagingDirectory) { + this.logger?.debug(`Using staging directory: ${this.sfpmPackage.stagingDirectory}`); + + // Cleanup noise from staging directory + const noise = ['.sfpm', '.sfdx', 'node_modules']; + for (const dir of noise) { + const noiseDir = path.join(this.sfpmPackage.stagingDirectory, dir); + // eslint-disable-next-line no-await-in-loop + if (await fs.pathExists(noiseDir)) { + // eslint-disable-next-line no-await-in-loop + await fs.remove(noiseDir); } + } - this.logger?.debug(`Calculated source hash: ${hash}`); - return hash; - } - - /** - * Calculate artifact hash and update manifest. - */ - private async finalizeArtifact(artifactPath: string, sourceHash: string): Promise { - const artifactHash = await this.repository.calculateFileHash(artifactPath); - this.logger?.debug(`Artifact hash: ${artifactHash}`); - - await this.repository.finalizeArtifact(this.sfpmPackage.packageName, this.packageVersionNumber, { - path: this.repository.getRelativeArtifactPath(this.sfpmPackage.packageName, this.packageVersionNumber), - sourceHash, - artifactHash, - generatedAt: Date.now(), - commit: this.sfpmPackage.commitId, - }); - - return artifactHash; + return this.sfpmPackage.stagingDirectory; } - // ========================================================================= - // Event Emission Helpers - // ========================================================================= - - private emitStart(): void { - this.logger?.info(`Assembling artifact for ${this.sfpmPackage.packageName}@${this.packageVersionNumber}`); - this.emit('assembly:start', { - timestamp: new Date(), - packageName: this.sfpmPackage.packageName, - version: this.packageVersionNumber, - }); - } - - private emitComplete(artifactPath: string, sourceHash: string, artifactHash: string, startTime: number): void { - this.logger?.info(`Artifact successfully stored at ${artifactPath}`); - this.emit('assembly:complete', { - timestamp: new Date(), - packageName: this.sfpmPackage.packageName, - version: this.packageVersionNumber, - artifactPath, - sourceHash, - artifactHash, - duration: Date.now() - startTime, - }); - } - - private emitError(error: any): void { - this.logger?.error(`Failed to assemble artifact: ${error.message}`); - this.emit('assembly:error', { - timestamp: new Date(), - packageName: this.sfpmPackage.packageName, - version: this.packageVersionNumber, - error: error instanceof Error ? error : new Error(String(error)), - }); + throw new ArtifactError( + this.sfpmPackage.packageName, + 'assembly', + 'No staging directory available - package must be staged before assembly', + {version: this.packageVersionNumber}, + ); + } + + /** + * Run npm pack in the staging directory. + * @returns The name of the generated tarball file. + */ + private async runNpmPack(stagingDir: string): Promise { + this.logger?.debug(`Running npm pack in ${stagingDir}`); + + try { + // npm pack outputs the filename of the created tarball + const output = execSync('npm pack', { + cwd: stagingDir, + encoding: 'utf8', + timeout: 60_000, + }).trim(); + + // The output is the tarball filename (e.g., "myorg-my-package-1.0.0-1.tgz") + const tarballName = output.split('\n').pop()?.trim(); + + if (!tarballName || !tarballName.endsWith('.tgz')) { + throw new Error(`Unexpected npm pack output: ${output}`); + } + + this.logger?.debug(`npm pack created: ${tarballName}`); + + this.emit('assembly:pack', { + packageName: this.sfpmPackage.packageName, + tarballName, + timestamp: new Date(), + }); + + return tarballName; + } catch (error) { + throw new ArtifactError(this.sfpmPackage.packageName, 'pack', 'npm pack failed', { + cause: error instanceof Error ? error : new Error(String(error)), + context: {stagingDir}, + version: this.packageVersionNumber, + }); } + } } /** @@ -398,36 +404,38 @@ export default class ArtifactAssembler extends EventEmitter { * Preserves: non-empty values, booleans, numbers (including 0), and non-empty strings. */ function removeEmptyValues(obj: T): T { - if (obj === null || obj === undefined) { - return obj; - } + if (obj === null || obj === undefined) { + return obj; + } - if (Array.isArray(obj)) { - return obj.length === 0 ? undefined as unknown as T : obj; - } + if (Array.isArray(obj)) { + return obj.length === 0 ? undefined as unknown as T : obj; + } - if (typeof obj === 'object') { - const cleaned: Record = {}; + if (typeof obj === 'object') { + const cleaned: Record = {}; - for (const [key, value] of Object.entries(obj as Record)) { - const cleanedValue = removeEmptyValues(value); + for (const [key, value] of Object.entries(obj as Record)) { + const cleanedValue = removeEmptyValues(value); - // Skip undefined, null, empty arrays, and empty objects - if (cleanedValue === undefined || cleanedValue === null) { - continue; - } - if (Array.isArray(cleanedValue) && cleanedValue.length === 0) { - continue; - } - if (typeof cleanedValue === 'object' && !Array.isArray(cleanedValue) && Object.keys(cleanedValue).length === 0) { - continue; - } + // Skip undefined, null, empty arrays, and empty objects + if (cleanedValue === undefined || cleanedValue === null) { + continue; + } - cleaned[key] = cleanedValue; - } + if (Array.isArray(cleanedValue) && cleanedValue.length === 0) { + continue; + } + + if (typeof cleanedValue === 'object' && !Array.isArray(cleanedValue) && Object.keys(cleanedValue).length === 0) { + continue; + } - return (Object.keys(cleaned).length === 0 ? {} : cleaned) as T; + cleaned[key] = cleanedValue; } - return obj; + return (Object.keys(cleaned).length === 0 ? {} : cleaned) as T; + } + + return obj; } diff --git a/packages/core/src/artifacts/artifact-repository.ts b/packages/core/src/artifacts/artifact-repository.ts index da59b64..65e84b1 100644 --- a/packages/core/src/artifacts/artifact-repository.ts +++ b/packages/core/src/artifacts/artifact-repository.ts @@ -1,12 +1,13 @@ -import path from 'path'; import fs from 'fs-extra'; -import crypto from 'crypto'; -import { execSync } from 'child_process'; -import { Logger } from '../types/logger.js'; -import { ArtifactManifest, ArtifactVersionEntry } from '../types/artifact.js'; -import { SfpmPackageMetadata } from '../types/package.js'; -import { NpmPackageJson } from '../types/npm.js'; -import { ArtifactError } from '../types/errors.js'; +import {execSync} from 'node:child_process'; +import crypto from 'node:crypto'; +import path from 'node:path'; + +import {ArtifactManifest, ArtifactVersionEntry} from '../types/artifact.js'; +import {ArtifactError} from '../types/errors.js'; +import {Logger} from '../types/logger.js'; +import {NpmPackageJson} from '../types/npm.js'; +import {SfpmPackageMetadataBase} from '../types/package.js'; /** * The hidden folder for SFPM configuration and temporary files @@ -15,576 +16,572 @@ const DOT_FOLDER = '.sfpm'; /** * ArtifactRepository handles all filesystem operations for local artifact storage. - * + * * Responsibilities: * - Reading and writing artifact manifests * - Reading artifact metadata from zip files * - Calculating file and source hashes * - Managing 'latest' symlinks * - Path resolution for artifacts - * + * * This class provides the low-level storage abstraction used by: * - ArtifactAssembler (for writing) * - ArtifactResolver (for reading and remote localization) */ export class ArtifactRepository { - private logger?: Logger; - private projectDirectory: string; - private artifactsRootDir: string; - - constructor(projectDirectory: string, logger?: Logger) { - this.logger = logger; - this.projectDirectory = projectDirectory; - this.artifactsRootDir = path.join(projectDirectory, 'artifacts'); - } - - /** - * Get the project directory - */ - public getProjectDirectory(): string { - return this.projectDirectory; - } - - /** - * Get the root directory for all artifacts - */ - public getArtifactsRoot(): string { - return this.artifactsRootDir; - } - - /** - * Get the path to a package's artifact directory - */ - public getPackageArtifactPath(packageName: string): string { - return path.join(this.artifactsRootDir, packageName); - } - - /** - * Get the path to a specific version's directory - */ - public getVersionPath(packageName: string, version: string): string { - return path.join(this.getPackageArtifactPath(packageName), version); - } - - /** - * Get the absolute path to the artifact file - */ - public getArtifactPath(packageName: string, version: string): string { - return path.join(this.getVersionPath(packageName, version), 'artifact.tgz'); - } - - /** - * Get the relative path to the artifact file (for storage in manifest) - */ - public getRelativeArtifactPath(packageName: string, version: string): string { - return `${packageName}/${version}/artifact.tgz`; - } - - /** - * Get the path to the manifest file for a package - */ - private getManifestPath(packageName: string): string { - return path.join(this.getPackageArtifactPath(packageName), 'manifest.json'); - } - - /** - * Create a unique temporary directory for downloads/extraction. - * Pattern: .sfpm/tmp/downloads/[timestamp]-[packageName]-[hash] - */ - private async createTempDir(packageName: string): Promise { - const timestamp = new Date().toISOString() - .replace(/T/, '-') - .replace(/\..+/, '') - .replace(/[:-]/g, ''); - const hash = crypto.randomBytes(4).toString('hex'); - const tempDirName = `${timestamp}-${packageName}-${hash}`; - const tempDir = path.join(this.projectDirectory, DOT_FOLDER, 'tmp', 'downloads', tempDirName); - await fs.ensureDir(tempDir); - return tempDir; - } - - // ========================================================================= - // Existence Checks - // ========================================================================= - - /** - * Check if any local artifacts exist for a package - */ - public hasArtifacts(packageName: string): boolean { - const manifestPath = this.getManifestPath(packageName); - return fs.existsSync(manifestPath); - } - - /** - * Check if a specific version exists locally - */ - private hasVersion(packageName: string, version: string): boolean { - const manifest = this.getManifestSync(packageName); - return manifest?.versions[version] !== undefined; - } - - /** - * Check if an artifact exists for a version - */ - private artifactExists(packageName: string, version: string): boolean { - const tgzPath = this.getArtifactPath(packageName, version); - return fs.existsSync(tgzPath); - } - - // ========================================================================= - // Manifest Operations - // ========================================================================= - - /** - * Load the manifest for a package (async) - */ - public async getManifest(packageName: string): Promise { - const manifestPath = this.getManifestPath(packageName); - - try { - if (await fs.pathExists(manifestPath)) { - return await fs.readJson(manifestPath); - } - } catch (error) { - this.logger?.warn(`Failed to load manifest for ${packageName}: ${error instanceof Error ? error.message : String(error)}`); - } - + private artifactsRootDir: string; + private logger?: Logger; + private projectDirectory: string; + + constructor(projectDirectory: string, logger?: Logger) { + this.logger = logger; + this.projectDirectory = projectDirectory; + this.artifactsRootDir = path.join(projectDirectory, 'artifacts'); + } + + /** + * Calculate SHA-256 hash of a file + */ + public async calculateFileHash(filePath: string): Promise { + return new Promise((resolve, reject) => { + const hash = crypto.createHash('sha256'); + const stream = fs.createReadStream(filePath); + + stream.on('data', data => hash.update(data)); + stream.on('end', () => resolve(hash.digest('hex'))); + stream.on('error', reject); + }); + } + + /** + * Ensure version directory exists + */ + public async ensureVersionDir(packageName: string, version: string): Promise { + const versionPath = this.getVersionPath(packageName, version); + await fs.ensureDir(versionPath); + return versionPath; + } + + /** + * Extract packageVersionId from artifact metadata + */ + public extractPackageVersionId(packageName: string, version?: string): string | undefined { + const metadata = this.getMetadata(packageName, version); + if (!metadata?.identity) { + return undefined; + } + + // Check for unlocked package identity with versionId + const identity = metadata.identity as any; + return identity.packageVersionId; + } + + /** + * Finalize an artifact by updating the manifest and symlink. + * + * This is a convenience method that combines: + * 1. Adding/updating the version entry in manifest + * 2. Updating the latest symlink + * + * @param packageName - Name of the package + * @param version - Version being finalized + * @param entry - Version entry data for the manifest + */ + public async finalizeArtifact( + packageName: string, + version: string, + entry: ArtifactVersionEntry, + ): Promise { + await this.addVersionEntry(packageName, version, entry, true); + await this.updateLatestSymlink(packageName, version); + } + + /** + * Get comprehensive artifact info for a package + */ + public getArtifactInfo( + packageName: string, + version?: string, + ): { + manifest?: ArtifactManifest; + metadata?: SfpmPackageMetadataBase; + version?: string; + versionInfo?: ArtifactVersionEntry; + } { + const manifest = this.getManifestSync(packageName); + + if (!manifest) { + return {}; + } + + const targetVersion = version || manifest.latest; + const versionInfo = targetVersion ? manifest.versions[targetVersion] : undefined; + const metadata = this.getMetadata(packageName, targetVersion); + + return { + manifest, + metadata, + version: targetVersion, + versionInfo, + }; + } + + /** + * Get the absolute path to the artifact file + */ + public getArtifactPath(packageName: string, version: string): string { + return path.join(this.getVersionPath(packageName, version), 'artifact.tgz'); + } + + /** + * Get the root directory for all artifacts + */ + public getArtifactsRoot(): string { + return this.artifactsRootDir; + } + + /** + * Get the latest version from a package's manifest + */ + public getLatestVersion(packageName: string): string | undefined { + const manifest = this.getManifestSync(packageName); + return manifest?.latest; + } + + // ========================================================================= + // Existence Checks + // ========================================================================= + + /** + * Load the manifest for a package (async) + */ + public async getManifest(packageName: string): Promise { + const manifestPath = this.getManifestPath(packageName); + + try { + if (await fs.pathExists(manifestPath)) { + return await fs.readJson(manifestPath); + } + } catch (error) { + this.logger?.warn(`Failed to load manifest for ${packageName}: ${error instanceof Error ? error.message : String(error)}`); + } + + return undefined; + } + + /** + * Load the manifest for a package (sync) + */ + public getManifestSync(packageName: string): ArtifactManifest | undefined { + const manifestPath = this.getManifestPath(packageName); + + try { + if (fs.existsSync(manifestPath)) { + return fs.readJsonSync(manifestPath); + } + } catch (error) { + this.logger?.warn(`Failed to load manifest for ${packageName}: ${error instanceof Error ? error.message : String(error)}`); + } + + return undefined; + } + + /** + * Read artifact metadata from a specific version. + * Reads the sfpm property from package.json inside the tarball. + */ + public getMetadata(packageName: string, version?: string): SfpmPackageMetadataBase | undefined { + try { + const manifest = this.getManifestSync(packageName); + if (!manifest) { return undefined; - } - - /** - * Load the manifest for a package (sync) - */ - public getManifestSync(packageName: string): ArtifactManifest | undefined { - const manifestPath = this.getManifestPath(packageName); - - try { - if (fs.existsSync(manifestPath)) { - return fs.readJsonSync(manifestPath); - } - } catch (error) { - this.logger?.warn(`Failed to load manifest for ${packageName}: ${error instanceof Error ? error.message : String(error)}`); - } + } + const targetVersion = version || manifest.latest; + if (!targetVersion) { + this.logger?.warn(`No version specified and no latest version in manifest for ${packageName}`); return undefined; - } - - /** - * Save the manifest for a package (atomic write) - */ - private async saveManifest(packageName: string, manifest: ArtifactManifest): Promise { - const manifestPath = this.getManifestPath(packageName); - const tempPath = `${manifestPath}.tmp`; + } - await fs.ensureDir(path.dirname(manifestPath)); - - // Atomic write: write to temp file first, then rename - await fs.writeJson(tempPath, manifest, { spaces: 4 }); - await fs.move(tempPath, manifestPath, { overwrite: true }); - } - - /** - * Get the latest version from a package's manifest - */ - public getLatestVersion(packageName: string): string | undefined { - const manifest = this.getManifestSync(packageName); - return manifest?.latest; - } - - /** - * Get all local versions for a package - */ - private getVersions(packageName: string): string[] { - const manifest = this.getManifestSync(packageName); - return manifest ? Object.keys(manifest.versions) : []; - } - - /** - * Get version entry from manifest - */ - private getVersionEntry(packageName: string, version: string): ArtifactVersionEntry | undefined { - const manifest = this.getManifestSync(packageName); - return manifest?.versions[version]; - } - - /** - * Add or update a version entry in the manifest - */ - private async addVersionEntry( - packageName: string, - version: string, - entry: ArtifactVersionEntry, - updateLatest: boolean = true - ): Promise { - let manifest = await this.getManifest(packageName); - - if (!manifest) { - manifest = { - name: packageName, - latest: version, - versions: {}, - }; - } - - manifest.versions[version] = entry; - - if (updateLatest) { - manifest.latest = version; - } - - await this.saveManifest(packageName, manifest); - } - - /** - * Update lastCheckedRemote timestamp in manifest - */ - public async updateLastCheckedRemote(packageName: string): Promise { - const manifest = await this.getManifest(packageName); - if (manifest) { - manifest.lastCheckedRemote = Date.now(); - await this.saveManifest(packageName, manifest); - } - } - - // ========================================================================= - // Metadata Operations - // ========================================================================= - - /** - * Read artifact metadata from a specific version. - * Reads the sfpm property from package.json inside the tarball. - */ - public getMetadata(packageName: string, version?: string): SfpmPackageMetadata | undefined { - try { - const manifest = this.getManifestSync(packageName); - if (!manifest) { - return undefined; - } - - const targetVersion = version || manifest.latest; - if (!targetVersion) { - this.logger?.warn(`No version specified and no latest version in manifest for ${packageName}`); - return undefined; - } - - // Check if version exists in manifest - if (!manifest.versions[targetVersion]) { - this.logger?.warn(`Version ${targetVersion} not found in manifest for ${packageName}`); - return undefined; - } - - const tgzPath = this.getArtifactPath(packageName, targetVersion); - return this.extractMetadataFromTarball(tgzPath); - } catch (error) { - this.logger?.warn(`Failed to read artifact metadata: ${error instanceof Error ? error.message : String(error)}`); - return undefined; - } - } - - /** - * Extract metadata from a tarball (npm package format). - * Reads the sfpm property from package.json and converts to SfpmPackageMetadata. - */ - private extractMetadataFromTarball(tarballPath: string): SfpmPackageMetadata | undefined { - try { - if (!fs.existsSync(tarballPath)) { - this.logger?.debug(`No artifact.tgz found at ${tarballPath}`); - return undefined; - } - - const packageJson = this.extractPackageJsonFromTarball(tarballPath); - if (!packageJson?.sfpm) { - this.logger?.debug(`No sfpm metadata found in package.json inside ${tarballPath}`); - return undefined; - } - - // Convert NpmPackageSfpmMetadata to SfpmPackageMetadata - return this.convertNpmMetadataToSfpm(packageJson); - } catch (error) { - this.logger?.debug(`Failed to extract metadata from tarball ${tarballPath}: ${error instanceof Error ? error.message : String(error)}`); - return undefined; - } - } - - /** - * Extract package.json from a tarball - */ - private extractPackageJsonFromTarball(tarballPath: string): NpmPackageJson | undefined { - try { - // Extract package.json content from tarball without fully extracting - const packageJsonContent = execSync( - `tar -xOzf "${tarballPath}" package/package.json`, - { encoding: 'utf-8', timeout: 30000 } - ); - return JSON.parse(packageJsonContent); - } catch (error) { - this.logger?.debug(`Failed to extract package.json from ${tarballPath}: ${error instanceof Error ? error.message : String(error)}`); - return undefined; - } - } - - /** - * Convert npm package.json with sfpm metadata to SfpmPackageMetadata - */ - private convertNpmMetadataToSfpm(packageJson: NpmPackageJson): SfpmPackageMetadata { - const sfpm = packageJson.sfpm; - - // Parse name to get package name (remove scope) - const packageName = packageJson.name.includes('/') - ? packageJson.name.split('/')[1] - : packageJson.name; - - // If full metadata is embedded, use it directly - if (sfpm.metadata) { - return sfpm.metadata; - } - - // Otherwise, reconstruct from sfpm properties - return { - identity: { - packageName, - packageType: sfpm.packageType as any, - versionNumber: packageJson.version, - apiVersion: sfpm.apiVersion, - ...(sfpm.packageId && { packageId: sfpm.packageId }), - ...(sfpm.packageVersionId && { packageVersionId: sfpm.packageVersionId }), - ...(sfpm.isOrgDependent !== undefined && { isOrgDependent: sfpm.isOrgDependent }), - }, - source: { - commitSHA: sfpm.commitId, - }, - content: {}, - validation: {}, - orchestration: {}, - } as SfpmPackageMetadata; - } - - /** - * Extract packageVersionId from artifact metadata - */ - public extractPackageVersionId(packageName: string, version?: string): string | undefined { - const metadata = this.getMetadata(packageName, version); - if (!metadata?.identity) { - return undefined; - } - - // Check for unlocked package identity with versionId - const identity = metadata.identity as any; - return identity.packageVersionId; - } - - /** - * Get comprehensive artifact info for a package - */ - public getArtifactInfo( - packageName: string, - version?: string - ): { - version?: string; - manifest?: ArtifactManifest; - metadata?: SfpmPackageMetadata; - versionInfo?: ArtifactVersionEntry; - } { - const manifest = this.getManifestSync(packageName); - - if (!manifest) { - return {}; - } - - const targetVersion = version || manifest.latest; - const versionInfo = targetVersion ? manifest.versions[targetVersion] : undefined; - const metadata = this.getMetadata(packageName, targetVersion); - - return { - version: targetVersion, - manifest, - metadata, - versionInfo, - }; - } - - // ========================================================================= - // Hash Calculation - // ========================================================================= - - /** - * Calculate SHA-256 hash of a file - */ - public async calculateFileHash(filePath: string): Promise { - return new Promise((resolve, reject) => { - const hash = crypto.createHash('sha256'); - const stream = fs.createReadStream(filePath); - - stream.on('data', (data) => hash.update(data)); - stream.on('end', () => resolve(hash.digest('hex'))); - stream.on('error', reject); - }); - } - - /** - * Calculate SHA-256 hash of a file (sync) - */ - private calculateFileHashSync(filePath: string): string { - const content = fs.readFileSync(filePath); - return crypto.createHash('sha256').update(content).digest('hex'); - } - - // ========================================================================= - // Symlink Management - // ========================================================================= - - /** - * Update the 'latest' symlink to point to a version directory - */ - private async updateLatestSymlink(packageName: string, version: string): Promise { - const packageArtifactRoot = this.getPackageArtifactPath(packageName); - const symlinkPath = path.join(packageArtifactRoot, 'latest'); - - try { - // Remove existing symlink if present - if (await fs.pathExists(symlinkPath)) { - await fs.remove(symlinkPath); - } - - // Create relative symlink (version directory name is relative to package root) - // Use 'junction' for Windows compatibility - await fs.symlink(version, symlinkPath, 'junction'); - } catch (error) { - // Symlinks might fail on some systems (Windows without admin) - this.logger?.warn(`Symlink failed: ${error instanceof Error ? error.message : String(error)}. Falling back to latest.version identifier.`); - - // Fallback: write version to a file - const versionFilePath = path.join(packageArtifactRoot, 'latest.version'); - await fs.writeFile(versionFilePath, version); - } - } - - // ========================================================================= - // Artifact Finalization - // ========================================================================= - - /** - * Finalize an artifact by updating the manifest and symlink. - * - * This is a convenience method that combines: - * 1. Adding/updating the version entry in manifest - * 2. Updating the latest symlink - * - * @param packageName - Name of the package - * @param version - Version being finalized - * @param entry - Version entry data for the manifest - */ - public async finalizeArtifact( - packageName: string, - version: string, - entry: ArtifactVersionEntry - ): Promise { - await this.addVersionEntry(packageName, version, entry, true); - await this.updateLatestSymlink(packageName, version); - } - - // ========================================================================= - // Directory Management - // ========================================================================= - - /** - * Ensure version directory exists - */ - public async ensureVersionDir(packageName: string, version: string): Promise { - const versionPath = this.getVersionPath(packageName, version); - await fs.ensureDir(versionPath); - return versionPath; - } - - /** - * Remove a version directory - */ - public async removeVersion(packageName: string, version: string): Promise { - const versionPath = this.getVersionPath(packageName, version); - await fs.remove(versionPath); - } - - /** - * Localize a downloaded tarball into the artifact repository. - * - * This method owns the full responsibility of "localization": - * 1. Read package.json from tarball to extract sfpm metadata - * 2. Move tarball to artifacts///artifact.tgz - * 3. Calculate artifact hash - * 4. Build and save version entry in manifest - * 5. Update 'latest' symlink - * 6. Update lastCheckedRemote timestamp - * - * @param tarballPath - Path to the downloaded .tgz file - * @param packageName - Name of the package - * @param version - Version being localized - * @returns Localized artifact info including version entry - */ - public async localizeTarball( - tarballPath: string, - packageName: string, - version: string - ): Promise<{ - artifactPath: string; - versionEntry: ArtifactVersionEntry; - metadata?: SfpmPackageMetadata; - packageVersionId?: string; - }> { - const versionDir = this.getVersionPath(packageName, version); - const artifactPath = this.getArtifactPath(packageName, version); - - try { - // Ensure version directory exists - await fs.ensureDir(versionDir); - - // Read sfpm metadata from the tarball's package.json - const packageJson = this.extractPackageJsonFromTarball(tarballPath); - - // Move tarball to the artifacts folder - await fs.move(tarballPath, artifactPath, { overwrite: true }); - - const artifactHash = await this.calculateFileHash(artifactPath); - - let metadata: SfpmPackageMetadata | undefined; - let packageVersionId: string | undefined; - - if (packageJson?.sfpm) { - metadata = this.convertNpmMetadataToSfpm(packageJson); - packageVersionId = packageJson.sfpm.packageVersionId; - } - - // Use sourceHash from metadata if available, otherwise fall back to artifactHash - const sourceHash = metadata?.source?.sourceHash || artifactHash; - - // Build version entry - const versionEntry: ArtifactVersionEntry = { - path: `${packageName}/${version}/artifact.tgz`, - artifactHash, - sourceHash, - generatedAt: Date.now(), - packageVersionId, - }; - - // Finalize: update manifest and symlink - await this.finalizeArtifact(packageName, version, versionEntry); - - // Update last checked remote timestamp - await this.updateLastCheckedRemote(packageName); - - return { - artifactPath, - versionEntry, - metadata, - packageVersionId, - }; - - } catch (error) { - throw new ArtifactError(packageName, 'extract', 'Failed to localize tarball', { - version, - context: { tarballPath, artifactPath }, - cause: error instanceof Error ? error : new Error(String(error)), - }); - } - } + // Check if version exists in manifest + if (!manifest.versions[targetVersion]) { + this.logger?.warn(`Version ${targetVersion} not found in manifest for ${packageName}`); + return undefined; + } + + const tgzPath = this.getArtifactPath(packageName, targetVersion); + return this.extractMetadataFromTarball(tgzPath); + } catch (error) { + this.logger?.warn(`Failed to read artifact metadata: ${error instanceof Error ? error.message : String(error)}`); + return undefined; + } + } + + // ========================================================================= + // Manifest Operations + // ========================================================================= + + /** + * Get the path to a package's artifact directory + */ + public getPackageArtifactPath(packageName: string): string { + return path.join(this.artifactsRootDir, packageName); + } + + /** + * Get the project directory + */ + public getProjectDirectory(): string { + return this.projectDirectory; + } + + /** + * Get the relative path to the artifact file (for storage in manifest) + */ + public getRelativeArtifactPath(packageName: string, version: string): string { + return `${packageName}/${version}/artifact.tgz`; + } + + /** + * Get the path to a specific version's directory + */ + public getVersionPath(packageName: string, version: string): string { + return path.join(this.getPackageArtifactPath(packageName), version); + } + + /** + * Check if any local artifacts exist for a package + */ + public hasArtifacts(packageName: string): boolean { + const manifestPath = this.getManifestPath(packageName); + return fs.existsSync(manifestPath); + } + + /** + * Localize a downloaded tarball into the artifact repository. + * + * This method owns the full responsibility of "localization": + * 1. Read package.json from tarball to extract sfpm metadata + * 2. Move tarball to artifacts///artifact.tgz + * 3. Calculate artifact hash + * 4. Build and save version entry in manifest + * 5. Update 'latest' symlink + * 6. Update lastCheckedRemote timestamp + * + * @param tarballPath - Path to the downloaded .tgz file + * @param packageName - Name of the package + * @param version - Version being localized + * @returns Localized artifact info including version entry + */ + public async localizeTarball( + tarballPath: string, + packageName: string, + version: string, + ): Promise<{ + artifactPath: string; + metadata?: SfpmPackageMetadataBase; + packageVersionId?: string; + versionEntry: ArtifactVersionEntry; + }> { + const versionDir = this.getVersionPath(packageName, version); + const artifactPath = this.getArtifactPath(packageName, version); + + try { + // Ensure version directory exists + await fs.ensureDir(versionDir); + + // Read sfpm metadata from the tarball's package.json + const packageJson = this.extractPackageJsonFromTarball(tarballPath); + + // Move tarball to the artifacts folder + await fs.move(tarballPath, artifactPath, {overwrite: true}); + + const artifactHash = await this.calculateFileHash(artifactPath); + + let metadata: SfpmPackageMetadataBase | undefined; + let packageVersionId: string | undefined; + + if (packageJson?.sfpm) { + metadata = this.convertNpmMetadataToSfpm(packageJson); + packageVersionId = packageJson.sfpm.packageVersionId; + } + + // Use sourceHash from metadata if available, otherwise fall back to artifactHash + const sourceHash = metadata?.source?.sourceHash || artifactHash; + + // Build version entry + const versionEntry: ArtifactVersionEntry = { + artifactHash, + generatedAt: Date.now(), + packageVersionId, + path: `${packageName}/${version}/artifact.tgz`, + sourceHash, + }; + + // Finalize: update manifest and symlink + await this.finalizeArtifact(packageName, version, versionEntry); + + // Update last checked remote timestamp + await this.updateLastCheckedRemote(packageName); + + return { + artifactPath, + metadata, + packageVersionId, + versionEntry, + }; + } catch (error) { + throw new ArtifactError(packageName, 'extract', 'Failed to localize tarball', { + cause: error instanceof Error ? error : new Error(String(error)), + context: {artifactPath, tarballPath}, + version, + }); + } + } + + /** + * Remove a version directory + */ + public async removeVersion(packageName: string, version: string): Promise { + const versionPath = this.getVersionPath(packageName, version); + await fs.remove(versionPath); + } + + /** + * Update lastCheckedRemote timestamp in manifest + */ + public async updateLastCheckedRemote(packageName: string): Promise { + const manifest = await this.getManifest(packageName); + if (manifest) { + manifest.lastCheckedRemote = Date.now(); + await this.saveManifest(packageName, manifest); + } + } + + // ========================================================================= + // Metadata Operations + // ========================================================================= + + /** + * Add or update a version entry in the manifest + */ + private async addVersionEntry( + packageName: string, + version: string, + entry: ArtifactVersionEntry, + updateLatest: boolean = true, + ): Promise { + let manifest = await this.getManifest(packageName); + + if (!manifest) { + manifest = { + latest: version, + name: packageName, + versions: {}, + }; + } + + manifest.versions[version] = entry; + + if (updateLatest) { + manifest.latest = version; + } + + await this.saveManifest(packageName, manifest); + } + + /** + * Check if an artifact exists for a version + */ + private artifactExists(packageName: string, version: string): boolean { + const tgzPath = this.getArtifactPath(packageName, version); + return fs.existsSync(tgzPath); + } + + /** + * Calculate SHA-256 hash of a file (sync) + */ + private calculateFileHashSync(filePath: string): string { + const content = fs.readFileSync(filePath); + return crypto.createHash('sha256').update(content).digest('hex'); + } + + /** + * Convert npm package.json with sfpm metadata to SfpmPackageMetadataBase + */ + private convertNpmMetadataToSfpm(packageJson: NpmPackageJson): SfpmPackageMetadataBase { + const {sfpm} = packageJson; + + // Parse name to get package name (remove scope) + const packageName = packageJson.name.includes('/') + ? packageJson.name.split('/')[1] + : packageJson.name; + + // If full metadata is embedded, use it directly + if (sfpm.metadata) { + return sfpm.metadata; + } + + // Otherwise, reconstruct base metadata from sfpm properties + return { + identity: { + apiVersion: sfpm.apiVersion, + packageName, + packageType: sfpm.packageType as any, + versionNumber: packageJson.version, + ...(sfpm.packageId && {packageId: sfpm.packageId}), + ...(sfpm.packageVersionId && {packageVersionId: sfpm.packageVersionId}), + ...(sfpm.isOrgDependent !== undefined && {isOrgDependent: sfpm.isOrgDependent}), + }, + orchestration: {}, + source: { + commitSHA: sfpm.commitId, + }, + } as SfpmPackageMetadataBase; + } + + /** + * Create a unique temporary directory for downloads/extraction. + * Pattern: .sfpm/tmp/downloads/[timestamp]-[packageName]-[hash] + */ + private async createTempDir(packageName: string): Promise { + const timestamp = new Date().toISOString() + .replace(/T/, '-') + .replace(/\..+/, '') + .replaceAll(/[:-]/g, ''); + const hash = crypto.randomBytes(4).toString('hex'); + const tempDirName = `${timestamp}-${packageName}-${hash}`; + const tempDir = path.join(this.projectDirectory, DOT_FOLDER, 'tmp', 'downloads', tempDirName); + await fs.ensureDir(tempDir); + return tempDir; + } + + /** + * Extract metadata from a tarball (npm package format). + * Reads the sfpm property from package.json and converts to SfpmPackageMetadataBase. + */ + private extractMetadataFromTarball(tarballPath: string): SfpmPackageMetadataBase | undefined { + try { + if (!fs.existsSync(tarballPath)) { + this.logger?.debug(`No artifact.tgz found at ${tarballPath}`); + return undefined; + } + const packageJson = this.extractPackageJsonFromTarball(tarballPath); + if (!packageJson?.sfpm) { + this.logger?.debug(`No sfpm metadata found in package.json inside ${tarballPath}`); + return undefined; + } + + // Convert NpmPackageSfpmMetadata to SfpmPackageMetadataBase + return this.convertNpmMetadataToSfpm(packageJson); + } catch (error) { + this.logger?.debug(`Failed to extract metadata from tarball ${tarballPath}: ${error instanceof Error ? error.message : String(error)}`); + return undefined; + } + } + + // ========================================================================= + // Hash Calculation + // ========================================================================= + + /** + * Extract package.json from a tarball + */ + private extractPackageJsonFromTarball(tarballPath: string): NpmPackageJson | undefined { + try { + // Extract package.json content from tarball without fully extracting + const packageJsonContent = execSync( + `tar -xOzf "${tarballPath}" package/package.json`, + {encoding: 'utf8', timeout: 30_000}, + ); + return JSON.parse(packageJsonContent); + } catch (error) { + this.logger?.debug(`Failed to extract package.json from ${tarballPath}: ${error instanceof Error ? error.message : String(error)}`); + return undefined; + } + } + + /** + * Get the path to the manifest file for a package + */ + private getManifestPath(packageName: string): string { + return path.join(this.getPackageArtifactPath(packageName), 'manifest.json'); + } + + // ========================================================================= + // Symlink Management + // ========================================================================= + + /** + * Get version entry from manifest + */ + private getVersionEntry(packageName: string, version: string): ArtifactVersionEntry | undefined { + const manifest = this.getManifestSync(packageName); + return manifest?.versions[version]; + } + + // ========================================================================= + // Artifact Finalization + // ========================================================================= + + /** + * Get all local versions for a package + */ + private getVersions(packageName: string): string[] { + const manifest = this.getManifestSync(packageName); + return manifest ? Object.keys(manifest.versions) : []; + } + + // ========================================================================= + // Directory Management + // ========================================================================= + + /** + * Check if a specific version exists locally + */ + private hasVersion(packageName: string, version: string): boolean { + const manifest = this.getManifestSync(packageName); + return manifest?.versions[version] !== undefined; + } + + /** + * Save the manifest for a package (atomic write) + */ + private async saveManifest(packageName: string, manifest: ArtifactManifest): Promise { + const manifestPath = this.getManifestPath(packageName); + const tempPath = `${manifestPath}.tmp`; + + await fs.ensureDir(path.dirname(manifestPath)); + + // Atomic write: write to temp file first, then rename + await fs.writeJson(tempPath, manifest, {spaces: 4}); + await fs.move(tempPath, manifestPath, {overwrite: true}); + } + + /** + * Update the 'latest' symlink to point to a version directory + */ + private async updateLatestSymlink(packageName: string, version: string): Promise { + const packageArtifactRoot = this.getPackageArtifactPath(packageName); + const symlinkPath = path.join(packageArtifactRoot, 'latest'); + + try { + // Remove existing symlink if present + if (await fs.pathExists(symlinkPath)) { + await fs.remove(symlinkPath); + } + + // Create relative symlink (version directory name is relative to package root) + // Use 'junction' for Windows compatibility + await fs.symlink(version, symlinkPath, 'junction'); + } catch (error) { + // Symlinks might fail on some systems (Windows without admin) + this.logger?.warn(`Symlink failed: ${error instanceof Error ? error.message : String(error)}. Falling back to latest.version identifier.`); + + // Fallback: write version to a file + const versionFilePath = path.join(packageArtifactRoot, 'latest.version'); + await fs.writeFile(versionFilePath, version); + } + } } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index adde6b2..e406b52 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -55,18 +55,22 @@ export {default as Git} from './git/git.js'; export {LifecycleEngine} from './lifecycle/lifecycle-engine.js'; export {AnalyzerRegistry, type PackageAnalyzer} from './package/analyzers/analyzer-registry.js'; export {BuildOrchestrationTask, BuildOrchestrator, type BuildOrchestratorOptions} from './package/build-orchestrator.js'; -export {BuilderRegistry} from './package/builders/builder-registry.js'; +export { + type Builder, type BuilderConstructor, BuilderRegistry, RegisterBuilder, +} from './package/builders/builder-registry.js'; export {InstallOrchestrationTask, InstallOrchestrator, type InstallOrchestratorOptions} from './package/install-orchestrator.js'; -export {InstallerRegistry} from './package/installers/installer-registry.js'; export { - ManagedPackageRef, type SourceDeployable, type VersionInstallable, + type Installer, type InstallerConstructor, InstallerRegistry, RegisterInstaller, +} from './package/installers/installer-registry.js'; +export { + type DataDeployable, ManagedPackageRef, type SourceDeployable, type VersionInstallable, } from './package/installers/types.js'; export { type OrchestrationTask, Orchestrator, type OrchestratorEmitter, type OrchestratorOptions, } from './package/orchestrator.js'; -export {PackageBuilder} from './package/package-builder.js'; // Avoid export * due to BuildOptions name conflict with types/project.ts +export {type BuildTask, PackageBuilder} from './package/package-builder.js'; // Avoid export * due to BuildOptions name conflict with types/project.ts export {type InstallOptions, type InstallResult, default as PackageInstaller} from './package/package-installer.js'; -export {PackageFactory, default as SfpmPackage} from './package/sfpm-package.js'; +export {PackageFactory, SfpmDataPackage, default as SfpmPackage} from './package/sfpm-package.js'; export {loadSfpmConfig, resolveConfigPath} from './project/config-loader.js'; export {default as ProjectConfig} from './project/project-config.js'; export * from './project/project-graph.js'; @@ -81,4 +85,5 @@ export * from './types/logger.js'; export * from './types/npm.js'; export * from './types/package.js'; export * from './types/project.js'; +export {DirectoryHasher} from './utils/directory-hasher.js'; export {escapeSOQL, soql} from './utils/soql.js'; diff --git a/packages/core/src/package/analyzers/picklist-analyzer.ts b/packages/core/src/package/analyzers/picklist-analyzer.ts index b25e124..93fad5b 100644 --- a/packages/core/src/package/analyzers/picklist-analyzer.ts +++ b/packages/core/src/package/analyzers/picklist-analyzer.ts @@ -1,46 +1,45 @@ -import { ComponentSet, MetadataComponent, registry } from '@salesforce/source-deploy-retrieve'; -import SfpmPackage, { SfpmMetadataPackage } from '../sfpm-package.js'; -import { PackageType, SfpmPackageContent, SfpmPackageMetadata } from '../../types/package.js'; -import { PackageAnalyzer } from './analyzer-registry.js'; +import {ComponentSet, MetadataComponent, registry} from '@salesforce/source-deploy-retrieve'; -import { Logger } from '../../types/logger.js'; +import {Logger} from '../../types/logger.js'; +import {PackageType, SfpmPackageContent} from '../../types/package.js'; +import SfpmPackage, {SfpmMetadataPackage} from '../sfpm-package.js'; +import {PackageAnalyzer} from './analyzer-registry.js'; -const PICKLIST_TYPES = ['Picklist', 'MultiselectPicklist']; +const PICKLIST_TYPES = new Set(['MultiselectPicklist', 'Picklist']); export default class PicklistAnalyzer implements PackageAnalyzer { - private logger?: Logger; + private logger?: Logger; - constructor(logger?: Logger) { - this.logger = logger; - } + constructor(logger?: Logger) { + this.logger = logger; + } - public isEnabled(sfpmPackage: SfpmMetadataPackage): boolean { - return sfpmPackage.type != PackageType.Data; + public async analyze(sfpmPackage: SfpmMetadataPackage): Promise> { + if (!sfpmPackage.customFields) { + return {}; } - - public async analyze(sfpmPackage: SfpmMetadataPackage): Promise> { - if (!sfpmPackage.customFields) { - return {}; - } + const picklists: MetadataComponent[] = []; + + try { + for (const field of sfpmPackage.customFields) { + // eslint-disable-next-line no-await-in-loop + const customField = (await field.parseXml()).CustomField as any; - const picklists: MetadataComponent[] = []; - - try { - - for (const field of sfpmPackage.customFields) { - let customField = (await field.parseXml()).CustomField as any; - - if (customField && PICKLIST_TYPES.includes(customField.type)) { - picklists.push(field); - } - } - } catch (error) { - this.logger?.trace(`Unable to process Picklist update due to ${error}`); + if (customField && PICKLIST_TYPES.has(customField.type)) { + picklists.push(field); } + } + } catch (error) { + this.logger?.trace(`Unable to process Picklist update due to ${error}`); + } - sfpmPackage.setPicklists(picklists.map(p => p.fullName)); + sfpmPackage.setPicklists(picklists.map(p => p.fullName)); - return {}; - } + return {}; + } + + public isEnabled(sfpmPackage: SfpmMetadataPackage): boolean { + return sfpmPackage.type !== PackageType.Data; + } } diff --git a/packages/core/src/package/assemblers/package-assembler.ts b/packages/core/src/package/assemblers/package-assembler.ts index 0f3d07a..846786d 100644 --- a/packages/core/src/package/assemblers/package-assembler.ts +++ b/packages/core/src/package/assemblers/package-assembler.ts @@ -1,31 +1,30 @@ import * as fs from 'fs-extra'; -import path from 'path'; -import crypto from 'crypto'; - -import ProjectConfig from "../../project/project-config.js"; -import { PackageType } from '../../types/package.js'; -import { Logger } from '../../types/logger.js'; - -import { AssemblyOptions, AssemblyStep, AssemblyOutput } from './types.js'; -import { SourceCopyStep } from './steps/source-copy-step.js'; -import { UnpackagedMetadataStep } from './steps/unpackaged-metadata-step.js'; -import { ScriptAssemblyStep } from './steps/script-assembly-step.js'; -import { ForceIgnoreStep } from './steps/force-ignore-step.js'; -import { DestructiveManifestStep } from './steps/destructive-manifest-step.js'; -import { OrgDefinitionStep } from './steps/org-definition-step.js'; -import { ProjectJsonAssemblyStep } from './steps/project-json-assembly-step.js'; +import crypto from 'node:crypto'; +import path from 'node:path'; + +import ProjectConfig from '../../project/project-config.js'; +import {Logger} from '../../types/logger.js'; +import {PackageType} from '../../types/package.js'; +import {DestructiveManifestStep} from './steps/destructive-manifest-step.js'; +import {ForceIgnoreStep} from './steps/force-ignore-step.js'; +import {OrgDefinitionStep} from './steps/org-definition-step.js'; +import {ProjectJsonAssemblyStep} from './steps/project-json-assembly-step.js'; +import {ScriptAssemblyStep} from './steps/script-assembly-step.js'; +import {SourceCopyStep} from './steps/source-copy-step.js'; +import {UnpackagedMetadataStep} from './steps/unpackaged-metadata-step.js'; +import {AssemblyOptions, AssemblyOutput, AssemblyStep} from './types.js'; // import { MDAPIConversionStep } from './steps/mdapi-conversion-step.js'; -const DOT_FOLDER = ".sfpm"; +const DOT_FOLDER = '.sfpm'; /** * @description Assembles package contents from a project configuration in a fluent, instance-based manner. - * + * * ### Staging Area ("The Why") * The `PackageAssembler` creates a temporary, isolated "staging area" for each build. This isolation: * 1. **Prevents Interference**: Multiple concurrent builds won't corrupt each other's files. * 2. **Ensures Determinism**: The resulting artifact contains exactly what is specified, with no leftover files from previous runs. * 3. **Simplifies Packaging**: Tools can simply zip or upload the entire contents of the staging directory. - * + * * ### Staging Structure ("The How") * The staging area follows a standardized layout: * - `/[packagePath]`: Primary source metadata. @@ -34,7 +33,7 @@ const DOT_FOLDER = ".sfpm"; * - `/forceignores`: Stage-specific ignore files (e.g., `.prepareignore`). * - `.forceignore`: The root ignore file used for the final artifact. * - `sfdx-project.json`: A pruned version of the original manifest, specifically for this package. - * + * * @example * ```typescript * const stagingPath = await new PackageAssembler(projectConfig, 'my-package', logger) @@ -44,175 +43,203 @@ const DOT_FOLDER = ".sfpm"; * ``` */ export default class PackageAssembler { - private stagingDirectory: string; - - constructor( - private packageName: string, - private projectConfig: ProjectConfig, - private options: AssemblyOptions = {}, - private logger?: Logger - ) { - this.stagingDirectory = this.initializeStagingArea(); + private stagingDirectory: string; + + constructor( + private packageName: string, + private projectConfig: ProjectConfig, + private options: AssemblyOptions = {}, + private logger?: Logger, + ) { + this.stagingDirectory = this.initializeStagingArea(); + } + + /** + * @description Orchestrates the package assembly process. This method executes all necessary file I/O operations, + * including copying source code, handling scripts, generating manifests, and managing + * the staging area lifecycle. + * + * @returns {Promise} A promise that resolves to the absolute path of the created staging directory. + * @throws {Error} if any step of the assembly process fails. + * + * @example + * ```typescript + * const stagingPath = await assembler.assemble(); + * console.log(`Package assembled at: ${stagingPath}`); + * ``` + */ + public async assemble(): Promise { + try { + await this.ensureStagingDirectoryExists(); + + const output: AssemblyOutput = { + projectDefinitionPath: path.join(this.stagingDirectory, 'sfdx-project.json'), + stagingDirectory: this.stagingDirectory, + }; + + const packageDefinition = this.projectConfig.getPackageDefinition(this.packageName); + const packageType = packageDefinition.type?.toLowerCase(); + + const steps = packageType === PackageType.Data + ? this.buildDataAssemblySteps() + : this.buildMetadataAssemblySteps(); + + for (const step of steps) { + this.logger?.debug(`Executing step: ${step.constructor.name}`); + // eslint-disable-next-line no-await-in-loop -- steps must be executed sequentially as they depend on the output of previous steps + await step.execute(this.options, output); + } + + return output; + } catch (error) { + // Error Handling: attempt to delete the stagingDirectory before re-throwing + if (process.env.DEBUG !== 'true' && this.stagingDirectory) { + await fs.remove(this.stagingDirectory).catch(() => { }); + } + + throw error; } - - /** - * @description Sets the package version number to be injected into the assembly's sfdx-project.json. - * - * @param {string | undefined} version The version string (e.g., "1.2.0.1" or "1.2.0.NEXT"). - * @returns {this} The PackageAssembler instance for chaining. - * - * @example - * ```typescript - * assembler.withVersion('1.0.0.NEXT'); - * ``` - */ - public withVersion(version: string | undefined): this { - this.options.versionNumber = version; - return this; - } - - /** - * @description Specifies the path to an organization definition file (e.g., scratch org definition) - * to be included in the package assembly. - * - * @param {string | undefined} path Relative or absolute path to the org definition JSON. - * @returns {this} The PackageAssembler instance for chaining. - * - * @example - * ```typescript - * assembler.withOrgDefinition('config/project-scratch-def.json'); - * ``` - */ - public withOrgDefinition(path: string | undefined): this { - this.options.orgDefinitionPath = path; - return this; - } - - /** - * @description Specifies the path to a destructive changes manifest (e.g., destructiveChanges.xml) - * to be included in the package assembly. - * - * @param {string | undefined} path Relative or absolute path to the destructive changes manifest. - * @returns {this} The PackageAssembler instance for chaining. - * - * @example - * ```typescript - * assembler.withDestructiveManifest('manifest/destructiveChanges.xml'); - * ``` - */ - public withDestructiveManifest(path: string | undefined): this { - this.options.destructiveManifestPath = path; - return this; - } - - /** - * @description Overrides the default .forceignore file with a specific replacement file. - * - * @param {string | undefined} path Relative or absolute path to the replacement .forceignore file. - * @returns {this} The PackageAssembler instance for chaining. - * - * @example - * ```typescript - * assembler.withReplacementForceIgnore('.forceignore.prod'); - * ``` - */ - public withReplacementForceIgnore(path: string | undefined): this { - this.options.replacementForceignorePath = path; - return this; - } - - /** - * @description Orchestrates the package assembly process. This method executes all necessary file I/O operations, - * including copying source code, handling scripts, generating manifests, and managing - * the staging area lifecycle. - * - * @returns {Promise} A promise that resolves to the absolute path of the created staging directory. - * @throws {Error} if any step of the assembly process fails. - * - * @example - * ```typescript - * const stagingPath = await assembler.assemble(); - * console.log(`Package assembled at: ${stagingPath}`); - * ``` - */ - public async assemble(): Promise { - try { - await this.ensureStagingDirectoryExists(); - - const output: AssemblyOutput = { - stagingDirectory: this.stagingDirectory, - projectDefinitionPath: path.join(this.stagingDirectory, 'sfdx-project.json') - }; - - const steps: AssemblyStep[] = [ - new SourceCopyStep(this.packageName, this.projectConfig, this.logger), - new OrgDefinitionStep(this.packageName, this.projectConfig, this.logger), - new ScriptAssemblyStep(this.packageName, this.projectConfig, this.logger), - new UnpackagedMetadataStep(this.packageName, this.projectConfig, this.logger), - new ForceIgnoreStep(this.packageName, this.projectConfig, this.logger) - ]; - - if (this.options.destructiveManifestPath) { - steps.push(new DestructiveManifestStep(this.packageName, this.projectConfig, this.logger)); - } - - // always final - steps.push(new ProjectJsonAssemblyStep(this.packageName, this.projectConfig, this.logger)); - - for (const step of steps) { - this.logger?.debug(`Executing step: ${step.constructor.name}`); - await step.execute(this.options, output); - } - - return output; - - } catch (error) { - // Error Handling: attempt to delete the stagingDirectory before re-throwing - if (process.env.DEBUG !== 'true' && this.stagingDirectory) { - await fs.remove(this.stagingDirectory).catch(() => { }); - } - throw error; - } - } - - /** - * @description Resolves the path for the temporary assembly area. - * The path is constructed as: `.sfpm/tmp/builds/[timestamp]-[packageName]-[hash]` - * - * @returns {string} The absolute path to the staging directory. - */ - private initializeStagingArea(): string { - const buildName = this.createBuildName(); - return path.join(process.cwd(), DOT_FOLDER, 'tmp', 'builds', buildName); - } - - /** - * @description Ensures the parent directories for the staging area exist and that the - * specific staging folder is completely empty and ready for a fresh build. - * - * @returns {Promise} - */ - private async ensureStagingDirectoryExists(): Promise { - await fs.ensureDir(path.dirname(this.stagingDirectory)); - await fs.emptyDir(this.stagingDirectory); - } - - /** - * @description Generates a unique name for the build to avoid collisions. - * Format: YYYYMMDDHHMMSS-[packageName]-[randomHash] - * - * @returns {string} A unique build identifier. - */ - private createBuildName(): string { - const now = new Date(); - const timestamp = now.toISOString() - .replace(/T/, '-') - .replace(/\..+/, '') - .replace(/:/g, '') - .replace(/-/g, ''); - - const hash = crypto.randomBytes(2).toString('hex'); - return `${timestamp}-${this.packageName}-${hash}`; + } + + /** + * @description Specifies the path to a destructive changes manifest (e.g., destructiveChanges.xml) + * to be included in the package assembly. + * + * @param {string | undefined} path Relative or absolute path to the destructive changes manifest. + * @returns {this} The PackageAssembler instance for chaining. + * + * @example + * ```typescript + * assembler.withDestructiveManifest('manifest/destructiveChanges.xml'); + * ``` + */ + public withDestructiveManifest(path: string | undefined): this { + this.options.destructiveManifestPath = path; + return this; + } + + /** + * @description Specifies the path to an organization definition file (e.g., scratch org definition) + * to be included in the package assembly. + * + * @param {string | undefined} path Relative or absolute path to the org definition JSON. + * @returns {this} The PackageAssembler instance for chaining. + * + * @example + * ```typescript + * assembler.withOrgDefinition('config/project-scratch-def.json'); + * ``` + */ + public withOrgDefinition(path: string | undefined): this { + this.options.orgDefinitionPath = path; + return this; + } + + /** + * @description Overrides the default .forceignore file with a specific replacement file. + * + * @param {string | undefined} path Relative or absolute path to the replacement .forceignore file. + * @returns {this} The PackageAssembler instance for chaining. + * + * @example + * ```typescript + * assembler.withReplacementForceIgnore('.forceignore.prod'); + * ``` + */ + public withReplacementForceIgnore(path: string | undefined): this { + this.options.replacementForceignorePath = path; + return this; + } + + /** + * @description Sets the package version number to be injected into the assembly's sfdx-project.json. + * + * @param {string | undefined} version The version string (e.g., "1.2.0.1" or "1.2.0.NEXT"). + * @returns {this} The PackageAssembler instance for chaining. + * + * @example + * ```typescript + * assembler.withVersion('1.0.0.NEXT'); + * ``` + */ + public withVersion(version: string | undefined): this { + this.options.versionNumber = version; + return this; + } + + /** + * Assembly pipeline for data packages. + * Content-agnostic: copies the entire package directory as-is. + * Skips metadata-specific steps (ForceIgnore, UnpackagedMetadata, DestructiveManifest, OrgDefinition). + */ + private buildDataAssemblySteps(): AssemblyStep[] { + return [ + new SourceCopyStep(this.packageName, this.projectConfig, this.logger), + new ScriptAssemblyStep(this.packageName, this.projectConfig, this.logger), + new ProjectJsonAssemblyStep(this.packageName, this.projectConfig, this.logger), + ]; + } + + /** + * Assembly pipeline for metadata packages (source, unlocked). + * Full pipeline with metadata-specific steps. + */ + private buildMetadataAssemblySteps(): AssemblyStep[] { + const steps: AssemblyStep[] = [ + new SourceCopyStep(this.packageName, this.projectConfig, this.logger), + new OrgDefinitionStep(this.packageName, this.projectConfig, this.logger), + new ScriptAssemblyStep(this.packageName, this.projectConfig, this.logger), + new UnpackagedMetadataStep(this.packageName, this.projectConfig, this.logger), + new ForceIgnoreStep(this.packageName, this.projectConfig, this.logger), + ]; + + if (this.options.destructiveManifestPath) { + steps.push(new DestructiveManifestStep(this.packageName, this.projectConfig, this.logger)); } + // always final + steps.push(new ProjectJsonAssemblyStep(this.packageName, this.projectConfig, this.logger)); + + return steps; + } + + /** + * @description Generates a unique name for the build to avoid collisions. + * Format: YYYYMMDDHHMMSS-[packageName]-[randomHash] + * + * @returns {string} A unique build identifier. + */ + private createBuildName(): string { + const now = new Date(); + const timestamp = now.toISOString() + .replace(/T/, '-') + .replace(/\..+/, '') + .replaceAll(':', '') + .replaceAll('-', ''); + + const hash = crypto.randomBytes(2).toString('hex'); + return `${timestamp}-${this.packageName}-${hash}`; + } + + /** + * @description Ensures the parent directories for the staging area exist and that the + * specific staging folder is completely empty and ready for a fresh build. + * + * @returns {Promise} + */ + private async ensureStagingDirectoryExists(): Promise { + await fs.ensureDir(path.dirname(this.stagingDirectory)); + await fs.emptyDir(this.stagingDirectory); + } + + /** + * @description Resolves the path for the temporary assembly area. + * The path is constructed as: `.sfpm/tmp/builds/[timestamp]-[packageName]-[hash]` + * + * @returns {string} The absolute path to the staging directory. + */ + private initializeStagingArea(): string { + const buildName = this.createBuildName(); + return path.join(process.cwd(), DOT_FOLDER, 'tmp', 'builds', buildName); + } } diff --git a/packages/core/src/package/installers/installer-registry.ts b/packages/core/src/package/installers/installer-registry.ts index 3c465fd..65616a6 100644 --- a/packages/core/src/package/installers/installer-registry.ts +++ b/packages/core/src/package/installers/installer-registry.ts @@ -32,19 +32,19 @@ export type InstallerConstructor = new ( * Registry to store and retrieve package installers by type */ export class InstallerRegistry { - private static installers = new Map, InstallerConstructor>(); + private static installers = new Map, InstallerConstructor>(); /** * Retrieves an installer for a specific package type */ - public static getInstaller(type: Omit): InstallerConstructor | undefined { + public static getInstaller(type: Omit): InstallerConstructor | undefined { return InstallerRegistry.installers.get(type); } /** * Registers an installer for a specific package type */ - public static register(type: Omit, installer: InstallerConstructor) { + public static register(type: Omit, installer: InstallerConstructor) { InstallerRegistry.installers.set(type, installer); } } @@ -56,7 +56,7 @@ export class InstallerRegistry { * it casts internally so concrete constructors (which narrow the installable * parameter) don't clash with the broader {@link InstallerConstructor} union. */ -export function RegisterInstaller(type: Omit) { +export function RegisterInstaller(type: Omit) { return (constructor: new (...args: any[]) => Installer) => { InstallerRegistry.register(type, constructor as InstallerConstructor); }; diff --git a/packages/core/src/package/installers/types.ts b/packages/core/src/package/installers/types.ts index 7c485e7..5d17de7 100644 --- a/packages/core/src/package/installers/types.ts +++ b/packages/core/src/package/installers/types.ts @@ -35,6 +35,21 @@ export interface SourceDeployable { versionNumber?: string; } +/** + * A package that can be installed via data loading tools (e.g. SFDMU). + * + * Core is intentionally content-agnostic — it only exposes the directory + * containing the data files. Concrete adapters (like @b64/sfpm-sfdmu) + * interpret the directory contents (export.json, CSVs, etc.). + */ +export interface DataDeployable { + artifact?: ArtifactProvenance; + /** Absolute path to the directory containing the data files */ + dataDirectory: string; + packageName: string; + versionNumber?: string; +} + /** * Lightweight reference to an external managed/subscriber package. * diff --git a/packages/core/src/package/sfpm-package.ts b/packages/core/src/package/sfpm-package.ts index 7a038f7..bf8e3af 100644 --- a/packages/core/src/package/sfpm-package.ts +++ b/packages/core/src/package/sfpm-package.ts @@ -1,17 +1,21 @@ import {ComponentSet, SourceComponent} from '@salesforce/source-deploy-retrieve'; +import fg from 'fast-glob'; import { get, merge, omit, set, } from 'lodash-es'; import path from 'node:path'; import ProjectConfig from '../project/project-config.js'; +import {VersionManager} from '../project/version-manager.js'; import { - MetadataFile, PackageType, SfpmPackageContent, SfpmPackageMetadata, SfpmPackageOrchestration, SfpmUnlockedPackageBuildOptions, SfpmUnlockedPackageMetadata, + MetadataFile, PackageType, SfpmDataPackageMetadata, SfpmPackageContent, SfpmPackageMetadata, SfpmPackageMetadataBase, SfpmPackageOrchestration, SfpmUnlockedPackageBuildOptions, SfpmUnlockedPackageMetadata, } from '../types/package.js'; import {PackageDefinition, ProjectDefinition} from '../types/project.js'; +import {DirectoryHasher} from '../utils/directory-hasher.js'; import {SourceHasher} from '../utils/source-hasher.js'; -import {ManagedPackageRef, VersionInstallable, type SourceDeployable} from './installers/types.js'; -import { VersionManager } from '../project/version-manager.js'; +import { + type DataDeployable, ManagedPackageRef, type SourceDeployable, VersionInstallable, +} from './installers/types.js'; const TEST_COVERAGE_THRESHOLD = 75; const DEFAULT_API_VERSION = '65.0'; @@ -44,17 +48,16 @@ const PROFILE_SUPPORTED_METADATA_TYPES = new Set([ ]); export default abstract class SfpmPackage { - protected _metadata: SfpmPackageMetadata; + protected _metadata: SfpmPackageMetadataBase; protected _packageDefinition?: PackageDefinition; public orgDefinitionPath?: string = path.join('config', 'project-scratch-def.json'); public projectDefinition?: ProjectDefinition; public projectDirectory: string; public stagingDirectory: string | undefined; - constructor(packageName: string, projectDirectory: string, metadata?: Partial) { + constructor(packageName: string, projectDirectory: string, metadata?: Partial) { this.projectDirectory = projectDirectory; this._metadata = { - content: {...metadata?.content}, identity: { packageName, packageType: '', @@ -62,9 +65,8 @@ export default abstract class SfpmPackage { }, orchestration: {...metadata?.orchestration}, source: {...metadata?.source}, - validation: {...metadata?.validation}, - ...omit(metadata, ['identity', 'source', 'content', 'validation', 'orchestration']), - } as SfpmPackageMetadata; + ...omit(metadata, ['identity', 'source', 'orchestration']), + } as SfpmPackageMetadataBase; } get apiVersion(): string { @@ -83,7 +85,7 @@ export default abstract class SfpmPackage { return this.packageDefinition?.dependencies; } - get metadata(): SfpmPackageMetadata { + get metadata(): SfpmPackageMetadataBase { return this._metadata; } @@ -192,13 +194,21 @@ export default abstract class SfpmPackage { * @param sourceHash - Optional source hash to include * @returns SFPM metadata object for package.json */ - public async toJson(): Promise { + public async toJson(): Promise { return this.metadata } } export abstract class SfpmMetadataPackage extends SfpmPackage implements SourceDeployable { protected _componentSet?: ComponentSet; + protected declare _metadata: SfpmPackageMetadata; + + constructor(packageName: string, projectDirectory: string, metadata?: Partial) { + super(packageName, projectDirectory, metadata); + // Ensure content and validation sections exist + this._metadata.content = {...metadata?.content} as SfpmPackageContent; + this._metadata.validation = {...metadata?.validation}; + } get apexClasses(): SourceComponent[] { return this.getComponentSet() @@ -495,7 +505,83 @@ export abstract class SfpmMetadataPackage extends SfpmPackage implements SourceD } } -export class SfpmDataPackage extends SfpmPackage { +export class SfpmDataPackage extends SfpmPackage implements DataDeployable { + constructor(packageName: string, projectDirectory: string, metadata?: Partial) { + super(packageName, projectDirectory, metadata); + this._metadata.identity.packageType = PackageType.Data; + } + + /** + * Absolute path to the data directory. + * Before staging: resolves from project source. + * After staging: resolves from the staging area. + */ + get dataDirectory(): string { + const packagePath = this.packageDefinition?.path; + if (!packagePath) { + throw new Error('Data package must have a path defined in packageDefinition'); + } + + if (this.stagingDirectory) { + return path.join(this.stagingDirectory, packagePath); + } + + return path.join(this.projectDirectory, packagePath); + } + + override get metadata(): SfpmPackageMetadataBase { + return this._metadata; + } + + /** + * Alias for the version property, satisfying the DataDeployable interface. + */ + get versionNumber(): string | undefined { + return this.version; + } + + /** + * Calculate source hash by hashing all files in the data directory recursively. + * This is content-agnostic — it hashes every file regardless of type. + */ + public async calculateSourceHash(): Promise { + const hash = await DirectoryHasher.calculate(this.dataDirectory); + this.sourceHash = hash; + return hash; + } + + /** + * Counts the files in the data directory for metadata. + */ + public async countFiles(): Promise { + const files = await fg(['**/*'], { + cwd: this.dataDirectory, + dot: false, + onlyFiles: true, + }); + return files.length; + } + + override async toJson(): Promise { + const fileCount = await this.countFiles(); + + return { + content: { + dataDirectory: this.packageDefinition?.path || '', + fileCount, + }, + identity: { + packageName: this.name, + packageType: PackageType.Data, + versionNumber: this.version, + }, + orchestration: { + buildOptions: this.packageDefinition?.packageOptions?.build as any, + deploymentOptions: this.packageDefinition?.packageOptions?.deploy, + }, + source: this._metadata.source, + }; + } } export class SfpmUnlockedPackage extends SfpmMetadataPackage { @@ -533,7 +619,7 @@ export class SfpmUnlockedPackage extends SfpmMetadataPackage { } override setBuildNumber(buildNumber: string): void { - return; + } override setOrchestrationOptions(options: Partial): void { @@ -600,7 +686,7 @@ export class PackageFactory { const managedRef = this.createManagedRef(packageName); if (managedRef) { throw new Error(`Package "${packageName}" is a managed dependency, not a local package. ` - + 'Use createManagedRef() instead.'); + + 'Use createManagedRef() instead.'); } throw new Error(`Package ${packageName} not found in project definition`); diff --git a/packages/core/src/types/npm.ts b/packages/core/src/types/npm.ts index 218e647..d52f5f8 100644 --- a/packages/core/src/types/npm.ts +++ b/packages/core/src/types/npm.ts @@ -1,141 +1,141 @@ /** * Types for npm package.json generation from SFPM packages. - * + * * When publishing to npm, SFPM packages are represented as standard npm packages * with SFPM-specific metadata stored under the "sfpm" property. */ -import { PackageType, SfpmPackageMetadata } from './package.js'; +import {PackageType, SfpmPackageMetadataBase} from './package.js'; /** * The "sfpm" property in package.json. * Contains all SFPM-specific metadata that doesn't map to standard npm fields. */ export interface NpmPackageSfpmMetadata { - /** Package type (unlocked, source, data) */ - packageType: string; - /** Salesforce Package ID (0Ho...) for unlocked packages */ - packageId?: string; - /** Salesforce Package Version ID (04t...) for unlocked packages */ - packageVersionId?: string; - /** Namespace prefix if applicable */ - namespacePrefix?: string; - /** Whether this is an org-dependent unlocked package */ - isOrgDependent?: boolean; - /** SHA-256 hash of the source files */ - sourceHash?: string; - /** Git commit SHA */ - commitId?: string; - /** Build timestamp */ - buildDate?: string; - /** Salesforce API version */ - apiVersion?: string; - /** Full SFPM metadata (optional, for advanced use cases) */ - metadata?: SfpmPackageMetadata; + /** Salesforce API version */ + apiVersion?: string; + /** Build timestamp */ + buildDate?: string; + /** Git commit SHA */ + commitId?: string; + /** Whether this is an org-dependent unlocked package */ + isOrgDependent?: boolean; + /** Full SFPM metadata (optional, for advanced use cases) */ + metadata?: SfpmPackageMetadataBase; + /** Namespace prefix if applicable */ + namespacePrefix?: string; + /** Salesforce Package ID (0Ho...) for unlocked packages */ + packageId?: string; + /** Package type (unlocked, source, data) */ + packageType: string; + /** Salesforce Package Version ID (04t...) for unlocked packages */ + packageVersionId?: string; + /** SHA-256 hash of the source files */ + sourceHash?: string; } /** * Standard npm package.json structure as generated by SFPM. - * + * * @see https://docs.npmjs.com/cli/v10/configuring-npm/package-json */ export interface NpmPackageJson { - /** Scoped package name (e.g., "@myorg/my-package") */ - name: string; - /** Semver version (e.g., "1.0.0-1") */ - version: string; - /** Package description from versionDescription */ - description?: string; - /** Main entry point (stub for SFPM packages) */ - main?: string; - /** Package keywords for discovery */ - keywords?: string[]; - /** Author information */ - author?: string; - /** License identifier */ - license?: string; - /** Repository URL */ - repository?: { - type: string; - url: string; - }; - /** Homepage URL (e.g., AppExchange listing or project docs) */ - homepage?: string; - /** Issue/bug tracker URL */ - bugs?: { - url: string; - }; - /** - * Optional dependencies - used for versioned SFPM package dependencies. - * "Optional" because they may not yet be published to the npm registry. - */ - optionalDependencies?: Record; - /** - * Managed package dependencies pinned to a subscriber packageVersionId (04t...). - * These are Salesforce managed packages identified by their alias in - * sfdx-project.json (e.g., "Nebula Logger@4.16.0") and cannot be resolved via npm. - */ - managedDependencies?: Record; - /** - * Files to include in the package. - * SFPM includes source, scripts, manifests, and sfdx-project.json. - */ - files?: string[]; - /** - * SFPM-specific metadata. - * Contains package type, IDs, and other Salesforce-specific info. - */ - sfpm: SfpmPackageMetadata; + /** Author information */ + author?: string; + /** Issue/bug tracker URL */ + bugs?: { + url: string; + }; + /** Package description from versionDescription */ + description?: string; + /** + * Files to include in the package. + * SFPM includes source, scripts, manifests, and sfdx-project.json. + */ + files?: string[]; + /** Homepage URL (e.g., AppExchange listing or project docs) */ + homepage?: string; + /** Package keywords for discovery */ + keywords?: string[]; + /** License identifier */ + license?: string; + /** Main entry point (stub for SFPM packages) */ + main?: string; + /** + * Managed package dependencies pinned to a subscriber packageVersionId (04t...). + * These are Salesforce managed packages identified by their alias in + * sfdx-project.json (e.g., "Nebula Logger@4.16.0") and cannot be resolved via npm. + */ + managedDependencies?: Record; + /** Scoped package name (e.g., "@myorg/my-package") */ + name: string; + /** + * Optional dependencies - used for versioned SFPM package dependencies. + * "Optional" because they may not yet be published to the npm registry. + */ + optionalDependencies?: Record; + /** Repository URL */ + repository?: { + type: string; + url: string; + }; + /** + * SFPM-specific metadata. + * Contains package type, IDs, and other Salesforce-specific info. + */ + sfpm: SfpmPackageMetadataBase; + /** Semver version (e.g., "1.0.0-1") */ + version: string; } /** * Options for generating package.json */ export interface PackageJsonGeneratorOptions { - /** npm scope (e.g., "@myorg") - required */ - npmScope: string; - /** Source hash to include in metadata */ - sourceHash?: string; - /** Additional keywords to include */ - additionalKeywords?: string[]; - /** Author string */ - author?: string; - /** License identifier */ - license?: string; + /** Additional keywords to include */ + additionalKeywords?: string[]; + /** Author string */ + author?: string; + /** License identifier */ + license?: string; + /** npm scope (e.g., "@myorg") - required */ + npmScope: string; + /** Source hash to include in metadata */ + sourceHash?: string; } /** * Converts an SFPM dependency to npm optionalDependency format. - * + * * sfdx-project.json format: * { "package": "my-dependency", "versionNumber": "1.2.0.LATEST" } - * + * * npm format: * { "@scope/my-dependency": "^1.2.0" } - * + * * @param dep - Dependency from sfdx-project.json * @param npmScope - npm scope to use * @returns Tuple of [packageName, versionRange] */ export function convertDependencyToNpm( - dep: { package: string; versionNumber?: string }, - npmScope: string + dep: {package: string; versionNumber?: string}, + npmScope: string, ): [string, string] { - const npmPackageName = `${npmScope}/${dep.package}`; - - // Extract base version (major.minor.patch) from sfdx version format - // "1.2.0.LATEST" -> "^1.2.0" - // "1.2.0.4" -> "^1.2.0" - // "1.2.0" -> "^1.2.0" - let versionRange = '*'; - - if (dep.versionNumber) { - const parts = dep.versionNumber.split('.'); - if (parts.length >= 3) { - const baseVersion = parts.slice(0, 3).join('.'); - versionRange = `^${baseVersion}`; - } + const npmPackageName = `${npmScope}/${dep.package}`; + + // Extract base version (major.minor.patch) from sfdx version format + // "1.2.0.LATEST" -> "^1.2.0" + // "1.2.0.4" -> "^1.2.0" + // "1.2.0" -> "^1.2.0" + let versionRange = '*'; + + if (dep.versionNumber) { + const parts = dep.versionNumber.split('.'); + if (parts.length >= 3) { + const baseVersion = parts.slice(0, 3).join('.'); + versionRange = `^${baseVersion}`; } - - return [npmPackageName, versionRange]; + } + + return [npmPackageName, versionRange]; } diff --git a/packages/core/src/types/package.ts b/packages/core/src/types/package.ts index 7ceb02c..b1427d4 100644 --- a/packages/core/src/types/package.ts +++ b/packages/core/src/types/package.ts @@ -118,22 +118,45 @@ export interface SfpmUnlockedPackageBuildOptions extends SfpmPackageBuildOptions postInstallScript?: string; } -export interface SfpmDataPackageMetadata { +/** + * Content descriptor for data packages. + * Intentionally separate from SfpmPackageContent — data packages have no + * Salesforce metadata components, apex, or manifests. + */ +export interface SfpmDataPackageContent { + /** Relative path to the data directory within the package (from packageDefinition.path) */ + dataDirectory: string; + /** Total number of files in the data directory */ + fileCount: number; +} + +// --------------------------------------------------------------------------- +// Package Metadata Hierarchy +// +// SfpmPackageMetadataBase (universal: identity, orchestration, source) +// ├── SfpmPackageMetadata (source/unlocked: + content + validation) +// │ └── SfpmUnlockedPackageMetadata (+ unlocked identity) +// └── SfpmDataPackageMetadata (data: + data-specific content) +// --------------------------------------------------------------------------- + +/** + * Base metadata shared by **all** package types (source, unlocked, data, …). + * Contains only universally applicable sections. + */ +export interface SfpmPackageMetadataBase { [key: string]: any; identity: SfpmPackageIdentity; + orchestration: SfpmPackageOrchestration; source: SfpmPackageSource; } /** - * The "Source of Truth" for the JSON Artifact. - * This represents the metadata file stored in the artifact. + * Metadata for source and unlocked packages. + * Adds Salesforce-specific content (component manifest, apex, metadata counts) + * and validation (code-coverage, test results). */ -export interface SfpmPackageMetadata { - [key: string]: any; +export interface SfpmPackageMetadata extends SfpmPackageMetadataBase { content: SfpmPackageContent; - identity: SfpmPackageIdentity; - orchestration: SfpmPackageOrchestration; - source: SfpmPackageSource; validation: SfpmPackageValidation; } @@ -141,6 +164,14 @@ export interface SfpmUnlockedPackageMetadata extends SfpmPackageMetadata { identity: SfpmUnlockedPackageIdentity; } +/** + * Metadata for data packages. + * Contains data-specific content with no Salesforce metadata components. + */ +export interface SfpmDataPackageMetadata extends SfpmPackageMetadataBase { + content: SfpmDataPackageContent; +} + /** * Represents merged view of sfpm artifacts + subscriber packages */ diff --git a/packages/core/src/utils/directory-hasher.ts b/packages/core/src/utils/directory-hasher.ts new file mode 100644 index 0000000..b7c6ffa --- /dev/null +++ b/packages/core/src/utils/directory-hasher.ts @@ -0,0 +1,46 @@ +import fg from 'fast-glob'; +import fs from 'fs-extra'; +import crypto from 'node:crypto'; +import path from 'node:path'; + +/** + * Content-agnostic directory hasher. + * + * Recursively hashes all files in a directory to produce a deterministic + * SHA-256 digest. Files are sorted by relative path before hashing to + * guarantee consistent ordering across runs and platforms. + * + * Used by data packages (and any other non-metadata package types) where + * a ComponentSet-based hash is not applicable. + */ +export class DirectoryHasher { + /** + * Calculate a deterministic SHA-256 hash of all files in a directory. + * + * @param directory - Absolute path to the directory to hash + * @returns Hex-encoded SHA-256 digest + */ + public static async calculate(directory: string): Promise { + const hash = crypto.createHash('sha256'); + + // Enumerate all files, sorted for determinism + const files = await fg(['**/*'], { + cwd: directory, + dot: false, + onlyFiles: true, + }); + files.sort(); + + for (const relativePath of files) { + // Include the relative path in the hash for structural integrity + hash.update(relativePath); + + const absolutePath = path.join(directory, relativePath); + // eslint-disable-next-line no-await-in-loop -- sequential read required for deterministic hash ordering + const content = await fs.readFile(absolutePath); + hash.update(content); + } + + return hash.digest('hex'); + } +} diff --git a/packages/core/test/package/assemblers/data-package-assembler.test.ts b/packages/core/test/package/assemblers/data-package-assembler.test.ts new file mode 100644 index 0000000..9abffa1 --- /dev/null +++ b/packages/core/test/package/assemblers/data-package-assembler.test.ts @@ -0,0 +1,104 @@ +import {beforeEach, describe, expect, it, vi} from 'vitest'; + +vi.mock('fs-extra', () => { + const mockFs = { + pathExists: vi.fn(), + copy: vi.fn(), + ensureDir: vi.fn(), + emptyDir: vi.fn(), + writeJSON: vi.fn(), + appendFile: vi.fn(), + remove: vi.fn().mockResolvedValue(undefined), + pathExistsSync: vi.fn(), + }; + return { + ...mockFs, + default: mockFs, + }; +}); + +vi.mock('../../../../src/package/assemblers/steps/mdapi-conversion-step.js', () => { + return { + MDAPIConversionStep: class { + execute = vi.fn().mockResolvedValue(undefined); + }, + }; +}); + +import fs from 'fs-extra'; +import path from 'path'; +import PackageAssembler from '../../../../src/package/assemblers/package-assembler.js'; + +const mockedFs = fs as any; + +describe('PackageAssembler — Data packages', () => { + let mockProjectConfig: any; + let assembler: any; + + beforeEach(() => { + vi.clearAllMocks(); + + mockProjectConfig = { + projectDirectory: '/root', + getPackageDefinition: vi.fn().mockReturnValue({ + package: 'my-data', + path: 'data', + type: 'data', + versionNumber: '1.0.0.0', + }), + getProjectDefinition: vi.fn().mockReturnValue({ + packageDirectories: [{package: 'my-data', path: 'data', type: 'data', versionNumber: '1.0.0.0'}], + }), + getPrunedDefinition: vi.fn().mockReturnValue({ + packageDirectories: [{package: 'my-data', path: 'data', type: 'data', versionNumber: '1.0.0.0'}], + }), + }; + + assembler = new PackageAssembler('my-data', mockProjectConfig as any); + }); + + it('should use reduced assembly pipeline for data packages', async () => { + mockedFs.pathExists.mockResolvedValue(true); + mockedFs.copy.mockResolvedValue(undefined); + mockedFs.ensureDir.mockResolvedValue(undefined); + mockedFs.emptyDir.mockResolvedValue(undefined); + mockedFs.writeJSON.mockResolvedValue(undefined); + mockedFs.appendFile.mockResolvedValue(undefined); + + const result = await assembler.assemble(); + const stagingPath = result.stagingDirectory; + + expect(stagingPath).toContain('.sfpm/tmp/builds'); + + // Should copy source (data directory) + expect(mockedFs.copy).toHaveBeenCalledWith( + path.join('/root', 'data'), + path.join(stagingPath, 'data'), + ); + + // Should write sfdx-project.json + expect(mockedFs.writeJSON).toHaveBeenCalledWith( + path.join(stagingPath, 'sfdx-project.json'), + expect.any(Object), + {spaces: 4}, + ); + }); + + it('should NOT copy org definitions for data packages', async () => { + mockedFs.pathExists.mockResolvedValue(true); + mockedFs.copy.mockResolvedValue(undefined); + mockedFs.ensureDir.mockResolvedValue(undefined); + mockedFs.emptyDir.mockResolvedValue(undefined); + mockedFs.writeJSON.mockResolvedValue(undefined); + mockedFs.appendFile.mockResolvedValue(undefined); + + await assembler.withOrgDefinition('config/project-scratch-def.json').assemble(); + + // Verify org definition was NOT copied (data packages skip OrgDefinitionStep) + const copyCalls = mockedFs.copy.mock.calls; + const orgDefCopy = copyCalls.find((call: any[]) => + call[0].includes('project-scratch-def'), + ); + expect(orgDefCopy).toBeUndefined(); + }); +}); diff --git a/packages/core/test/package/installers/data-installer-registry.test.ts b/packages/core/test/package/installers/data-installer-registry.test.ts new file mode 100644 index 0000000..c0b547b --- /dev/null +++ b/packages/core/test/package/installers/data-installer-registry.test.ts @@ -0,0 +1,43 @@ +import {beforeEach, describe, expect, it, vi} from 'vitest'; +import {InstallerRegistry} from '../../../src/package/installers/installer-registry.js'; +import {PackageType} from '../../../src/types/package.js'; +import SfpmPackage from '../../../src/package/sfpm-package.js'; + +describe('InstallerRegistry — Data type support', () => { + beforeEach(() => { + (InstallerRegistry as any).installers = new Map(); + }); + + it('should accept PackageType.Data for registration', () => { + class DataInstaller { + constructor(targetOrg: string, sfpmPackage: SfpmPackage) {} + async connect(username: string): Promise {} + async exec(): Promise {} + } + + InstallerRegistry.register(PackageType.Data, DataInstaller as any); + + const installer = InstallerRegistry.getInstaller(PackageType.Data); + expect(installer).toBe(DataInstaller); + }); + + it('should allow data and source installers to coexist', () => { + class DataInstaller { + constructor(targetOrg: string, sfpmPackage: SfpmPackage) {} + async connect(username: string): Promise {} + async exec(): Promise {} + } + + class SourceInstaller { + constructor(targetOrg: string, sfpmPackage: SfpmPackage) {} + async connect(username: string): Promise {} + async exec(): Promise {} + } + + InstallerRegistry.register(PackageType.Data, DataInstaller as any); + InstallerRegistry.register(PackageType.Source, SourceInstaller as any); + + expect(InstallerRegistry.getInstaller(PackageType.Data)).toBe(DataInstaller); + expect(InstallerRegistry.getInstaller(PackageType.Source)).toBe(SourceInstaller); + }); +}); diff --git a/packages/core/test/package/sfpm-data-package.test.ts b/packages/core/test/package/sfpm-data-package.test.ts new file mode 100644 index 0000000..3ed2371 --- /dev/null +++ b/packages/core/test/package/sfpm-data-package.test.ts @@ -0,0 +1,84 @@ +import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest'; +import path from 'path'; +import fs from 'fs-extra'; +import os from 'os'; + +import {SfpmDataPackage} from '../../src/package/sfpm-package.js'; +import {PackageType} from '../../src/types/package.js'; + +describe('SfpmDataPackage', () => { + let tmpDir: string; + let dataPackage: SfpmDataPackage; + + async function createTmpProject(): Promise { + const dir = path.join(os.tmpdir(), `sfpm-data-pkg-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + const dataDir = path.join(dir, 'data'); + await fs.ensureDir(dataDir); + await fs.writeFile(path.join(dataDir, 'export.json'), JSON.stringify({objects: [{objectName: 'Account', query: 'SELECT Id FROM Account', operation: 'Upsert'}]})); + await fs.writeFile(path.join(dataDir, 'Account.csv'), 'Id,Name\n001,Acme'); + return dir; + } + + beforeEach(async () => { + tmpDir = await createTmpProject(); + dataPackage = new SfpmDataPackage('my-data', tmpDir); + dataPackage.packageDefinition = { + package: 'my-data', + path: 'data', + versionNumber: '1.0.0.0', + type: PackageType.Data, + } as any; + }); + + it('should set package type to Data', () => { + expect(dataPackage.type).toBe(PackageType.Data); + }); + + it('should resolve dataDirectory from project source', () => { + expect(dataPackage.dataDirectory).toBe(path.join(tmpDir, 'data')); + }); + + it('should resolve dataDirectory from staging when staged', () => { + dataPackage.stagingDirectory = '/staging/area'; + expect(dataPackage.dataDirectory).toBe(path.join('/staging/area', 'data')); + }); + + it('should throw if no path defined', () => { + const pkg = new SfpmDataPackage('no-path', tmpDir); + expect(() => pkg.dataDirectory).toThrow('must have a path defined'); + }); + + it('should calculate deterministic source hash', async () => { + const hash1 = await dataPackage.calculateSourceHash(); + const hash2 = await dataPackage.calculateSourceHash(); + + expect(hash1).toBe(hash2); + expect(hash1).toHaveLength(64); + expect(dataPackage.sourceHash).toBe(hash1); + }); + + it('should count files in data directory', async () => { + const count = await dataPackage.countFiles(); + expect(count).toBe(2); // export.json + Account.csv + }); + + it('should produce correct toJson output', async () => { + const json = await dataPackage.toJson(); + + expect(json.identity.packageName).toBe('my-data'); + expect(json.identity.packageType).toBe(PackageType.Data); + expect(json.content.dataDirectory).toBe('data'); + expect(json.content.fileCount).toBe(2); + expect(json.source).toBeDefined(); + expect(json.orchestration).toBeDefined(); + }); + + it('should expose versionNumber for DataDeployable', () => { + dataPackage.version = '1.0.0.1'; + expect(dataPackage.versionNumber).toBe('1.0.0-1'); + }); + + afterEach(async () => { + if (tmpDir) await fs.remove(tmpDir).catch(() => {}); + }); +}); diff --git a/packages/core/test/utils/directory-hasher.test.ts b/packages/core/test/utils/directory-hasher.test.ts new file mode 100644 index 0000000..f58ef91 --- /dev/null +++ b/packages/core/test/utils/directory-hasher.test.ts @@ -0,0 +1,84 @@ +import {describe, expect, it} from 'vitest'; +import {DirectoryHasher} from '../../src/utils/directory-hasher.js'; +import fs from 'fs-extra'; +import path from 'path'; +import os from 'os'; + +describe('DirectoryHasher', () => { + let tmpDir: string; + + async function createTmpDir(): Promise { + const dir = path.join(os.tmpdir(), `dir-hasher-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await fs.ensureDir(dir); + return dir; + } + + it('should produce a deterministic hash for the same contents', async () => { + tmpDir = await createTmpDir(); + await fs.writeFile(path.join(tmpDir, 'file1.csv'), 'Id,Name\n001,Account1'); + await fs.writeFile(path.join(tmpDir, 'export.json'), '{"objects":[]}'); + + const hash1 = await DirectoryHasher.calculate(tmpDir); + const hash2 = await DirectoryHasher.calculate(tmpDir); + + expect(hash1).toBe(hash2); + expect(hash1).toHaveLength(64); // SHA-256 hex + + await fs.remove(tmpDir); + }); + + it('should produce different hashes for different contents', async () => { + const dir1 = await createTmpDir(); + const dir2 = await createTmpDir(); + + await fs.writeFile(path.join(dir1, 'data.csv'), 'Id,Name\n001,Foo'); + await fs.writeFile(path.join(dir2, 'data.csv'), 'Id,Name\n001,Bar'); + + const hash1 = await DirectoryHasher.calculate(dir1); + const hash2 = await DirectoryHasher.calculate(dir2); + + expect(hash1).not.toBe(hash2); + + await fs.remove(dir1); + await fs.remove(dir2); + }); + + it('should include file paths in hash (structural integrity)', async () => { + const dir1 = await createTmpDir(); + const dir2 = await createTmpDir(); + + // Same content but different filenames + await fs.writeFile(path.join(dir1, 'a.csv'), 'data'); + await fs.writeFile(path.join(dir2, 'b.csv'), 'data'); + + const hash1 = await DirectoryHasher.calculate(dir1); + const hash2 = await DirectoryHasher.calculate(dir2); + + expect(hash1).not.toBe(hash2); + + await fs.remove(dir1); + await fs.remove(dir2); + }); + + it('should handle nested directories', async () => { + tmpDir = await createTmpDir(); + const subDir = path.join(tmpDir, 'subdir'); + await fs.ensureDir(subDir); + await fs.writeFile(path.join(subDir, 'nested.csv'), 'Id\n001'); + await fs.writeFile(path.join(tmpDir, 'top.csv'), 'Id\n002'); + + const hash = await DirectoryHasher.calculate(tmpDir); + expect(hash).toHaveLength(64); + + await fs.remove(tmpDir); + }); + + it('should produce an empty-ish hash for an empty directory', async () => { + tmpDir = await createTmpDir(); + + const hash = await DirectoryHasher.calculate(tmpDir); + expect(hash).toHaveLength(64); // SHA-256 of empty input + + await fs.remove(tmpDir); + }); +}); diff --git a/packages/sfdmu/package.json b/packages/sfdmu/package.json new file mode 100644 index 0000000..133e9b3 --- /dev/null +++ b/packages/sfdmu/package.json @@ -0,0 +1,40 @@ +{ + "name": "@b64/sfpm-sfdmu", + "version": "0.1.0", + "description": "SFDMU adapter for SFPM data packages — builds and installs data using Salesforce Data Move Utility", + "type": "module", + "scripts": { + "build": "tsc -b", + "test": "vitest run" + }, + "keywords": [ + "sfpm", + "salesforce", + "sfdmu", + "data", + "migration" + ], + "author": "b64", + "license": "MIT", + "dependencies": { + "@b64/sfpm-core": "workspace:^0.1.0", + "@salesforce/core": "^8.24.2", + "fs-extra": "^11.3.3" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js", + "require": "./dist/index.js" + } + }, + "devDependencies": { + "@types/fs-extra": "^11.0.4", + "vitest": "^4.0.17" + } +} diff --git a/packages/sfdmu/src/index.ts b/packages/sfdmu/src/index.ts new file mode 100644 index 0000000..08377aa --- /dev/null +++ b/packages/sfdmu/src/index.ts @@ -0,0 +1,16 @@ +// Side-effect imports: trigger decorator registration for data builder and installer +import './sfdmu-data-builder.js'; +import './sfdmu-data-installer.js'; + +// Public API +export {default as SfdmuDataBuilder} from './sfdmu-data-builder.js'; +export {default as SfdmuDataInstaller} from './sfdmu-data-installer.js'; +export {default as SfdmuImportStrategy} from './strategies/sfdmu-import-strategy.js'; +export type { + SfdmuExportJson, + SfdmuObjectConfig, + SfdmuObjectResult, + SfdmuOperation, + SfdmuRunOptions, + SfdmuRunResult, +} from './types.js'; diff --git a/packages/sfdmu/src/sfdmu-data-builder.ts b/packages/sfdmu/src/sfdmu-data-builder.ts new file mode 100644 index 0000000..98f88b3 --- /dev/null +++ b/packages/sfdmu/src/sfdmu-data-builder.ts @@ -0,0 +1,215 @@ +import EventEmitter from 'node:events'; +import fs from 'fs-extra'; +import path from 'path'; + +import { + type Builder, + type BuildTask, + type Logger, + PackageType, + RegisterBuilder, + SfpmDataPackage, + type SfpmPackage, +} from '@b64/sfpm-core'; + +import type {SfdmuExportJson} from './types.js'; + +/** + * Builder for SFDMU-based data packages. + * + * This is where SFDMU-specific knowledge lives. The builder knows what + * valid data package contents look like (export.json + CSV files) and + * validates their presence during the build. + * + * Core's PackageBuilder handles staging (via PackageAssembler) and artifact + * assembly. This builder runs between those phases to perform SFDMU-specific + * validation and preparation. + */ +// eslint-disable-next-line new-cap +@RegisterBuilder(PackageType.Data) +export default class SfdmuDataBuilder extends EventEmitter implements Builder { + public postBuildTasks: BuildTask[] = []; + public preBuildTasks: BuildTask[] = []; + private readonly logger?: Logger; + private readonly sfpmPackage: SfpmDataPackage; + private readonly workingDirectory: string; + + constructor( + workingDirectory: string, + sfpmPackage: SfpmPackage, + logger?: Logger, + ) { + super(); + if (!(sfpmPackage instanceof SfpmDataPackage)) { + throw new TypeError(`SfdmuDataBuilder received incompatible package type: ${sfpmPackage.constructor.name}`); + } + + this.workingDirectory = workingDirectory; + this.sfpmPackage = sfpmPackage; + this.logger = logger; + } + + /** + * Data packages do not require a DevHub connection. + */ + public async connect(_username: string): Promise { + // No-op: data packages don't need DevHub + } + + /** + * Execute the build pipeline: pre-build tasks -> validate -> post-build tasks. + */ + public async exec(): Promise { + await this.runPreBuildTasks(); + await this.validate(); + await this.runPostBuildTasks(); + } + + /** + * SFDMU-specific validation: + * 1. Verify export.json exists in the data directory + * 2. Parse and validate its basic structure + * 3. Count data files for metadata + */ + private async validate(): Promise { + this.emit('task:start', { + packageName: this.sfpmPackage.packageName, + taskName: 'SfdmuValidation', + taskType: 'build', + timestamp: new Date(), + }); + + const exportJsonPath = path.join(this.workingDirectory, 'export.json'); + + // Validate export.json exists + // eslint-disable-next-line no-await-in-loop + if (!await fs.pathExists(exportJsonPath)) { + const error = new Error( + `export.json not found at ${exportJsonPath}. ` + + 'SFDMU data packages must contain an export.json file in the package directory.', + ); + + this.emit('task:complete', { + packageName: this.sfpmPackage.packageName, + success: false, + taskName: 'SfdmuValidation', + taskType: 'build', + timestamp: new Date(), + }); + + throw error; + } + + // Parse and validate basic structure + try { + const exportJson: SfdmuExportJson = await fs.readJson(exportJsonPath); + + if (!exportJson.objects || !Array.isArray(exportJson.objects)) { + throw new Error('export.json must contain an "objects" array'); + } + + if (exportJson.objects.length === 0) { + throw new Error('export.json "objects" array must not be empty'); + } + + const sObjectNames = exportJson.objects.map(o => o.objectName); + this.logger?.info(`SFDMU export.json validated: ${sObjectNames.length} sObject(s) configured: ${sObjectNames.join(', ')}`); + + // Log CSV files found + // eslint-disable-next-line no-await-in-loop + const csvFiles = await this.findCsvFiles(); + if (csvFiles.length > 0) { + this.logger?.info(`Found ${csvFiles.length} CSV file(s): ${csvFiles.join(', ')}`); + } + } catch (error) { + if (error instanceof SyntaxError) { + throw new Error(`export.json contains invalid JSON: ${error.message}`); + } + + throw error; + } + + this.emit('task:complete', { + packageName: this.sfpmPackage.packageName, + success: true, + taskName: 'SfdmuValidation', + taskType: 'build', + timestamp: new Date(), + }); + } + + private async findCsvFiles(): Promise { + const files = await fs.readdir(this.workingDirectory); + return files.filter(f => f.toLowerCase().endsWith('.csv')); + } + + private async runPostBuildTasks(): Promise { + for (const task of this.postBuildTasks) { + const taskName = task.constructor.name; + + this.emit('task:start', { + packageName: this.sfpmPackage.packageName, + taskName, + taskType: 'post-build', + timestamp: new Date(), + }); + + try { + await task.exec(); + + this.emit('task:complete', { + packageName: this.sfpmPackage.packageName, + success: true, + taskName, + taskType: 'post-build', + timestamp: new Date(), + }); + } catch (error) { + this.emit('task:complete', { + packageName: this.sfpmPackage.packageName, + success: false, + taskName, + taskType: 'post-build', + timestamp: new Date(), + }); + + throw error; + } + } + } + + private async runPreBuildTasks(): Promise { + for (const task of this.preBuildTasks) { + const taskName = task.constructor.name; + + this.emit('task:start', { + packageName: this.sfpmPackage.packageName, + taskName, + taskType: 'pre-build', + timestamp: new Date(), + }); + + try { + await task.exec(); + + this.emit('task:complete', { + packageName: this.sfpmPackage.packageName, + success: true, + taskName, + taskType: 'pre-build', + timestamp: new Date(), + }); + } catch (error) { + this.emit('task:complete', { + packageName: this.sfpmPackage.packageName, + success: false, + taskName, + taskType: 'pre-build', + timestamp: new Date(), + }); + + throw error; + } + } + } +} diff --git a/packages/sfdmu/src/sfdmu-data-installer.ts b/packages/sfdmu/src/sfdmu-data-installer.ts new file mode 100644 index 0000000..e5d0463 --- /dev/null +++ b/packages/sfdmu/src/sfdmu-data-installer.ts @@ -0,0 +1,74 @@ +import {Org} from '@salesforce/core'; +import EventEmitter from 'node:events'; + +import { + type Installer, + type Logger, + PackageType, + RegisterInstaller, + SfpmDataPackage, + type SfpmPackage, +} from '@b64/sfpm-core'; + +import SfdmuImportStrategy from './strategies/sfdmu-import-strategy.js'; + +/** + * Installer for SFDMU-based data packages. + * + * Bridges the core installer interface with the SFDMU import strategy. + * Accepts an {@link SfpmDataPackage} (which implements {@link DataDeployable}) + * and delegates the actual data import to {@link SfdmuImportStrategy}. + */ +// eslint-disable-next-line new-cap +@RegisterInstaller(PackageType.Data) +export default class SfdmuDataInstaller extends EventEmitter implements Installer { + private readonly logger?: Logger; + private org?: Org; + private readonly sfpmPackage: SfpmDataPackage; + private readonly strategy: SfdmuImportStrategy; + private readonly targetOrg: string; + + constructor(targetOrg: string, sfpmPackage: SfpmPackage, logger?: Logger) { + super(); + if (!(sfpmPackage instanceof SfpmDataPackage)) { + throw new TypeError(`SfdmuDataInstaller received incompatible package type: ${(sfpmPackage as any).constructor.name}`); + } + + this.targetOrg = targetOrg; + this.sfpmPackage = sfpmPackage; + this.logger = logger; + + // Create the SFDMU strategy, forwarding events through this installer + this.strategy = new SfdmuImportStrategy(logger, this); + } + + /** + * Validate target org connectivity. + */ + public async connect(username: string): Promise { + this.emit('connection:start', { + targetOrg: username, + timestamp: new Date(), + }); + + this.org = await Org.create({aliasOrUsername: username}); + + if (!this.org.getConnection()) { + throw new Error('Unable to connect to target org'); + } + + this.emit('connection:complete', { + targetOrg: username, + timestamp: new Date(), + }); + } + + /** + * Execute the data import via SFDMU. + */ + public async exec(): Promise { + this.logger?.info(`Installing data package: ${this.sfpmPackage.packageName}`); + const result = await this.strategy.execute(this.sfpmPackage, this.targetOrg); + return result; + } +} diff --git a/packages/sfdmu/src/strategies/sfdmu-import-strategy.ts b/packages/sfdmu/src/strategies/sfdmu-import-strategy.ts new file mode 100644 index 0000000..de5c0d5 --- /dev/null +++ b/packages/sfdmu/src/strategies/sfdmu-import-strategy.ts @@ -0,0 +1,241 @@ +import {execSync, type ExecSyncOptions} from 'node:child_process'; +import EventEmitter from 'node:events'; +import fs from 'fs-extra'; +import path from 'path'; + +import type {DataDeployable, Logger} from '@b64/sfpm-core'; +import {InstallationError} from '@b64/sfpm-core'; + +import type {SfdmuExportJson, SfdmuRunOptions, SfdmuRunResult} from '../types.js'; + +/** + * Strategy for importing data into a Salesforce org using SFDMU. + * + * Consumes the content-agnostic {@link DataDeployable} interface from core, + * then applies SFDMU-specific knowledge to interpret the directory contents + * and invoke the SFDMU tool. + * + * Prefers CLI invocation (`sf sfdmu run`) as the most stable integration path. + * Can be extended in the future to support SFDMU's Node API if available. + */ +export default class SfdmuImportStrategy extends EventEmitter { + private readonly logger?: Logger; + + constructor(logger?: Logger, parentEmitter?: EventEmitter) { + super(); + this.logger = logger; + + // Forward events to the parent emitter (installer) if provided + if (parentEmitter) { + this.on('data-import:start', (data) => parentEmitter.emit('data-import:start', data)); + this.on('data-import:progress', (data) => parentEmitter.emit('data-import:progress', data)); + this.on('data-import:complete', (data) => parentEmitter.emit('data-import:complete', data)); + } + } + + /** + * Execute the SFDMU import. + * + * @param dataDeployable - The data package to deploy (provides dataDirectory) + * @param targetOrg - Target org alias or username + * @returns Result of the SFDMU run + */ + public async execute(dataDeployable: DataDeployable, targetOrg: string): Promise { + const {dataDirectory, packageName} = dataDeployable; + const startTime = Date.now(); + + // Read export.json to understand what we're deploying + const exportJsonPath = path.join(dataDirectory, 'export.json'); + const exportJson: SfdmuExportJson = await fs.readJson(exportJsonPath); + const sObjectNames = exportJson.objects.map(o => o.objectName); + + this.emit('data-import:start', { + objectCount: sObjectNames.length, + objects: sObjectNames, + packageName, + timestamp: new Date(), + }); + + this.logger?.info(`Starting SFDMU import for ${packageName}: ${sObjectNames.length} sObject(s) to ${targetOrg}`); + + const runOptions: SfdmuRunOptions = { + noprompt: true, + path: dataDirectory, + sourceusername: 'csvfile', + targetusername: targetOrg, + ...(exportJson.apiVersion ? {apiVersion: exportJson.apiVersion} : {}), + }; + + try { + const result = await this.runSfdmu(runOptions, packageName); + + this.emit('data-import:complete', { + duration: result.duration, + objectsProcessed: result.objectsProcessed, + packageName, + success: result.success, + timestamp: new Date(), + }); + + if (!result.success) { + const failedObjects = result.objectResults + .filter(r => !r.success) + .map(r => `${r.objectName}: ${r.errorMessage}`) + .join('; '); + + throw new InstallationError(packageName, targetOrg, `SFDMU import failed for: ${failedObjects}`, { + cause: new Error(failedObjects), + }); + } + + this.logger?.info(`SFDMU import completed for ${packageName} in ${result.duration}ms`); + return result; + } catch (error) { + if (error instanceof InstallationError) { + throw error; + } + + throw new InstallationError( + packageName, + targetOrg, + `SFDMU import failed: ${error instanceof Error ? error.message : String(error)}`, + {cause: error instanceof Error ? error : new Error(String(error))}, + ); + } + } + + /** + * Invoke SFDMU via the Salesforce CLI. + * + * Uses `sf sfdmu run` as the primary invocation path. + * Falls back to `sfdx sfdmu:run` for legacy installations. + */ + private async runSfdmu(options: SfdmuRunOptions, packageName: string): Promise { + const startTime = Date.now(); + + // Build the CLI command + const args = [ + `--sourceusername "${options.sourceusername}"`, + `--targetusername "${options.targetusername}"`, + `--path "${options.path}"`, + ]; + + if (options.apiVersion) { + args.push(`--apiversion "${options.apiVersion}"`); + } + + if (options.noprompt) { + args.push('--noprompt'); + } + + if (options.concurrencyMode) { + args.push(`--concurrencymode "${options.concurrencyMode}"`); + } + + if (options.verbose) { + args.push('--verbose'); + } + + // Try `sf sfdmu run` first, fall back to `sfdx sfdmu:run` + const sfCommand = `sf sfdmu run ${args.join(' ')}`; + const sfdxCommand = `sfdx sfdmu:run ${args.join(' ')}`; + + const execOptions: ExecSyncOptions = { + cwd: options.path, + encoding: 'utf8', + timeout: 600_000, // 10 minute timeout + stdio: ['pipe', 'pipe', 'pipe'], + }; + + this.logger?.debug(`Executing: ${sfCommand}`); + + let rawOutput: string; + + try { + // eslint-disable-next-line unicorn/explicit-length-check + rawOutput = execSync(sfCommand, execOptions) as string; + } catch (sfError: any) { + // If `sf` command not found, try `sfdx` + if (sfError.status === 127 || sfError.message?.includes('command not found') || sfError.message?.includes('ENOENT')) { + this.logger?.debug(`sf command not available, falling back to: ${sfdxCommand}`); + try { + rawOutput = execSync(sfdxCommand, execOptions) as string; + } catch (sfdxError: any) { + if (sfdxError.status === 127 || sfdxError.message?.includes('command not found') || sfdxError.message?.includes('ENOENT')) { + throw new Error( + 'SFDMU is not installed. Install it with: sf plugins install sfdmu', + ); + } + + throw new Error( + `SFDMU execution failed:\n${sfdxError.stderr || sfdxError.stdout || sfdxError.message}`, + ); + } + } else { + // `sf` is available but the command failed + throw new Error( + `SFDMU execution failed:\n${sfError.stderr || sfError.stdout || sfError.message}`, + ); + } + } + + const duration = Date.now() - startTime; + + // Parse the output to extract results + return this.parseOutput(rawOutput, duration); + } + + /** + * Parse SFDMU CLI output to extract structured results. + * + * SFDMU output parsing is best-effort — the tool's output format + * is not strictly guaranteed. We extract what we can and fall back + * to a generic success/failure based on the process exit code. + */ + private parseOutput(rawOutput: string, duration: number): SfdmuRunResult { + // Basic parsing — SFDMU doesn't have a structured JSON output mode. + // We consider the run successful if the process exited cleanly (no exception above). + const objectResults: SfdmuRunResult['objectResults'] = []; + + // Try to extract per-object results from output lines + const lines = rawOutput.split('\n'); + let objectsProcessed = 0; + + for (const line of lines) { + // Look for lines like: "Account -- Upserted: 150 records" + const match = line.match(/(\w+)\s+--\s+(\w+):\s+(\d+)\s+records?/i); + if (match) { + objectsProcessed++; + objectResults.push({ + errorMessage: undefined, + objectName: match[1], + operation: match[2] as any, + recordsFailed: 0, + recordsProcessed: Number.parseInt(match[3], 10), + success: true, + }); + } + + // Look for error lines + const errorMatch = line.match(/(\w+)\s+--\s+ERROR:\s+(.+)/i); + if (errorMatch) { + objectResults.push({ + errorMessage: errorMatch[2], + objectName: errorMatch[1], + operation: 'Readonly', + recordsFailed: 0, + recordsProcessed: 0, + success: false, + }); + } + } + + return { + duration, + objectResults, + objectsProcessed: objectsProcessed || objectResults.length, + rawOutput, + success: !objectResults.some(r => !r.success), + }; + } +} diff --git a/packages/sfdmu/src/types.ts b/packages/sfdmu/src/types.ts new file mode 100644 index 0000000..1559806 --- /dev/null +++ b/packages/sfdmu/src/types.ts @@ -0,0 +1,106 @@ +/** + * SFDMU-specific type definitions. + * + * These types model the SFDMU tool's configuration and execution concerns. + * They are intentionally confined to this adapter package and never leak into core. + */ + +/** + * Represents a single sObject entry in SFDMU's export.json. + */ +export interface SfdmuObjectConfig { + /** The sObject API name (e.g., "Account", "Contact") */ + objectName: string; + /** SOQL query for filtering records */ + query: string; + /** Operation to perform: Insert, Update, Upsert, Delete, etc. */ + operation: SfdmuOperation; + /** External ID field for upsert operations */ + externalId?: string; + /** Whether to use CSV file as the data source for this object */ + useCSVValuesMapping?: boolean; + /** Field mapping overrides */ + fieldMapping?: Record; +} + +/** + * Root structure of SFDMU's export.json configuration file. + */ +export interface SfdmuExportJson { + /** Array of sObject configurations defining what data to move */ + objects: SfdmuObjectConfig[]; + /** Polling interval in ms for async operations */ + pollingIntervalMs?: number; + /** Bulk API threshold — switch to Bulk API when record count exceeds this */ + bulkThreshold?: number; + /** API version to use for Salesforce calls */ + apiVersion?: string; + /** Whether to create target sObjects if they don't exist */ + createTargetCSVFiles?: boolean; + /** Concurrency mode for Bulk API */ + bulkApiV1BatchSize?: number; + /** Allow field truncation on deploy */ + allOrNone?: boolean; +} + +/** + * SFDMU operation types. + */ +export type SfdmuOperation = + | 'Insert' + | 'Update' + | 'Upsert' + | 'Merge' + | 'Delete' + | 'DeleteSource' + | 'DeleteHierarchy' + | 'HardDelete' + | 'Readonly'; + +/** + * Options for running the SFDMU import strategy. + */ +export interface SfdmuRunOptions { + /** Absolute path to the directory containing export.json and CSV files */ + path: string; + /** Target org alias or username */ + targetusername: string; + /** Source: "csvfile" for CSV-to-org, or an org alias for org-to-org */ + sourceusername: string; + /** API version override */ + apiVersion?: string; + /** Verbose output */ + verbose?: boolean; + /** Concurrency mode override */ + concurrencyMode?: 'Serial' | 'Parallel'; + /** Whether to suppress prompts */ + noprompt?: boolean; +} + +/** + * Result of an SFDMU run. + */ +export interface SfdmuRunResult { + /** Whether the run completed successfully */ + success: boolean; + /** Number of sObjects processed */ + objectsProcessed: number; + /** Per-object results */ + objectResults: SfdmuObjectResult[]; + /** Raw output from the SFDMU process */ + rawOutput?: string; + /** Duration in milliseconds */ + duration: number; +} + +/** + * Per-object result from an SFDMU run. + */ +export interface SfdmuObjectResult { + objectName: string; + operation: SfdmuOperation; + recordsProcessed: number; + recordsFailed: number; + success: boolean; + errorMessage?: string; +} diff --git a/packages/sfdmu/test/sfdmu-data-builder.test.ts b/packages/sfdmu/test/sfdmu-data-builder.test.ts new file mode 100644 index 0000000..b409a41 --- /dev/null +++ b/packages/sfdmu/test/sfdmu-data-builder.test.ts @@ -0,0 +1,156 @@ +import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest'; +import fs from 'fs-extra'; +import path from 'path'; +import os from 'os'; + +// Mock @b64/sfpm-core to avoid full dependency resolution in unit tests +vi.mock('@b64/sfpm-core', () => { + class MockSfpmPackage { + _metadata: any = {identity: {packageName: '', packageType: 'data'}, source: {}, content: {}, orchestration: {}, validation: {}}; + packageName: string; + projectDirectory: string; + stagingDirectory?: string; + _packageDefinition?: any; + + constructor(name: string, dir: string) { + this.packageName = name; + this.projectDirectory = dir; + this._metadata.identity.packageName = name; + } + + get packageDefinition() { return this._packageDefinition; } + set packageDefinition(val: any) { this._packageDefinition = val; } + } + + class MockSfpmDataPackage extends MockSfpmPackage { + get dataDirectory() { + const pkgPath = this._packageDefinition?.path; + if (!pkgPath) throw new Error('must have a path'); + return this.stagingDirectory + ? `${this.stagingDirectory}/${pkgPath}` + : `${this.projectDirectory}/${pkgPath}`; + } + + get versionNumber() { return '1.0.0.1'; } + } + + return { + SfpmDataPackage: MockSfpmDataPackage, + SfpmPackage: MockSfpmPackage, + PackageType: {Data: 'data', Source: 'source', Unlocked: 'unlocked', Managed: 'managed', Diff: 'diff'}, + RegisterBuilder: () => (constructor: any) => constructor, + RegisterInstaller: () => (constructor: any) => constructor, + BuilderRegistry: {register: vi.fn(), getBuilder: vi.fn()}, + InstallerRegistry: {register: vi.fn(), getInstaller: vi.fn()}, + InstallationError: class extends Error { + constructor(pkg: string, org: string, msg: string, opts?: any) { + super(msg); + if (opts?.cause) this.cause = opts.cause; + } + }, + Logger: undefined, + }; +}); + +import SfdmuDataBuilder from '../../src/sfdmu-data-builder.js'; +import {SfpmDataPackage} from '@b64/sfpm-core'; + +describe('SfdmuDataBuilder', () => { + let tmpDir: string; + let dataPackage: any; + + async function createTmpProject(): Promise { + const dir = path.join(os.tmpdir(), `sfdmu-builder-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + const dataDir = path.join(dir, 'data'); + await fs.ensureDir(dataDir); + return dir; + } + + beforeEach(async () => { + tmpDir = await createTmpProject(); + dataPackage = new SfpmDataPackage('my-data', tmpDir); + (dataPackage as any)._packageDefinition = { + package: 'my-data', + path: 'data', + type: 'data', + versionNumber: '1.0.0.0', + }; + }); + + afterEach(async () => { + if (tmpDir) await fs.remove(tmpDir).catch(() => {}); + }); + + it('should throw TypeError for non-data packages', () => { + const badPackage = {constructor: {name: 'SfpmSourcePackage'}} as any; + expect(() => new SfdmuDataBuilder('/tmp', badPackage)).toThrow('incompatible package type'); + }); + + it('should not throw on connect (no-op)', async () => { + const builder = new SfdmuDataBuilder(path.join(tmpDir, 'data'), dataPackage); + await expect(builder.connect('devhub')).resolves.not.toThrow(); + }); + + it('should throw when export.json is missing', async () => { + const builder = new SfdmuDataBuilder(path.join(tmpDir, 'data'), dataPackage); + await expect(builder.exec()).rejects.toThrow('export.json not found'); + }); + + it('should succeed when export.json is valid', async () => { + const dataDir = path.join(tmpDir, 'data'); + await fs.writeFile( + path.join(dataDir, 'export.json'), + JSON.stringify({ + objects: [{objectName: 'Account', query: 'SELECT Id FROM Account', operation: 'Upsert'}], + }), + ); + + const builder = new SfdmuDataBuilder(dataDir, dataPackage); + await expect(builder.exec()).resolves.not.toThrow(); + }); + + it('should throw when export.json has invalid JSON', async () => { + const dataDir = path.join(tmpDir, 'data'); + await fs.writeFile(path.join(dataDir, 'export.json'), 'not json'); + + const builder = new SfdmuDataBuilder(dataDir, dataPackage); + await expect(builder.exec()).rejects.toThrow('invalid JSON'); + }); + + it('should throw when export.json objects array is empty', async () => { + const dataDir = path.join(tmpDir, 'data'); + await fs.writeFile(path.join(dataDir, 'export.json'), JSON.stringify({objects: []})); + + const builder = new SfdmuDataBuilder(dataDir, dataPackage); + await expect(builder.exec()).rejects.toThrow('must not be empty'); + }); + + it('should throw when export.json has no objects key', async () => { + const dataDir = path.join(tmpDir, 'data'); + await fs.writeFile(path.join(dataDir, 'export.json'), JSON.stringify({foo: 'bar'})); + + const builder = new SfdmuDataBuilder(dataDir, dataPackage); + await expect(builder.exec()).rejects.toThrow('must contain an "objects" array'); + }); + + it('should emit task events during validation', async () => { + const dataDir = path.join(tmpDir, 'data'); + await fs.writeFile( + path.join(dataDir, 'export.json'), + JSON.stringify({ + objects: [{objectName: 'Account', query: 'SELECT Id FROM Account', operation: 'Upsert'}], + }), + ); + + const builder = new SfdmuDataBuilder(dataDir, dataPackage); + const events: string[] = []; + + builder.on('task:start', () => events.push('start')); + builder.on('task:complete', () => events.push('complete')); + + await builder.exec(); + + expect(events).toContain('start'); + expect(events).toContain('complete'); + }); +}); diff --git a/packages/sfdmu/test/sfdmu-data-installer.test.ts b/packages/sfdmu/test/sfdmu-data-installer.test.ts new file mode 100644 index 0000000..9f32cf7 --- /dev/null +++ b/packages/sfdmu/test/sfdmu-data-installer.test.ts @@ -0,0 +1,99 @@ +import {beforeEach, describe, expect, it, vi} from 'vitest'; + +// Mock @salesforce/core +vi.mock('@salesforce/core', () => ({ + Org: { + create: vi.fn().mockResolvedValue({ + getConnection: vi.fn().mockReturnValue({}), + }), + }, +})); + +// Mock @b64/sfpm-core +vi.mock('@b64/sfpm-core', () => { + class MockSfpmPackage { + _metadata: any = {identity: {packageName: '', packageType: 'data'}, source: {}, content: {}, orchestration: {}, validation: {}}; + packageName: string; + projectDirectory: string; + stagingDirectory?: string; + _packageDefinition?: any; + + constructor(name: string, dir: string) { + this.packageName = name; + this.projectDirectory = dir; + this._metadata.identity.packageName = name; + } + + get packageDefinition() { return this._packageDefinition; } + set packageDefinition(val: any) { this._packageDefinition = val; } + } + + class MockSfpmDataPackage extends MockSfpmPackage { + get dataDirectory() { + const pkgPath = this._packageDefinition?.path; + if (!pkgPath) throw new Error('must have a path'); + return this.stagingDirectory + ? `${this.stagingDirectory}/${pkgPath}` + : `${this.projectDirectory}/${pkgPath}`; + } + + get versionNumber() { return '1.0.0.1'; } + } + + return { + SfpmDataPackage: MockSfpmDataPackage, + SfpmPackage: MockSfpmPackage, + PackageType: {Data: 'data', Source: 'source', Unlocked: 'unlocked', Managed: 'managed', Diff: 'diff'}, + RegisterBuilder: () => (constructor: any) => constructor, + RegisterInstaller: () => (constructor: any) => constructor, + BuilderRegistry: {register: vi.fn(), getBuilder: vi.fn()}, + InstallerRegistry: {register: vi.fn(), getInstaller: vi.fn()}, + InstallationError: class extends Error { + constructor(pkg: string, org: string, msg: string, opts?: any) { + super(msg); + if (opts?.cause) this.cause = opts.cause; + } + }, + Logger: undefined, + }; +}); + +import SfdmuDataInstaller from '../../src/sfdmu-data-installer.js'; +import {SfpmDataPackage} from '@b64/sfpm-core'; + +describe('SfdmuDataInstaller', () => { + let dataPackage: any; + + beforeEach(() => { + dataPackage = new SfpmDataPackage('my-data', '/project'); + (dataPackage as any)._packageDefinition = { + package: 'my-data', + path: 'data', + type: 'data', + versionNumber: '1.0.0.0', + }; + }); + + it('should throw TypeError for non-data packages', () => { + const badPackage = {constructor: {name: 'SfpmSourcePackage'}} as any; + expect(() => new SfdmuDataInstaller('my-org', badPackage)).toThrow('incompatible package type'); + }); + + it('should create installer for data packages', () => { + const installer = new SfdmuDataInstaller('my-org', dataPackage); + expect(installer).toBeDefined(); + }); + + it('should connect to org', async () => { + const installer = new SfdmuDataInstaller('my-org', dataPackage); + const events: string[] = []; + + installer.on('connection:start', () => events.push('connection:start')); + installer.on('connection:complete', () => events.push('connection:complete')); + + await installer.connect('my-org'); + + expect(events).toContain('connection:start'); + expect(events).toContain('connection:complete'); + }); +}); diff --git a/packages/sfdmu/test/strategies/sfdmu-import-strategy.test.ts b/packages/sfdmu/test/strategies/sfdmu-import-strategy.test.ts new file mode 100644 index 0000000..8a58162 --- /dev/null +++ b/packages/sfdmu/test/strategies/sfdmu-import-strategy.test.ts @@ -0,0 +1,143 @@ +import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest'; +import {execSync} from 'node:child_process'; +import fs from 'fs-extra'; +import path from 'path'; +import os from 'os'; + +// Mock child_process +vi.mock('node:child_process', () => ({ + execSync: vi.fn(), +})); + +// Mock @b64/sfpm-core +vi.mock('@b64/sfpm-core', () => ({ + InstallationError: class extends Error { + constructor(pkg: string, org: string, msg: string, opts?: any) { + super(msg); + if (opts?.cause) this.cause = opts.cause; + } + }, + Logger: undefined, +})); + +import SfdmuImportStrategy from '../../src/strategies/sfdmu-import-strategy.js'; +import type {DataDeployable} from '@b64/sfpm-core'; + +const mockedExecSync = vi.mocked(execSync); + +describe('SfdmuImportStrategy', () => { + let tmpDir: string; + let dataDeployable: DataDeployable; + + async function createTmpData(): Promise { + const dir = path.join(os.tmpdir(), `sfdmu-strategy-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await fs.ensureDir(dir); + await fs.writeFile( + path.join(dir, 'export.json'), + JSON.stringify({ + objects: [ + {objectName: 'Account', query: 'SELECT Id, Name FROM Account', operation: 'Upsert'}, + {objectName: 'Contact', query: 'SELECT Id FROM Contact', operation: 'Insert'}, + ], + }), + ); + return dir; + } + + beforeEach(async () => { + vi.clearAllMocks(); + tmpDir = await createTmpData(); + dataDeployable = { + dataDirectory: tmpDir, + packageName: 'my-data', + versionNumber: '1.0.0.1', + }; + }); + + afterEach(async () => { + if (tmpDir) await fs.remove(tmpDir).catch(() => {}); + }); + + it('should invoke sf sfdmu run with correct arguments', async () => { + mockedExecSync.mockReturnValue('Account -- Upserted: 100 records\nContact -- Inserted: 50 records\n'); + + const strategy = new SfdmuImportStrategy(); + const result = await strategy.execute(dataDeployable, 'my-sandbox'); + + expect(mockedExecSync).toHaveBeenCalledTimes(1); + const command = mockedExecSync.mock.calls[0][0] as string; + expect(command).toContain('sf sfdmu run'); + expect(command).toContain('--sourceusername "csvfile"'); + expect(command).toContain('--targetusername "my-sandbox"'); + expect(command).toContain(`--path "${tmpDir}"`); + expect(command).toContain('--noprompt'); + + expect(result.success).toBe(true); + expect(result.objectsProcessed).toBe(2); + }); + + it('should fall back to sfdx when sf command not found', async () => { + // First call (sf) fails with command not found + mockedExecSync.mockImplementationOnce(() => { + const error: any = new Error('command not found'); + error.status = 127; + throw error; + }); + // Second call (sfdx) succeeds + mockedExecSync.mockReturnValueOnce('Account -- Upserted: 10 records\n'); + + const strategy = new SfdmuImportStrategy(); + const result = await strategy.execute(dataDeployable, 'my-sandbox'); + + expect(mockedExecSync).toHaveBeenCalledTimes(2); + const sfdxCommand = mockedExecSync.mock.calls[1][0] as string; + expect(sfdxCommand).toContain('sfdx sfdmu:run'); + expect(result.success).toBe(true); + }); + + it('should throw InstallationError when both sf and sfdx fail with ENOENT', async () => { + mockedExecSync.mockImplementation(() => { + const error: any = new Error('command not found'); + error.status = 127; + throw error; + }); + + const strategy = new SfdmuImportStrategy(); + await expect(strategy.execute(dataDeployable, 'my-sandbox')).rejects.toThrow('SFDMU is not installed'); + }); + + it('should throw InstallationError when sfdmu execution fails', async () => { + const error: any = new Error('Some SFDMU error'); + error.stderr = 'SFDMU error details'; + mockedExecSync.mockImplementation(() => { + throw error; + }); + + const strategy = new SfdmuImportStrategy(); + await expect(strategy.execute(dataDeployable, 'my-sandbox')).rejects.toThrow('SFDMU import failed'); + }); + + it('should emit data-import events', async () => { + mockedExecSync.mockReturnValue('Account -- Upserted: 100 records\n'); + + const strategy = new SfdmuImportStrategy(); + const events: string[] = []; + + strategy.on('data-import:start', () => events.push('start')); + strategy.on('data-import:complete', () => events.push('complete')); + + await strategy.execute(dataDeployable, 'my-sandbox'); + + expect(events).toEqual(['start', 'complete']); + }); + + it('should throw for per-object errors with detail in message', async () => { + mockedExecSync.mockReturnValue( + 'Account -- Upserted: 100 records\nContact -- Inserted: 50 records\nOpportunity -- ERROR: Field mapping failed\n', + ); + + const strategy = new SfdmuImportStrategy(); + + await expect(strategy.execute(dataDeployable, 'my-sandbox')).rejects.toThrow('Opportunity: Field mapping failed'); + }); +}); diff --git a/packages/sfdmu/tsconfig.json b/packages/sfdmu/tsconfig.json new file mode 100644 index 0000000..7f43f55 --- /dev/null +++ b/packages/sfdmu/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "declaration": true, + "module": "Node16", + "outDir": "dist", + "rootDir": "src", + "strict": true, + "target": "es2022", + "moduleResolution": "node16", + "composite": true, + "skipLibCheck": true, + "sourceMap": true + }, + "include": ["./src/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/sfdmu/vitest.config.ts b/packages/sfdmu/vitest.config.ts new file mode 100644 index 0000000..c458baf --- /dev/null +++ b/packages/sfdmu/vitest.config.ts @@ -0,0 +1,7 @@ +import {defineConfig} from 'vitest/config'; + +export default defineConfig({ + test: { + environment: 'node', + }, +});