diff --git a/change/change-3a1c5c00-cc62-4ec6-8fe5-cb3d6d2c81ff.json b/change/change-3a1c5c00-cc62-4ec6-8fe5-cb3d6d2c81ff.json new file mode 100644 index 000000000..67fbf4159 --- /dev/null +++ b/change/change-3a1c5c00-cc62-4ec6-8fe5-cb3d6d2c81ff.json @@ -0,0 +1,60 @@ +{ + "changes": [ + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/cache", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/cli", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/logger", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/reporters", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/runners", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/scheduler", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/target-graph", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + }, + { + "type": "patch", + "comment": "Add/enforce explicit visibility modifiers", + "packageName": "@lage-run/worker-threads-pool", + "email": "elcraig@microsoft.com", + "dependentChangeType": "patch" + } + ] +} \ No newline at end of file diff --git a/packages/cache/src/providers/BackfillCacheProvider.ts b/packages/cache/src/providers/BackfillCacheProvider.ts index 3c122d020..92b6349cf 100644 --- a/packages/cache/src/providers/BackfillCacheProvider.ts +++ b/packages/cache/src/providers/BackfillCacheProvider.ts @@ -42,7 +42,7 @@ export class BackfillCacheProvider implements CacheProvider { this.backfillLogger = createBackfillLogger(); } - async fetch(hash: string, target: Target): Promise { + public async fetch(hash: string, target: Target): Promise { const { logger } = this.options; if (!hash) { @@ -67,7 +67,7 @@ export class BackfillCacheProvider implements CacheProvider { } } - async put(hash: string, target: Target): Promise { + public async put(hash: string, target: Target): Promise { const { logger } = this.options; if (!hash) { @@ -91,11 +91,11 @@ export class BackfillCacheProvider implements CacheProvider { } } - async clear(concurrency = 10): Promise { + public async clear(concurrency = 10): Promise { return this.purge(0, concurrency); } - async purge(prunePeriod = 30, concurrency = 10): Promise { + public async purge(prunePeriod = 30, concurrency = 10): Promise { const now = new Date(); const cacheTypes = ["cache", "logs"]; @@ -127,7 +127,7 @@ export class BackfillCacheProvider implements CacheProvider { ); } - getCachePath(packagePath: string, hash: string): string { + private getCachePath(packagePath: string, hash: string): string { return path.relative(packagePath, getCacheDirectory(this.options.root, hash)); } } diff --git a/packages/cache/src/providers/RemoteFallbackCacheProvider.ts b/packages/cache/src/providers/RemoteFallbackCacheProvider.ts index 9428289cb..91279f0f6 100644 --- a/packages/cache/src/providers/RemoteFallbackCacheProvider.ts +++ b/packages/cache/src/providers/RemoteFallbackCacheProvider.ts @@ -24,7 +24,7 @@ export class RemoteFallbackCacheProvider implements CacheProvider { constructor(private options: RemoteFallbackCacheProviderOptions) {} - async fetch(hash: string, target: Target): Promise { + public async fetch(hash: string, target: Target): Promise { const { logger, remoteCacheProvider, localCacheProvider } = this.options; if (localCacheProvider) { @@ -48,7 +48,7 @@ export class RemoteFallbackCacheProvider implements CacheProvider { return RemoteFallbackCacheProvider.localHits[hash]; } - async put(hash: string, target: Target): Promise { + public async put(hash: string, target: Target): Promise { const { logger, remoteCacheProvider, localCacheProvider, writeRemoteCache } = this.options; const putPromises: Promise[] = []; @@ -80,14 +80,14 @@ export class RemoteFallbackCacheProvider implements CacheProvider { return hash in RemoteFallbackCacheProvider.localHits && RemoteFallbackCacheProvider.localHits[hash]; } - async clear(): Promise { + public async clear(): Promise { const { localCacheProvider } = this.options; if (localCacheProvider) { return localCacheProvider.clear(); } } - async purge(sinceDays: number): Promise { + public async purge(sinceDays: number): Promise { const { localCacheProvider } = this.options; if (localCacheProvider) { return localCacheProvider.purge(sinceDays); diff --git a/packages/cli/src/__tests__/simulateFileAccess.test.ts b/packages/cli/src/__tests__/simulateFileAccess.test.ts index 0f7fe6483..bc831381f 100644 --- a/packages/cli/src/__tests__/simulateFileAccess.test.ts +++ b/packages/cli/src/__tests__/simulateFileAccess.test.ts @@ -9,10 +9,10 @@ jest.mock("fs"); // Mock the logger const mockSilly = jest.fn(); class MockLogger extends Logger { - override silly = mockSilly; + public override silly = mockSilly; // do nothing - override log() {} - override stream() { + public override log() {} + public override stream() { return () => {}; } } diff --git a/packages/cli/src/commands/cache/runners/ClearCacheRunner.ts b/packages/cli/src/commands/cache/runners/ClearCacheRunner.ts index 453d0468d..10630967b 100644 --- a/packages/cli/src/commands/cache/runners/ClearCacheRunner.ts +++ b/packages/cli/src/commands/cache/runners/ClearCacheRunner.ts @@ -4,10 +4,10 @@ import path from "path"; import { rm, stat, unlink } from "fs/promises"; export class ClearCacheRunner implements TargetRunner { - async shouldRun(): Promise { + public async shouldRun(): Promise { return true; } - async run(runOptions: TargetRunnerOptions): Promise { + public async run(runOptions: TargetRunnerOptions): Promise { const { target } = runOptions; const { clearPaths } = target.options!; diff --git a/packages/cli/src/commands/cache/runners/PruneCacheRunner.ts b/packages/cli/src/commands/cache/runners/PruneCacheRunner.ts index 4d9a5c4d2..ffd9fc4dd 100644 --- a/packages/cli/src/commands/cache/runners/PruneCacheRunner.ts +++ b/packages/cli/src/commands/cache/runners/PruneCacheRunner.ts @@ -6,10 +6,10 @@ import { rm, stat, unlink } from "fs/promises"; const MS_IN_A_DAY = 1000 * 60 * 60 * 24; export class PruneCacheRunner implements TargetRunner { - async shouldRun(): Promise { + public async shouldRun(): Promise { return true; } - async run(runOptions: TargetRunnerOptions): Promise { + public async run(runOptions: TargetRunnerOptions): Promise { const { target } = runOptions; const { clearPaths, prunePeriod, now } = target.options!; diff --git a/packages/cli/src/commands/server/MemoryStream.ts b/packages/cli/src/commands/server/MemoryStream.ts index 33083c344..cb5eb5d46 100644 --- a/packages/cli/src/commands/server/MemoryStream.ts +++ b/packages/cli/src/commands/server/MemoryStream.ts @@ -8,15 +8,15 @@ export class MemoryStream extends Writable { this.chunks = []; } - _write(chunk: unknown, encoding: BufferEncoding): void { + public _write(chunk: unknown, encoding: BufferEncoding): void { this.chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as any, encoding)); } - getData(): Buffer { + public getData(): Buffer { return Buffer.concat(this.chunks); } - toString(): string { + public toString(): string { return this.getData().toString(); } } diff --git a/packages/e2e-tests/jest.config.js b/packages/e2e-tests/jest.config.js index 2278faa83..98aef1f60 100644 --- a/packages/e2e-tests/jest.config.js +++ b/packages/e2e-tests/jest.config.js @@ -1 +1,6 @@ -module.exports = require("@lage-run/monorepo-scripts/config/jest.config.js"); +const baseConfig = require("@lage-run/monorepo-scripts/config/jest.config.js"); + +module.exports = { + ...baseConfig, + testTimeout: baseConfig.testTimeout * 2, +}; diff --git a/packages/e2e-tests/package.json b/packages/e2e-tests/package.json index 0753dd2c2..135eba45d 100644 --- a/packages/e2e-tests/package.json +++ b/packages/e2e-tests/package.json @@ -19,6 +19,7 @@ "dependencies": { "@lage-run/cli": "workspace:^", "@lage-run/globby": "workspace:^", + "@lage-run/monorepo-fixture": "workspace:^", "@lage-run/monorepo-scripts": "workspace:^", "@lage-run/scheduler-types": "workspace:^", "@lage-run/target-graph": "workspace:^", diff --git a/packages/e2e-tests/src/basic.test.ts b/packages/e2e-tests/src/basic.test.ts index ad6195ae1..e833fc02a 100644 --- a/packages/e2e-tests/src/basic.test.ts +++ b/packages/e2e-tests/src/basic.test.ts @@ -9,16 +9,16 @@ describe("basics", () => { repo = undefined; }); - it("basic test case", () => { + it("basic test case", async () => { repo = new Monorepo("basics"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); - const results = repo.run("test"); + const results = await repo.run("test"); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -29,21 +29,21 @@ describe("basics", () => { expect(jsonOutput.find((entry) => filterEntry(entry.data, "a", "lint", "success"))).toBeFalsy(); }); - it("basic with missing script names - logging should not include those targets", () => { + it("basic with missing script names - logging should not include those targets", async () => { repo = new Monorepo("basics-missing-scripts"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b", [], { + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b", [], { build: "node ./build.js", test: "node ./test.js", lint: "node ./lint.js", extra: "node ./extra.js", }); - repo.install(); + await repo.install(); - const results = repo.run("extra"); + const results = await repo.run("extra"); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -54,20 +54,20 @@ describe("basics", () => { expect(jsonOutput.find((entry) => filterEntry(entry.data, "a", "lint", "success"))).toBeFalsy(); }); - it("basic test case - with task args", () => { + it("basic test case - with task args", async () => { repo = new Monorepo("basics-with-task-args"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); // run once without params - repo.run("test"); + await repo.run("test"); // run with some params, expected actual runs - const results = repo.run("test", ["--1", "--2"]); + const results = await repo.run("test", ["--1", "--2"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -78,7 +78,7 @@ describe("basics", () => { expect(jsonOutput.find((entry) => filterEntry(entry.data, "a", "lint", "success"))).toBeFalsy(); // run with some params, expected skips - const results2 = repo.run("test", ["--1", "--2"]); + const results2 = await repo.run("test", ["--1", "--2"]); const output2 = results2.stdout + results2.stderr; const jsonOutput2 = parseNdJson(output2); @@ -89,7 +89,7 @@ describe("basics", () => { expect(jsonOutput2.find((entry) => filterEntry(entry.data, "a", "lint", "skipped"))).toBeFalsy(); // run with some lage specific params, expected skips - const results3 = repo.run("test", ["--concurrency", "1"]); + const results3 = await repo.run("test", ["--concurrency", "1"]); const output3 = results3.stdout + results3.stderr; const jsonOutput3 = parseNdJson(output3); @@ -100,7 +100,7 @@ describe("basics", () => { expect(jsonOutput3.find((entry) => filterEntry(entry.data, "a", "lint", "skipped"))).toBeFalsy(); // run with some params AND lage specific params, expected skips - const results4 = repo.run("test", ["--1", "--2", "--concurrency", "1"]); + const results4 = await repo.run("test", ["--1", "--2", "--concurrency", "1"]); const output4 = results4.stdout + results4.stderr; const jsonOutput4 = parseNdJson(output4); @@ -111,16 +111,16 @@ describe("basics", () => { expect(jsonOutput4.find((entry) => filterEntry(entry.data, "a", "lint", "skipped"))).toBeFalsy(); }); - it("works in repo with spaces", () => { + it("works in repo with spaces", async () => { repo = new Monorepo("spaces why"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); - const results = repo.run("test"); + const results = await repo.run("test"); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); diff --git a/packages/e2e-tests/src/basicFailure.test.ts b/packages/e2e-tests/src/basicFailure.test.ts index 4adee37ec..c87216dd9 100644 --- a/packages/e2e-tests/src/basicFailure.test.ts +++ b/packages/e2e-tests/src/basicFailure.test.ts @@ -5,20 +5,21 @@ describe("basic failure case where a dependent target has failed", () => { it("when a failure happens, halts all other targets", async () => { const repo = new Monorepo("basics-failure-halt-all"); - repo.init(); + await repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b", [], { + await repo.addPackage("a", ["b"]); + await repo.addPackage("b", [], { build: 'node -e "process.exit(1);"', }); - repo.addPackage("c"); - repo.install(); + await repo.addPackage("c"); + await repo.install(); let jsonOutput: any[] = []; let results: any; try { - repo.run("test"); + await repo.run("test"); + expect(true).toBe(false); // should not get here } catch (e) { results = e; } @@ -39,20 +40,21 @@ describe("basic failure case where a dependent target has failed", () => { it("when a failure happens in `--continue` mode, halts all other dependent targets but continue to build as much as possible", async () => { const repo = new Monorepo("basics-failure-continue"); - repo.init(); + await repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b", [], { + await repo.addPackage("a", ["b"]); + await repo.addPackage("b", [], { build: 'node -e "process.exit(1);"', }); - repo.addPackage("c"); - repo.install(); + await repo.addPackage("c"); + await repo.install(); let jsonOutput: any[] = []; let results: any; try { - repo.run("test", ["--continue"]); + await repo.run("test", ["--continue"]); + expect(true).toBe(false); // should not get here } catch (e) { results = e; } @@ -74,19 +76,20 @@ describe("basic failure case where a dependent target has failed", () => { expect.hasAssertions(); const repo = new Monorepo("basics-failure-exit-code"); - repo.init(); + await repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b", [], { + await repo.addPackage("a", ["b"]); + await repo.addPackage("b", [], { build: 'node -e "process.exit(1);"', }); - repo.addPackage("c"); - repo.addPackage("d"); - repo.addPackage("e"); - repo.install(); + await repo.addPackage("c"); + await repo.addPackage("d"); + await repo.addPackage("e"); + await repo.install(); try { - repo.run("test"); + await repo.run("test"); + expect(true).toBe(false); // should not get here } catch (e) { const results = e as any; expect(results.exitCode).not.toBe(0); diff --git a/packages/e2e-tests/src/bigapp.test.ts b/packages/e2e-tests/src/bigapp.test.ts index c6a9a51d8..b3596a6e2 100644 --- a/packages/e2e-tests/src/bigapp.test.ts +++ b/packages/e2e-tests/src/bigapp.test.ts @@ -8,18 +8,18 @@ describe("bigapp test", () => { it("with apps and libs and all, y'all", async () => { const repo = new Monorepo("bigapp"); - repo.init(); + await repo.init(); - repo.addPackage("FooApp1", ["FooCore"]); - repo.addPackage("FooApp2", ["FooCore"]); - repo.addPackage("FooCore", ["BuildTool"]); - repo.addPackage("BarPage", ["BarCore"]); - repo.addPackage("BarCore", ["BuildTool"]); - repo.addPackage("BuildTool"); + await repo.addPackage("FooApp1", ["FooCore"]); + await repo.addPackage("FooApp2", ["FooCore"]); + await repo.addPackage("FooCore", ["BuildTool"]); + await repo.addPackage("BarPage", ["BarCore"]); + await repo.addPackage("BarCore", ["BuildTool"]); + await repo.addPackage("BuildTool"); - repo.install(); + await repo.install(); - const results = repo.run("test"); + const results = await repo.run("test"); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); diff --git a/packages/e2e-tests/src/cacheClear.test.ts b/packages/e2e-tests/src/cacheClear.test.ts index 701c4a8bd..8e3cf18c3 100644 --- a/packages/e2e-tests/src/cacheClear.test.ts +++ b/packages/e2e-tests/src/cacheClear.test.ts @@ -9,8 +9,8 @@ describe("Cache clear", () => { it("should clear cache with the default cache location", async () => { const repo = new Monorepo("cache-default"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `const fs = require('fs'); const path = require('path'); module.exports = { @@ -21,17 +21,17 @@ describe("Cache clear", () => { };` ); - repo.addPackage("a", [], { + await repo.addPackage("a", [], { build: "echo a:build", test: "echo a:test", }); - repo.addPackage("b", [], { + await repo.addPackage("b", [], { build: "echo b:build", }); - repo.install(); + await repo.install(); // Run build so we get a cache folder - repo.run("build"); + await repo.run("build"); const cacheFolder = path.join(repo.root, defaultCacheLocation); @@ -43,7 +43,7 @@ describe("Cache clear", () => { // Clear the cache - repo.run("clear"); + await repo.run("clear"); // Cache folders should be empty expect(fs.readdirSync(cacheFolder)).toHaveLength(0); diff --git a/packages/e2e-tests/src/customReporter.test.ts b/packages/e2e-tests/src/customReporter.test.ts index c3187ff15..d3550360b 100644 --- a/packages/e2e-tests/src/customReporter.test.ts +++ b/packages/e2e-tests/src/customReporter.test.ts @@ -9,15 +9,15 @@ describe("custom reporters", () => { repo = undefined; }); - it("should use custom reporter defined in lage config", () => { + it("should use custom reporter defined in lage config", async () => { repo = new Monorepo("custom-reporter"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); // Create a custom reporter file - repo.commitFiles({ + await repo.commitFiles({ "custom-reporter.mjs": ` export class CustomTestReporter { constructor(options) { @@ -44,7 +44,7 @@ export default CustomTestReporter; }); // Update lage config to include custom reporter - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], test: ['build'], @@ -56,9 +56,9 @@ export default CustomTestReporter; } };`); - repo.install(); + await repo.install(); - const results = repo.run("build", ["--reporter", "customTest"]); + const results = await repo.run("build", ["--reporter", "customTest"]); const output = results.stdout + results.stderr; // Check that custom reporter was used @@ -66,14 +66,14 @@ export default CustomTestReporter; expect(output).toContain('"type":"summary"'); }); - it("should use multiple reporters including custom ones", () => { + it("should use multiple reporters including custom ones", async () => { repo = new Monorepo("multiple-reporters"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a custom reporter file - repo.commitFiles({ + await repo.commitFiles({ "custom-reporter.mjs": ` export class CustomTestReporter { constructor(options) { @@ -97,7 +97,7 @@ export default CustomTestReporter; }); // Update lage config to include custom reporter - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], test: ['build'], @@ -108,10 +108,10 @@ export default CustomTestReporter; } };`); - repo.install(); + await repo.install(); // Use both json and custom reporter - const results = repo.run("build", ["--reporter", "json", "--reporter", "customTest", "--log-level", "silly"]); + const results = await repo.run("build", ["--reporter", "json", "--reporter", "customTest", "--log-level", "silly"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -121,14 +121,14 @@ export default CustomTestReporter; expect(output).toContain('"message":"Custom reporter summary"'); }); - it("should handle custom reporter with different export patterns", () => { + it("should handle custom reporter with different export patterns", async () => { repo = new Monorepo("export-patterns"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a custom reporter with named export that is also the default - repo.commitFiles({ + await repo.commitFiles({ "named-export-reporter.mjs": ` export class NamedReporter { constructor(options) { @@ -152,7 +152,7 @@ export default NamedReporter; }); // Update lage config - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -162,9 +162,9 @@ export default NamedReporter; } };`); - repo.install(); + await repo.install(); - const results = repo.run("build", ["--reporter", "namedReporter"]); + const results = await repo.run("build", ["--reporter", "namedReporter"]); const output = results.stdout + results.stderr; expect(output).toContain('"namedExport":true'); @@ -173,23 +173,25 @@ export default NamedReporter; it("should error when custom reporter file is invalid", async () => { repo = new Monorepo("reporter-not-found"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); - repo.install(); + await repo.install(); // Request a reporter that doesn't exist in config - expect(() => repo!.run("build", ["--reporter", "nonExistentReporter"])).toThrow('Invalid --reporter option: "nonExistentReporter"'); + await expect(repo!.run("build", ["--reporter", "nonExistentReporter"])).rejects.toThrow( + 'Invalid --reporter option: "nonExistentReporter"' + ); }); - it("should handle custom reporter with relative path", () => { + it("should handle custom reporter with relative path", async () => { repo = new Monorepo("relative-path"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a custom reporter in a subdirectory - repo.commitFiles({ + await repo.commitFiles({ "reporters/my-custom-reporter.mjs": ` export default class MyCustomReporter { constructor(options) { @@ -211,7 +213,7 @@ export default class MyCustomReporter { }); // Update lage config with relative path - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -221,23 +223,23 @@ export default class MyCustomReporter { } };`); - repo.install(); + await repo.install(); - const results = repo.run("build", ["--reporter", "myReporter"]); + const results = await repo.run("build", ["--reporter", "myReporter"]); const output = results.stdout + results.stderr; expect(output).toContain('"customPath":true'); expect(output).toContain('"reporter":"my-custom-reporter"'); }); - it("should pass options to custom reporter", () => { + it("should pass options to custom reporter", async () => { repo = new Monorepo("reporter-options"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a custom reporter that logs its options - repo.commitFiles({ + await repo.commitFiles({ "options-reporter.mjs": ` export default class OptionsReporter { constructor(options) { @@ -260,7 +262,7 @@ export default class OptionsReporter { `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -270,9 +272,9 @@ export default class OptionsReporter { } };`); - repo.install(); + await repo.install(); - const results = repo.run("build", ["--reporter", "optionsTest", "--concurrency", "2", "--grouped"]); + const results = await repo.run("build", ["--reporter", "optionsTest", "--concurrency", "2", "--grouped"]); const output = results.stdout + results.stderr; expect(output).toContain('"receivedOptions":true'); @@ -280,20 +282,20 @@ export default class OptionsReporter { expect(output).toContain('"grouped":true'); }); - it("should handle errors from custom reporter gracefully", () => { + it("should handle errors from custom reporter gracefully", async () => { repo = new Monorepo("reporter-error"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a reporter with invalid JavaScript - repo.commitFiles({ + await repo.commitFiles({ "broken-reporter.mjs": ` This is not valid JavaScript {{{ ]]] ;;; `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -303,28 +305,26 @@ This is not valid JavaScript {{{ ]]] ;;; } };`); - repo.install(); + await repo.install(); // Should throw an error when trying to use the broken reporter - expect(() => { - repo!.run("build", ["--reporter", "brokenReporter"]); - }).toThrow(); + await expect(repo!.run("build", ["--reporter", "brokenReporter"])).rejects.toThrow(); }); - it("should error when custom reporter exports a non-function/non-class value", () => { + it("should error when custom reporter exports a non-function/non-class value", async () => { repo = new Monorepo("invalid-export"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a reporter that exports a number - repo.commitFiles({ + await repo.commitFiles({ "number-reporter.mjs": ` export default 42; `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -334,28 +334,28 @@ export default 42; } };`); - repo.install(); + await repo.install(); // Should throw an error when trying to use a reporter that exports a primitive value - expect(() => { - repo!.run("build", ["--reporter", "numberReporter"]); - }).toThrow(/does not export a valid reporter class or instance/); + await expect(repo!.run("build", ["--reporter", "numberReporter"])).rejects.toThrow( + /does not export a valid reporter class or instance/ + ); }); - it("should error when custom reporter exports a string", () => { + it("should error when custom reporter exports a string", async () => { repo = new Monorepo("string-export"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a reporter that exports a string - repo.commitFiles({ + await repo.commitFiles({ "string-reporter.mjs": ` export default "not a reporter"; `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -365,22 +365,22 @@ export default "not a reporter"; } };`); - repo.install(); + await repo.install(); // Should throw an error when trying to use a reporter that exports a string - expect(() => { - repo!.run("build", ["--reporter", "stringReporter"]); - }).toThrow(/does not export a valid reporter class or instance/); + await expect(repo!.run("build", ["--reporter", "stringReporter"])).rejects.toThrow( + /does not export a valid reporter class or instance/ + ); }); - it("should work with custom reporter that exports an object instance", () => { + it("should work with custom reporter that exports an object instance", async () => { repo = new Monorepo("object-instance"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a reporter that exports an object instance (not a class) - repo.commitFiles({ + await repo.commitFiles({ "object-reporter.mjs": ` const objectReporter = { log(entry) { @@ -399,7 +399,7 @@ export default objectReporter; `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -409,22 +409,22 @@ export default objectReporter; } };`); - repo.install(); + await repo.install(); - const results = repo.run("build", ["--reporter", "objectReporter"]); + const results = await repo.run("build", ["--reporter", "objectReporter"]); const output = results.stdout + results.stderr; expect(output).toContain('"objectInstance":true'); }); - it("should work with CommonJS custom reporter", () => { + it("should work with CommonJS custom reporter", async () => { repo = new Monorepo("commonjs-reporter"); - repo.init(); - repo.addPackage("a"); + await repo.init(); + await repo.addPackage("a"); // Create a CommonJS reporter - repo.commitFiles({ + await repo.commitFiles({ "cjs-reporter.cjs": ` class CommonJSReporter { constructor(options) { @@ -447,7 +447,7 @@ module.exports = CommonJSReporter; `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], }, @@ -457,23 +457,23 @@ module.exports = CommonJSReporter; } };`); - repo.install(); + await repo.install(); - const results = repo.run("build", ["--reporter", "cjsReporter"]); + const results = await repo.run("build", ["--reporter", "cjsReporter"]); const output = results.stdout + results.stderr; expect(output).toContain('"commonJS":true'); }); - it("should allow custom reporter to track all build events", () => { + it("should allow custom reporter to track all build events", async () => { repo = new Monorepo("event-tracking"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); // Create a reporter that tracks events - repo.commitFiles({ + await repo.commitFiles({ "tracking-reporter.mjs": ` export default class TrackingReporter { constructor(options) { @@ -501,7 +501,7 @@ export default class TrackingReporter { `, }); - repo.setLageConfig(`module.exports = { + await repo.setLageConfig(`module.exports = { pipeline: { build: ['^build'], test: ['build'], @@ -512,9 +512,9 @@ export default class TrackingReporter { } };`); - repo.install(); + await repo.install(); - const results = repo.run("test", ["--reporter", "tracker"]); + const results = await repo.run("test", ["--reporter", "tracker"]); const output = results.stdout + results.stderr; expect(output).toContain('"trackingReporter":true'); diff --git a/packages/e2e-tests/src/info.test.ts b/packages/e2e-tests/src/info.test.ts index 3f3e1def8..e3d9762e8 100644 --- a/packages/e2e-tests/src/info.test.ts +++ b/packages/e2e-tests/src/info.test.ts @@ -6,13 +6,13 @@ describe("info command", () => { it("basic info test case", async () => { const repo = new Monorepo("basics-info"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); - const results = repo.run("writeInfo", ["test"]); + const results = await repo.run("writeInfo", ["test"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -24,13 +24,13 @@ describe("info command", () => { it("scoped info test case", async () => { const repo = new Monorepo("scoped-info"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); - const results = repo.run("writeInfo", ["test", "--to", "b"]); + const results = await repo.run("writeInfo", ["test", "--to", "b"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); expect(jsonOutput).toMatchSnapshot(); @@ -40,14 +40,14 @@ describe("info command", () => { it("dependencies are resolved via noop tasks", async () => { const repo = new Monorepo("noop-task-info"); - repo.init(); - repo.addPackage("a", ["b"], { build: "echo 'building a'" }); + await repo.init(); + await repo.addPackage("a", ["b"], { build: "echo 'building a'" }); // This task does not have a `build` script. - repo.addPackage("b", ["c"], {}); - repo.addPackage("c", [], { build: "echo 'building c'" }); - repo.install(); + await repo.addPackage("b", ["c"], {}); + await repo.addPackage("c", [], { build: "echo 'building c'" }); + await repo.install(); - const results = repo.run("writeInfo", ["build", "prepare"]); + const results = await repo.run("writeInfo", ["build", "prepare"]); const output = results.stdout + results.stderr; const infoJsonOutput: any = parseNdJson(output)[0]; @@ -70,14 +70,14 @@ describe("info command", () => { it("lage info drops direct dependencies when transtive and keeps __start", async () => { const repo = new Monorepo("transitive-info-dropped"); - repo.init(); - repo.addPackage("a", ["b", "c"]); - repo.addPackage("b", ["c", "d"]); - repo.addPackage("c", []); - repo.addPackage("d", [], { nobuild: "echo 'no build'" }); - repo.install(); + await repo.init(); + await repo.addPackage("a", ["b", "c"]); + await repo.addPackage("b", ["c", "d"]); + await repo.addPackage("c", []); + await repo.addPackage("d", [], { nobuild: "echo 'no build'" }); + await repo.install(); - const results = repo.run("writeInfo", ["build"]); + const results = await repo.run("writeInfo", ["build"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -96,15 +96,15 @@ describe("info command", () => { it("lage info in back compat mode keeps direct dependencies and drops __start", async () => { const repo = new Monorepo("transitive-info-dropped"); - repo.init(); - repo.addPackage("a", ["b", "c"]); - repo.addPackage("b", ["c", "d"]); - repo.addPackage("c", []); - repo.addPackage("d", [], { nobuild: "echo 'no build'" }); - repo.install(); + await repo.init(); + await repo.addPackage("a", ["b", "c"]); + await repo.addPackage("b", ["c", "d"]); + await repo.addPackage("c", []); + await repo.addPackage("d", [], { nobuild: "echo 'no build'" }); + await repo.install(); const backCompatEnvVars = { DOMINO: "1" }; - const results = repo.run("writeInfo", ["build"], false, { env: backCompatEnvVars }); + const results = await repo.run("writeInfo", ["build"], false, { env: backCompatEnvVars }); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -124,8 +124,8 @@ describe("info command", () => { it("custom inputs, outputs and weight value", async () => { const repo = new Monorepo("scoped-info"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `module.exports = { pipeline: { build: { @@ -144,10 +144,10 @@ describe("info command", () => { };` ); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); - repo.install(); - const results = repo.run("writeInfo", ["test", "build"]); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); + await repo.install(); + const results = await repo.run("writeInfo", ["test", "build"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); expect(jsonOutput).toMatchSnapshot(); @@ -158,8 +158,8 @@ describe("info command", () => { it("custom options", async () => { const repo = new Monorepo("scoped-info"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `module.exports = { pipeline: { build: ["^build"], @@ -179,12 +179,12 @@ describe("info command", () => { };` ); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); - const results = repo.run("writeInfo", ["test", "build"]); + const results = await repo.run("writeInfo", ["test", "build"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); expect(jsonOutput).toMatchSnapshot(); diff --git a/packages/e2e-tests/src/lageserver.test.ts b/packages/e2e-tests/src/lageserver.test.ts index 2675c3b0a..2f5adc945 100644 --- a/packages/e2e-tests/src/lageserver.test.ts +++ b/packages/e2e-tests/src/lageserver.test.ts @@ -16,20 +16,20 @@ describe("lageserver", () => { it("connects to a running server", async () => { repo = new Monorepo("basics"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); const serverProcess = repo.runServer(["build"]); await new Promise((resolve) => setTimeout(resolve, 2000)); - const results = repo.run("lage", ["exec", "--server", "--tasks", "build", "--", "b", "build"]); + const results = await repo.run("lage", ["exec", "--server", "--tasks", "build", "--", "b", "build"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); - repo.run("lage", ["exec", "--server", "--tasks", "build", "--", "a", "build"]); + await repo.run("lage", ["exec", "--server", "--tasks", "build", "--", "a", "build"]); serverProcess.kill(); @@ -40,13 +40,13 @@ describe("lageserver", () => { it("launches a background server", async () => { repo = new Monorepo("basics"); - repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b"); + await repo.init(); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b"); - repo.install(); + await repo.install(); - const results = repo.run("lage", ["exec", "--server", "localhost:5112", "--tasks", "build", "--", "b", "build"]); + const results = await repo.run("lage", ["exec", "--server", "localhost:5112", "--tasks", "build", "--", "b", "build"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); const started = jsonOutput.find((entry) => entry.data?.pid && entry.msg === "Server started"); @@ -59,15 +59,15 @@ describe("lageserver", () => { it("reports inputs for targets and their transitive dependencies' files", async () => { repo = new Monorepo("basics"); - repo.init(); + await repo.init(); - repo.addPackage("a", ["b"]); - repo.addPackage("b", ["c"]); - repo.addPackage("c"); + await repo.addPackage("a", ["b"]); + await repo.addPackage("b", ["c"]); + await repo.addPackage("c"); - repo.install(); + await repo.install(); - repo.commitFiles({ + await repo.commitFiles({ "packages/a/src/index.ts": "console.log('a');", "packages/a/alt/extra.ts": "console.log('a');", "packages/b/alt/index.ts": "console.log('b');", @@ -76,7 +76,7 @@ describe("lageserver", () => { "packages/c/alt/extra.ts": "console.log('c');", }); - repo.setLageConfig( + await repo.setLageConfig( `module.exports = { pipeline: { "a#build": { @@ -95,7 +95,7 @@ describe("lageserver", () => { };` ); - const results = repo.run("lage", [ + const results = await repo.run("lage", [ "exec", "c", "build", @@ -115,8 +115,32 @@ describe("lageserver", () => { const started = jsonOutput.find((entry) => entry.data?.pid && entry.msg === "Server started"); expect(started?.data.pid).not.toBeUndefined(); - repo.run("lage", ["exec", "b", "build", "--tasks", "build", "--server", "localhost:5111", "--timeout", "60", "--reporter", "json"]); - repo.run("lage", ["exec", "a", "build", "--tasks", "build", "--server", "localhost:5111", "--timeout", "60", "--reporter", "json"]); + await repo.run("lage", [ + "exec", + "b", + "build", + "--tasks", + "build", + "--server", + "localhost:5111", + "--timeout", + "60", + "--reporter", + "json", + ]); + await repo.run("lage", [ + "exec", + "a", + "build", + "--tasks", + "build", + "--server", + "localhost:5111", + "--timeout", + "60", + "--reporter", + "json", + ]); killDetachedProcess(parseInt(started?.data.pid)); diff --git a/packages/e2e-tests/src/mock/monorepo.ts b/packages/e2e-tests/src/mock/monorepo.ts index ca2f48aa1..1fe758cc1 100644 --- a/packages/e2e-tests/src/mock/monorepo.ts +++ b/packages/e2e-tests/src/mock/monorepo.ts @@ -1,41 +1,27 @@ -import * as os from "os"; import * as fs from "fs"; import * as path from "path"; import * as execa from "execa"; import { glob } from "@lage-run/globby"; +import { Monorepo as BaseMonorepo } from "@lage-run/monorepo-fixture"; -export class Monorepo { - static tmpdir: string = os.tmpdir(); +const externalPackageJsonGlobs = ["node_modules/glob-hasher/package.json", "node_modules/glob-hasher-*/package.json"]; +const externalPackageJsons = glob(externalPackageJsonGlobs, { + cwd: path.join(__dirname, "..", "..", "..", ".."), + gitignore: false, +})!.map((f) => path.resolve(path.join(__dirname, "..", "..", "..", ".."), f)); - root: string; - nodeModulesPath: string; - yarnPath: string; +export class Monorepo extends BaseMonorepo { + private readonly yarnPath: string; - static externalPackageJsonGlobs: string[] = ["node_modules/glob-hasher/package.json", "node_modules/glob-hasher-*/package.json"]; - - static externalPackageJsons: string[] = glob(Monorepo.externalPackageJsonGlobs, { - cwd: path.join(__dirname, "..", "..", "..", ".."), - gitignore: false, - })!.map((f) => path.resolve(path.join(__dirname, "..", "..", "..", ".."), f)); - - constructor(private name: string) { - this.root = fs.mkdtempSync(path.join(Monorepo.tmpdir, `lage-monorepo-${name}-`)); - this.nodeModulesPath = path.join(this.root, "node_modules"); + constructor(name: string) { + super(name, "lage-monorepo"); this.yarnPath = path.join(this.root, ".yarn", "yarn.js"); } - init(): void { - const options = { cwd: this.root }; - execa.sync("git", ["init"], options); - execa.sync("git", ["config", "user.email", "you@example.com"], options); - execa.sync("git", ["config", "user.name", "test user"], options); - execa.sync("git", ["config", "commit.gpgsign", "false"], options); - this.generateRepoFiles(); - } - - install(): void { - for (const packagePath of Monorepo.externalPackageJsons.map((p) => path.dirname(p))) { + public override async install(): Promise { + for (const externalPackageJson of externalPackageJsons) { + const packagePath = path.dirname(externalPackageJson); const name = JSON.parse(fs.readFileSync(path.join(packagePath, "package.json"), "utf-8")).name; fs.cpSync(packagePath, path.join(this.root, "node_modules", name), { recursive: true }); } @@ -44,8 +30,8 @@ export class Monorepo { execa.sync(`"${process.execPath}"`, [`"${this.yarnPath}"`, "install", "--no-immutable"], { cwd: this.root, shell: true }); } - generateRepoFiles(): void { - this.commitFiles({ + protected override async generateRepoFiles(): Promise { + await this.commitFiles({ ".yarnrc.yml": `yarnPath: "${this.yarnPath.replace(/\\/g, "/")}"\ncacheFolder: "${this.root.replace( /\\/g, "/" @@ -84,81 +70,13 @@ export class Monorepo { }); } - setLageConfig(contents: string): void { - this.commitFiles({ - "lage.config.js": contents, - }); - } - - addPackage(name: string, internalDeps: string[] = [], scripts?: { [script: string]: string }): void { - return this.commitFiles({ - [`packages/${name}/build.js`]: `console.log('building ${name}');`, - [`packages/${name}/test.js`]: `console.log('testing ${name}');`, - [`packages/${name}/lint.js`]: `console.log('linting ${name}');`, + public override async addPackage(name: string, internalDeps: string[] = [], scripts?: { [script: string]: string }): Promise { + return super.addPackage(name, internalDeps, scripts, { [`packages/${name}/extra.js`]: `console.log('extra ${name}');`, - [`packages/${name}/package.json`]: { - name, - version: "0.1.0", - scripts: scripts || { - build: "node ./build.js", - test: "node ./test.js", - lint: "node ./lint.js", - }, - dependencies: { - ...(internalDeps && - internalDeps.reduce((deps, dep) => { - return { ...deps, [dep]: "*" }; - }, {})), - }, - }, - }); - } - - clone(origin: string): execa.ExecaSyncReturnValue { - return execa.sync("git", ["clone", origin], { cwd: this.root }); - } - - push(origin: string, branch: string): execa.ExecaSyncReturnValue { - return execa.sync("git", ["push", origin, branch], { cwd: this.root }); - } - - commitFiles(files: { [name: string]: string | Record }, options: { executable?: boolean } = {}): void { - for (const [file, contents] of Object.entries(files)) { - let out = ""; - if (typeof contents !== "string") { - out = JSON.stringify(contents, null, 2); - } else { - out = contents; - } - - const fullPath = path.join(this.root, file); - - if (!fs.existsSync(path.dirname(fullPath))) { - fs.mkdirSync(path.dirname(fullPath), { recursive: true }); - } - - fs.writeFileSync(fullPath, out); - - if (options.executable) { - fs.chmodSync(path.join(this.root, file), fs.constants.S_IXUSR | fs.constants.S_IRUSR | fs.constants.S_IROTH); - } - } - - execa.sync("git", ["add", ...Object.keys(files)], { - cwd: this.root, }); - - execa.sync("git", ["commit", "-m", "commit files"], { cwd: this.root }); } - run(command: string, args?: string[], silent?: boolean, options?: Partial): execa.ExecaSyncReturnValue { - return execa.sync(process.execPath, [this.yarnPath, ...(silent === true ? ["--silent"] : []), command, ...(args || [])], { - cwd: this.root, - ...options, - }); - } - - runServer(tasks: string[]): execa.ExecaChildProcess { + public runServer(tasks: string[]): execa.ExecaChildProcess { const cp = execa.default(process.execPath, [path.join(this.root, "node_modules/lage/dist/lage-server.js"), "--tasks", ...tasks], { cwd: this.root, detached: true, @@ -171,22 +89,4 @@ export class Monorepo { return cp; } - - async cleanup(): Promise { - const maxRetries = 5; - let attempts = 0; - - while (attempts < maxRetries) { - try { - fs.rmSync(this.root, { recursive: true }); - break; - } catch (error) { - attempts++; - if (attempts >= maxRetries) { - throw error; - } - await new Promise((resolve) => setTimeout(resolve, 1000)); - } - } - } } diff --git a/packages/e2e-tests/src/remoteFallback.test.ts b/packages/e2e-tests/src/remoteFallback.test.ts index b4b3ec436..4e7bd154b 100644 --- a/packages/e2e-tests/src/remoteFallback.test.ts +++ b/packages/e2e-tests/src/remoteFallback.test.ts @@ -5,8 +5,8 @@ describe("RemoteFallbackCacheProvider", () => { it("should skip local cache population if --skip-local-cache is enabled", async () => { const repo = new Monorepo("fallback"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `const fs = require('fs'); const path = require('path'); module.exports = { @@ -21,20 +21,20 @@ describe("RemoteFallbackCacheProvider", () => { provider: 'local' }, internalCacheFolder: '.lage-cache-test' - } + } };` ); - repo.addPackage("a", [], { + await repo.addPackage("a", [], { build: "echo a:build", test: "echo a:test", }); - repo.addPackage("b", [], { + await repo.addPackage("b", [], { build: "echo b:build", }); - repo.install(); + await repo.install(); - const results = repo.run("test", ["--skip-local-cache"]); + const results = await repo.run("test", ["--skip-local-cache"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -53,8 +53,8 @@ describe("RemoteFallbackCacheProvider", () => { it("should operate with local provider ONLY by default", async () => { const repo = new Monorepo("fallback-local-only"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `const fs = require('fs'); const path = require('path'); module.exports = { @@ -64,21 +64,21 @@ describe("RemoteFallbackCacheProvider", () => { }, cache: true, cacheOptions: { - - } + + } };` ); - repo.addPackage("a", [], { + await repo.addPackage("a", [], { build: "echo a:build", test: "echo a:test", }); - repo.addPackage("b", [], { + await repo.addPackage("b", [], { build: "echo b:build", }); - repo.install(); + await repo.install(); - const results = repo.run("test"); + const results = await repo.run("test"); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -97,8 +97,8 @@ describe("RemoteFallbackCacheProvider", () => { it("should allow read-only mode when given a remote (or custom) cache config", async () => { const repo = new Monorepo("fallback-read-only"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `const fs = require('fs'); const path = require('path'); module.exports = { @@ -112,20 +112,20 @@ describe("RemoteFallbackCacheProvider", () => { provider: 'local' }, internalCacheFolder: '.lage-cache-test' - } + } };` ); - repo.addPackage("a", [], { + await repo.addPackage("a", [], { build: "echo a:build", test: "echo a:test", }); - repo.addPackage("b", [], { + await repo.addPackage("b", [], { build: "echo b:build", }); - repo.install(); + await repo.install(); - const results = repo.run("test", ["--log-level", "silly"]); + const results = await repo.run("test", ["--log-level", "silly"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -144,8 +144,8 @@ describe("RemoteFallbackCacheProvider", () => { it("should allow read-write mode when given a special environment variable", async () => { const repo = new Monorepo("fallback-read-write-env-var"); - repo.init(); - repo.setLageConfig( + await repo.init(); + await repo.setLageConfig( `const fs = require('fs'); const path = require('path'); module.exports = { @@ -164,16 +164,16 @@ describe("RemoteFallbackCacheProvider", () => { };` ); - repo.addPackage("a", [], { + await repo.addPackage("a", [], { build: "echo a:build", test: "echo a:test", }); - repo.addPackage("b", [], { + await repo.addPackage("b", [], { build: "echo b:build", }); - repo.install(); + await repo.install(); - const results = repo.run("test", ["--log-level", "silly"]); + const results = await repo.run("test", ["--log-level", "silly"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); diff --git a/packages/e2e-tests/src/transitiveTaskDeps.test.ts b/packages/e2e-tests/src/transitiveTaskDeps.test.ts index 168e6126f..a8859aeb2 100644 --- a/packages/e2e-tests/src/transitiveTaskDeps.test.ts +++ b/packages/e2e-tests/src/transitiveTaskDeps.test.ts @@ -8,8 +8,8 @@ describe("transitive task deps test", () => { it("produces a build graph even when some scripts are missing in package.json", async () => { const repo = new Monorepo("transitiveDeps"); - repo.init(); - repo.setLageConfig(`module.exports = { + await repo.init(); + await repo.setLageConfig(`module.exports = { "pipeline": { "build": [ ], "bundle":["build"], @@ -17,16 +17,16 @@ describe("transitive task deps test", () => { } }`); - repo.addPackage("a", [], { + await repo.addPackage("a", [], { build: "echo a:build", test: "echo a:test", }); - repo.addPackage("b", [], { + await repo.addPackage("b", [], { build: "echo b:build", }); - repo.install(); + await repo.install(); - const results = repo.run("test"); + const results = await repo.run("test"); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -52,27 +52,27 @@ describe("transitive task deps test", () => { it("only runs package local dependencies for no-prefix dependencies", async () => { const repo = new Monorepo("transitiveDeps-no-prefix"); - repo.init(); - repo.setLageConfig(`module.exports = { + await repo.init(); + await repo.setLageConfig(`module.exports = { pipeline: { bundle: ["transpile"], transpile: [] }, }`); - repo.addPackage("a", ["b"], { + await repo.addPackage("a", ["b"], { bundle: "echo a:bundle", transpile: "echo a:transpile", }); - repo.addPackage("b", ["c"], { + await repo.addPackage("b", ["c"], { transpile: "echo b:transpile", }); - repo.addPackage("c", [], { + await repo.addPackage("c", [], { transpile: "echo c:transpile", }); - repo.install(); + await repo.install(); - const results = repo.run("bundle", ["--scope", "a"]); + const results = await repo.run("bundle", ["--scope", "a"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -100,27 +100,27 @@ describe("transitive task deps test", () => { it("only runs direct dependencies for ^ prefix dependencies -- ", async () => { const repo = new Monorepo("transitiveDeps-carat-prefix"); - repo.init(); - repo.setLageConfig(`module.exports = { + await repo.init(); + await repo.setLageConfig(`module.exports = { pipeline: { bundle: ["^transpile"], transpile: [] }, }`); - repo.addPackage("a", ["b"], { + await repo.addPackage("a", ["b"], { bundle: "echo a:bundle", transpile: "echo a:transpile", }); - repo.addPackage("b", ["c"], { + await repo.addPackage("b", ["c"], { transpile: "echo b:transpile", }); - repo.addPackage("c", [], { + await repo.addPackage("c", [], { transpile: "echo c:transpile", }); - repo.install(); + await repo.install(); - const results = repo.run("bundle", ["--scope", "a"]); + const results = await repo.run("bundle", ["--scope", "a"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); @@ -150,8 +150,8 @@ describe("transitive task deps test", () => { it("Runs transitive dependencies for ^^ prefix dependencies", async () => { const repo = new Monorepo("transitiveDeps-indirect"); - repo.init(); - repo.setLageConfig(`module.exports = { + await repo.init(); + await repo.setLageConfig(`module.exports = { pipeline: { bundle: ["^^transpile"], transpile: [] @@ -170,19 +170,19 @@ describe("transitive task deps test", () => { ], }`); - repo.addPackage("a", ["b"], { + await repo.addPackage("a", ["b"], { bundle: "echo a:bundle", transpile: "echo a:transpile", }); - repo.addPackage("b", ["c"], { + await repo.addPackage("b", ["c"], { transpile: "echo b:transpile", }); - repo.addPackage("c", [], { + await repo.addPackage("c", [], { transpile: "echo c:transpile", }); - repo.install(); + await repo.install(); - const results = repo.run("bundle", ["--scope", "a"]); + const results = await repo.run("bundle", ["--scope", "a"]); const output = results.stdout + results.stderr; const jsonOutput = parseNdJson(output); diff --git a/packages/logger/src/Logger.ts b/packages/logger/src/Logger.ts index 1ad0f10e3..8cb6515f0 100644 --- a/packages/logger/src/Logger.ts +++ b/packages/logger/src/Logger.ts @@ -4,9 +4,9 @@ import { LogLevel } from "./interfaces/LogLevel.js"; import { createInterface } from "readline"; export class Logger { - reporters: Reporter[] = []; + public readonly reporters: Reporter[] = []; - log(level: LogLevel, msg: string, data?: TLogStructuredData): void { + public log(level: LogLevel, msg: string, data?: TLogStructuredData): void { const entry = { timestamp: Date.now(), level, @@ -19,27 +19,27 @@ export class Logger void { + public stream(level: LogLevel, input: NodeJS.ReadableStream, data?: TLogStructuredData): () => void { const readline = createInterface({ input, crlfDelay: Infinity, @@ -58,7 +58,7 @@ export class Logger { class TestReporter implements Reporter { - logLevel = LogLevel.warn; - entries: LogEntry[] = []; + public logLevel = LogLevel.warn; + public entries: LogEntry[] = []; - log(entry: LogEntry) { + public log(entry: LogEntry) { this.entries.push(entry); } - summarize(): void {} + public summarize(): void {} } it("should create a logger that reports to a single reporter", () => { diff --git a/packages/monorepo-fixture/src/monorepo.ts b/packages/monorepo-fixture/src/monorepo.ts index 373593126..7c05ead82 100644 --- a/packages/monorepo-fixture/src/monorepo.ts +++ b/packages/monorepo-fixture/src/monorepo.ts @@ -3,23 +3,26 @@ import * as fs from "fs"; import * as path from "path"; import execa from "execa"; -export class Monorepo { - static tmpdir: string = os.tmpdir(); +const tmpdir = os.tmpdir(); - root: string; +export class Monorepo { + public readonly root: string; - lagePath: string; + private readonly lagePath: string; - get nodeModulesPath(): string { + private get nodeModulesPath(): string { return path.join(this.root, "node_modules"); } - constructor(private name: string) { - this.root = fs.mkdtempSync(path.join(Monorepo.tmpdir, `monorepo-fixture-${name}-`)); + constructor( + protected name: string, + prefix = "monorepo-fixture" + ) { + this.root = fs.mkdtempSync(path.join(tmpdir, `${prefix}-${name}-`)); this.lagePath = path.join(this.nodeModulesPath, "@lage-run"); } - async init(fixturePath?: string): Promise { + public async init(fixturePath?: string): Promise { const options = { cwd: this.root }; const cwd = this.root; await execa("git", ["init"], options); @@ -38,7 +41,7 @@ export class Monorepo { } } - async install(): Promise { + public async install(): Promise { if (!fs.existsSync(this.nodeModulesPath)) { fs.mkdirSync(this.nodeModulesPath, { recursive: true }); } @@ -66,7 +69,7 @@ export class Monorepo { /** * Simulates a "yarn" call by linking internal packages and generates a yarn.lock file */ - async linkPackages(): Promise { + public async linkPackages(): Promise { const pkgs = fs.readdirSync(path.join(this.root, "packages")); if (!fs.existsSync(this.nodeModulesPath)) { @@ -94,7 +97,7 @@ export class Monorepo { await this.commitFiles({ "yarn.lock": yarnYaml }); } - async generateRepoFiles(): Promise { + protected async generateRepoFiles(): Promise { const lagePath = path.join(this.nodeModulesPath, "lage/lib/cli"); await this.commitFiles({ @@ -127,13 +130,18 @@ export class Monorepo { }); } - async setLageConfig(contents: string): Promise { + public async setLageConfig(contents: string): Promise { await this.commitFiles({ "lage.config.js": contents, }); } - async addPackage(name: string, internalDeps: string[] = [], scripts?: { [script: string]: string }): Promise { + public async addPackage( + name: string, + internalDeps: string[] = [], + scripts?: { [script: string]: string }, + extraFiles?: Record + ): Promise { return await this.commitFiles({ [`packages/${name}/build.js`]: `console.log('building ${name}');`, [`packages/${name}/test.js`]: `console.log('building ${name}');`, @@ -146,25 +154,21 @@ export class Monorepo { test: "node ./test.js", lint: "node ./lint.js", }, - dependencies: { - ...(internalDeps && - internalDeps.reduce((deps, dep) => { - return { ...deps, [dep]: "*" }; - }, {})), - }, + dependencies: internalDeps ? Object.fromEntries(internalDeps.map((dep) => [dep, "*"])) : {}, }, + ...extraFiles, }); } - clone(origin: string): execa.ExecaChildProcess { + public clone(origin: string): execa.ExecaChildProcess { return execa("git", ["clone", origin], { cwd: this.root }); } - push(origin: string, branch: string): execa.ExecaChildProcess { + public push(origin: string, branch: string): execa.ExecaChildProcess { return execa("git", ["push", origin, branch], { cwd: this.root }); } - writeFiles(files: { [file: string]: string | object }, options: { executable?: boolean } = {}): void { + public writeFiles(files: { [file: string]: string | object }, options: { executable?: boolean } = {}): void { for (const [file, contents] of Object.entries(files)) { let out = ""; if (typeof contents !== "string") { @@ -187,7 +191,7 @@ export class Monorepo { } } - async readFiles(files: string[]): Promise> { + public async readFiles(files: string[]): Promise> { const contents: Record = {}; for (const file of files) { const fullPath = path.isAbsolute(file) ? file : path.join(this.root, file); @@ -200,22 +204,23 @@ export class Monorepo { return contents; } - async commitFiles(files: { [name: string]: string | object }, options: { executable?: boolean } = {}): Promise { - await this.writeFiles(files, options); + public async commitFiles(files: { [name: string]: string | object }, options: { executable?: boolean } = {}): Promise { + this.writeFiles(files, options); await execa("git", ["add", "--", ...Object.keys(files)], { cwd: this.root, }); await execa("git", ["commit", "-m", "commit files"], { cwd: this.root }); } - run(command: string, args?: string[], silent?: boolean): execa.ExecaChildProcess { + public run(command: string, args?: string[], silent?: boolean, options?: Partial): execa.ExecaChildProcess { return execa("yarn", [...(silent === true ? ["--silent"] : []), command, ...(args || [])], { cwd: this.root, shell: true, + ...options, }); } - async cleanup(): Promise { + public async cleanup(): Promise { const maxRetries = 5; let attempts = 0; diff --git a/packages/reporters/src/AdoReporter.ts b/packages/reporters/src/AdoReporter.ts index 14a5c84d7..e6298cdec 100644 --- a/packages/reporters/src/AdoReporter.ts +++ b/packages/reporters/src/AdoReporter.ts @@ -49,16 +49,24 @@ function format(level: LogLevel, prefix: string, message: string) { } export class AdoReporter implements Reporter { - logStream: Writable = process.stdout; + private logStream: Writable; private logEntries = new Map(); - readonly groupedEntries: Map[]> = new Map(); - - constructor(private options: { logLevel?: LogLevel; grouped?: boolean }) { + private readonly groupedEntries: Map[]> = new Map(); + + constructor( + private options: { + logLevel?: LogLevel; + grouped?: boolean; + /** stream for testing */ + logStream?: Writable; + } + ) { options.logLevel = options.logLevel || LogLevel.info; + this.logStream = options.logStream || process.stdout; } - log(entry: LogEntry): boolean | void { + public log(entry: LogEntry): boolean | void { if (entry.data && entry.data.target && entry.data.target.hidden) { return; } @@ -159,7 +167,7 @@ export class AdoReporter implements Reporter { } } - summarize(schedulerRunSummary: SchedulerRunSummary): void { + public summarize(schedulerRunSummary: SchedulerRunSummary): void { const { targetRuns, targetRunByStatus, duration } = schedulerRunSummary; const { failed, aborted, skipped, success, pending } = targetRunByStatus; diff --git a/packages/reporters/src/BasicReporter.ts b/packages/reporters/src/BasicReporter.ts index 98c5108cd..417284bff 100644 --- a/packages/reporters/src/BasicReporter.ts +++ b/packages/reporters/src/BasicReporter.ts @@ -71,7 +71,7 @@ export class BasicReporter implements Reporter { process.on("exit", () => process.stdout.write(terminal.showCursor)); } - log(entry: LogEntry): void { + public log(entry: LogEntry): void { const data = entry.data; if (!data?.target || data.target.hidden) return; @@ -92,7 +92,7 @@ export class BasicReporter implements Reporter { } } - summarize(schedulerRunSummary: SchedulerRunSummary): void { + public summarize(schedulerRunSummary: SchedulerRunSummary): void { clearInterval(this.updateTimer); process.stdout.write(terminal.clearLine); diff --git a/packages/reporters/src/ChromeTraceEventsReporter.ts b/packages/reporters/src/ChromeTraceEventsReporter.ts index fcbf89b42..99d8421d2 100644 --- a/packages/reporters/src/ChromeTraceEventsReporter.ts +++ b/packages/reporters/src/ChromeTraceEventsReporter.ts @@ -39,7 +39,7 @@ function getTimeBasedFilename(prefix: string) { } export class ChromeTraceEventsReporter implements Reporter { - consoleLogStream: Writable = process.stdout; + private consoleLogStream: Writable; private events: TraceEventsObject = { traceEvents: [], @@ -47,15 +47,21 @@ export class ChromeTraceEventsReporter implements Reporter { }; private outputFile: string; - constructor(private options: ChromeTraceEventsReporterOptions) { + constructor( + private options: ChromeTraceEventsReporterOptions & { + /** stream for testing */ + consoleLogStream?: Writable; + } + ) { this.outputFile = options.outputFile ?? getTimeBasedFilename("profile"); + this.consoleLogStream = options.consoleLogStream ?? process.stdout; } - log(): void { + public log(): void { // pass } - summarize(schedulerRunSummary: SchedulerRunSummary): void { + public summarize(schedulerRunSummary: SchedulerRunSummary): void { const { targetRuns, startTime } = schedulerRunSummary; // categorize events diff --git a/packages/reporters/src/JsonReporter.ts b/packages/reporters/src/JsonReporter.ts index 4b5c4da06..84718b639 100644 --- a/packages/reporters/src/JsonReporter.ts +++ b/packages/reporters/src/JsonReporter.ts @@ -10,7 +10,7 @@ import type { TargetMessageEntry, TargetStatusEntry } from "./types/TargetLogEnt export class JsonReporter implements Reporter { constructor(private options: { logLevel: LogLevel; indented: boolean }) {} - log(entry: LogEntry): void { + public log(entry: LogEntry): void { if (entry.data && entry.data.target && entry.data.target.hidden) { return; } @@ -20,7 +20,7 @@ export class JsonReporter implements Reporter { } } - summarize(schedulerRunSummary: SchedulerRunSummary): void { + public summarize(schedulerRunSummary: SchedulerRunSummary): void { const { duration, targetRuns, targetRunByStatus } = schedulerRunSummary; const summary: Record = { diff --git a/packages/reporters/src/LogReporter.ts b/packages/reporters/src/LogReporter.ts index 3eb751e98..07b40ef39 100644 --- a/packages/reporters/src/LogReporter.ts +++ b/packages/reporters/src/LogReporter.ts @@ -1,6 +1,6 @@ import { formatDuration, hrtimeDiff, hrToSeconds } from "@lage-run/format-hrtime"; import { isTargetStatusLogEntry } from "./isTargetStatusLogEntry.js"; -import { LogLevel, type LogStructuredData } from "@lage-run/logger"; +import { LogLevel } from "@lage-run/logger"; import ansiRegex from "ansi-regex"; import chalk from "chalk"; import type { Chalk } from "chalk"; @@ -77,15 +77,22 @@ function normalize(prefixOrMessage: string, message?: string) { } export class LogReporter implements Reporter { - logStream: Writable = process.stdout; + private logStream: Writable; private logEntries = new Map(); - readonly groupedEntries: Map[]> = new Map(); - constructor(private options: { logLevel?: LogLevel; grouped?: boolean }) { + constructor( + private options: { + logLevel?: LogLevel; + grouped?: boolean; + /** stream for testing */ + logStream?: Writable; + } + ) { options.logLevel = options.logLevel || LogLevel.info; + this.logStream = options.logStream || process.stdout; } - log(entry: LogEntry): void { + public log(entry: LogEntry): void { // if "hidden", do not even attempt to record or report the entry if (entry?.data?.target?.hidden) { return; @@ -190,11 +197,11 @@ export class LogReporter implements Reporter { } } - hr(): void { + private hr(): void { this.print("┈".repeat(80)); } - summarize(schedulerRunSummary: SchedulerRunSummary): void { + public summarize(schedulerRunSummary: SchedulerRunSummary): void { const { targetRuns, targetRunByStatus, duration } = schedulerRunSummary; const { failed, aborted, skipped, success, pending } = targetRunByStatus; @@ -281,7 +288,7 @@ export class LogReporter implements Reporter { this.print(`Took a total of ${formatDuration(hrToSeconds(duration))} to complete. ${allCacheHitText}`); } - resetLogEntries(): void { + public resetLogEntries(): void { this.logEntries.clear(); } } diff --git a/packages/reporters/src/ProgressReporter.ts b/packages/reporters/src/ProgressReporter.ts index 128b8d630..15cfbae91 100644 --- a/packages/reporters/src/ProgressReporter.ts +++ b/packages/reporters/src/ProgressReporter.ts @@ -1,4 +1,3 @@ -import EventEmitter from "events"; import { type LogEntry, LogLevel, type Reporter, type LogStructuredData } from "@lage-run/logger"; import type { SchedulerRunSummary, TargetStatus } from "@lage-run/scheduler-types"; @@ -29,14 +28,12 @@ function fancy(str: string) { } export class ProgressReporter implements Reporter { - logStream: Writable = process.stdout; - startTime: [number, number] = [0, 0]; + public logStream: Writable = process.stdout; - logEvent: EventEmitter = new EventEmitter(); - logEntries: Map[]> = new Map(); + private logEntries: Map[]> = new Map(); - taskReporter: TaskReporter; - tasks: Map = new Map(); + private taskReporter: TaskReporter; + private tasks: Map = new Map(); constructor(private options: { concurrency: number; version: string } = { concurrency: 0, version: "0.0.0" }) { this.taskReporter = this.createTaskReporter(); @@ -44,7 +41,7 @@ export class ProgressReporter implements Reporter { this.print(`${fancy("lage")} - Version ${options.version} - ${options.concurrency} Workers`); } - createTaskReporter(): TaskReporter { + private createTaskReporter(): TaskReporter { return new TaskReporter({ productName: "lage", version: this.options.version, @@ -64,7 +61,7 @@ export class ProgressReporter implements Reporter { }); } - log(entry: LogEntry): void { + public log(entry: LogEntry): void { // save the logs for errors if (entry.data?.target?.id) { if (!this.logEntries.has(entry.data.target.id)) { @@ -78,10 +75,6 @@ export class ProgressReporter implements Reporter { return; } - if (entry.data && entry.data.schedulerRun) { - this.startTime = entry.data.schedulerRun.startTime; - } - if (entry.data && entry.data.status && entry.data.target) { const target: Target = entry.data.target; const status: TargetStatus = entry.data.status; @@ -119,11 +112,11 @@ export class ProgressReporter implements Reporter { this.logStream.write(message + "\n"); } - hr(): void { + private hr(): void { this.print("┈".repeat(80)); } - summarize(schedulerRunSummary: SchedulerRunSummary): void { + public summarize(schedulerRunSummary: SchedulerRunSummary): void { const { targetRuns, targetRunByStatus, duration } = schedulerRunSummary; const { failed, aborted, skipped, success, pending, running, queued } = targetRunByStatus; diff --git a/packages/reporters/src/VerboseFileLogReporter.ts b/packages/reporters/src/VerboseFileLogReporter.ts index 2ce5cd482..355bb5f9a 100644 --- a/packages/reporters/src/VerboseFileLogReporter.ts +++ b/packages/reporters/src/VerboseFileLogReporter.ts @@ -16,24 +16,29 @@ function stripAnsi(message: string) { } export class VerboseFileLogReporter implements Reporter { - fileStream: Writable; - constructor(logFile?: string) { + private fileStream: Writable; + + /** + * @param logFile Log file path from CLI args + * @param fileStream Stream for testing + */ + constructor(logFile?: string, fileStream?: Writable) { // if logFile is falsy (not specified on cli args), this.fileStream just become a "nowhere" stream and this reporter effectively does nothing if (logFile) { - const logFilePath = path.dirname(path.resolve(logFile)); - if (!fs.existsSync(logFilePath)) { - fs.mkdirSync(logFilePath, { recursive: true }); + const logFileDir = path.dirname(path.resolve(logFile)); + if (!fs.existsSync(logFileDir)) { + fs.mkdirSync(logFileDir, { recursive: true }); } } - this.fileStream = logFile ? fs.createWriteStream(logFile) : new Writable({ write() {} }); + this.fileStream = fileStream ?? (logFile ? fs.createWriteStream(logFile) : new Writable({ write() {} })); } - cleanup(): void { - this.fileStream?.end(); + public cleanup(): void { + this.fileStream.end(); } - log(entry: LogEntry): void { + public log(entry: LogEntry): void { // if "hidden", do not even attempt to record or report the entry if (entry?.data?.target?.hidden) { return; @@ -77,7 +82,7 @@ export class VerboseFileLogReporter implements Reporter { } private print(message: string) { - this.fileStream?.write(message + "\n"); + this.fileStream.write(message + "\n"); } private logTargetEntry(entry: LogEntry) { @@ -102,7 +107,7 @@ export class VerboseFileLogReporter implements Reporter { } } - summarize(): void { + public summarize(): void { // No summary needed for VerboseFileLogReporter } } diff --git a/packages/reporters/src/__tests__/AdoReporter.test.ts b/packages/reporters/src/__tests__/AdoReporter.test.ts index 73e2eca9c..e34a6332c 100644 --- a/packages/reporters/src/__tests__/AdoReporter.test.ts +++ b/packages/reporters/src/__tests__/AdoReporter.test.ts @@ -22,8 +22,7 @@ describe("AdoReporter", () => { it("records a target status entry", () => { const writer = new streams.WritableStream(); - const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.verbose, logStream: writer }); reporter.log({ data: { @@ -48,8 +47,7 @@ describe("AdoReporter", () => { it("records a target message entry", () => { const writer = new streams.WritableStream(); - const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.verbose, logStream: writer }); reporter.log({ data: { @@ -72,8 +70,7 @@ describe("AdoReporter", () => { it("groups messages together", () => { const writer = new streams.WritableStream(); - const reporter = new AdoReporter({ grouped: true, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new AdoReporter({ grouped: true, logLevel: LogLevel.verbose, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -131,8 +128,7 @@ describe("AdoReporter", () => { it("interweave messages when ungrouped", () => { const writer = new streams.WritableStream(); - const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.verbose, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -184,8 +180,7 @@ describe("AdoReporter", () => { it("can filter out verbose messages", () => { const writer = new streams.WritableStream(); - const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.info }); - reporter.logStream = writer; + const reporter = new AdoReporter({ grouped: false, logLevel: LogLevel.info, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -231,8 +226,7 @@ describe("AdoReporter", () => { it("can group verbose messages, displaying summary", () => { const writer = new streams.WritableStream(); - const reporter = new AdoReporter({ grouped: true, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new AdoReporter({ grouped: true, logLevel: LogLevel.verbose, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); diff --git a/packages/reporters/src/__tests__/ChromeTraceEventsReporter.test.ts b/packages/reporters/src/__tests__/ChromeTraceEventsReporter.test.ts index 477721f4a..4470e07f1 100644 --- a/packages/reporters/src/__tests__/ChromeTraceEventsReporter.test.ts +++ b/packages/reporters/src/__tests__/ChromeTraceEventsReporter.test.ts @@ -34,8 +34,7 @@ describe("ChromeTraceEventsReporter", () => { it("can group verbose messages, displaying summary", () => { const consoleWriter = new streams.WritableStream(); - const reporter = new ChromeTraceEventsReporter({ concurrency: 4, outputFile }); - reporter.consoleLogStream = consoleWriter; + const reporter = new ChromeTraceEventsReporter({ concurrency: 4, outputFile, consoleLogStream: consoleWriter }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); diff --git a/packages/reporters/src/__tests__/LogReporter.test.ts b/packages/reporters/src/__tests__/LogReporter.test.ts index 177a21e92..e1ae30bc7 100644 --- a/packages/reporters/src/__tests__/LogReporter.test.ts +++ b/packages/reporters/src/__tests__/LogReporter.test.ts @@ -22,8 +22,7 @@ describe("LogReporter", () => { it("records a target status entry", () => { const writer = new streams.WritableStream(); - const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.verbose, logStream: writer }); reporter.log({ data: { @@ -48,8 +47,7 @@ describe("LogReporter", () => { it("records a target message entry", () => { const writer = new streams.WritableStream(); - const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.verbose, logStream: writer }); reporter.log({ data: { @@ -72,8 +70,7 @@ describe("LogReporter", () => { it("groups messages together", () => { const writer = new streams.WritableStream(); - const reporter = new LogReporter({ grouped: true, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new LogReporter({ grouped: true, logLevel: LogLevel.verbose, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -128,8 +125,7 @@ describe("LogReporter", () => { it("interweave messages when ungrouped", () => { const writer = new streams.WritableStream(); - const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.verbose }); - reporter.logStream = writer; + const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.verbose, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -181,8 +177,7 @@ describe("LogReporter", () => { it("can filter out verbose messages", () => { const writer = new streams.WritableStream(); - const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.info }); - reporter.logStream = writer; + const reporter = new LogReporter({ grouped: false, logLevel: LogLevel.info, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -228,8 +223,7 @@ describe("LogReporter", () => { it("can display a summary of a failure", () => { const writer = new streams.WritableStream(); - const reporter = new LogReporter({ grouped: true, logLevel: LogLevel.info }); - reporter.logStream = writer; + const reporter = new LogReporter({ grouped: true, logLevel: LogLevel.info, logStream: writer }); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); diff --git a/packages/reporters/src/__tests__/VerboseFileLogReporter.test.ts b/packages/reporters/src/__tests__/VerboseFileLogReporter.test.ts index 161532b91..5f681c6ed 100644 --- a/packages/reporters/src/__tests__/VerboseFileLogReporter.test.ts +++ b/packages/reporters/src/__tests__/VerboseFileLogReporter.test.ts @@ -20,8 +20,7 @@ function createTarget(packageName: string, task: string) { describe("VerboseFileLogReporter", () => { it("records a target status entry", () => { const writer = new streams.WritableStream(); - const reporter = new VerboseFileLogReporter(); - reporter.fileStream = writer; + const reporter = new VerboseFileLogReporter(undefined, writer); const entry: LogEntry = { data: { @@ -45,8 +44,7 @@ describe("VerboseFileLogReporter", () => { it("records a target message entry", () => { const writer = new streams.WritableStream(); - const reporter = new VerboseFileLogReporter(); - reporter.fileStream = writer; + const reporter = new VerboseFileLogReporter(undefined, writer); const entry: LogEntry = { data: { @@ -68,8 +66,7 @@ describe("VerboseFileLogReporter", () => { it("prefixes target entries with the target id", () => { const writer = new streams.WritableStream(); - const reporter = new VerboseFileLogReporter(); - reporter.fileStream = writer; + const reporter = new VerboseFileLogReporter(undefined, writer); const entry: LogEntry = { data: { @@ -92,8 +89,7 @@ describe("VerboseFileLogReporter", () => { it("does not prefix non-target entries with target id", () => { const writer = new streams.WritableStream(); - const reporter = new VerboseFileLogReporter(); - reporter.fileStream = writer; + const reporter = new VerboseFileLogReporter(undefined, writer); const entry: LogEntry = { level: LogLevel.verbose, @@ -111,8 +107,7 @@ describe("VerboseFileLogReporter", () => { it("never groups messages together", () => { const writer = new streams.WritableStream(); - const reporter = new VerboseFileLogReporter(); - reporter.fileStream = writer; + const reporter = new VerboseFileLogReporter(undefined, writer); const aBuildTarget = createTarget("a", "build"); const aTestTarget = createTarget("a", "test"); @@ -163,8 +158,7 @@ describe("VerboseFileLogReporter", () => { it("always records messages with logLevel verbose or lower", () => { const writer = new streams.WritableStream(); - const reporter = new VerboseFileLogReporter(); - reporter.fileStream = writer; + const reporter = new VerboseFileLogReporter(undefined, writer); const entry1: LogEntry = { level: LogLevel.info, diff --git a/packages/runners/src/NoOpRunner.ts b/packages/runners/src/NoOpRunner.ts index 59b023b02..f20740963 100644 --- a/packages/runners/src/NoOpRunner.ts +++ b/packages/runners/src/NoOpRunner.ts @@ -1,11 +1,11 @@ import type { TargetRunner } from "./types/TargetRunner.js"; export class NoOpRunner implements TargetRunner { - async shouldRun(): Promise { + public async shouldRun(): Promise { return true; } - async run(): Promise { + public async run(): Promise { // pass } } diff --git a/packages/runners/src/NpmScriptRunner.ts b/packages/runners/src/NpmScriptRunner.ts index f20868c97..887847c0c 100644 --- a/packages/runners/src/NpmScriptRunner.ts +++ b/packages/runners/src/NpmScriptRunner.ts @@ -11,6 +11,8 @@ export interface NpmScriptRunnerOptions { npmCmd: string; } +const gracefulKillTimeout = 2500; + /** * Runs a npm script on a target. * @@ -30,8 +32,6 @@ export interface NpmScriptRunnerOptions { * - FORCE_COLOR - set to "1" detect that this is a TTY */ export class NpmScriptRunner implements TargetRunner { - static gracefulKillTimeout = 2500; - constructor(private options: NpmScriptRunnerOptions) {} private getNpmArgs(task: string, taskArgs: string[]) { @@ -46,14 +46,14 @@ export class NpmScriptRunner implements TargetRunner { return !!packageJson.scripts?.[task]; } - async shouldRun(target: Target): Promise { + public async shouldRun(target: Target): Promise { // By convention, do not run anything if there is no script for this task defined in package.json (counts as "success") const hasNpmScript = await this.hasNpmScript(target); return hasNpmScript && (target.shouldRun ?? true); } - async run(runOptions: TargetRunnerOptions): Promise { + public async run(runOptions: TargetRunnerOptions): Promise { const { target, weight, abortSignal } = runOptions; const { nodeOptions, npmCmd, taskArgs } = this.options; const task = target.options?.script ?? target.task; @@ -83,7 +83,7 @@ export class NpmScriptRunner implements TargetRunner { if (childProcess && !childProcess.killed) { childProcess.kill("SIGKILL"); } - }, NpmScriptRunner.gracefulKillTimeout); + }, gracefulKillTimeout); // Remember that even this timeout needs to be unref'ed, otherwise the process will hang due to this timeout if (t.unref) { diff --git a/packages/runners/src/TargetRunnerPicker.ts b/packages/runners/src/TargetRunnerPicker.ts index 531ad9428..dabd3f7f6 100644 --- a/packages/runners/src/TargetRunnerPicker.ts +++ b/packages/runners/src/TargetRunnerPicker.ts @@ -8,7 +8,7 @@ import { pathToFileURL } from "url"; export class TargetRunnerPicker { constructor(private options: TargetRunnerPickerOptions) {} - async pick(target: Target): Promise { + public async pick(target: Target): Promise { if (target.id === getStartTargetId()) { return new (await import("./NoOpRunner.js")).NoOpRunner(); } diff --git a/packages/runners/src/WorkerRunner.ts b/packages/runners/src/WorkerRunner.ts index 9eb6af691..11c18acab 100644 --- a/packages/runners/src/WorkerRunner.ts +++ b/packages/runners/src/WorkerRunner.ts @@ -40,11 +40,9 @@ export interface WorkerRunnerOptions { * ``` */ export class WorkerRunner implements TargetRunner { - static gracefulKillTimeout = 2500; - constructor(private options: WorkerRunnerOptions) {} - async shouldRun(target: Target): Promise { + public async shouldRun(target: Target): Promise { const scriptModule = await this.getScriptModule(target); if (typeof scriptModule.shouldRun === "function") { @@ -54,7 +52,7 @@ export class WorkerRunner implements TargetRunner { return target.shouldRun ?? true; } - async run(runOptions: TargetRunnerOptions): Promise { + public async run(runOptions: TargetRunnerOptions): Promise { const { target, weight, abortSignal } = runOptions; const { taskArgs } = this.options; @@ -73,7 +71,7 @@ export class WorkerRunner implements TargetRunner { return await runFn({ target, weight, taskArgs, abortSignal }); } - async getScriptModule(target: Target): Promise { + private async getScriptModule(target: Target): Promise { const scriptFile = target.options?.worker ?? target.options?.script; if (!scriptFile) { diff --git a/packages/scheduler/src/SimpleScheduler.ts b/packages/scheduler/src/SimpleScheduler.ts index 956ee2aef..1bd71b7ab 100644 --- a/packages/scheduler/src/SimpleScheduler.ts +++ b/packages/scheduler/src/SimpleScheduler.ts @@ -1,19 +1,18 @@ -import { AggregatedPool } from "@lage-run/worker-threads-pool"; -import { formatBytes } from "./formatBytes.js"; -import { categorizeTargetRuns } from "./categorizeTargetRuns.js"; -import { type Target, getStartTargetId, sortTargetsByPriority } from "@lage-run/target-graph"; -import { WrappedTarget } from "./WrappedTarget.js"; -import { TargetRunnerPicker } from "@lage-run/runners"; - -import type { WorkerResult } from "./WrappedTarget.js"; +import type { CacheOptions } from "@lage-run/cache"; +import type { TargetHasher } from "@lage-run/hasher"; import type { Logger } from "@lage-run/logger"; +import type { TargetRunnerPickerOptions } from "@lage-run/runners"; +import { TargetRunnerPicker } from "@lage-run/runners"; +import type { SchedulerRunResults, SchedulerRunSummary, TargetRunSummary, TargetScheduler } from "@lage-run/scheduler-types"; import type { TargetGraph } from "@lage-run/target-graph"; -import type { TargetScheduler, SchedulerRunResults, SchedulerRunSummary, TargetRunSummary } from "@lage-run/scheduler-types"; +import { type Target, getStartTargetId, sortTargetsByPriority } from "@lage-run/target-graph"; import type { Pool } from "@lage-run/worker-threads-pool"; -import type { TargetRunnerPickerOptions } from "@lage-run/runners"; -import type { TargetHasher } from "@lage-run/hasher"; -import type { CacheOptions } from "@lage-run/cache"; +import { AggregatedPool } from "@lage-run/worker-threads-pool"; import type { MessagePort } from "worker_threads"; +import { categorizeTargetRuns } from "./categorizeTargetRuns.js"; +import { formatBytes } from "./formatBytes.js"; +import type { WorkerResult } from "./WrappedTarget.js"; +import { WrappedTarget } from "./WrappedTarget.js"; export interface SimpleSchedulerOptions { logger: Logger; @@ -48,14 +47,12 @@ export interface SimpleSchedulerOptions { * */ export class SimpleScheduler implements TargetScheduler { - targetRuns: Map = new Map(); - rerunTargets: Set = new Set(); - abortController: AbortController = new AbortController(); - abortSignal: AbortSignal = this.abortController.signal; - pool: Pool; - runnerPicker: TargetRunnerPicker; - - runPromise = Promise.resolve() as Promise; + public readonly targetRuns: Map = new Map(); + private rerunTargets: Set = new Set(); + private abortController: AbortController = new AbortController(); + private abortSignal: AbortSignal = this.abortController.signal; + private pool: Pool; + public readonly runnerPicker: TargetRunnerPicker; constructor(private options: SimpleSchedulerOptions) { this.pool = @@ -77,7 +74,7 @@ export class SimpleScheduler implements TargetScheduler { this.runnerPicker = new TargetRunnerPicker(options.workerData.runners); } - getTargetsByPriority(): Target[] { + private getTargetsByPriority(): Target[] { return sortTargetsByPriority([...this.targetRuns.values()].map((run) => run.target)); } @@ -86,12 +83,8 @@ export class SimpleScheduler implements TargetScheduler { * 1. Convert the target graph into a promise graph. * 2. Create a promise graph of all targets * 3. Pass the continueOnError option to the promise graph runner. - * - * @param root - * @param targetGraph - * @returns */ - async run(root: string, targetGraph: TargetGraph, shouldRerun = false): Promise> { + public async run(root: string, targetGraph: TargetGraph, shouldRerun = false): Promise> { const startTime: [number, number] = process.hrtime(); const { continueOnError, logger, shouldCache } = this.options; @@ -181,9 +174,8 @@ export class SimpleScheduler implements TargetScheduler { /** * Used by consumers of the scheduler to notify that the inputs to the target has changed - * @param targetId */ - markTargetAndDependentsPending(targetId: string): void { + private markTargetAndDependentsPending(targetId: string): void { const queue = [targetId]; while (queue.length > 0) { const current = queue.shift()!; @@ -203,7 +195,7 @@ export class SimpleScheduler implements TargetScheduler { } } - getReadyTargets(): WrappedTarget[] { + private getReadyTargets(): WrappedTarget[] { const readyTargets: Set = new Set(); for (const target of this.getTargetsByPriority()) { @@ -233,7 +225,7 @@ export class SimpleScheduler implements TargetScheduler { return [...readyTargets]; } - isAllDone(): boolean { + private isAllDone(): boolean { for (const t of this.targetRuns.values()) { if (t.status !== "skipped" && t.status !== "success" && t.target.id !== getStartTargetId()) { return false; @@ -243,7 +235,7 @@ export class SimpleScheduler implements TargetScheduler { return true; } - async scheduleReadyTargets(): Promise { + private async scheduleReadyTargets(): Promise { if (this.isAllDone() || this.abortSignal.aborted) { return Promise.resolve(); } @@ -260,7 +252,7 @@ export class SimpleScheduler implements TargetScheduler { await Promise.all(promises); } - logProgress(): void { + private logProgress(): void { const targetRunByStatus = categorizeTargetRuns(this.targetRuns.values()); const total = [...this.targetRuns.values()].filter((t) => !t.target.hidden).length; @@ -323,7 +315,7 @@ export class SimpleScheduler implements TargetScheduler { await this.scheduleReadyTargets(); } - async cleanup(): Promise { + public async cleanup(): Promise { this.options.logger.silly(`Max Worker Memory Usage: ${formatBytes(this.pool.stats().maxWorkerMemoryUsage)}`); await this.pool.close(); } @@ -331,7 +323,7 @@ export class SimpleScheduler implements TargetScheduler { /** * Abort the scheduler using the abort controller. */ - abort(): void { + public abort(): void { this.abortController.abort(); } } diff --git a/packages/scheduler/src/WrappedTarget.ts b/packages/scheduler/src/WrappedTarget.ts index c66337e98..fef6e20be 100644 --- a/packages/scheduler/src/WrappedTarget.ts +++ b/packages/scheduler/src/WrappedTarget.ts @@ -1,17 +1,15 @@ -import { bufferTransform } from "./bufferTransform.js"; -import { getLageOutputCacheLocation } from "./getLageOutputCacheLocation.js"; +import type { TargetHasher } from "@lage-run/hasher"; +import type { Logger } from "@lage-run/logger"; import { type LogEntry, LogLevel } from "@lage-run/logger"; - -import fs from "fs"; -import path from "path"; -import { mkdir, writeFile } from "fs/promises"; - -import type { Pool } from "@lage-run/worker-threads-pool"; import type { TargetRun, TargetStatus } from "@lage-run/scheduler-types"; import { getStartTargetId, type Target } from "@lage-run/target-graph"; -import type { Logger } from "@lage-run/logger"; -import type { TargetHasher } from "@lage-run/hasher"; +import type { Pool } from "@lage-run/worker-threads-pool"; +import fs from "fs"; +import { mkdir, writeFile } from "fs/promises"; +import path from "path"; import type { MessagePort } from "worker_threads"; +import { bufferTransform } from "./bufferTransform.js"; +import { getLageOutputCacheLocation } from "./getLageOutputCacheLocation.js"; export interface WrappedTargetOptions { root: string; @@ -44,33 +42,25 @@ export interface WorkerResult { export class WrappedTarget implements TargetRun { #status: TargetStatus = "pending"; #result: WorkerResult | undefined; - queueTime: [number, number] = [0, 0]; - startTime: [number, number] = [0, 0]; - duration: [number, number] = [0, 0]; - target: Target; - threadId = 0; + public queueTime: [number, number] = [0, 0]; + public startTime: [number, number] = [0, 0]; + public duration: [number, number] = [0, 0]; + public target: Target; + public threadId = 0; - get result(): WorkerResult | undefined { + public get result(): WorkerResult | undefined { return this.#result; } - get status(): TargetStatus { + public get status(): TargetStatus { return this.#status; } - get abortController(): AbortController { - return this.options.abortController; - } - - set abortController(abortController: AbortController) { - this.options.abortController = abortController; - } - - get successful(): boolean { + public get successful(): boolean { return this.#status === "skipped" || this.#status === "success"; } - get waiting(): boolean { + public get waiting(): boolean { return this.#status === "pending" || this.#status === "queued"; } @@ -84,18 +74,18 @@ export class WrappedTarget implements TargetRun { this.options.logger.info("", { target: this.target, status: this.status }); } - onQueued(): void { + public onQueued(): void { this.#status = "queued"; this.queueTime = process.hrtime(); this.options.logger.info("", { target: this.target, status: "queued" }); } - onAbort(): void { + private onAbort(): void { this.#status = "aborted"; this.options.logger.info("", { target: this.target, status: "aborted", threadId: this.threadId }); } - onStart(threadId: number): void { + private onStart(threadId: number): void { if (this.status !== "running") { this.threadId = threadId; this.#status = "running"; @@ -104,7 +94,7 @@ export class WrappedTarget implements TargetRun { } } - onComplete(): void { + private onComplete(): void { this.#status = "success"; this.options.logger.info("", { target: this.target, @@ -114,7 +104,7 @@ export class WrappedTarget implements TargetRun { }); } - onFail(): void { + private onFail(): void { this.#status = "failed"; this.options.logger.info("", { target: this.target, @@ -128,7 +118,7 @@ export class WrappedTarget implements TargetRun { } } - onSkipped(hash?: string | undefined): void { + public onSkipped(hash?: string | undefined): void { this.#status = "skipped"; if (hash) { @@ -142,7 +132,7 @@ export class WrappedTarget implements TargetRun { } } - async run(): Promise { + public async run(): Promise { const { target, logger, shouldCache, abortController, root } = this.options; const abortSignal = abortController.signal; @@ -277,13 +267,11 @@ export class WrappedTarget implements TargetRun { } /** - * A JSON representation of this wrapped target, suitable for serialization in tests. + * A JSON representation of this wrapped target, used in Jest snapshots. * * Skips the unpredictable properties of the wrapped target like the startTime and duration. - * - * @returns */ - toJSON(): { + public toJSON(): { target: string; status: TargetStatus; } { @@ -296,7 +284,7 @@ export class WrappedTarget implements TargetRun { /** * Reset the state of this wrapped target. */ - reset(): void { + public reset(): void { this.#result = undefined; this.#status = "pending"; } diff --git a/packages/scheduler/src/__tests__/SimpleScheduler.test.ts b/packages/scheduler/src/__tests__/SimpleScheduler.test.ts index 65ab76b8d..b697a5b88 100644 --- a/packages/scheduler/src/__tests__/SimpleScheduler.test.ts +++ b/packages/scheduler/src/__tests__/SimpleScheduler.test.ts @@ -15,7 +15,7 @@ import { InProcPool, SingleSchedulePool } from "./fixtures/pools.js"; * 2. It will auto create a startTargetId -> each target's id. */ class TestTargetGraph implements TargetGraph { - targets: Map = new Map([ + public targets: Map = new Map([ [ getStartTargetId(), { @@ -26,9 +26,9 @@ class TestTargetGraph implements TargetGraph { ], ]); - dependencies: [string, string][] = []; + private dependencies: [string, string][] = []; - addTarget(packageName: string, task: string) { + public addTarget(packageName: string, task: string) { const id = `${packageName}#${task}`; this.targets.set(id, { @@ -50,7 +50,7 @@ class TestTargetGraph implements TargetGraph { return this as TestTargetGraph; } - addDependency(from: string, to: string) { + public addDependency(from: string, to: string) { this.dependencies.push([from, to]); this.targets.get(from)!.dependencies.push(to); this.targets.get(to)!.dependencies.push(from); diff --git a/packages/scheduler/src/__tests__/WrappedTarget.test.ts b/packages/scheduler/src/__tests__/WrappedTarget.test.ts index 3c2ced877..873be8845 100644 --- a/packages/scheduler/src/__tests__/WrappedTarget.test.ts +++ b/packages/scheduler/src/__tests__/WrappedTarget.test.ts @@ -23,23 +23,23 @@ function createTarget(packageName: string): Target { class InProcPool implements Pool { constructor(private runner: TargetRunner) {} - exec(data: { target: Target; weight: number }, _weight: number, _setup: any, _teardown: any, abortSignal?: AbortSignal) { + public exec(data: { target: Target; weight: number }, _weight: number, _setup: any, _teardown: any, abortSignal?: AbortSignal) { return this.runner.run({ target: data.target, weight: data.weight, abortSignal }); } - stats() { + public stats() { return { workerRestarts: 0, maxWorkerMemoryUsage: 0, }; } - close() { + public close() { return Promise.resolve(); } } class SkippyInProcPool implements Pool { constructor(private runner: TargetRunner) {} - async exec( + public async exec( data: { target: Target; weight: number }, _weight: number, _setup: any, @@ -52,13 +52,13 @@ class SkippyInProcPool implements Pool { hash: "1234", }); } - stats() { + public stats() { return { workerRestarts: 0, maxWorkerMemoryUsage: 0, }; } - close() { + public close() { return Promise.resolve(); } } diff --git a/packages/target-graph/src/TargetFactory.ts b/packages/target-graph/src/TargetFactory.ts index 817000064..98d34e9fc 100644 --- a/packages/target-graph/src/TargetFactory.ts +++ b/packages/target-graph/src/TargetFactory.ts @@ -12,7 +12,7 @@ export interface TargetFactoryOptions { } export class TargetFactory { - packageScripts: Set = new Set(); + private packageScripts: Set = new Set(); constructor(private options: TargetFactoryOptions) { const { packageInfos } = options; @@ -23,7 +23,7 @@ export class TargetFactory { } } - getTargetType(task: string, config: TargetConfig): string { + private getTargetType(task: string, config: TargetConfig): string { if (!config.type) { if (this.packageScripts.has(task)) { return "npmScript"; @@ -37,12 +37,8 @@ export class TargetFactory { /** * Creates a package task `Target` - * @param packageName - * @param task - * @param config - * @returns a package task `Target` */ - createPackageTarget(packageName: string, task: string, config: TargetConfig): Target { + public createPackageTarget(packageName: string, task: string, config: TargetConfig): Target { const { resolve } = this.options; const { options, deps, dependsOn, cache, inputs, priority, maxWorkers, environmentGlob, weight } = config; const cwd = resolve(packageName); @@ -75,7 +71,7 @@ export class TargetFactory { return target; } - createGlobalTarget(id: string, config: TargetConfig): Target { + public createGlobalTarget(id: string, config: TargetConfig): Target { const { root } = this.options; const { options, deps, dependsOn, cache, inputs, outputs, priority, maxWorkers, environmentGlob, weight } = config; const { task } = getPackageAndTask(id); @@ -107,7 +103,7 @@ export class TargetFactory { /** * Creates a target that operates on files that are "staged" (git index) */ - createStagedTarget(task: string, config: StagedTargetConfig, changedFiles: string[]): Target { + public createStagedTarget(task: string, config: StagedTargetConfig, changedFiles: string[]): Target { const { root } = this.options; const { dependsOn, priority } = config; diff --git a/packages/target-graph/src/TargetGraphBuilder.ts b/packages/target-graph/src/TargetGraphBuilder.ts index 0df2c8b0f..2934a2a3e 100644 --- a/packages/target-graph/src/TargetGraphBuilder.ts +++ b/packages/target-graph/src/TargetGraphBuilder.ts @@ -42,7 +42,7 @@ import type { TargetGraph } from "./types/TargetGraph.js"; */ export class TargetGraphBuilder { /** A map of targets - used internally for looking up generated targets from the target configurations */ - targets: Map = new Map(); + public readonly targets: Map = new Map(); /** * Initializes the builder with package infos @@ -63,13 +63,13 @@ export class TargetGraphBuilder { } as Target); } - addTarget(target: Target): Target { + public addTarget(target: Target): Target { this.targets.set(target.id, target); this.addDependency(getStartTargetId(), target.id); return target; } - addDependency(dependency: string, dependent: string): void { + public addDependency(dependency: string, dependent: string): void { if (this.targets.has(dependent)) { const target = this.targets.get(dependent)!; @@ -90,7 +90,7 @@ export class TargetGraphBuilder { /** * Builds a target graph for given tasks and packages */ - build(): TargetGraph { + public build(): TargetGraph { // Ensure we do not have cycles in the subgraph const cycleInfo = detectCycles(this.targets); if (cycleInfo.hasCycle) { @@ -105,7 +105,7 @@ export class TargetGraphBuilder { }; } - subgraph(entriesTargetIds: string[]): TargetGraph { + public subgraph(entriesTargetIds: string[]): TargetGraph { const subgraphBuilder = new TargetGraphBuilder(); const visited: Set = new Set(); const queue: string[] = []; diff --git a/packages/worker-threads-pool/src/AggregatedPool.ts b/packages/worker-threads-pool/src/AggregatedPool.ts index 93b096721..78af7515a 100644 --- a/packages/worker-threads-pool/src/AggregatedPool.ts +++ b/packages/worker-threads-pool/src/AggregatedPool.ts @@ -18,8 +18,10 @@ interface AggregatedPoolOptions { } export class AggregatedPool extends EventEmitter implements Pool { - readonly groupedPools: Map = new Map(); - readonly defaultPool: WorkerPool | undefined; + /** @internal visible for testing */ + public readonly groupedPools: Map = new Map(); + /** @internal visible for testing */ + public readonly defaultPool: WorkerPool | undefined; constructor(private options: AggregatedPoolOptions) { super(); @@ -72,7 +74,7 @@ export class AggregatedPool extends EventEmitter implements Pool { }); } - stats(): { + public stats(): { maxWorkerMemoryUsage: number; workerRestarts: number; } { @@ -91,7 +93,7 @@ export class AggregatedPool extends EventEmitter implements Pool { return stats; } - async exec( + public async exec( data: Record, weight: number, setup?: (worker: IWorker, stdout: Readable, stderr: Readable) => void, @@ -109,7 +111,7 @@ export class AggregatedPool extends EventEmitter implements Pool { return pool.exec(data, weight, setup, cleanup, abortSignal, priority); } - async close(): Promise { + public async close(): Promise { const promises = [...this.groupedPools.values(), this.defaultPool].map((pool) => pool?.close()); return Promise.all(promises); } diff --git a/packages/worker-threads-pool/src/TaskInfo.ts b/packages/worker-threads-pool/src/TaskInfo.ts index 0bc73e907..8ee11f2cb 100644 --- a/packages/worker-threads-pool/src/TaskInfo.ts +++ b/packages/worker-threads-pool/src/TaskInfo.ts @@ -23,19 +23,19 @@ export class TaskInfo extends AsyncResource { } } - get id(): string { + public get id(): string { return this.options.id; } - get weight(): number { + public get weight(): number { return this.options.weight; } - get abortSignal(): AbortSignal | undefined { + public get abortSignal(): AbortSignal | undefined { return this.options.abortSignal; } - done(err: Error, results: unknown): void { + public done(err: Error, results: unknown): void { const { cleanup, worker, resolve, reject } = this.options; if (cleanup) { diff --git a/packages/worker-threads-pool/src/ThreadWorker.ts b/packages/worker-threads-pool/src/ThreadWorker.ts index 6adc6401f..8e6bc468a 100644 --- a/packages/worker-threads-pool/src/ThreadWorker.ts +++ b/packages/worker-threads-pool/src/ThreadWorker.ts @@ -47,10 +47,10 @@ export class ThreadWorker extends EventEmitter implements IWorker { // @ts-ignore TS2564 #worker: Worker; - status: "free" | "busy" = "busy"; - restarts = 0; + public status: "free" | "busy" = "busy"; + public restarts = 0; - maxWorkerMemoryUsage = 0; + public maxWorkerMemoryUsage = 0; constructor( private script: string, @@ -225,7 +225,7 @@ export class ThreadWorker extends EventEmitter implements IWorker { } } - start(work: QueueItem, abortSignal?: AbortSignal): void { + public start(work: QueueItem, abortSignal?: AbortSignal): void { this.status = "busy"; const { task, resolve, reject, cleanup, setup } = work; @@ -249,44 +249,44 @@ export class ThreadWorker extends EventEmitter implements IWorker { this.#worker.postMessage({ type: "start", task: { ...task, weight: work.weight }, id }); } - get weight(): number { + public get weight(): number { return this.#taskInfo?.weight ?? 1; } - get stdout(): Readable { + public get stdout(): Readable { return this.#stdoutInfo.stream; } - get stderr(): Readable { + public get stderr(): Readable { return this.#stderrInfo.stream; } - get resourceLimits(): ResourceLimits | undefined { + public get resourceLimits(): ResourceLimits | undefined { return this.#worker.resourceLimits; } - get threadId(): number { + public get threadId(): number { return this.#worker.threadId; } - terminate(): void { + public terminate(): void { this.#worker.removeAllListeners(); void this.#worker.terminate(); this.#worker.unref(); } - restart(): void { + public restart(): void { this.restarts++; this.status = "busy"; void this.#worker.terminate(); this.#createNewWorker(); } - checkMemoryUsage(): void { + private checkMemoryUsage(): void { this.#worker.postMessage({ type: "check-memory-usage" }); } - postMessage(value: unknown, transferList?: readonly TransferListItem[] | undefined): void { + public postMessage(value: unknown, transferList?: readonly TransferListItem[] | undefined): void { this.#worker.postMessage(value, transferList); } } diff --git a/packages/worker-threads-pool/src/WorkerPool.ts b/packages/worker-threads-pool/src/WorkerPool.ts index 58055eb49..f804be4a5 100644 --- a/packages/worker-threads-pool/src/WorkerPool.ts +++ b/packages/worker-threads-pool/src/WorkerPool.ts @@ -18,11 +18,14 @@ export const WorkerPoolEvents = { } as const; export class WorkerPool extends EventEmitter implements Pool { - workers: IWorker[] = []; - queue: QueueItem[] = []; - minWorkers = 0; - maxWorkers = 0; - availability = 0; + /** @internal visible for testing */ + public workers: IWorker[] = []; + private queue: QueueItem[] = []; + private minWorkers = 0; + /** @internal visible for testing */ + public maxWorkers = 0; + /** @internal visible for testing */ + public availability = 0; constructor(private options: WorkerPoolOptions) { super(); @@ -48,29 +51,21 @@ export class WorkerPool extends EventEmitter implements Pool { }); } - isIdle(): boolean { + public isIdle(): boolean { return this.workers.every((w) => w.status === "free"); } - get workerRestarts(): number { - return this.workers.reduce((acc, worker) => acc + worker.restarts, 0); - } - - get maxWorkerMemoryUsage(): number { - return this.workers.reduce((acc, worker) => Math.max(acc, worker.maxWorkerMemoryUsage), 0); - } - - stats(): { + public stats(): { maxWorkerMemoryUsage: number; workerRestarts: number; } { return { - maxWorkerMemoryUsage: this.maxWorkerMemoryUsage, - workerRestarts: this.workerRestarts, + maxWorkerMemoryUsage: this.workers.reduce((acc, worker) => Math.max(acc, worker.maxWorkerMemoryUsage), 0), + workerRestarts: this.workers.reduce((acc, worker) => acc + worker.restarts, 0), }; } - createInitialWorkers(): void { + private createInitialWorkers(): void { if (this.workers.length === 0) { for (let i = 0; i < this.minWorkers; i++) { this.addNewWorker(); @@ -78,7 +73,7 @@ export class WorkerPool extends EventEmitter implements Pool { } } - addNewWorker(): ThreadWorker | undefined { + private addNewWorker(): ThreadWorker | undefined { if (this.workers.length < this.maxWorkers) { const { script, workerOptions } = this.options; const worker = new ThreadWorker(script, { workerOptions, workerIdleMemoryLimit: this.options.workerIdleMemoryLimit }); @@ -92,7 +87,7 @@ export class WorkerPool extends EventEmitter implements Pool { } } - exec( + public exec( task: Record, weight: number, setup?: (worker: IWorker, stdout: Readable, stderr: Readable) => void, @@ -113,7 +108,7 @@ export class WorkerPool extends EventEmitter implements Pool { }); } - _exec(abortSignal?: AbortSignal): void { + private _exec(abortSignal?: AbortSignal): void { // find work that will fit the availability of workers const workIndex = pickTaskFromQueue(this.queue, this.availability); @@ -138,7 +133,7 @@ export class WorkerPool extends EventEmitter implements Pool { } } - async close(): Promise { + public async close(): Promise { await Promise.all(this.workers.map((worker) => worker.terminate())); } } diff --git a/scripts/config/eslintrc.js b/scripts/config/eslintrc.js index 7d502ca42..87f5318f7 100644 --- a/scripts/config/eslintrc.js +++ b/scripts/config/eslintrc.js @@ -52,6 +52,15 @@ const config = { destructuredArrayIgnorePattern: "^_", }, ], + "@typescript-eslint/explicit-member-accessibility": [ + "error", + { + accessibility: "explicit", + overrides: { + constructors: "off", + }, + }, + ], }, overrides: [ { @@ -59,6 +68,7 @@ const config = { rules: { "@typescript-eslint/no-var-requires": "off", "@typescript-eslint/no-require-imports": "off", + "@typescript-eslint/explicit-member-accessibility": "off", "no-console": "off", }, }, diff --git a/yarn.lock b/yarn.lock index 862ed061d..9e46bf01d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1633,6 +1633,7 @@ __metadata: dependencies: "@lage-run/cli": "workspace:^" "@lage-run/globby": "workspace:^" + "@lage-run/monorepo-fixture": "workspace:^" "@lage-run/monorepo-scripts": "workspace:^" "@lage-run/scheduler-types": "workspace:^" "@lage-run/target-graph": "workspace:^"