From 687603cc4f33bb51e92bcb7e6ea7aac8e0a771a5 Mon Sep 17 00:00:00 2001 From: Craigory Coppola Date: Tue, 5 May 2026 17:39:36 -0400 Subject: [PATCH 1/3] feat(core): expose verifySandboxViolations API for sandbox-report checking Adds a programmatic API that lets external consumers verify whether sandbox-violation file paths from a prior task run would be considered legitimate inputs/outputs in the current workspace configuration. The API mirrors the logic that powers nx show target --check: inputs are reconciled via HashPlanInspector.inspectTaskInputs, outputs via getOutputsForTargetAndConfiguration with glob matching. HashPlanInspector and the new verifier are now exported from devkit-exports for use by the nx-cloud light client. --- packages/nx/src/devkit-exports.ts | 18 + .../hasher/verify-sandbox-violations.spec.ts | 379 ++++++++++++++++++ .../src/hasher/verify-sandbox-violations.ts | 227 +++++++++++ 3 files changed, 624 insertions(+) create mode 100644 packages/nx/src/hasher/verify-sandbox-violations.spec.ts create mode 100644 packages/nx/src/hasher/verify-sandbox-violations.ts diff --git a/packages/nx/src/devkit-exports.ts b/packages/nx/src/devkit-exports.ts index c939e18790161..6a2038292ef1a 100644 --- a/packages/nx/src/devkit-exports.ts +++ b/packages/nx/src/devkit-exports.ts @@ -259,3 +259,21 @@ export { cacheDir } from './utils/cache-directory'; export { createProjectFileMapUsingProjectGraph } from './project-graph/file-map-utils'; export { isDaemonEnabled } from './daemon/client/client'; + +/** + * @category Sandbox + */ +export { HashPlanInspector } from './hasher/hash-plan-inspector'; + +/** + * @category Sandbox + */ +export type { + SandboxViolationInput, + SandboxViolationResult, +} from './hasher/verify-sandbox-violations'; + +/** + * @category Sandbox + */ +export { verifySandboxViolations } from './hasher/verify-sandbox-violations'; diff --git a/packages/nx/src/hasher/verify-sandbox-violations.spec.ts b/packages/nx/src/hasher/verify-sandbox-violations.spec.ts new file mode 100644 index 0000000000000..e2131a2fc85a7 --- /dev/null +++ b/packages/nx/src/hasher/verify-sandbox-violations.spec.ts @@ -0,0 +1,379 @@ +import type { ProjectGraph } from '../config/project-graph'; +import type { HashInputs } from '../native'; + +// ── Mocks must be declared BEFORE imports that use them ───────────────────── +// +// jest.mock() calls are hoisted to the top of the file by Babel/SWC, which +// means any factory closure runs before module-level `const` declarations are +// initialised. The pattern below avoids referencing outer-scope variables +// inside the factory; instead we configure the mock in beforeEach so every +// test starts with a fresh set of spies. + +jest.mock('./hash-plan-inspector', () => ({ + HashPlanInspector: jest.fn(), +})); + +jest.mock('../tasks-runner/utils', () => ({ + getOutputsForTargetAndConfiguration: jest.fn(), +})); + +// ── Imports (after mocks) ──────────────────────────────────────────────────── + +// eslint-disable-next-line import/order +import { HashPlanInspector } from './hash-plan-inspector'; +// eslint-disable-next-line import/order +import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; +// eslint-disable-next-line import/order +import { verifySandboxViolations } from './verify-sandbox-violations'; + +const MockHashPlanInspector = jest.mocked(HashPlanInspector); +const mockGetOutputs = jest.mocked(getOutputsForTargetAndConfiguration); + +// ── Per-test mock spies ────────────────────────────────────────────────────── + +let mockInit: jest.Mock; +let mockInspectTaskInputs: jest.Mock; + +// ── Helpers ────────────────────────────────────────────────────────────────── + +function buildGraph(): ProjectGraph { + return { + nodes: { + myproj: { + name: 'myproj', + type: 'lib', + data: { + root: 'libs/myproj', + targets: { + build: { + executor: '@nx/js:tsc', + outputs: ['dist/libs/myproj'], + }, + }, + }, + }, + }, + dependencies: { myproj: [] }, + } as unknown as ProjectGraph; +} + +function makeHashInputs(files: string[]): HashInputs { + return { + files, + runtime: [], + environment: [], + depOutputs: [], + external: [], + }; +} + +// ── Tests ──────────────────────────────────────────────────────────────────── + +describe('verifySandboxViolations', () => { + beforeEach(() => { + jest.clearAllMocks(); + + // Rebuild per-test spies so callers can customise return values. + mockInit = jest.fn().mockResolvedValue(undefined); + mockInspectTaskInputs = jest.fn(); + + MockHashPlanInspector.mockImplementation( + () => + ({ + init: mockInit, + inspectTaskInputs: mockInspectTaskInputs, + }) as unknown as HashPlanInspector + ); + + // Default: no outputs + mockGetOutputs.mockReturnValue([]); + }); + + it('returns empty array for empty violations', async () => { + const result = await verifySandboxViolations([], { + projectGraph: buildGraph(), + }); + expect(result).toEqual([]); + expect(MockHashPlanInspector).not.toHaveBeenCalled(); + }); + + describe('all-reconciled case', () => { + it('returns ok:true when every read and write is accounted for', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([ + 'libs/myproj/src/index.ts', + 'libs/myproj/package.json', + ]), + }); + mockGetOutputs.mockReturnValue(['dist/libs/myproj']); + + const [result] = await verifySandboxViolations( + [ + { + taskId: 'myproj:build', + reads: ['libs/myproj/src/index.ts', 'libs/myproj/package.json'], + writes: ['dist/libs/myproj/index.js'], + }, + ], + { projectGraph: graph } + ); + + expect(result.ok).toBe(true); + expect(result.reads.reconciled).toEqual([ + 'libs/myproj/src/index.ts', + 'libs/myproj/package.json', + ]); + expect(result.reads.stillUnexpected).toEqual([]); + expect(result.writes.reconciled).toEqual(['dist/libs/myproj/index.js']); + expect(result.writes.stillUnexpected).toEqual([]); + }); + }); + + describe('mixed case', () => { + it('splits paths into reconciled and stillUnexpected correctly', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }); + mockGetOutputs.mockReturnValue(['dist/libs/myproj']); + + const [result] = await verifySandboxViolations( + [ + { + taskId: 'myproj:build', + reads: ['libs/myproj/src/index.ts', 'some/secret/file.ts'], + writes: ['dist/libs/myproj/bundle.js', '/tmp/shadow-file'], + }, + ], + { projectGraph: graph } + ); + + expect(result.ok).toBe(false); + expect(result.reads.reconciled).toEqual(['libs/myproj/src/index.ts']); + expect(result.reads.stillUnexpected).toEqual(['some/secret/file.ts']); + expect(result.writes.reconciled).toEqual(['dist/libs/myproj/bundle.js']); + expect(result.writes.stillUnexpected).toEqual(['/tmp/shadow-file']); + }); + }); + + describe('all-still-unexpected case', () => { + it('returns ok:false when nothing is reconciled', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + mockGetOutputs.mockReturnValue([]); + + const [result] = await verifySandboxViolations( + [ + { + taskId: 'myproj:build', + reads: ['sneaky/read.ts'], + writes: ['sneaky/write.js'], + }, + ], + { projectGraph: graph } + ); + + expect(result.ok).toBe(false); + expect(result.reads.stillUnexpected).toEqual(['sneaky/read.ts']); + expect(result.writes.stillUnexpected).toEqual(['sneaky/write.js']); + expect(result.reads.reconciled).toEqual([]); + expect(result.writes.reconciled).toEqual([]); + }); + }); + + describe('glob output matching', () => { + it('reconciles a write that matches a glob output pattern', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + // Output is a glob pattern + mockGetOutputs.mockReturnValue(['dist/**']); + + const [result] = await verifySandboxViolations( + [ + { + taskId: 'myproj:build', + writes: ['dist/main.js', 'dist/nested/chunk.js'], + }, + ], + { projectGraph: graph } + ); + + expect(result.writes.reconciled).toEqual([ + 'dist/main.js', + 'dist/nested/chunk.js', + ]); + expect(result.writes.stillUnexpected).toEqual([]); + expect(result.ok).toBe(true); + }); + + it('does NOT reconcile a write that is outside the glob pattern', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + mockGetOutputs.mockReturnValue(['dist/**']); + + const [result] = await verifySandboxViolations( + [ + { + taskId: 'myproj:build', + writes: ['other-dir/file.js'], + }, + ], + { projectGraph: graph } + ); + + expect(result.writes.stillUnexpected).toEqual(['other-dir/file.js']); + expect(result.ok).toBe(false); + }); + }); + + describe('empty reads and writes', () => { + it('handles undefined reads and writes as empty arrays', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + + const [result] = await verifySandboxViolations( + [{ taskId: 'myproj:build' }], + { projectGraph: graph } + ); + + expect(result.reads.reconciled).toEqual([]); + expect(result.reads.stillUnexpected).toEqual([]); + expect(result.writes.reconciled).toEqual([]); + expect(result.writes.stillUnexpected).toEqual([]); + expect(result.ok).toBe(true); + }); + + it('handles explicitly empty reads and writes arrays', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + + const [result] = await verifySandboxViolations( + [{ taskId: 'myproj:build', reads: [], writes: [] }], + { projectGraph: graph } + ); + + expect(result.reads.reconciled).toEqual([]); + expect(result.reads.stillUnexpected).toEqual([]); + expect(result.writes.reconciled).toEqual([]); + expect(result.writes.stillUnexpected).toEqual([]); + expect(result.ok).toBe(true); + }); + }); + + describe('deduplication of inspector calls', () => { + it('calls inspectTaskInputs only once for duplicate taskIds', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }); + mockGetOutputs.mockReturnValue(['dist/libs/myproj']); + + await verifySandboxViolations( + [ + { + taskId: 'myproj:build', + reads: ['libs/myproj/src/index.ts'], + }, + { + taskId: 'myproj:build', + reads: ['libs/myproj/src/index.ts'], + writes: ['dist/libs/myproj/bundle.js'], + }, + { + taskId: 'myproj:build', + writes: ['dist/libs/myproj/other.js'], + }, + ], + { projectGraph: graph } + ); + + // Three violations with the same taskId → inspector called only once + expect(mockInspectTaskInputs).toHaveBeenCalledTimes(1); + }); + + it('builds the HashPlanInspector exactly once for the whole batch', async () => { + const graph = buildGraph(); + + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + + await verifySandboxViolations( + [ + { taskId: 'myproj:build' }, + { taskId: 'myproj:build' }, + { taskId: 'myproj:build' }, + ], + { projectGraph: graph } + ); + + expect(MockHashPlanInspector).toHaveBeenCalledTimes(1); + expect(mockInit).toHaveBeenCalledTimes(1); + }); + + it('calls inspectTaskInputs once per unique taskId across multiple distinct tasks', async () => { + // Two-project graph + const graph = { + nodes: { + myproj: { + name: 'myproj', + type: 'lib', + data: { + root: 'libs/myproj', + targets: { build: { executor: '@nx/js:tsc' } }, + }, + }, + otherproj: { + name: 'otherproj', + type: 'lib', + data: { + root: 'libs/otherproj', + targets: { build: { executor: '@nx/js:tsc' } }, + }, + }, + }, + dependencies: { myproj: [], otherproj: [] }, + } as unknown as ProjectGraph; + + mockInspectTaskInputs + .mockReturnValueOnce({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }) + .mockReturnValueOnce({ + 'otherproj:build': makeHashInputs(['libs/otherproj/src/index.ts']), + }); + + await verifySandboxViolations( + [ + { taskId: 'myproj:build', reads: ['libs/myproj/src/index.ts'] }, + { taskId: 'otherproj:build', reads: ['libs/otherproj/src/index.ts'] }, + // Duplicate — should NOT cause a third call + { taskId: 'myproj:build', reads: ['libs/myproj/src/index.ts'] }, + ], + { projectGraph: graph } + ); + + // Two unique taskIds → exactly two inspectTaskInputs calls + expect(mockInspectTaskInputs).toHaveBeenCalledTimes(2); + }); + }); +}); diff --git a/packages/nx/src/hasher/verify-sandbox-violations.ts b/packages/nx/src/hasher/verify-sandbox-violations.ts new file mode 100644 index 0000000000000..b2613f4647a07 --- /dev/null +++ b/packages/nx/src/hasher/verify-sandbox-violations.ts @@ -0,0 +1,227 @@ +import { minimatch } from 'minimatch'; +import { NxJsonConfiguration } from '../config/nx-json'; +import { ProjectGraph } from '../config/project-graph'; +import { HashInputs } from '../native'; +import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; +import { splitTargetFromNodes } from '../utils/split-target'; +import { workspaceRoot as defaultWorkspaceRoot } from '../utils/workspace-root'; +import { HashPlanInspector } from './hash-plan-inspector'; + +/** + * A single sandbox violation report produced by a prior task run. + * All paths must be workspace-relative POSIX paths (forward slashes, no + * leading slash). The verifier does not normalise paths — callers must ensure + * consistency with how Nx records file paths in its own data structures. + */ +export interface SandboxViolationInput { + /** Full Nx task ID, e.g. `"myproj:build"` or `"myproj:build:production"`. */ + taskId: string; + /** Workspace-relative POSIX paths that were read outside the sandbox. */ + reads?: string[]; + /** Workspace-relative POSIX paths that were written outside the sandbox. */ + writes?: string[]; +} + +/** + * The reconciliation result for a single sandbox violation. + */ +export interface SandboxViolationResult { + /** The task ID from the original {@link SandboxViolationInput}. */ + taskId: string; + reads: { + /** Paths that are legitimate declared inputs for this task. */ + reconciled: string[]; + /** Paths that are still not covered by any declared input. */ + stillUnexpected: string[]; + }; + writes: { + /** Paths that are legitimate declared outputs for this task. */ + reconciled: string[]; + /** Paths that are still not covered by any declared output. */ + stillUnexpected: string[]; + }; + /** + * `true` when every reported read and write is accounted for by the + * current workspace configuration. + */ + ok: boolean; +} + +/** + * Verifies whether file paths reported as sandbox violations would be + * considered legitimate inputs or outputs in the **current** workspace + * configuration. + * + * ### Path format requirement + * All paths in {@link SandboxViolationInput.reads} and + * {@link SandboxViolationInput.writes} **must** be workspace-relative POSIX + * paths (forward slashes, no leading slash, e.g. `"libs/myproj/src/main.ts"`). + * The verifier performs no normalisation — callers are responsible for + * ensuring paths match the format Nx uses internally. + * + * ### Inputs reconciliation + * A read is reconciled when it appears in the fully-expanded `files` array + * returned by {@link HashPlanInspector.inspectTaskInputs} for the task. + * Matching is exact set-membership (no glob expansion needed because the + * inspector already expands all file patterns). + * + * ### Outputs reconciliation + * A write is reconciled when it matches at least one entry from the list + * returned by {@link getOutputsForTargetAndConfiguration}. Matching uses: + * 1. Exact path equality. + * 2. Prefix match — the write is inside an output directory. + * 3. Glob match via `minimatch` — handles patterns such as `dist/**`. + * + * @param violations - One entry per task whose sandbox was violated. + * @param options - Workspace context. The `projectGraph` must already be + * fully resolved. If `nxJson` is omitted it is read from disk using the + * provided (or default) `workspaceRoot`. + */ +export async function verifySandboxViolations( + violations: SandboxViolationInput[], + options: { + projectGraph: ProjectGraph; + nxJson?: NxJsonConfiguration; + workspaceRoot?: string; + } +): Promise { + if (violations.length === 0) return []; + + const { + projectGraph, + nxJson, + workspaceRoot = defaultWorkspaceRoot, + } = options; + + // Build the inspector once for the whole batch and initialise it once. + const inspector = new HashPlanInspector(projectGraph, workspaceRoot, nxJson); + await inspector.init(); + + // Cache inspector results keyed by violation taskId to deduplicate calls + // when multiple violations share the same task. + const inputsCache = new Map(); + + function getCachedInputs(taskId: string): HashInputs | undefined { + if (inputsCache.has(taskId)) { + return inputsCache.get(taskId); + } + + const [project, target, configuration] = splitTargetFromNodes( + taskId, + projectGraph.nodes, + { silent: true } + ); + + if (!project || !target) { + inputsCache.set(taskId, undefined); + return undefined; + } + + let result: Record = {}; + try { + result = inspector.inspectTaskInputs({ project, target, configuration }); + } catch { + // Project / target not found in graph — treat as no inputs. + } + + // The result key is usually the same as taskId but may include a + // defaultConfiguration suffix when none was explicitly given. + let hashInputs = result[taskId]; + if (!hashInputs) { + // Fallback: find any key that starts with project:target + const prefix = `${project}:${target}`; + for (const [key, inputs] of Object.entries(result)) { + if (key === prefix || key.startsWith(prefix + ':')) { + hashInputs = inputs; + break; + } + } + } + + inputsCache.set(taskId, hashInputs); + return hashInputs; + } + + return violations.map((violation) => { + const { taskId, reads = [], writes = [] } = violation; + + // ── Inputs reconciliation ──────────────────────────────────────────────── + const hashInputs = getCachedInputs(taskId); + const inputFileSet = new Set(hashInputs?.files ?? []); + + const reconciledReads: string[] = []; + const unexpectedReads: string[] = []; + for (const read of reads) { + if (inputFileSet.has(read)) { + reconciledReads.push(read); + } else { + unexpectedReads.push(read); + } + } + + // ── Outputs reconciliation ─────────────────────────────────────────────── + const [project, target, configuration] = splitTargetFromNodes( + taskId, + projectGraph.nodes, + { silent: true } + ); + + let outputPatterns: string[] = []; + if (project && target) { + const node = projectGraph.nodes[project]; + if (node) { + outputPatterns = getOutputsForTargetAndConfiguration( + { project, target, configuration }, + {}, + node + ); + } + } + + const reconciledWrites: string[] = []; + const unexpectedWrites: string[] = []; + for (const write of writes) { + if (matchesAnyOutput(write, outputPatterns)) { + reconciledWrites.push(write); + } else { + unexpectedWrites.push(write); + } + } + + const ok = unexpectedReads.length === 0 && unexpectedWrites.length === 0; + + return { + taskId, + reads: { reconciled: reconciledReads, stillUnexpected: unexpectedReads }, + writes: { + reconciled: reconciledWrites, + stillUnexpected: unexpectedWrites, + }, + ok, + }; + }); +} + +/** + * Returns `true` when `filePath` is covered by at least one entry in + * `outputPatterns`. Handles three cases: + * + * 1. **Exact match** — the path equals the pattern literally. + * 2. **Prefix match** — the path is nested inside a non-glob output directory. + * 3. **Glob match** — the pattern contains glob characters and `minimatch` + * confirms a match (e.g. `dist/**` covers `dist/main.js`). + */ +function matchesAnyOutput(filePath: string, outputPatterns: string[]): boolean { + const normalized = filePath.replace(/\\/g, '/'); + for (const pattern of outputPatterns) { + const normalizedPattern = pattern.replace(/\\/g, '/'); + if ( + normalized === normalizedPattern || + normalized.startsWith(normalizedPattern + '/') || + minimatch(normalized, normalizedPattern) + ) { + return true; + } + } + return false; +} From 8378d0eec36dfeb140466280ba2947f611f9fed3 Mon Sep 17 00:00:00 2001 From: Craigory Coppola Date: Wed, 6 May 2026 11:02:04 -0400 Subject: [PATCH 2/3] cleanup(core): extract createTaskFileResolver primitive; expose via devkit-internals MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replaces the sandbox-report-aware verifySandboxViolations export with a generic primitive. createTaskFileResolver returns a handle exposing getInputs / getOutputs / isInput / isOutput per task — the cloud light client owns the SandboxReport shape and the iteration loop. Exposed via devkit-internals (not the public devkit-exports surface). A follow-up commit will refactor nx show target --check to consume this same resolver. --- .../command-line/show/show-target/inputs.ts | 57 +-- .../command-line/show/show-target/outputs.ts | 55 +-- packages/nx/src/devkit-exports.ts | 18 - packages/nx/src/devkit-internals.ts | 3 + .../nx/src/hasher/task-file-resolver.spec.ts | 298 ++++++++++++++ packages/nx/src/hasher/task-file-resolver.ts | 131 ++++++ .../hasher/verify-sandbox-violations.spec.ts | 379 ------------------ .../src/hasher/verify-sandbox-violations.ts | 227 ----------- 8 files changed, 482 insertions(+), 686 deletions(-) create mode 100644 packages/nx/src/hasher/task-file-resolver.spec.ts create mode 100644 packages/nx/src/hasher/task-file-resolver.ts delete mode 100644 packages/nx/src/hasher/verify-sandbox-violations.spec.ts delete mode 100644 packages/nx/src/hasher/verify-sandbox-violations.ts diff --git a/packages/nx/src/command-line/show/show-target/inputs.ts b/packages/nx/src/command-line/show/show-target/inputs.ts index 744d308232e5b..9ba6af3f66ac8 100644 --- a/packages/nx/src/command-line/show/show-target/inputs.ts +++ b/packages/nx/src/command-line/show/show-target/inputs.ts @@ -1,7 +1,6 @@ import type { TargetConfiguration } from '../../../config/workspace-json-project-json'; import type { HashInputs } from '../../../native'; -import { workspaceRoot } from '../../../utils/workspace-root'; -import { handleImport } from '../../../utils/handle-import'; +import { createTaskFileResolver } from '../../../hasher/task-file-resolver'; import type { ShowTargetInputsOptions } from '../command-object'; import { resolveTarget, @@ -10,7 +9,6 @@ import { hasCustomHasher, pc, printList, - type ResolvedTarget, } from './utils'; // ── Handler ───────────────────────────────────────────────────────── @@ -32,16 +30,29 @@ export async function showTargetInputsHandler( return; } - const hashInputs = await resolveInputFiles(t); + const { projectName, targetName, configuration } = t; + const taskId = configuration + ? `${projectName}:${targetName}:${configuration}` + : `${projectName}:${targetName}`; + + const resolver = await createTaskFileResolver({ + projectGraph: t.graph, + nxJson: t.nxJson, + }); + + const hashInputs = resolver.getRawInputs(taskId); + if (!hashInputs) { + throw new Error(`Could not find hash plan for task "${taskId}".`); + } if (args.check !== undefined) { const checkItems = deduplicateFolderEntries(args.check); const results = checkItems.map((input) => - resolveCheckFromInputs(input, t.projectName, t.targetName, hashInputs) + resolveCheckFromInputs(input, projectName, targetName, hashInputs) ); if (results.length >= 2) { - renderBatchCheckInputs(results, t.projectName, t.targetName); + renderBatchCheckInputs(results, projectName, targetName); } else { for (const data of results) renderCheckInput(data); } @@ -54,8 +65,8 @@ export async function showTargetInputsHandler( } renderInputs( - { project: t.projectName, target: t.targetName, ...hashInputs }, - t.node.data.targets[t.targetName].inputs, + { project: projectName, target: targetName, ...hashInputs }, + t.node.data.targets[targetName].inputs, args ); } @@ -64,36 +75,6 @@ export async function showTargetInputsHandler( type CheckInputResult = ReturnType; -async function resolveInputFiles(t: ResolvedTarget): Promise { - const { projectName, targetName, configuration, graph, nxJson } = t; - const { HashPlanInspector } = (await handleImport( - '../../../hasher/hash-plan-inspector.js', - __dirname - )) as typeof import('../../../hasher/hash-plan-inspector'); - - const inspector = new HashPlanInspector(graph, workspaceRoot, nxJson); - await inspector.init(); - - const plan = inspector.inspectTaskInputs({ - project: projectName, - target: targetName, - configuration, - }); - - const targetConfig = graph.nodes[projectName]?.data?.targets?.[targetName]; - const effectiveConfig = configuration ?? targetConfig?.defaultConfiguration; - const taskId = effectiveConfig - ? `${projectName}:${targetName}:${effectiveConfig}` - : `${projectName}:${targetName}`; - const result = plan[taskId]; - if (!result) { - throw new Error( - `Could not find hash plan for task "${taskId}". Available tasks: ${Object.keys(plan).join(', ')}` - ); - } - return result; -} - function resolveCheckFromInputs( rawValue: string, projectName: string, diff --git a/packages/nx/src/command-line/show/show-target/outputs.ts b/packages/nx/src/command-line/show/show-target/outputs.ts index 2415b0d045e3d..0b3f34d47a551 100644 --- a/packages/nx/src/command-line/show/show-target/outputs.ts +++ b/packages/nx/src/command-line/show/show-target/outputs.ts @@ -1,4 +1,7 @@ -import { getOutputsForTargetAndConfiguration } from '../../../tasks-runner/utils'; +import { + createTaskFileResolver, + type TaskFileResolver, +} from '../../../hasher/task-file-resolver'; import { workspaceRoot } from '../../../utils/workspace-root'; import type { ShowTargetOutputsOptions } from '../command-object'; import { @@ -16,12 +19,22 @@ export async function showTargetOutputsHandler( args: ShowTargetOutputsOptions ): Promise { const t = await resolveTarget(args); - const outputsData = resolveOutputsData(t); + + const taskId = t.configuration + ? `${t.projectName}:${t.targetName}:${t.configuration}` + : `${t.projectName}:${t.targetName}`; + + const resolver = await createTaskFileResolver({ + projectGraph: t.graph, + nxJson: t.nxJson, + }); + + const outputsData = resolveOutputsData(t, resolver, taskId); if (args.check !== undefined) { const checkItems = deduplicateFolderEntries(args.check); const results = checkItems.map((o) => - resolveCheckOutputData(o, outputsData) + resolveCheckOutputData(o, outputsData, resolver, taskId) ); if (results.length >= 2) { @@ -49,13 +62,15 @@ export async function showTargetOutputsHandler( type OutputsData = ReturnType; type CheckOutputResult = ReturnType; -function resolveOutputsData(t: ResolvedTarget) { +function resolveOutputsData( + t: ResolvedTarget, + resolver: TaskFileResolver, + taskId: string +) { const { projectName, targetName, configuration, node } = t; - const resolvedOutputs = getOutputsForTargetAndConfiguration( - { project: projectName, target: targetName, configuration }, - {}, - node - ); + // Use the resolver to obtain resolved output paths — avoids duplicating + // the getOutputsForTargetAndConfiguration call that the resolver already makes. + const resolvedOutputs = resolver.getOutputs(taskId); const targetConfig = node.data.targets?.[targetName]; const configuredOutputs: string[] = targetConfig?.outputs ?? []; @@ -93,25 +108,17 @@ function resolveOutputsData(t: ResolvedTarget) { function resolveCheckOutputData( rawFileToCheck: string, - outputsData: OutputsData + outputsData: OutputsData, + resolver: TaskFileResolver, + taskId: string ) { const fileToCheck = normalizePath(rawFileToCheck); const { outputPaths, expandedOutputs } = outputsData; - let matchedOutput: string | null = null; - for (const outputPath of outputPaths) { - const normalizedOutput = outputPath.replace(/\\/g, '/'); - if ( - fileToCheck === normalizedOutput || - fileToCheck.startsWith(normalizedOutput + '/') - ) { - matchedOutput = outputPath; - break; - } - } - if (!matchedOutput && expandedOutputs.includes(fileToCheck)) { - matchedOutput = fileToCheck; - } + // Delegate the direct-match decision to the resolver (handles exact, prefix, + // and glob matching via minimatch — supersedes the previous manual prefix + // comparison + expandedOutputs exact-match approach). + const matchedOutput = resolver.isOutput(taskId, fileToCheck); let containedOutputPaths: string[] = []; let containedExpandedOutputs: string[] = []; diff --git a/packages/nx/src/devkit-exports.ts b/packages/nx/src/devkit-exports.ts index 6a2038292ef1a..c939e18790161 100644 --- a/packages/nx/src/devkit-exports.ts +++ b/packages/nx/src/devkit-exports.ts @@ -259,21 +259,3 @@ export { cacheDir } from './utils/cache-directory'; export { createProjectFileMapUsingProjectGraph } from './project-graph/file-map-utils'; export { isDaemonEnabled } from './daemon/client/client'; - -/** - * @category Sandbox - */ -export { HashPlanInspector } from './hasher/hash-plan-inspector'; - -/** - * @category Sandbox - */ -export type { - SandboxViolationInput, - SandboxViolationResult, -} from './hasher/verify-sandbox-violations'; - -/** - * @category Sandbox - */ -export { verifySandboxViolations } from './hasher/verify-sandbox-violations'; diff --git a/packages/nx/src/devkit-internals.ts b/packages/nx/src/devkit-internals.ts index 402b7c1963a10..1967bcc4ff1c8 100644 --- a/packages/nx/src/devkit-internals.ts +++ b/packages/nx/src/devkit-internals.ts @@ -47,3 +47,6 @@ export { globalSpinner } from './utils/spinner'; export { signalToCode } from './utils/exit-codes'; export { handleImport } from './utils/handle-import'; export { PluginCache, safeWriteFileCache } from './utils/plugin-cache-utils'; +export { HashPlanInspector } from './hasher/hash-plan-inspector'; +export type { TaskFileResolver } from './hasher/task-file-resolver'; +export { createTaskFileResolver } from './hasher/task-file-resolver'; diff --git a/packages/nx/src/hasher/task-file-resolver.spec.ts b/packages/nx/src/hasher/task-file-resolver.spec.ts new file mode 100644 index 0000000000000..6273207b338de --- /dev/null +++ b/packages/nx/src/hasher/task-file-resolver.spec.ts @@ -0,0 +1,298 @@ +import type { ProjectGraph } from '../config/project-graph'; +import type { HashInputs } from '../native'; + +// ── Mocks must be declared BEFORE imports that use them ───────────────────── +// +// jest.mock() calls are hoisted to the top of the file by Babel/SWC, which +// means any factory closure runs before module-level `const` declarations are +// initialised. The pattern below avoids referencing outer-scope variables +// inside the factory; instead we configure the mock in beforeEach so every +// test starts with a fresh set of spies. + +jest.mock('./hash-plan-inspector', () => ({ + HashPlanInspector: jest.fn(), +})); + +jest.mock('../tasks-runner/utils', () => ({ + getOutputsForTargetAndConfiguration: jest.fn(), +})); + +// ── Imports (after mocks) ──────────────────────────────────────────────────── + +// eslint-disable-next-line import/order +import { HashPlanInspector } from './hash-plan-inspector'; +// eslint-disable-next-line import/order +import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; +// eslint-disable-next-line import/order +import { createTaskFileResolver } from './task-file-resolver'; + +const MockHashPlanInspector = jest.mocked(HashPlanInspector); +const mockGetOutputs = jest.mocked(getOutputsForTargetAndConfiguration); + +// ── Per-test mock spies ────────────────────────────────────────────────────── + +let mockInit: jest.Mock; +let mockInspectTaskInputs: jest.Mock; + +// ── Helpers ────────────────────────────────────────────────────────────────── + +function buildGraph(): ProjectGraph { + return { + nodes: { + myproj: { + name: 'myproj', + type: 'lib', + data: { + root: 'libs/myproj', + targets: { + build: { + executor: '@nx/js:tsc', + outputs: ['dist/libs/myproj'], + }, + }, + }, + }, + }, + dependencies: { myproj: [] }, + } as unknown as ProjectGraph; +} + +function makeHashInputs(files: string[]): HashInputs { + return { + files, + runtime: [], + environment: [], + depOutputs: [], + external: [], + }; +} + +// ── Tests ──────────────────────────────────────────────────────────────────── + +describe('createTaskFileResolver', () => { + beforeEach(() => { + jest.clearAllMocks(); + + // Rebuild per-test spies so callers can customise return values. + mockInit = jest.fn().mockResolvedValue(undefined); + mockInspectTaskInputs = jest.fn(); + + MockHashPlanInspector.mockImplementation( + () => + ({ + init: mockInit, + inspectTaskInputs: mockInspectTaskInputs, + }) as unknown as HashPlanInspector + ); + + // Default: no outputs + mockGetOutputs.mockReturnValue([]); + }); + + it('initialises the HashPlanInspector exactly once', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // Trigger multiple calls to exercise init-once guarantee + resolver.getInputs('myproj:build'); + resolver.getInputs('myproj:build'); + resolver.getOutputs('myproj:build'); + + expect(MockHashPlanInspector).toHaveBeenCalledTimes(1); + expect(mockInit).toHaveBeenCalledTimes(1); + }); + + describe('getInputs', () => { + it('returns the expanded file list from the inspector', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([ + 'libs/myproj/src/index.ts', + 'libs/myproj/package.json', + ]), + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + const inputs = resolver.getInputs('myproj:build'); + + expect(inputs).toEqual([ + 'libs/myproj/src/index.ts', + 'libs/myproj/package.json', + ]); + }); + + it('returns empty array when inspector finds no files', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect(resolver.getInputs('myproj:build')).toEqual([]); + }); + + it('caches result — inspectTaskInputs called only once per taskId', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + resolver.getInputs('myproj:build'); + resolver.getInputs('myproj:build'); + resolver.getInputs('myproj:build'); + + expect(mockInspectTaskInputs).toHaveBeenCalledTimes(1); + }); + }); + + describe('getOutputs', () => { + it('returns the output patterns for the task', async () => { + const graph = buildGraph(); + mockGetOutputs.mockReturnValue([ + 'dist/libs/myproj', + 'dist/libs/myproj/**', + ]); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + const outputs = resolver.getOutputs('myproj:build'); + + expect(outputs).toEqual(['dist/libs/myproj', 'dist/libs/myproj/**']); + }); + + it('returns empty array when node has no matching target', async () => { + const graph = buildGraph(); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + // 'myproj:test' target does not exist in graph + const outputs = resolver.getOutputs('myproj:test'); + + expect(outputs).toEqual([]); + expect(mockGetOutputs).not.toHaveBeenCalled(); + }); + + it('caches result — getOutputsForTargetAndConfiguration called only once per taskId', async () => { + const graph = buildGraph(); + mockGetOutputs.mockReturnValue(['dist/libs/myproj']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + resolver.getOutputs('myproj:build'); + resolver.getOutputs('myproj:build'); + resolver.getOutputs('myproj:build'); + + expect(mockGetOutputs).toHaveBeenCalledTimes(1); + }); + }); + + describe('isInput', () => { + it('returns true for a path in the input file list (exact match)', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect(resolver.isInput('myproj:build', 'libs/myproj/src/index.ts')).toBe( + true + ); + }); + + it('returns false for a path NOT in the input file list', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect(resolver.isInput('myproj:build', 'libs/other/src/index.ts')).toBe( + false + ); + }); + }); + + describe('isOutput', () => { + it('returns true for an exact match against an output pattern', async () => { + const graph = buildGraph(); + mockGetOutputs.mockReturnValue(['dist/libs/myproj/index.js']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect( + resolver.isOutput('myproj:build', 'dist/libs/myproj/index.js') + ).toBe(true); + }); + + it('returns true when path is nested inside an output directory', async () => { + const graph = buildGraph(); + mockGetOutputs.mockReturnValue(['dist/libs/myproj']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect( + resolver.isOutput('myproj:build', 'dist/libs/myproj/deep/file.js') + ).toBe(true); + }); + + it('returns true for a glob pattern match (dist/**)', async () => { + const graph = buildGraph(); + mockGetOutputs.mockReturnValue(['dist/**']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect(resolver.isOutput('myproj:build', 'dist/main.js')).toBe(true); + expect(resolver.isOutput('myproj:build', 'dist/nested/chunk.js')).toBe( + true + ); + }); + + it('returns false for a path that does NOT match any output pattern', async () => { + const graph = buildGraph(); + mockGetOutputs.mockReturnValue(['dist/**']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect(resolver.isOutput('myproj:build', 'other-dir/file.js')).toBe( + false + ); + }); + }); + + describe('caching across both getInputs and getOutputs', () => { + it('does not call underlying APIs more than once per taskId even with mixed calls', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), + }); + mockGetOutputs.mockReturnValue(['dist/libs/myproj']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // Interleaved calls for the same taskId + resolver.getInputs('myproj:build'); + resolver.getOutputs('myproj:build'); + resolver.getInputs('myproj:build'); + resolver.getOutputs('myproj:build'); + + expect(mockInspectTaskInputs).toHaveBeenCalledTimes(1); + expect(mockGetOutputs).toHaveBeenCalledTimes(1); + }); + }); + + describe('parseTaskId validation', () => { + it('throws on a taskId that has no colon (no target)', async () => { + const graph = buildGraph(); + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + expect(() => resolver.getInputs('justproject')).toThrow( + /Invalid taskId "justproject"/ + ); + }); + + it('throws on an empty taskId string', async () => { + const graph = buildGraph(); + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + expect(() => resolver.getInputs('')).toThrow(/Invalid taskId ""/); + }); + }); +}); diff --git a/packages/nx/src/hasher/task-file-resolver.ts b/packages/nx/src/hasher/task-file-resolver.ts new file mode 100644 index 0000000000000..f6e3b9ec4bc4f --- /dev/null +++ b/packages/nx/src/hasher/task-file-resolver.ts @@ -0,0 +1,131 @@ +import { minimatch } from 'minimatch'; +import type { NxJsonConfiguration } from '../config/nx-json'; +import type { ProjectGraph } from '../config/project-graph'; +import type { HashInputs } from '../native'; +import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; +import { splitByColons } from '../utils/split-target'; +import { workspaceRoot as defaultWorkspaceRoot } from '../utils/workspace-root'; +import { HashPlanInspector } from './hash-plan-inspector'; + +export interface TaskFileResolver { + /** Full hash plan entry (files + runtime + environment + depOutputs + external). */ + getRawInputs(taskId: string): HashInputs | null; + getInputs(taskId: string): string[]; + getOutputs(taskId: string): string[]; + isInput(taskId: string, path: string): boolean; + isOutput(taskId: string, path: string): boolean; +} + +export async function createTaskFileResolver(options: { + projectGraph: ProjectGraph; + nxJson?: NxJsonConfiguration; + workspaceRoot?: string; +}): Promise { + const workspaceRoot = options.workspaceRoot ?? defaultWorkspaceRoot; + const inspector = new HashPlanInspector( + options.projectGraph, + workspaceRoot, + options.nxJson + ); + await inspector.init(); + + // Cache the full HashInputs (null = task not found). A single cache entry + // serves both getRawInputs() and getInputs() so the inspector is never + // called more than once per taskId. + const hashInputsCache = new Map(); + const outputsCache = new Map(); + + function parseTaskId(taskId: string): { + project: string; + target: string; + configuration?: string; + } { + const [project, target, configuration] = splitByColons(taskId); + if (!project || !target) { + throw new Error( + `Invalid taskId "${taskId}" — expected "project:target[:configuration]"` + ); + } + return { project, target, configuration }; + } + + function getRawInputs(taskId: string): HashInputs | null { + if (hashInputsCache.has(taskId)) { + return hashInputsCache.get(taskId) ?? null; + } + + const { project, target, configuration } = parseTaskId(taskId); + + let planResult: Record = {}; + try { + planResult = inspector.inspectTaskInputs({ + project, + target, + configuration, + }); + } catch { + // Project / target not found in graph — treat as no inputs. + hashInputsCache.set(taskId, null); + return null; + } + + // The result key is usually the same as taskId but may include a + // defaultConfiguration suffix when none was explicitly given. + let inputs: HashInputs | undefined = planResult[taskId]; + if (!inputs) { + const prefix = `${project}:${target}`; + for (const [key, val] of Object.entries(planResult)) { + if (key === prefix || key.startsWith(prefix + ':')) { + inputs = val; + break; + } + } + } + + const result = inputs ?? null; + hashInputsCache.set(taskId, result); + return result; + } + + function getInputs(taskId: string): string[] { + return getRawInputs(taskId)?.files ?? []; + } + + function getOutputs(taskId: string): string[] { + const cached = outputsCache.get(taskId); + if (cached !== undefined) return cached; + + const { project, target, configuration } = parseTaskId(taskId); + const node = options.projectGraph.nodes[project]; + const outputs = node?.data?.targets?.[target] + ? getOutputsForTargetAndConfiguration( + { project, target, configuration }, + {}, + node + ) + : []; + + outputsCache.set(taskId, outputs); + return outputs; + } + + return { + getRawInputs, + getInputs, + getOutputs, + isInput(taskId: string, path: string): boolean { + return getInputs(taskId).includes(path); + }, + isOutput(taskId: string, path: string): boolean { + const normalized = path.replace(/\\/g, '/'); + return getOutputs(taskId).some((pattern) => { + const normalizedPattern = pattern.replace(/\\/g, '/'); + return ( + normalized === normalizedPattern || + normalized.startsWith(normalizedPattern + '/') || + minimatch(normalized, normalizedPattern, { dot: true }) + ); + }); + }, + }; +} diff --git a/packages/nx/src/hasher/verify-sandbox-violations.spec.ts b/packages/nx/src/hasher/verify-sandbox-violations.spec.ts deleted file mode 100644 index e2131a2fc85a7..0000000000000 --- a/packages/nx/src/hasher/verify-sandbox-violations.spec.ts +++ /dev/null @@ -1,379 +0,0 @@ -import type { ProjectGraph } from '../config/project-graph'; -import type { HashInputs } from '../native'; - -// ── Mocks must be declared BEFORE imports that use them ───────────────────── -// -// jest.mock() calls are hoisted to the top of the file by Babel/SWC, which -// means any factory closure runs before module-level `const` declarations are -// initialised. The pattern below avoids referencing outer-scope variables -// inside the factory; instead we configure the mock in beforeEach so every -// test starts with a fresh set of spies. - -jest.mock('./hash-plan-inspector', () => ({ - HashPlanInspector: jest.fn(), -})); - -jest.mock('../tasks-runner/utils', () => ({ - getOutputsForTargetAndConfiguration: jest.fn(), -})); - -// ── Imports (after mocks) ──────────────────────────────────────────────────── - -// eslint-disable-next-line import/order -import { HashPlanInspector } from './hash-plan-inspector'; -// eslint-disable-next-line import/order -import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; -// eslint-disable-next-line import/order -import { verifySandboxViolations } from './verify-sandbox-violations'; - -const MockHashPlanInspector = jest.mocked(HashPlanInspector); -const mockGetOutputs = jest.mocked(getOutputsForTargetAndConfiguration); - -// ── Per-test mock spies ────────────────────────────────────────────────────── - -let mockInit: jest.Mock; -let mockInspectTaskInputs: jest.Mock; - -// ── Helpers ────────────────────────────────────────────────────────────────── - -function buildGraph(): ProjectGraph { - return { - nodes: { - myproj: { - name: 'myproj', - type: 'lib', - data: { - root: 'libs/myproj', - targets: { - build: { - executor: '@nx/js:tsc', - outputs: ['dist/libs/myproj'], - }, - }, - }, - }, - }, - dependencies: { myproj: [] }, - } as unknown as ProjectGraph; -} - -function makeHashInputs(files: string[]): HashInputs { - return { - files, - runtime: [], - environment: [], - depOutputs: [], - external: [], - }; -} - -// ── Tests ──────────────────────────────────────────────────────────────────── - -describe('verifySandboxViolations', () => { - beforeEach(() => { - jest.clearAllMocks(); - - // Rebuild per-test spies so callers can customise return values. - mockInit = jest.fn().mockResolvedValue(undefined); - mockInspectTaskInputs = jest.fn(); - - MockHashPlanInspector.mockImplementation( - () => - ({ - init: mockInit, - inspectTaskInputs: mockInspectTaskInputs, - }) as unknown as HashPlanInspector - ); - - // Default: no outputs - mockGetOutputs.mockReturnValue([]); - }); - - it('returns empty array for empty violations', async () => { - const result = await verifySandboxViolations([], { - projectGraph: buildGraph(), - }); - expect(result).toEqual([]); - expect(MockHashPlanInspector).not.toHaveBeenCalled(); - }); - - describe('all-reconciled case', () => { - it('returns ok:true when every read and write is accounted for', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([ - 'libs/myproj/src/index.ts', - 'libs/myproj/package.json', - ]), - }); - mockGetOutputs.mockReturnValue(['dist/libs/myproj']); - - const [result] = await verifySandboxViolations( - [ - { - taskId: 'myproj:build', - reads: ['libs/myproj/src/index.ts', 'libs/myproj/package.json'], - writes: ['dist/libs/myproj/index.js'], - }, - ], - { projectGraph: graph } - ); - - expect(result.ok).toBe(true); - expect(result.reads.reconciled).toEqual([ - 'libs/myproj/src/index.ts', - 'libs/myproj/package.json', - ]); - expect(result.reads.stillUnexpected).toEqual([]); - expect(result.writes.reconciled).toEqual(['dist/libs/myproj/index.js']); - expect(result.writes.stillUnexpected).toEqual([]); - }); - }); - - describe('mixed case', () => { - it('splits paths into reconciled and stillUnexpected correctly', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), - }); - mockGetOutputs.mockReturnValue(['dist/libs/myproj']); - - const [result] = await verifySandboxViolations( - [ - { - taskId: 'myproj:build', - reads: ['libs/myproj/src/index.ts', 'some/secret/file.ts'], - writes: ['dist/libs/myproj/bundle.js', '/tmp/shadow-file'], - }, - ], - { projectGraph: graph } - ); - - expect(result.ok).toBe(false); - expect(result.reads.reconciled).toEqual(['libs/myproj/src/index.ts']); - expect(result.reads.stillUnexpected).toEqual(['some/secret/file.ts']); - expect(result.writes.reconciled).toEqual(['dist/libs/myproj/bundle.js']); - expect(result.writes.stillUnexpected).toEqual(['/tmp/shadow-file']); - }); - }); - - describe('all-still-unexpected case', () => { - it('returns ok:false when nothing is reconciled', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([]), - }); - mockGetOutputs.mockReturnValue([]); - - const [result] = await verifySandboxViolations( - [ - { - taskId: 'myproj:build', - reads: ['sneaky/read.ts'], - writes: ['sneaky/write.js'], - }, - ], - { projectGraph: graph } - ); - - expect(result.ok).toBe(false); - expect(result.reads.stillUnexpected).toEqual(['sneaky/read.ts']); - expect(result.writes.stillUnexpected).toEqual(['sneaky/write.js']); - expect(result.reads.reconciled).toEqual([]); - expect(result.writes.reconciled).toEqual([]); - }); - }); - - describe('glob output matching', () => { - it('reconciles a write that matches a glob output pattern', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([]), - }); - // Output is a glob pattern - mockGetOutputs.mockReturnValue(['dist/**']); - - const [result] = await verifySandboxViolations( - [ - { - taskId: 'myproj:build', - writes: ['dist/main.js', 'dist/nested/chunk.js'], - }, - ], - { projectGraph: graph } - ); - - expect(result.writes.reconciled).toEqual([ - 'dist/main.js', - 'dist/nested/chunk.js', - ]); - expect(result.writes.stillUnexpected).toEqual([]); - expect(result.ok).toBe(true); - }); - - it('does NOT reconcile a write that is outside the glob pattern', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([]), - }); - mockGetOutputs.mockReturnValue(['dist/**']); - - const [result] = await verifySandboxViolations( - [ - { - taskId: 'myproj:build', - writes: ['other-dir/file.js'], - }, - ], - { projectGraph: graph } - ); - - expect(result.writes.stillUnexpected).toEqual(['other-dir/file.js']); - expect(result.ok).toBe(false); - }); - }); - - describe('empty reads and writes', () => { - it('handles undefined reads and writes as empty arrays', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([]), - }); - - const [result] = await verifySandboxViolations( - [{ taskId: 'myproj:build' }], - { projectGraph: graph } - ); - - expect(result.reads.reconciled).toEqual([]); - expect(result.reads.stillUnexpected).toEqual([]); - expect(result.writes.reconciled).toEqual([]); - expect(result.writes.stillUnexpected).toEqual([]); - expect(result.ok).toBe(true); - }); - - it('handles explicitly empty reads and writes arrays', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([]), - }); - - const [result] = await verifySandboxViolations( - [{ taskId: 'myproj:build', reads: [], writes: [] }], - { projectGraph: graph } - ); - - expect(result.reads.reconciled).toEqual([]); - expect(result.reads.stillUnexpected).toEqual([]); - expect(result.writes.reconciled).toEqual([]); - expect(result.writes.stillUnexpected).toEqual([]); - expect(result.ok).toBe(true); - }); - }); - - describe('deduplication of inspector calls', () => { - it('calls inspectTaskInputs only once for duplicate taskIds', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), - }); - mockGetOutputs.mockReturnValue(['dist/libs/myproj']); - - await verifySandboxViolations( - [ - { - taskId: 'myproj:build', - reads: ['libs/myproj/src/index.ts'], - }, - { - taskId: 'myproj:build', - reads: ['libs/myproj/src/index.ts'], - writes: ['dist/libs/myproj/bundle.js'], - }, - { - taskId: 'myproj:build', - writes: ['dist/libs/myproj/other.js'], - }, - ], - { projectGraph: graph } - ); - - // Three violations with the same taskId → inspector called only once - expect(mockInspectTaskInputs).toHaveBeenCalledTimes(1); - }); - - it('builds the HashPlanInspector exactly once for the whole batch', async () => { - const graph = buildGraph(); - - mockInspectTaskInputs.mockReturnValue({ - 'myproj:build': makeHashInputs([]), - }); - - await verifySandboxViolations( - [ - { taskId: 'myproj:build' }, - { taskId: 'myproj:build' }, - { taskId: 'myproj:build' }, - ], - { projectGraph: graph } - ); - - expect(MockHashPlanInspector).toHaveBeenCalledTimes(1); - expect(mockInit).toHaveBeenCalledTimes(1); - }); - - it('calls inspectTaskInputs once per unique taskId across multiple distinct tasks', async () => { - // Two-project graph - const graph = { - nodes: { - myproj: { - name: 'myproj', - type: 'lib', - data: { - root: 'libs/myproj', - targets: { build: { executor: '@nx/js:tsc' } }, - }, - }, - otherproj: { - name: 'otherproj', - type: 'lib', - data: { - root: 'libs/otherproj', - targets: { build: { executor: '@nx/js:tsc' } }, - }, - }, - }, - dependencies: { myproj: [], otherproj: [] }, - } as unknown as ProjectGraph; - - mockInspectTaskInputs - .mockReturnValueOnce({ - 'myproj:build': makeHashInputs(['libs/myproj/src/index.ts']), - }) - .mockReturnValueOnce({ - 'otherproj:build': makeHashInputs(['libs/otherproj/src/index.ts']), - }); - - await verifySandboxViolations( - [ - { taskId: 'myproj:build', reads: ['libs/myproj/src/index.ts'] }, - { taskId: 'otherproj:build', reads: ['libs/otherproj/src/index.ts'] }, - // Duplicate — should NOT cause a third call - { taskId: 'myproj:build', reads: ['libs/myproj/src/index.ts'] }, - ], - { projectGraph: graph } - ); - - // Two unique taskIds → exactly two inspectTaskInputs calls - expect(mockInspectTaskInputs).toHaveBeenCalledTimes(2); - }); - }); -}); diff --git a/packages/nx/src/hasher/verify-sandbox-violations.ts b/packages/nx/src/hasher/verify-sandbox-violations.ts deleted file mode 100644 index b2613f4647a07..0000000000000 --- a/packages/nx/src/hasher/verify-sandbox-violations.ts +++ /dev/null @@ -1,227 +0,0 @@ -import { minimatch } from 'minimatch'; -import { NxJsonConfiguration } from '../config/nx-json'; -import { ProjectGraph } from '../config/project-graph'; -import { HashInputs } from '../native'; -import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; -import { splitTargetFromNodes } from '../utils/split-target'; -import { workspaceRoot as defaultWorkspaceRoot } from '../utils/workspace-root'; -import { HashPlanInspector } from './hash-plan-inspector'; - -/** - * A single sandbox violation report produced by a prior task run. - * All paths must be workspace-relative POSIX paths (forward slashes, no - * leading slash). The verifier does not normalise paths — callers must ensure - * consistency with how Nx records file paths in its own data structures. - */ -export interface SandboxViolationInput { - /** Full Nx task ID, e.g. `"myproj:build"` or `"myproj:build:production"`. */ - taskId: string; - /** Workspace-relative POSIX paths that were read outside the sandbox. */ - reads?: string[]; - /** Workspace-relative POSIX paths that were written outside the sandbox. */ - writes?: string[]; -} - -/** - * The reconciliation result for a single sandbox violation. - */ -export interface SandboxViolationResult { - /** The task ID from the original {@link SandboxViolationInput}. */ - taskId: string; - reads: { - /** Paths that are legitimate declared inputs for this task. */ - reconciled: string[]; - /** Paths that are still not covered by any declared input. */ - stillUnexpected: string[]; - }; - writes: { - /** Paths that are legitimate declared outputs for this task. */ - reconciled: string[]; - /** Paths that are still not covered by any declared output. */ - stillUnexpected: string[]; - }; - /** - * `true` when every reported read and write is accounted for by the - * current workspace configuration. - */ - ok: boolean; -} - -/** - * Verifies whether file paths reported as sandbox violations would be - * considered legitimate inputs or outputs in the **current** workspace - * configuration. - * - * ### Path format requirement - * All paths in {@link SandboxViolationInput.reads} and - * {@link SandboxViolationInput.writes} **must** be workspace-relative POSIX - * paths (forward slashes, no leading slash, e.g. `"libs/myproj/src/main.ts"`). - * The verifier performs no normalisation — callers are responsible for - * ensuring paths match the format Nx uses internally. - * - * ### Inputs reconciliation - * A read is reconciled when it appears in the fully-expanded `files` array - * returned by {@link HashPlanInspector.inspectTaskInputs} for the task. - * Matching is exact set-membership (no glob expansion needed because the - * inspector already expands all file patterns). - * - * ### Outputs reconciliation - * A write is reconciled when it matches at least one entry from the list - * returned by {@link getOutputsForTargetAndConfiguration}. Matching uses: - * 1. Exact path equality. - * 2. Prefix match — the write is inside an output directory. - * 3. Glob match via `minimatch` — handles patterns such as `dist/**`. - * - * @param violations - One entry per task whose sandbox was violated. - * @param options - Workspace context. The `projectGraph` must already be - * fully resolved. If `nxJson` is omitted it is read from disk using the - * provided (or default) `workspaceRoot`. - */ -export async function verifySandboxViolations( - violations: SandboxViolationInput[], - options: { - projectGraph: ProjectGraph; - nxJson?: NxJsonConfiguration; - workspaceRoot?: string; - } -): Promise { - if (violations.length === 0) return []; - - const { - projectGraph, - nxJson, - workspaceRoot = defaultWorkspaceRoot, - } = options; - - // Build the inspector once for the whole batch and initialise it once. - const inspector = new HashPlanInspector(projectGraph, workspaceRoot, nxJson); - await inspector.init(); - - // Cache inspector results keyed by violation taskId to deduplicate calls - // when multiple violations share the same task. - const inputsCache = new Map(); - - function getCachedInputs(taskId: string): HashInputs | undefined { - if (inputsCache.has(taskId)) { - return inputsCache.get(taskId); - } - - const [project, target, configuration] = splitTargetFromNodes( - taskId, - projectGraph.nodes, - { silent: true } - ); - - if (!project || !target) { - inputsCache.set(taskId, undefined); - return undefined; - } - - let result: Record = {}; - try { - result = inspector.inspectTaskInputs({ project, target, configuration }); - } catch { - // Project / target not found in graph — treat as no inputs. - } - - // The result key is usually the same as taskId but may include a - // defaultConfiguration suffix when none was explicitly given. - let hashInputs = result[taskId]; - if (!hashInputs) { - // Fallback: find any key that starts with project:target - const prefix = `${project}:${target}`; - for (const [key, inputs] of Object.entries(result)) { - if (key === prefix || key.startsWith(prefix + ':')) { - hashInputs = inputs; - break; - } - } - } - - inputsCache.set(taskId, hashInputs); - return hashInputs; - } - - return violations.map((violation) => { - const { taskId, reads = [], writes = [] } = violation; - - // ── Inputs reconciliation ──────────────────────────────────────────────── - const hashInputs = getCachedInputs(taskId); - const inputFileSet = new Set(hashInputs?.files ?? []); - - const reconciledReads: string[] = []; - const unexpectedReads: string[] = []; - for (const read of reads) { - if (inputFileSet.has(read)) { - reconciledReads.push(read); - } else { - unexpectedReads.push(read); - } - } - - // ── Outputs reconciliation ─────────────────────────────────────────────── - const [project, target, configuration] = splitTargetFromNodes( - taskId, - projectGraph.nodes, - { silent: true } - ); - - let outputPatterns: string[] = []; - if (project && target) { - const node = projectGraph.nodes[project]; - if (node) { - outputPatterns = getOutputsForTargetAndConfiguration( - { project, target, configuration }, - {}, - node - ); - } - } - - const reconciledWrites: string[] = []; - const unexpectedWrites: string[] = []; - for (const write of writes) { - if (matchesAnyOutput(write, outputPatterns)) { - reconciledWrites.push(write); - } else { - unexpectedWrites.push(write); - } - } - - const ok = unexpectedReads.length === 0 && unexpectedWrites.length === 0; - - return { - taskId, - reads: { reconciled: reconciledReads, stillUnexpected: unexpectedReads }, - writes: { - reconciled: reconciledWrites, - stillUnexpected: unexpectedWrites, - }, - ok, - }; - }); -} - -/** - * Returns `true` when `filePath` is covered by at least one entry in - * `outputPatterns`. Handles three cases: - * - * 1. **Exact match** — the path equals the pattern literally. - * 2. **Prefix match** — the path is nested inside a non-glob output directory. - * 3. **Glob match** — the pattern contains glob characters and `minimatch` - * confirms a match (e.g. `dist/**` covers `dist/main.js`). - */ -function matchesAnyOutput(filePath: string, outputPatterns: string[]): boolean { - const normalized = filePath.replace(/\\/g, '/'); - for (const pattern of outputPatterns) { - const normalizedPattern = pattern.replace(/\\/g, '/'); - if ( - normalized === normalizedPattern || - normalized.startsWith(normalizedPattern + '/') || - minimatch(normalized, normalizedPattern) - ) { - return true; - } - } - return false; -} From b90bdf7b47e6339fd71d16f4c29def77a9836083 Mon Sep 17 00:00:00 2001 From: Craigory Coppola Date: Wed, 6 May 2026 17:51:35 -0400 Subject: [PATCH 3/3] fix(core): taskFileResolver respects dependentTasksOutputFiles statically MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously isInput() only matched against the materialized HashInputs.files list — when an upstream task hadn't run yet, depOutputs was empty and any file declared via { dependentTasksOutputFiles: '...', transitive?: bool } was reported as not-an-input even when the path obviously matched both the glob and a declared upstream output. The new logic walks the task graph from the inspected task, pulls each upstream's declared output globs, and reports the path as an input when it matches the dependentTasksOutputFiles glob AND lies inside one of those upstream outputs. Honors transitive: true/false. The check is exposed separately as resolver.matchesDependentTaskOutputs so consumers (e.g. the nx-cloud check-sandbox-report command) can reason about why a path was considered an input. Adds 6 unit tests covering: materialized depOutputs, static glob+output match, glob-without-output mismatch, output-without-glob mismatch, transitive=true walk, transitive=false short-walk, and the standalone matchesDependentTaskOutputs accessor. --- .../nx/src/hasher/task-file-resolver.spec.ts | 419 ++++++++++++++++++ packages/nx/src/hasher/task-file-resolver.ts | 161 ++++++- 2 files changed, 566 insertions(+), 14 deletions(-) diff --git a/packages/nx/src/hasher/task-file-resolver.spec.ts b/packages/nx/src/hasher/task-file-resolver.spec.ts index 6273207b338de..3b1a6f188f70f 100644 --- a/packages/nx/src/hasher/task-file-resolver.spec.ts +++ b/packages/nx/src/hasher/task-file-resolver.spec.ts @@ -17,6 +17,14 @@ jest.mock('../tasks-runner/utils', () => ({ getOutputsForTargetAndConfiguration: jest.fn(), })); +jest.mock('../tasks-runner/create-task-graph', () => ({ + createTaskGraph: jest.fn(), +})); + +jest.mock('./task-hasher', () => ({ + getInputs: jest.fn(), +})); + // ── Imports (after mocks) ──────────────────────────────────────────────────── // eslint-disable-next-line import/order @@ -24,10 +32,16 @@ import { HashPlanInspector } from './hash-plan-inspector'; // eslint-disable-next-line import/order import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; // eslint-disable-next-line import/order +import { createTaskGraph } from '../tasks-runner/create-task-graph'; +// eslint-disable-next-line import/order +import { getInputs as mockedGetInputs } from './task-hasher'; +// eslint-disable-next-line import/order import { createTaskFileResolver } from './task-file-resolver'; const MockHashPlanInspector = jest.mocked(HashPlanInspector); const mockGetOutputs = jest.mocked(getOutputsForTargetAndConfiguration); +const mockCreateTaskGraph = jest.mocked(createTaskGraph); +const mockGetStructuredInputs = jest.mocked(mockedGetInputs); // ── Per-test mock spies ────────────────────────────────────────────────────── @@ -67,6 +81,43 @@ function makeHashInputs(files: string[]): HashInputs { }; } +/** + * Project graph used by the dependentTasksOutputFiles tests: includes a + * direct dep `dep` and a transitive chain `mid → deep`, all with a `build` + * target so the resolver's getOutputs() doesn't short-circuit. + */ +function buildGraphWithDeps(): ProjectGraph { + const buildTarget = { + executor: '@nx/js:tsc', + outputs: [] as string[], + }; + return { + nodes: { + myproj: { + name: 'myproj', + type: 'lib', + data: { root: 'libs/myproj', targets: { build: buildTarget } }, + }, + dep: { + name: 'dep', + type: 'lib', + data: { root: 'libs/dep', targets: { build: buildTarget } }, + }, + mid: { + name: 'mid', + type: 'lib', + data: { root: 'libs/mid', targets: { build: buildTarget } }, + }, + deep: { + name: 'deep', + type: 'lib', + data: { root: 'libs/deep', targets: { build: buildTarget } }, + }, + }, + dependencies: { myproj: [], dep: [], mid: [], deep: [] }, + } as unknown as ProjectGraph; +} + // ── Tests ──────────────────────────────────────────────────────────────────── describe('createTaskFileResolver', () => { @@ -87,6 +138,22 @@ describe('createTaskFileResolver', () => { // Default: no outputs mockGetOutputs.mockReturnValue([]); + + // Defaults for the new dependentTasksOutputFiles code path: empty task + // graph and empty depsOutputs unless a test overrides them. + mockCreateTaskGraph.mockReturnValue({ + roots: [], + tasks: {}, + dependencies: {}, + continuousDependencies: {}, + }); + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [], + projectInputs: [], + depsFilesets: [], + } as ReturnType); }); it('initialises the HashPlanInspector exactly once', async () => { @@ -212,6 +279,358 @@ describe('createTaskFileResolver', () => { false ); }); + + it('returns true when path matches a materialized depOutputs entry (upstream has run)', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': { + files: [], + runtime: [], + environment: [], + depOutputs: ['libs/dep/dist/index.d.ts'], + external: [], + }, + }); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect(resolver.isInput('myproj:build', 'libs/dep/dist/index.d.ts')).toBe( + true + ); + }); + + it('returns true when path matches a dependentTasksOutputFiles glob AND an upstream task output (upstream has NOT run)', async () => { + const graph = buildGraphWithDeps(); + // No materialized files / depOutputs — simulating "upstream not yet run". + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + + // Task graph: myproj:build depends directly on dep:build. + mockCreateTaskGraph.mockReturnValue({ + roots: ['dep:build'], + tasks: { + 'myproj:build': { + id: 'myproj:build', + target: { project: 'myproj', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'dep:build': { + id: 'dep:build', + target: { project: 'dep', target: 'build' }, + overrides: {}, + outputs: [], + }, + }, + dependencies: { + 'myproj:build': ['dep:build'], + 'dep:build': [], + }, + continuousDependencies: {}, + }); + + // myproj:build declares one dependentTasksOutputFiles input. + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [ + { dependentTasksOutputFiles: '**/*.d.ts', transitive: false }, + ], + projectInputs: [], + depsFilesets: [], + } as ReturnType); + + // dep:build declares libs/dep/dist as an output dir. + mockGetOutputs.mockImplementation(((t: { + project?: string; + target?: { project?: string }; + }) => + (t.project ?? t.target?.project) === 'dep' + ? ['libs/dep/dist'] + : []) as typeof getOutputsForTargetAndConfiguration); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // Path lies inside dep:build's outputs AND matches the **/*.d.ts glob. + expect(resolver.isInput('myproj:build', 'libs/dep/dist/index.d.ts')).toBe( + true + ); + }); + + it('returns false when path matches the dependentTasksOutputFiles glob but no upstream output covers it', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + mockCreateTaskGraph.mockReturnValue({ + roots: ['dep:build'], + tasks: { + 'myproj:build': { + id: 'myproj:build', + target: { project: 'myproj', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'dep:build': { + id: 'dep:build', + target: { project: 'dep', target: 'build' }, + overrides: {}, + outputs: [], + }, + }, + dependencies: { + 'myproj:build': ['dep:build'], + 'dep:build': [], + }, + continuousDependencies: {}, + }); + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [ + { dependentTasksOutputFiles: '**/*.d.ts', transitive: false }, + ], + projectInputs: [], + depsFilesets: [], + } as ReturnType); + // dep:build's outputs do NOT include this path. + mockGetOutputs.mockReturnValue(['libs/dep/dist']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // Matches **/*.d.ts but NOT inside libs/dep/dist. + expect( + resolver.isInput('myproj:build', 'libs/somewhere-else/index.d.ts') + ).toBe(false); + }); + + it('returns false when path lies inside an upstream output but does not match the dependentTasksOutputFiles glob', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + mockCreateTaskGraph.mockReturnValue({ + roots: ['dep:build'], + tasks: { + 'myproj:build': { + id: 'myproj:build', + target: { project: 'myproj', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'dep:build': { + id: 'dep:build', + target: { project: 'dep', target: 'build' }, + overrides: {}, + outputs: [], + }, + }, + dependencies: { + 'myproj:build': ['dep:build'], + 'dep:build': [], + }, + continuousDependencies: {}, + }); + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [ + { dependentTasksOutputFiles: '**/*.d.ts', transitive: false }, + ], + projectInputs: [], + depsFilesets: [], + } as ReturnType); + mockGetOutputs.mockReturnValue(['libs/dep/dist']); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // .js does not match **/*.d.ts even though the path is inside the output dir. + expect(resolver.isInput('myproj:build', 'libs/dep/dist/index.js')).toBe( + false + ); + }); + + it('walks transitive task graph dependencies when transitive=true', async () => { + const graph = buildGraphWithDeps(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + // myproj -> mid -> deep + mockCreateTaskGraph.mockReturnValue({ + roots: ['deep:build'], + tasks: { + 'myproj:build': { + id: 'myproj:build', + target: { project: 'myproj', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'mid:build': { + id: 'mid:build', + target: { project: 'mid', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'deep:build': { + id: 'deep:build', + target: { project: 'deep', target: 'build' }, + overrides: {}, + outputs: [], + }, + }, + dependencies: { + 'myproj:build': ['mid:build'], + 'mid:build': ['deep:build'], + 'deep:build': [], + }, + continuousDependencies: {}, + }); + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [ + { dependentTasksOutputFiles: '**/*.d.ts', transitive: true }, + ], + projectInputs: [], + depsFilesets: [], + } as ReturnType); + mockGetOutputs.mockImplementation(((t: { + project?: string; + target?: { project?: string }; + }) => + (t.project ?? t.target?.project) === 'deep' + ? ['libs/deep/dist'] + : []) as typeof getOutputsForTargetAndConfiguration); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // The matching output lives on the transitive dep `deep:build`. + expect( + resolver.isInput('myproj:build', 'libs/deep/dist/index.d.ts') + ).toBe(true); + }); + + it('does NOT walk transitive deps when transitive flag is false (default)', async () => { + const graph = buildGraph(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + mockCreateTaskGraph.mockReturnValue({ + roots: ['deep:build'], + tasks: { + 'myproj:build': { + id: 'myproj:build', + target: { project: 'myproj', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'mid:build': { + id: 'mid:build', + target: { project: 'mid', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'deep:build': { + id: 'deep:build', + target: { project: 'deep', target: 'build' }, + overrides: {}, + outputs: [], + }, + }, + dependencies: { + 'myproj:build': ['mid:build'], + 'mid:build': ['deep:build'], + 'deep:build': [], + }, + continuousDependencies: {}, + }); + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [ + { dependentTasksOutputFiles: '**/*.d.ts', transitive: false }, + ], + projectInputs: [], + depsFilesets: [], + } as ReturnType); + // Only `deep` declares a matching output dir; `mid` declares nothing. + mockGetOutputs.mockImplementation(((t: { + project?: string; + target?: { project?: string }; + }) => + (t.project ?? t.target?.project) === 'deep' + ? ['libs/deep/dist'] + : []) as typeof getOutputsForTargetAndConfiguration); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + + // With transitive=false, only direct dep `mid:build` is consulted, and + // it has no matching outputs. + expect( + resolver.isInput('myproj:build', 'libs/deep/dist/index.d.ts') + ).toBe(false); + }); + }); + + describe('matchesDependentTaskOutputs', () => { + it('exposes the same dep-outputs check as a standalone method', async () => { + const graph = buildGraphWithDeps(); + mockInspectTaskInputs.mockReturnValue({ + 'myproj:build': makeHashInputs([]), + }); + mockCreateTaskGraph.mockReturnValue({ + roots: ['dep:build'], + tasks: { + 'myproj:build': { + id: 'myproj:build', + target: { project: 'myproj', target: 'build' }, + overrides: {}, + outputs: [], + }, + 'dep:build': { + id: 'dep:build', + target: { project: 'dep', target: 'build' }, + overrides: {}, + outputs: [], + }, + }, + dependencies: { + 'myproj:build': ['dep:build'], + 'dep:build': [], + }, + continuousDependencies: {}, + }); + mockGetStructuredInputs.mockReturnValue({ + selfInputs: [], + depsInputs: [], + depsOutputs: [ + { dependentTasksOutputFiles: '**/*.d.ts', transitive: false }, + ], + projectInputs: [], + depsFilesets: [], + } as ReturnType); + mockGetOutputs.mockImplementation(((t: { + project?: string; + target?: { project?: string }; + }) => + (t.project ?? t.target?.project) === 'dep' + ? ['libs/dep/dist'] + : []) as typeof getOutputsForTargetAndConfiguration); + + const resolver = await createTaskFileResolver({ projectGraph: graph }); + expect( + resolver.matchesDependentTaskOutputs( + 'myproj:build', + 'libs/dep/dist/index.d.ts' + ) + ).toBe(true); + expect( + resolver.matchesDependentTaskOutputs( + 'myproj:build', + 'libs/dep/dist/index.js' + ) + ).toBe(false); + }); }); describe('isOutput', () => { diff --git a/packages/nx/src/hasher/task-file-resolver.ts b/packages/nx/src/hasher/task-file-resolver.ts index f6e3b9ec4bc4f..dc27814890da0 100644 --- a/packages/nx/src/hasher/task-file-resolver.ts +++ b/packages/nx/src/hasher/task-file-resolver.ts @@ -1,11 +1,14 @@ import { minimatch } from 'minimatch'; -import type { NxJsonConfiguration } from '../config/nx-json'; +import { type NxJsonConfiguration, readNxJson } from '../config/nx-json'; import type { ProjectGraph } from '../config/project-graph'; +import type { Task, TaskGraph } from '../config/task-graph'; import type { HashInputs } from '../native'; +import { createTaskGraph } from '../tasks-runner/create-task-graph'; import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; import { splitByColons } from '../utils/split-target'; import { workspaceRoot as defaultWorkspaceRoot } from '../utils/workspace-root'; import { HashPlanInspector } from './hash-plan-inspector'; +import { type ExpandedDepsOutput, getInputs } from './task-hasher'; export interface TaskFileResolver { /** Full hash plan entry (files + runtime + environment + depOutputs + external). */ @@ -14,6 +17,13 @@ export interface TaskFileResolver { getOutputs(taskId: string): string[]; isInput(taskId: string, path: string): boolean; isOutput(taskId: string, path: string): boolean; + /** + * True iff `path` matches a `dependentTasksOutputFiles` glob declared on the + * task AND lies inside the declared outputs of an upstream task in the + * task graph. Works without the upstream tasks having actually run, so + * static path validation (e.g. sandbox-report verification) is supported. + */ + matchesDependentTaskOutputs(taskId: string, path: string): boolean; } export async function createTaskFileResolver(options: { @@ -22,6 +32,10 @@ export async function createTaskFileResolver(options: { workspaceRoot?: string; }): Promise { const workspaceRoot = options.workspaceRoot ?? defaultWorkspaceRoot; + let resolvedNxJson: NxJsonConfiguration | undefined = options.nxJson; + function getNxJson(): NxJsonConfiguration { + return (resolvedNxJson ??= readNxJson(workspaceRoot)); + } const inspector = new HashPlanInspector( options.projectGraph, workspaceRoot, @@ -34,6 +48,8 @@ export async function createTaskFileResolver(options: { // called more than once per taskId. const hashInputsCache = new Map(); const outputsCache = new Map(); + const taskGraphCache = new Map(); + const depsOutputsCache = new Map(); function parseTaskId(taskId: string): { project: string; @@ -87,7 +103,7 @@ export async function createTaskFileResolver(options: { return result; } - function getInputs(taskId: string): string[] { + function getInputsImpl(taskId: string): string[] { return getRawInputs(taskId)?.files ?? []; } @@ -109,23 +125,140 @@ export async function createTaskFileResolver(options: { return outputs; } + function getTaskGraphFor(taskId: string): TaskGraph | null { + if (taskGraphCache.has(taskId)) return taskGraphCache.get(taskId) ?? null; + const { project, target, configuration } = parseTaskId(taskId); + if (!options.projectGraph.nodes[project]) { + taskGraphCache.set(taskId, null); + return null; + } + let tg: TaskGraph | null = null; + try { + tg = createTaskGraph( + options.projectGraph, + {}, + [project], + [target], + configuration, + {}, + false + ); + } catch { + tg = null; + } + taskGraphCache.set(taskId, tg); + return tg; + } + + function findCanonicalTaskId(taskId: string, tg: TaskGraph): string | null { + if (tg.tasks[taskId]) return taskId; + const { project, target } = parseTaskId(taskId); + const prefix = `${project}:${target}`; + for (const id of Object.keys(tg.tasks)) { + if (id === prefix || id.startsWith(prefix + ':')) return id; + } + return null; + } + + function getDepsOutputs(taskId: string): ExpandedDepsOutput[] { + if (depsOutputsCache.has(taskId)) return depsOutputsCache.get(taskId)!; + + const tg = getTaskGraphFor(taskId); + if (!tg) { + depsOutputsCache.set(taskId, []); + return []; + } + const canonical = findCanonicalTaskId(taskId, tg); + if (!canonical) { + depsOutputsCache.set(taskId, []); + return []; + } + const task = tg.tasks[canonical] as Task; + let result: ExpandedDepsOutput[] = []; + try { + result = + getInputs(task, options.projectGraph, getNxJson()).depsOutputs ?? []; + } catch { + result = []; + } + depsOutputsCache.set(taskId, result); + return result; + } + + function getUpstreamTaskIds(taskId: string, transitive: boolean): string[] { + const tg = getTaskGraphFor(taskId); + if (!tg) return []; + const canonical = findCanonicalTaskId(taskId, tg); + if (!canonical) return []; + const direct = tg.dependencies[canonical] ?? []; + if (!transitive) return [...direct]; + const visited = new Set(); + const queue = [...direct]; + while (queue.length) { + const id = queue.shift()!; + if (visited.has(id)) continue; + visited.add(id); + queue.push(...(tg.dependencies[id] ?? [])); + } + return [...visited]; + } + + function pathMatchesOutputPattern( + normalizedPath: string, + pattern: string + ): boolean { + const np = pattern.replace(/\\/g, '/'); + return ( + normalizedPath === np || + normalizedPath.startsWith(np + '/') || + minimatch(normalizedPath, np, { dot: true }) + ); + } + + function isOutputImpl(taskId: string, path: string): boolean { + const normalized = path.replace(/\\/g, '/'); + return getOutputs(taskId).some((p) => + pathMatchesOutputPattern(normalized, p) + ); + } + + function matchesDependentTaskOutputs(taskId: string, path: string): boolean { + const normalized = path.replace(/\\/g, '/'); + const depsOutputs = getDepsOutputs(taskId); + if (depsOutputs.length === 0) return false; + for (const { dependentTasksOutputFiles, transitive } of depsOutputs) { + if (!minimatch(normalized, dependentTasksOutputFiles, { dot: true })) { + continue; + } + const upstreamIds = getUpstreamTaskIds(taskId, !!transitive); + for (const upstreamId of upstreamIds) { + if (isOutputImpl(upstreamId, normalized)) return true; + } + } + return false; + } + return { getRawInputs, - getInputs, + getInputs: getInputsImpl, getOutputs, + matchesDependentTaskOutputs, isInput(taskId: string, path: string): boolean { - return getInputs(taskId).includes(path); - }, - isOutput(taskId: string, path: string): boolean { const normalized = path.replace(/\\/g, '/'); - return getOutputs(taskId).some((pattern) => { - const normalizedPattern = pattern.replace(/\\/g, '/'); - return ( - normalized === normalizedPattern || - normalized.startsWith(normalizedPattern + '/') || - minimatch(normalized, normalizedPattern, { dot: true }) - ); - }); + const raw = getRawInputs(taskId); + if (raw) { + if (raw.files.includes(path) || raw.files.includes(normalized)) { + return true; + } + if ( + raw.depOutputs.includes(path) || + raw.depOutputs.includes(normalized) + ) { + return true; + } + } + return matchesDependentTaskOutputs(taskId, normalized); }, + isOutput: isOutputImpl, }; }