Spaces:
Running
Running
| import { describe, it, expect, beforeEach, afterEach } from 'vitest'; | |
| import * as fs from 'node:fs'; | |
| import * as path from 'node:path'; | |
| import * as os from 'node:os'; | |
| import { resolveInput } from './input-resolver.js'; | |
| /** | |
| * Helper to create a valid dashboard JSON object | |
| */ | |
| function makeDashboard(projectName = 'test-project') { | |
| return { | |
| version: '1.0.0', | |
| project: { name: projectName, description: 'Test', languages: [], frameworks: [] }, | |
| nodes: [{ id: '1', type: 'file', name: 'index.ts' }], | |
| edges: [{ source: '1', target: '1', type: 'imports' }], | |
| }; | |
| } | |
| /** | |
| * Helper to create a meta.json object | |
| */ | |
| function makeMeta() { | |
| return { | |
| lastAnalyzedAt: '2024-01-01T00:00:00.000Z', | |
| gitCommitHash: 'abc123', | |
| version: '1.0.0', | |
| analyzedFiles: 10, | |
| }; | |
| } | |
| describe('resolveInput - recursive directory scanning', () => { | |
| let tmpDir; | |
| beforeEach(() => { | |
| tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'input-resolver-test-')); | |
| }); | |
| afterEach(() => { | |
| fs.rmSync(tmpDir, { recursive: true, force: true }); | |
| }); | |
| describe('handleFlatDirectory - recursive child directory scanning (Task 40.1)', () => { | |
| it('loads JSON files from nested child directories', () => { | |
| // Create structure: | |
| // tmpDir/ | |
| // top-level.json | |
| // child-a/ | |
| // graph-a.json | |
| // child-b/ | |
| // graph-b.json | |
| fs.writeFileSync(path.join(tmpDir, 'top-level.json'), JSON.stringify(makeDashboard('top'))); | |
| fs.mkdirSync(path.join(tmpDir, 'child-a')); | |
| fs.writeFileSync(path.join(tmpDir, 'child-a', 'graph-a.json'), JSON.stringify(makeDashboard('project-a'))); | |
| fs.mkdirSync(path.join(tmpDir, 'child-b')); | |
| fs.writeFileSync(path.join(tmpDir, 'child-b', 'graph-b.json'), JSON.stringify(makeDashboard('project-b'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| // Should have entries for root, child-a, and child-b | |
| expect(result.manifest.length).toBe(3); | |
| const dirNames = result.manifest.map(e => e.dirName); | |
| const baseName = path.basename(tmpDir); | |
| expect(dirNames).toContain(baseName); | |
| expect(dirNames).toContain(`${baseName}/child-a`); | |
| expect(dirNames).toContain(`${baseName}/child-b`); | |
| }); | |
| it('discovers deeply nested directories (3+ levels)', () => { | |
| // Create structure: | |
| // tmpDir/ | |
| // level1/ | |
| // level2/ | |
| // level3/ | |
| // deep.json | |
| const deepPath = path.join(tmpDir, 'level1', 'level2', 'level3'); | |
| fs.mkdirSync(deepPath, { recursive: true }); | |
| fs.writeFileSync(path.join(deepPath, 'deep.json'), JSON.stringify(makeDashboard('deep-project'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| const baseName = path.basename(tmpDir); | |
| const dirNames = result.manifest.map(e => e.dirName); | |
| expect(dirNames).toContain(`${baseName}/level1/level2/level3`); | |
| // Verify file mapping has correct URL path | |
| const expectedKey = `${baseName}/level1/level2/level3/deep.json`; | |
| expect(result.fileMapping[expectedKey]).toBe(path.join(deepPath, 'deep.json')); | |
| }); | |
| it('skips common non-source directories', () => { | |
| // Create structure with skippable dirs | |
| const skipDirs = ['node_modules', '.git', 'dist', 'build', '.next', '.cache', '__pycache__', '.turbo', 'target', 'obj', '.understand-anything']; | |
| for (const dir of skipDirs) { | |
| const dirPath = path.join(tmpDir, dir); | |
| fs.mkdirSync(dirPath); | |
| fs.writeFileSync(path.join(dirPath, 'graph.json'), JSON.stringify(makeDashboard(`skip-${dir}`))); | |
| } | |
| // Add one valid directory | |
| fs.mkdirSync(path.join(tmpDir, 'valid-dir')); | |
| fs.writeFileSync(path.join(tmpDir, 'valid-dir', 'graph.json'), JSON.stringify(makeDashboard('valid'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| // Should only find the valid-dir, not any of the skipped dirs | |
| const baseName = path.basename(tmpDir); | |
| const dirNames = result.manifest.map(e => e.dirName); | |
| for (const dir of skipDirs) { | |
| expect(dirNames).not.toContain(`${baseName}/${dir}`); | |
| } | |
| expect(dirNames).toContain(`${baseName}/valid-dir`); | |
| }); | |
| it('preserves correct URL paths in file mapping for nested files', () => { | |
| // Create structure: | |
| // tmpDir/ | |
| // root.json | |
| // sub/ | |
| // nested.json | |
| fs.writeFileSync(path.join(tmpDir, 'root.json'), JSON.stringify(makeDashboard('root-proj'))); | |
| fs.mkdirSync(path.join(tmpDir, 'sub')); | |
| fs.writeFileSync(path.join(tmpDir, 'sub', 'nested.json'), JSON.stringify(makeDashboard('nested-proj'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| const baseName = path.basename(tmpDir); | |
| // Top-level file mapping | |
| expect(result.fileMapping[`${baseName}/root.json`]).toBe(path.join(tmpDir, 'root.json')); | |
| // Nested file mapping | |
| expect(result.fileMapping[`${baseName}/sub/nested.json`]).toBe(path.join(tmpDir, 'sub', 'nested.json')); | |
| }); | |
| it('creates separate manifest entries per child directory', () => { | |
| // Create structure with multiple children | |
| fs.mkdirSync(path.join(tmpDir, 'alpha')); | |
| fs.writeFileSync(path.join(tmpDir, 'alpha', 'a1.json'), JSON.stringify(makeDashboard('alpha-1'))); | |
| fs.writeFileSync(path.join(tmpDir, 'alpha', 'a2.json'), JSON.stringify(makeDashboard('alpha-2'))); | |
| fs.mkdirSync(path.join(tmpDir, 'beta')); | |
| fs.writeFileSync(path.join(tmpDir, 'beta', 'b1.json'), JSON.stringify(makeDashboard('beta-1'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| const baseName = path.basename(tmpDir); | |
| const alphaEntry = result.manifest.find(e => e.dirName === `${baseName}/alpha`); | |
| const betaEntry = result.manifest.find(e => e.dirName === `${baseName}/beta`); | |
| expect(alphaEntry).toBeDefined(); | |
| expect(alphaEntry.graphFiles).toHaveLength(2); | |
| expect(betaEntry).toBeDefined(); | |
| expect(betaEntry.graphFiles).toHaveLength(1); | |
| }); | |
| }); | |
| describe('resolveInput - recursive meta.json detection (Task 40.2)', () => { | |
| it('finds meta.json in deeply nested subdirectories', () => { | |
| // Create structure: | |
| // tmpDir/ | |
| // deep/ | |
| // nested/ | |
| // meta.json | |
| // graph.json | |
| const nestedPath = path.join(tmpDir, 'deep', 'nested'); | |
| fs.mkdirSync(nestedPath, { recursive: true }); | |
| fs.writeFileSync(path.join(nestedPath, 'meta.json'), JSON.stringify(makeMeta())); | |
| fs.writeFileSync(path.join(nestedPath, 'graph.json'), JSON.stringify(makeDashboard('nested-project'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| // Should treat deep/nested as a project directory | |
| expect(result.manifest.length).toBe(1); | |
| // The dirName should be the relative path | |
| const dirName = result.manifest[0].dirName.replace(/\\/g, '/'); | |
| expect(dirName).toContain('nested'); | |
| }); | |
| it('finds multiple meta.json directories at different nesting levels', () => { | |
| // Create structure: | |
| // tmpDir/ | |
| // project-a/ | |
| // meta.json | |
| // graph-a.json | |
| // deep/ | |
| // project-b/ | |
| // meta.json | |
| // graph-b.json | |
| fs.mkdirSync(path.join(tmpDir, 'project-a')); | |
| fs.writeFileSync(path.join(tmpDir, 'project-a', 'meta.json'), JSON.stringify(makeMeta())); | |
| fs.writeFileSync(path.join(tmpDir, 'project-a', 'graph-a.json'), JSON.stringify(makeDashboard('proj-a'))); | |
| const deepPath = path.join(tmpDir, 'deep', 'project-b'); | |
| fs.mkdirSync(deepPath, { recursive: true }); | |
| fs.writeFileSync(path.join(deepPath, 'meta.json'), JSON.stringify(makeMeta())); | |
| fs.writeFileSync(path.join(deepPath, 'graph-b.json'), JSON.stringify(makeDashboard('proj-b'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| expect(result.manifest.length).toBe(2); | |
| }); | |
| it('directories with meta.json are treated as project directories', () => { | |
| // Create a directory with meta.json - should use meta.json data, not synthesize | |
| fs.mkdirSync(path.join(tmpDir, 'my-project')); | |
| const meta = makeMeta(); | |
| fs.writeFileSync(path.join(tmpDir, 'my-project', 'meta.json'), JSON.stringify(meta)); | |
| fs.writeFileSync(path.join(tmpDir, 'my-project', 'graph.json'), JSON.stringify(makeDashboard('my-proj'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| expect(result.manifest[0].meta.gitCommitHash).toBe('abc123'); | |
| expect(result.manifest[0].meta.version).toBe('1.0.0'); | |
| expect(result.manifest[0].meta.analyzedFiles).toBe(10); | |
| }); | |
| it('falls back to recursive flat scanning when no meta.json found anywhere', () => { | |
| // Create structure with no meta.json at all | |
| fs.mkdirSync(path.join(tmpDir, 'flat-a')); | |
| fs.writeFileSync(path.join(tmpDir, 'flat-a', 'graph.json'), JSON.stringify(makeDashboard('flat-proj'))); | |
| fs.mkdirSync(path.join(tmpDir, 'flat-b')); | |
| fs.writeFileSync(path.join(tmpDir, 'flat-b', 'data.json'), JSON.stringify(makeDashboard('flat-proj-2'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| // Should use synthesized meta (version 0.0.0) | |
| for (const entry of result.manifest) { | |
| expect(entry.meta.version).toBe('0.0.0'); | |
| expect(entry.meta.gitCommitHash).toBe(''); | |
| } | |
| }); | |
| it('skips non-source directories when searching for meta.json', () => { | |
| // Put meta.json inside node_modules - should be skipped | |
| fs.mkdirSync(path.join(tmpDir, 'node_modules', 'some-pkg'), { recursive: true }); | |
| fs.writeFileSync(path.join(tmpDir, 'node_modules', 'some-pkg', 'meta.json'), JSON.stringify(makeMeta())); | |
| fs.writeFileSync(path.join(tmpDir, 'node_modules', 'some-pkg', 'graph.json'), JSON.stringify(makeDashboard('pkg'))); | |
| // Add a valid flat directory | |
| fs.mkdirSync(path.join(tmpDir, 'valid')); | |
| fs.writeFileSync(path.join(tmpDir, 'valid', 'graph.json'), JSON.stringify(makeDashboard('valid-proj'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| // Should NOT find the node_modules project, should fall through to flat scanning | |
| const dirNames = result.manifest.map(e => e.dirName); | |
| for (const name of dirNames) { | |
| expect(name).not.toContain('node_modules'); | |
| } | |
| }); | |
| }); | |
| describe('preserves existing behavior', () => { | |
| it('directory with meta.json at root is still treated as project directory', () => { | |
| // tmpDir itself has meta.json | |
| fs.writeFileSync(path.join(tmpDir, 'meta.json'), JSON.stringify(makeMeta())); | |
| fs.writeFileSync(path.join(tmpDir, 'graph.json'), JSON.stringify(makeDashboard('root-proj'))); | |
| const result = resolveInput(tmpDir); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| expect(result.manifest.length).toBe(1); | |
| expect(result.manifest[0].meta.gitCommitHash).toBe('abc123'); | |
| }); | |
| it('single file input still works', () => { | |
| const filePath = path.join(tmpDir, 'single.json'); | |
| fs.writeFileSync(filePath, JSON.stringify(makeDashboard('single-proj'))); | |
| const result = resolveInput(filePath); | |
| expect(result.success).toBe(true); | |
| if (!result.success) | |
| return; | |
| expect(result.manifest.length).toBe(1); | |
| expect(result.manifest[0].graphFiles[0].projectName).toBe('single-proj'); | |
| }); | |
| }); | |
| }); | |