diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..fe589eb --- /dev/null +++ b/.env.example @@ -0,0 +1,12 @@ +# Google Sheets Report - OAuth2 Credentials +# ------------------------------------------ +# credentials.json: Google Cloud OAuth2 service account key file. +# Download from Google Cloud Console > APIs & Services > Credentials. +# See: https://developers.google.com/sheets/api/quickstart/nodejs +# +# Default path: ./credentials.json (override with --credentials flag) +# IMPORTANT: Never commit credentials.json or token.json to version control. + +# For testing report-google-sheets package: +# TEST_CREDENTIALS_PATH=./credentials.json +# TEST_TOKEN_PATH=./token.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 945a567..6b24dfd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,9 +15,12 @@ jobs: cache: yarn - run: yarn install --immutable - run: yarn build + - uses: actions/upload-artifact@v4 + with: + name: build-output + path: packages/**/lib/ + retention-days: 1 - # build ジョブはビルド成果物を共有しないが、 - # ビルドが通らない場合にテストを実行しても意味がないためガードとして依存させている。 test: needs: build runs-on: ubuntu-latest @@ -29,7 +32,10 @@ jobs: node-version: 24 cache: yarn - run: yarn install --immutable - - run: yarn build + - uses: actions/download-artifact@v4 + with: + name: build-output + path: packages/ - run: yarn test lint: diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 09a28c7..4015d00 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -117,7 +117,7 @@ crawler/src/ │ ├── object/ # cleanObject │ └── error/ # DOMEvaluationError, ErrorEmitter ├── archive/ # SQLite アーカイブストレージ -│ ├── filesystem/ # 1関数1ファイル(14ファイル)+ tar, untar +│ ├── filesystem/ # 1関数1ファイル(16ファイル)+ tar, untar │ └── ... # archive, archive-accessor, database, page, resource ├── crawler/ # Crawler エンジン │ ├── crawler.ts # Crawler クラス @@ -608,3 +608,40 @@ interval: 0 # 待機なし parallels: 1 # 直列実行 image: false # 画像取得なし ``` + +--- + +## 13. 外部依存パッケージ(`@d-zero/*`) + +Nitpicker は D-ZERO が公開する以下の外部パッケージに依存している。 +仕様変更やバグ調査時はこれらのパッケージを参照すること。バージョンは各パッケージの `package.json` を参照。 + +| パッケージ | 用途 | 検索キーワード | +| ----------------------- | ----------------------------------------------------------------------------- | ------------------------------------- | +| `@d-zero/beholder` | Puppeteer ベースのスクレイパーエンジン。`ScrapeResult` を返す | `"@d-zero/beholder" changelog` | +| `@d-zero/dealer` | 並列処理・スケジューリング。`deal()` 関数を提供 | `"@d-zero/dealer" deal concurrent` | +| `@d-zero/shared` | 共有ユーティリティ(サブパスエクスポート形式: `@d-zero/shared/parse-url` 等) | `"@d-zero/shared" subpath exports` | +| `@d-zero/roar` | CLI フレームワーク | `"@d-zero/roar" command` | +| `@d-zero/google-auth` | OAuth2 認証(`credentials.json` → `token.json`) | `"@d-zero/google-auth" oauth2` | +| `@d-zero/google-sheets` | Google Sheets API クライアント | `"@d-zero/google-sheets" spreadsheet` | +| `@d-zero/fs` | ファイルシステムユーティリティ | `"@d-zero/fs"` | +| `@d-zero/readtext` | テキスト読み取りユーティリティ | `"@d-zero/readtext"` | + +### 利用箇所マップ + +``` +@d-zero/beholder → crawler(Scraper, ScrapeResult) +@d-zero/dealer → crawler, core, cli, report-google-sheets(deal() 並列制御) +@d-zero/shared → 全パッケージ(parseUrl, delay, isError, detectCompress, detectCDN) +@d-zero/roar → cli(CLI コマンド定義) +@d-zero/google-auth → report-google-sheets(OAuth2 認証) +@d-zero/google-sheets → report-google-sheets(Sheets API) +@d-zero/fs → crawler(ファイルシステムユーティリティ) +@d-zero/readtext → cli(リストファイル読み込み) +``` + +### バージョン更新時の注意 + +- **`@d-zero/beholder`**: `ScrapeResult` の型が変わると crawler 全体に影響 +- **`@d-zero/dealer`**: `deal()` の API が変わると crawler と core の並列処理に影響 +- **`@d-zero/shared`**: サブパスエクスポートの追加・削除に注意。`@d-zero/shared/parse-url` 形式でインポートすること diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..5a6ff4d --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,118 @@ +# Contributing to Nitpicker + +Nitpicker へのコントリビューションを歓迎します。 + +## 開発環境セットアップ + +```sh +# リポジトリをクローン +git clone https://github.com/d-zero-dev/nitpicker.git +cd nitpicker + +# 依存関係をインストール(Yarn のみ使用、npm 厳禁) +corepack enable +yarn install --immutable + +# 全パッケージをビルド +yarn build + +# テスト実行 +yarn test +``` + +### 必要な環境 + +- **Node.js** 24 以上(Volta でバージョン管理推奨) +- **Yarn** 4.x(`corepack enable` で自動有効化) +- **npm は使用しない**: すべてのコマンドは `yarn` 経由で実行 + +## 開発ワークフロー + +### ブランチ戦略 + +- `main` ブランチから feature ブランチを作成 +- ブランチ名: `feat/機能名`, `fix/バグ名`, `docs/ドキュメント名` + +### コマンド制約 + +- **パッケージディレクトリに `cd` しない**: 常にリポジトリルートからコマンドを実行 +- **ビルドは `yarn build` のみ**: `npx tsc`, `yarn tsc` 等は禁止 +- **対象を限定した操作**: ビルド検証は `yarn build` で全パッケージ一括実行 + +```sh +# 全パッケージビルド +yarn build + +# ユニットテスト +yarn test + +# E2E テスト +yarn vitest run --config vitest.e2e.config.ts + +# lint +yarn lint:check +``` + +### コミットメッセージ + +[Conventional Commits](https://www.conventionalcommits.org/) に準拠: + +``` +feat(crawler): add retry logic for failed requests +fix(core): prevent analyze results from being silently empty +docs: update README with new CLI options +``` + +commitlint がプリコミットフックで検証します。 + +## コーディング規約 + +### ファイル構成 + +- **1ファイル1エクスポート**: エクスポートする関数/クラスは1つのみ +- **`index.ts` 禁止**: モジュールの公開は `package.json` の `exports` フィールドで行う +- **型は `types.ts` に集約**: ドメインごとに専用 `types.ts` を作成 + +### TypeScript + +- **JSDoc 必須**: すべての公開関数、クラス、interface、type に JSDoc を記述 +- **`interface` 優先**: `type` はユニオン型・交差型など `type` でしか定義できない場合のみ +- **パラメータ3つ以上**: 名前付きオブジェクトにまとめる +- **Vitest 4**: `import { describe, it, expect } from 'vitest'` を明示的に記述 + +### テスト + +- **1関数1ファイルにはユニットテスト必須** +- **テストファーストを推奨**: バグ修正や明確な仕様の機能追加では、実装より先にテスト + +### analyze プラグイン + +- `console.log` を使わない(`deal()` が進捗表示を担当) +- `definePlugin()` でプラグインを定義 +- `eachPage` / `eachUrl` コールバックでページ単位の分析を実装 + +## PR ガイドライン + +1. 変更の目的を明確に記述 +2. テストを追加・更新 +3. `yarn build && yarn test && yarn lint:check` がパスすることを確認 +4. 破壊的変更にはマイグレーションガイドを同梱 + +## パッケージ構成 + +詳細は [ARCHITECTURE.md](./ARCHITECTURE.md) を参照。 + +## 外部依存パッケージ + +| パッケージ | 用途 | 参照 | +| ----------------------- | ---------------------------------------------- | ---------------------------- | +| `@d-zero/beholder` | Puppeteer ベースのスクレイパー | npm: `@d-zero/beholder` | +| `@d-zero/dealer` | 並列処理・スケジューリング | npm: `@d-zero/dealer` | +| `@d-zero/shared` | 共有ユーティリティ(サブパスエクスポート形式) | npm: `@d-zero/shared` | +| `@d-zero/roar` | CLI フレームワーク | npm: `@d-zero/roar` | +| `@d-zero/google-auth` | OAuth2 認証 | npm: `@d-zero/google-auth` | +| `@d-zero/google-sheets` | Google Sheets API クライアント | npm: `@d-zero/google-sheets` | + +## ライセンス + +Apache 2.0 — 詳細は [LICENSE](./LICENSE) を参照。 diff --git a/packages/@nitpicker/core/src/nitpicker.spec.ts b/packages/@nitpicker/core/src/nitpicker.spec.ts index 11d2786..5fd4a1a 100644 --- a/packages/@nitpicker/core/src/nitpicker.spec.ts +++ b/packages/@nitpicker/core/src/nitpicker.spec.ts @@ -1,4 +1,6 @@ -import type { Config } from './types.js'; +import type { AnalyzePlugin, Config, ReportPage } from './types.js'; +import type { ExURL } from '@d-zero/shared/parse-url'; +import type { Report } from '@nitpicker/types'; import { afterEach, describe, it, expect, vi } from 'vitest'; @@ -16,8 +18,57 @@ vi.mock('@nitpicker/crawler', () => ({ }, })); +vi.mock('./worker/run-in-worker.js', () => ({ + runInWorker: vi.fn(), +})); + +vi.mock('@d-zero/shared/cache', () => { + /** Mock Cache class for testing. */ + class MockCache { + /** Clears the cache. */ + clear() { + return Promise.resolve(); + } + /** Loads a cached value. */ + load() { + return Promise.resolve(null); + } + /** Stores a value. */ + store() { + return Promise.resolve(); + } + } + return { Cache: MockCache }; +}); + +import { importModules } from './import-modules.js'; import { loadPluginSettings } from './load-plugin-settings.js'; import { Nitpicker } from './nitpicker.js'; +import { runInWorker } from './worker/run-in-worker.js'; + +const mockedImportModules = vi.mocked(importModules); +const mockedRunInWorker = vi.mocked(runInWorker); + +/** + * Creates a mock URL object compatible with ExURL. + * @param href - The URL string. + */ +function mockUrl(href: string) { + return { href } as ExURL; +} + +/** + * Creates a mock page object for getPagesWithRefs callback. + * @param url - The URL string. + * @param html - The HTML content (null means no snapshot). + */ +function createMockPage(url: string, html: string | null = '') { + return { + url: mockUrl(url), + isExternal: false, + getHtml: vi.fn().mockResolvedValue(html), + }; +} /** * Creates a minimal mock Archive for testing Nitpicker methods @@ -36,6 +87,8 @@ function createMockArchive() { afterEach(() => { vi.mocked(loadPluginSettings).mockReset(); + mockedImportModules.mockReset(); + mockedRunInWorker.mockReset(); }); describe('setPluginOverrides', () => { @@ -104,3 +157,326 @@ describe('setPluginOverrides', () => { expect(loadPluginSettings).toHaveBeenCalledWith({}, {}); }); }); + +describe('analyze', () => { + /** + * Helper to set up a Nitpicker instance with mocked archive and plugins. + * @param pages - Mock pages to return from getPagesWithRefs. + * @param plugins - Plugin configurations. + * @param mods - Analyze plugin modules. + */ + function setupAnalyze( + pages: ReturnType[], + plugins: Config['analyze'], + mods: AnalyzePlugin[], + ) { + const archive = { + filePath: '/tmp/test.nitpicker', + write: vi.fn().mockResolvedValue(), + close: vi.fn().mockResolvedValue(), + getUrl: vi.fn().mockResolvedValue('https://example.com'), + getPagesWithRefs: vi + .fn() + .mockImplementation( + async ( + _limit: number, + callback: (pages: ReturnType[]) => Promise, + ) => { + await callback(pages); + }, + ), + setData: vi.fn().mockResolvedValue(), + }; + const config: Config = { analyze: plugins }; + vi.mocked(loadPluginSettings).mockResolvedValue(config); + mockedImportModules.mockResolvedValue(mods); + + const nitpicker = new Nitpicker(archive as never); + return { nitpicker, archive }; + } + + it('saves report with data when worker returns valid results', async () => { + const pages = [createMockPage('https://example.com/')]; + const plugin = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + + const workerResult: ReportPage = { + page: { score: { value: 100 } }, + violations: [{ message: 'test', severity: 'error', url: 'https://example.com/' }], + }; + mockedRunInWorker.mockResolvedValue(workerResult); + + const { nitpicker, archive } = setupAnalyze(pages, [plugin], [mod]); + await nitpicker.analyze(); + + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + expect(reportCall).toBeDefined(); + const report = reportCall![1] as Report; + expect(Object.keys(report.pageData.data).length).toBeGreaterThan(0); + expect(report.violations.length).toBe(1); + }); + + it('continues processing when a single worker task throws', async () => { + const pages = [ + createMockPage('https://example.com/page1'), + createMockPage('https://example.com/page2'), + ]; + const plugin = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + + // First page throws, second page returns valid result + mockedRunInWorker + .mockRejectedValueOnce(new Error('Worker crashed')) + .mockResolvedValueOnce({ + page: { score: { value: 50 } }, + violations: [], + }); + + const { nitpicker, archive } = setupAnalyze(pages, [plugin], [mod]); + // Should not throw + await nitpicker.analyze(); + + // Report should still be saved with data from the second page + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + expect(reportCall).toBeDefined(); + const report = reportCall![1] as Report; + expect(report.pageData.data['https://example.com/page2']).toBeDefined(); + }); + + it('emits error event when a worker task fails', async () => { + const pages = [createMockPage('https://example.com/')]; + const plugin = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + + mockedRunInWorker.mockRejectedValue(new Error('Worker OOM')); + + const { nitpicker } = setupAnalyze(pages, [plugin], [mod]); + const errorHandler = vi.fn(); + nitpicker.on('error', errorHandler); + + await nitpicker.analyze(); + + // Two error events: one for the failed task, one for empty results warning + expect(errorHandler).toHaveBeenCalledTimes(2); + expect(errorHandler).toHaveBeenCalledWith( + expect.objectContaining({ message: expect.stringContaining('Worker OOM') }), + ); + expect(errorHandler).toHaveBeenCalledWith( + expect.objectContaining({ + message: expect.stringContaining('Produced no data'), + }), + ); + }); + + it('emits warning when all page results are empty', async () => { + const pages = [ + createMockPage('https://example.com/page1'), + createMockPage('https://example.com/page2'), + ]; + const plugin = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + + // All pages return null (no data) + mockedRunInWorker.mockResolvedValue(null); + + const { nitpicker, archive } = setupAnalyze(pages, [plugin], [mod]); + const errorHandler = vi.fn(); + nitpicker.on('error', errorHandler); + + await nitpicker.analyze(); + + // Report should be saved but with empty data + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + expect(reportCall).toBeDefined(); + const report = reportCall![1] as Report; + expect(Object.keys(report.pageData.data).length).toBe(0); + + // Warning should be emitted about empty results + expect(errorHandler).toHaveBeenCalledWith( + expect.objectContaining({ + message: expect.stringContaining('@nitpicker/analyze-axe'), + }), + ); + }); + + it('continues when eachUrl plugin throws', async () => { + const pages = [ + createMockPage('https://example.com/page1'), + createMockPage('https://example.com/page2'), + ]; + const plugin = { + name: '@nitpicker/analyze-search', + module: '@nitpicker/analyze-search', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + eachUrl: vi + .fn() + .mockRejectedValueOnce(new Error('eachUrl crash')) + .mockResolvedValueOnce({ + page: { found: { value: 3 } }, + violations: [], + }), + }; + + const { nitpicker, archive } = setupAnalyze(pages, [plugin], [mod]); + const errorHandler = vi.fn(); + nitpicker.on('error', errorHandler); + + await nitpicker.analyze(); + + expect(errorHandler).toHaveBeenCalledWith( + expect.objectContaining({ + message: expect.stringContaining('eachUrl crash'), + }), + ); + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + expect(reportCall).toBeDefined(); + }); + + it('preserves results from other plugins when one plugin fails all pages', async () => { + const pages = [createMockPage('https://example.com/')]; + const pluginA = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const pluginB = { + name: '@nitpicker/analyze-markuplint', + module: '@nitpicker/analyze-markuplint', + configFilePath: '', + }; + const modA: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + const modB: AnalyzePlugin = { + headers: { lint: 'Lint' }, + eachPage: vi.fn(), + }; + + mockedRunInWorker + .mockRejectedValueOnce(new Error('axe crashed')) + .mockResolvedValueOnce({ + page: { lint: { value: 'ok' } }, + violations: [], + }); + + const { nitpicker, archive } = setupAnalyze(pages, [pluginA, pluginB], [modA, modB]); + await nitpicker.analyze(); + + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + const report = reportCall![1] as Report; + expect(report.pageData.data['https://example.com/']).toBeDefined(); + }); + + it('handles getHtml throwing without crashing', async () => { + const brokenPage = createMockPage('https://example.com/broken'); + brokenPage.getHtml = vi.fn().mockRejectedValue(new Error('snapshot read failed')); + const goodPage = createMockPage('https://example.com/good'); + + const plugin = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + + mockedRunInWorker.mockResolvedValue({ + page: { score: { value: 90 } }, + violations: [], + }); + + const { nitpicker, archive } = setupAnalyze([brokenPage, goodPage], [plugin], [mod]); + const errorHandler = vi.fn(); + nitpicker.on('error', errorHandler); + + await nitpicker.analyze(); + + expect(errorHandler).toHaveBeenCalledWith( + expect.objectContaining({ + message: expect.stringContaining('snapshot read failed'), + }), + ); + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + const report = reportCall![1] as Report; + expect(report.pageData.data['https://example.com/good']).toBeDefined(); + }); + + it('skips pages with no HTML snapshot without error', async () => { + const pages = [ + createMockPage('https://example.com/no-html', null), + createMockPage('https://example.com/has-html'), + ]; + const plugin = { + name: '@nitpicker/analyze-axe', + module: '@nitpicker/analyze-axe', + configFilePath: '', + }; + const mod: AnalyzePlugin = { + headers: { score: 'Score' }, + eachPage: vi.fn(), + }; + + mockedRunInWorker.mockResolvedValue({ + page: { score: { value: 80 } }, + violations: [], + }); + + const { nitpicker, archive } = setupAnalyze(pages, [plugin], [mod]); + await nitpicker.analyze(); + + // runInWorker should only be called once (for the page with HTML) + expect(mockedRunInWorker).toHaveBeenCalledTimes(1); + + const reportCall = archive.setData.mock.calls.find( + (call: unknown[]) => call[0] === 'analysis/report', + ); + const report = reportCall![1] as Report; + expect(report.pageData.data['https://example.com/has-html']).toBeDefined(); + expect(report.pageData.data['https://example.com/no-html']).toBeUndefined(); + }); +}); diff --git a/packages/@nitpicker/core/src/nitpicker.ts b/packages/@nitpicker/core/src/nitpicker.ts index b190bb2..d4d8085 100644 --- a/packages/@nitpicker/core/src/nitpicker.ts +++ b/packages/@nitpicker/core/src/nitpicker.ts @@ -235,68 +235,87 @@ export class Nitpicker extends EventEmitter { // Bounded Promise pool (replaces deal()) const executing = new Set>(); + let pluginPageDataCount = 0; + let pluginErrorCount = 0; for (const [pageIndex, page] of pages.entries()) { const task = (async () => { - const cacheKey = `${plugin.name}:${page.url.href}`; - const cached = await cache.load(cacheKey); - if (cached) { - const { pages: cachedPages, violations } = cached; - if (cachedPages) { - table.addData(cachedPages); + try { + const cacheKey = `${plugin.name}:${page.url.href}`; + const cached = await cache.load(cacheKey); + if (cached) { + const { pages: cachedPages, violations } = cached; + if (cachedPages) { + table.addData(cachedPages); + pluginPageDataCount += Object.keys(cachedPages).length; + } + if (violations) { + for (const v of violations) { + allViolations.push(v); + } + pluginViolationCount += violations.length; + } + done++; + updateProgress(); + return; } - if (violations) { - allViolations.push(...violations); - pluginViolationCount += violations.length; - } - done++; - updateProgress(); - return; - } - const html = await page.getHtml(); - if (!html) { - done++; - updateProgress(); - return; - } + const html = await page.getHtml(); + if (!html) { + done++; + updateProgress(); + return; + } - const report = await runInWorker< - PageAnalysisWorkerData, - ReportPage | null - >({ - filePath: pageAnalysisWorkerPath, - num: pageIndex, - total: pages.length, - emitter: urlEmitter, - initialData: { - plugin, - pages: { - html, - url: page.url, + const report = await runInWorker< + PageAnalysisWorkerData, + ReportPage | null + >({ + filePath: pageAnalysisWorkerPath, + num: pageIndex, + total: pages.length, + emitter: urlEmitter, + initialData: { + plugin, + pages: { + html, + url: page.url, + }, }, - }, - }); + }); - const tablePages: Record> = {}; + const tablePages: Record> = {}; - if (report?.page) { - tablePages[page.url.href] = report.page; - table.addDataToUrl(page.url, report.page); - } + if (report?.page) { + tablePages[page.url.href] = report.page; + table.addDataToUrl(page.url, report.page); + pluginPageDataCount++; + } - await cache.store(cacheKey, { - pages: Object.keys(tablePages).length > 0 ? tablePages : undefined, - violations: report?.violations, - }); + await cache.store(cacheKey, { + pages: Object.keys(tablePages).length > 0 ? tablePages : undefined, + violations: report?.violations, + }); - if (report?.violations) { - allViolations.push(...report.violations); - pluginViolationCount += report.violations.length; - } + if (report?.violations) { + for (const v of report.violations) { + allViolations.push(v); + } + pluginViolationCount += report.violations.length; + } - done++; - updateProgress(); + done++; + updateProgress(); + } catch (error) { + pluginErrorCount++; + done++; + updateProgress(); + const message = error instanceof Error ? error.message : String(error); + await this.emit('error', { + message: `[${plugin.name}] Failed to analyze ${page.url.href}: ${message}`, + error: error instanceof Error ? error : null, + }); + } })(); executing.add(task); task.then( @@ -310,6 +329,16 @@ export class Nitpicker extends EventEmitter { await Promise.all(executing); + // Warn when plugin produced no page data at all + if (pluginPageDataCount === 0 && pages.length > 0) { + const errorDetail = + pluginErrorCount > 0 ? ` (${pluginErrorCount} errors occurred)` : ''; + await this.emit('error', { + message: `[${plugin.name}] Produced no data for ${pages.length} pages${errorDetail}. Check plugin configuration and HTML snapshots.`, + error: null, + }); + } + // Mark this plugin as Done const detail = pluginViolationCount > 0 @@ -343,19 +372,29 @@ export class Nitpicker extends EventEmitter { continue; } - const report = await mod.eachUrl({ url, isExternal }); - if (!report) { - continue; - } + try { + const report = await mod.eachUrl({ url, isExternal }); + if (!report) { + continue; + } - const { page: reportPage, violations } = report; + const { page: reportPage, violations } = report; - if (reportPage) { - table.addDataToUrl(url, reportPage); - } + if (reportPage) { + table.addDataToUrl(url, reportPage); + } - if (violations) { - allViolations.push(...violations); + if (violations) { + for (const v of violations) { + allViolations.push(v); + } + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + await this.emit('error', { + message: `[eachUrl] Failed to analyze ${url.href}: ${message}`, + error: error instanceof Error ? error : null, + }); } } } diff --git a/packages/@nitpicker/core/src/page-analysis-worker.spec.ts b/packages/@nitpicker/core/src/page-analysis-worker.spec.ts index 0220085..f054bf0 100644 --- a/packages/@nitpicker/core/src/page-analysis-worker.spec.ts +++ b/packages/@nitpicker/core/src/page-analysis-worker.spec.ts @@ -150,34 +150,31 @@ describe('page-analysis-worker', () => { expect(result).toBeNull(); }); - it('returns null and logs error when eachPage throws', async () => { - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + it('throws error with plugin name when eachPage throws', async () => { mockedImportModules.mockResolvedValue([ { eachPage: vi.fn().mockRejectedValue(new Error('plugin crash')), }, ]); - const result = await workerFn( - { - plugin: { - name: 'broken-plugin', - module: 'broken-plugin', - configFilePath: '', + await expect( + workerFn( + { + plugin: { + name: 'broken-plugin', + module: 'broken-plugin', + configFilePath: '', + }, + pages: { + html: '', + url: parseUrl('https://example.com/'), + }, }, - pages: { - html: '', - url: parseUrl('https://example.com/'), - }, - }, - new UrlEventBus(), - 0, - 1, - ); - - expect(result).toBeNull(); - expect(consoleErrorSpy).toHaveBeenCalledWith('[broken-plugin] plugin crash'); - consoleErrorSpy.mockRestore(); + new UrlEventBus(), + 0, + 1, + ), + ).rejects.toThrow('[broken-plugin] plugin crash'); }); it('cleans up JSDOM globals after eachPage completes', async () => { @@ -221,7 +218,6 @@ describe('page-analysis-worker', () => { }); it('cleans up JSDOM globals even when eachPage throws', async () => { - vi.spyOn(console, 'error').mockImplementation(() => {}); mockedImportModules.mockResolvedValue([ { eachPage: vi.fn().mockRejectedValue(new Error('crash')), @@ -243,11 +239,12 @@ describe('page-analysis-worker', () => { new UrlEventBus(), 0, 1, - ); + ).catch(() => { + // Expected to throw + }); const g = globalThis as Record; expect(g['HTMLElement']).toBeUndefined(); - vi.restoreAllMocks(); }); it('provides a JSDOM window with the correct URL', async () => { diff --git a/packages/@nitpicker/core/src/page-analysis-worker.ts b/packages/@nitpicker/core/src/page-analysis-worker.ts index 148fae0..877de6f 100644 --- a/packages/@nitpicker/core/src/page-analysis-worker.ts +++ b/packages/@nitpicker/core/src/page-analysis-worker.ts @@ -119,9 +119,8 @@ export default async function ( return report ?? null; } catch (error) { - // eslint-disable-next-line no-console - console.error(`[${plugin.name}] ${error instanceof Error ? error.message : error}`); - return null; + const message = error instanceof Error ? error.message : String(error); + throw new Error(`[${plugin.name}] ${message}`); } finally { for (const key of domGlobalKeys) { delete g[key]; diff --git a/packages/@nitpicker/core/src/worker/run-in-worker.ts b/packages/@nitpicker/core/src/worker/run-in-worker.ts index 3efce83..aed41af 100644 --- a/packages/@nitpicker/core/src/worker/run-in-worker.ts +++ b/packages/@nitpicker/core/src/worker/run-in-worker.ts @@ -137,7 +137,11 @@ export function runInWorker, R>( process.removeListener('uncaughtException', killWorker); process.removeListener('uncaughtExceptionMonitor', killWorker); process.removeListener('unhandledRejection', killWorker); - resolve(message.result); + if (message.error) { + reject(new Error(message.error)); + } else { + resolve(message.result); + } } }); }); diff --git a/packages/@nitpicker/core/src/worker/worker.ts b/packages/@nitpicker/core/src/worker/worker.ts index 4deb255..c7aaea2 100644 --- a/packages/@nitpicker/core/src/worker/worker.ts +++ b/packages/@nitpicker/core/src/worker/worker.ts @@ -52,13 +52,29 @@ emitter.on('url', (url) => { }); }); -const result = await runner(data, emitter); +try { + const result = await runner(data, emitter); -if (!parentPort) { - throw new Error('Use in worker thread'); -} + if (!parentPort) { + throw new Error('Use in worker thread'); + } -parentPort.postMessage({ - type: 'finish', - result, -}); + parentPort.postMessage({ + type: 'finish', + result, + }); +} catch (error) { + if (!parentPort) { + throw new Error('Use in worker thread'); + } + + const message = error instanceof Error ? error.message : String(error); + // eslint-disable-next-line no-console + console.error(`[worker] Fatal error: ${message}`); + + parentPort.postMessage({ + type: 'finish', + result: null, + error: message, + }); +}