diff --git a/.changeset/ninety-toys-complain.md b/.changeset/ninety-toys-complain.md new file mode 100644 index 00000000000..750b87def52 --- /dev/null +++ b/.changeset/ninety-toys-complain.md @@ -0,0 +1,16 @@ +--- +"@khanacademy/kas": patch +"@khanacademy/keypad-context": patch +"@khanacademy/kmath": patch +"@khanacademy/math-input": patch +"@khanacademy/perseus": patch +"@khanacademy/perseus-core": patch +"@khanacademy/perseus-editor": patch +"@khanacademy/perseus-linter": patch +"@khanacademy/perseus-score": patch +"@khanacademy/perseus-utils": patch +"@khanacademy/pure-markdown": patch +"@khanacademy/simple-markdown": patch +--- + +Add catalog hash system for dependency tracking diff --git a/README.md b/README.md index 41ec9744ec8..1aaf7b06af9 100644 --- a/README.md +++ b/README.md @@ -40,12 +40,13 @@ To make changes to Perseus, create a new branch based on `main`, commit your cha ### Everyday commands ```bash -pnpm tsc -w # run the typechecker in watch mode -pnpm test # run all tests -pnpm lint # find problems -pnpm lint --fix # fix problems -pnpm storybook # open component gallery -pnpm changeset # create a changeset file (see below) +pnpm tsc -w # run the typechecker in watch mode +pnpm test # run all tests +pnpm lint # find problems +pnpm lint --fix # fix problems +pnpm storybook # open component gallery +pnpm changeset # create a changeset file (see below) +pnpm update-catalog-hashes # update catalog dependency hashes (see below) ``` Additionally, we use Khan Academy's Git extensions (OLC) to manage pull requests. @@ -63,6 +64,10 @@ The components and widgets of Perseus are developed using [Storybook](https://gi We use [changesets](https://github.com/changesets/changesets) to help manage our versioning/releases. Each pull request must include a changeset file stating which packages changed and how their versions should be incremented. Run `pnpm changeset` to generate and commit a changeset file. +### Catalog Hashes + +Catalog hashes ensure packages are republished when their catalog dependencies (Wonder Blocks, React, etc.) are updated. When a catalog dependency version changes, the hash changes, signaling that affected packages should be version-bumped and republished even if their source code hasn't changed. These hashes are automatically updated when running `utils/sync-dependencies.ts`. If you manually add catalog dependencies to a package.json, run `pnpm update-catalog-hashes` to update the hashes. The pre-publish check will verify all hashes are current before releasing. + ### Releasing Perseus to npm 1. Landing changes to `main` creates/updates a “Version Packages” PR diff --git a/package.json b/package.json index 81d78516325..fd14db9cb63 100644 --- a/package.json +++ b/package.json @@ -144,6 +144,7 @@ "lint": "eslint . --ext .js --ext .jsx --ext .ts --ext .tsx", "lint:timing": "cross-env TIMING=1 pnpm lint", "publish:ci": "utils/pre-publish-check-ci.ts && git diff --stat --exit-code HEAD && pnpm build && pnpm build:types && changeset publish", + "update-catalog-hashes": "utils/update-catalog-hashes-cli.ts", "sloc": "sloc packages --exclude node_modules", "test": "jest", "storybook": "storybook dev -p 6006", diff --git a/packages/kas/package.json b/packages/kas/package.json index acf9bd58bef..0871314e77c 100644 --- a/packages/kas/package.json +++ b/packages/kas/package.json @@ -42,5 +42,8 @@ "expression", "algebra", "symbolic" - ] + ], + "khan": { + "catalogHash": "a53f505e530bd791" + } } diff --git a/packages/keypad-context/package.json b/packages/keypad-context/package.json index f787dd95dfe..0163381e9d7 100644 --- a/packages/keypad-context/package.json +++ b/packages/keypad-context/package.json @@ -34,5 +34,8 @@ "peerDependencies": { "react": "catalog:peerDeps" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "27e03f4ce1585aa4" + } } diff --git a/packages/kmath/package.json b/packages/kmath/package.json index 48f1a59cfea..02018e4345d 100644 --- a/packages/kmath/package.json +++ b/packages/kmath/package.json @@ -37,5 +37,8 @@ "jquery": "catalog:peerDeps", "underscore": "catalog:peerDeps" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "d7663955f3310f2b" + } } diff --git a/packages/math-input/package.json b/packages/math-input/package.json index 047cd303382..2f11434b16b 100644 --- a/packages/math-input/package.json +++ b/packages/math-input/package.json @@ -78,5 +78,8 @@ "react-dom": "catalog:peerDeps", "react-transition-group": "^4.4.1" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "84d2ec4b49a6d559" + } } diff --git a/packages/perseus-core/package.json b/packages/perseus-core/package.json index 13a99b9d0a1..31dd90c50fa 100644 --- a/packages/perseus-core/package.json +++ b/packages/perseus-core/package.json @@ -53,5 +53,8 @@ "@khanacademy/wonder-stuff-core": "catalog:peerDeps", "underscore": "catalog:peerDeps" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "14dd1a093a029c15" + } } diff --git a/packages/perseus-editor/package.json b/packages/perseus-editor/package.json index 216788baa59..6840a492478 100644 --- a/packages/perseus-editor/package.json +++ b/packages/perseus-editor/package.json @@ -109,5 +109,8 @@ "react-dom": "catalog:peerDeps", "underscore": "catalog:peerDeps" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "8504a385bdf3fd1e" + } } diff --git a/packages/perseus-linter/package.json b/packages/perseus-linter/package.json index d939d8d4acf..0745addbcf5 100644 --- a/packages/perseus-linter/package.json +++ b/packages/perseus-linter/package.json @@ -32,5 +32,8 @@ "@khanacademy/pure-markdown": "workspace:*", "perseus-build-settings": "workspace:*" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "e3b0c44298fc1c14" + } } diff --git a/packages/perseus-score/package.json b/packages/perseus-score/package.json index c300bc56196..ba445241913 100644 --- a/packages/perseus-score/package.json +++ b/packages/perseus-score/package.json @@ -37,5 +37,8 @@ "peerDependencies": { "underscore": "catalog:peerDeps" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "a53f505e530bd791" + } } diff --git a/packages/perseus-utils/package.json b/packages/perseus-utils/package.json index 5e835c0e505..dc253c2cc7d 100644 --- a/packages/perseus-utils/package.json +++ b/packages/perseus-utils/package.json @@ -28,5 +28,8 @@ "dependencies": {}, "devDependencies": { "perseus-build-settings": "workspace:*" + }, + "khan": { + "catalogHash": "e3b0c44298fc1c14" } } diff --git a/packages/perseus/package.json b/packages/perseus/package.json index 551d6cc3729..48e76b756c7 100644 --- a/packages/perseus/package.json +++ b/packages/perseus/package.json @@ -131,5 +131,8 @@ "react-popper": "^2.2.5", "underscore": "catalog:peerDeps" }, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "88859ca9115ee150" + } } diff --git a/packages/pure-markdown/package.json b/packages/pure-markdown/package.json index 26a999317d9..cb287f0ab63 100644 --- a/packages/pure-markdown/package.json +++ b/packages/pure-markdown/package.json @@ -32,5 +32,8 @@ "perseus-build-settings": "workspace:*" }, "peerDependencies": {}, - "keywords": [] + "keywords": [], + "khan": { + "catalogHash": "e3b0c44298fc1c14" + } } diff --git a/packages/simple-markdown/package.json b/packages/simple-markdown/package.json index 19ede63ccc2..e3bb019d5b2 100644 --- a/packages/simple-markdown/package.json +++ b/packages/simple-markdown/package.json @@ -36,5 +36,8 @@ }, "keywords": [ "markdown" - ] + ], + "khan": { + "catalogHash": "5d5d8fa43d22b8e8" + } } diff --git a/utils/__tests__/update-catalog-hashes-cli.test.ts b/utils/__tests__/update-catalog-hashes-cli.test.ts new file mode 100644 index 00000000000..5a8c3af4bc4 --- /dev/null +++ b/utils/__tests__/update-catalog-hashes-cli.test.ts @@ -0,0 +1,168 @@ +/** + * @jest-environment node + */ +import {describe, expect, it, jest, beforeEach} from "@jest/globals"; + +// We need to mock the module before importing it +const mockUpdateCatalogHashes = jest.fn(); +jest.mock("../internal/update-catalog-hashes", () => ({ + updateCatalogHashes: mockUpdateCatalogHashes, +})); + +describe("update-catalog-hashes-cli", () => { + let mockExit: jest.SpiedFunction; + let mockConsoleLog: jest.SpiedFunction; + let mockConsoleError: jest.SpiedFunction; + let originalArgv: string[]; + + beforeEach(() => { + // Save original argv + originalArgv = process.argv; + + jest.clearAllMocks(); + mockExit = jest + .spyOn(process, "exit") + .mockImplementation((() => {}) as any); + mockConsoleLog = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + mockConsoleError = jest + .spyOn(console, "error") + .mockImplementation(() => {}); + }); + + afterEach(() => { + // Restore original argv + process.argv = originalArgv; + // Clear the module cache to allow re-importing with new argv + jest.resetModules(); + }); + + // Helper to load and execute the CLI with given args + const runCli = (args: string[]) => { + // Set process.argv before importing + process.argv = ["node", "update-catalog-hashes-cli.ts", ...args]; + // Import the CLI script (which will execute main immediately) + // Using require instead of import so it runs synchronously + // eslint-disable-next-line import/no-unassigned-import + require("../update-catalog-hashes-cli"); + }; + + describe("help flag", () => { + it("should show help with --help flag", () => { + // Act + runCli(["--help"]); + + // Assert + expect(mockConsoleLog).toHaveBeenCalledWith( + "Usage: update-catalog-hashes-cli.ts [options]", + ); + expect(mockExit).toHaveBeenCalledWith(0); + }); + + it("should show help with -h flag", () => { + // Act + runCli(["-h"]); + + // Assert + expect(mockConsoleLog).toHaveBeenCalledWith( + "Usage: update-catalog-hashes-cli.ts [options]", + ); + expect(mockExit).toHaveBeenCalledWith(0); + }); + }); + + describe("normal mode", () => { + it("should call updateCatalogHashes with isDryRun=false by default", () => { + // Arrange + mockUpdateCatalogHashes.mockImplementation(() => {}); + + // Act + runCli([]); + + // Assert + expect(mockUpdateCatalogHashes).toHaveBeenCalledWith(false, false); + }); + + it("should not call process.exit on success", () => { + // Arrange + mockUpdateCatalogHashes.mockImplementation(() => {}); + + // Act + runCli([]); + + // Assert + expect(mockExit).not.toHaveBeenCalled(); + }); + }); + + describe("dry-run mode", () => { + it("should call updateCatalogHashes with isDryRun=true when --dry-run flag is passed", () => { + // Arrange + mockUpdateCatalogHashes.mockImplementation(() => {}); + + // Act + runCli(["--dry-run"]); + + // Assert + expect(mockUpdateCatalogHashes).toHaveBeenCalledWith(true, false); + }); + }); + + describe("verbose mode", () => { + it("should call updateCatalogHashes with verbose=true when --verbose flag is passed", () => { + // Arrange + mockUpdateCatalogHashes.mockImplementation(() => {}); + + // Act + runCli(["--verbose"]); + + // Assert + expect(mockUpdateCatalogHashes).toHaveBeenCalledWith(false, true); + }); + + it("should support both --dry-run and --verbose flags together", () => { + // Arrange + mockUpdateCatalogHashes.mockImplementation(() => {}); + + // Act + runCli(["--dry-run", "--verbose"]); + + // Assert + expect(mockUpdateCatalogHashes).toHaveBeenCalledWith(true, true); + }); + }); + + describe("error handling", () => { + it("should exit with code 1 and show error when updateCatalogHashes throws", () => { + // Arrange + const error = new Error("Test error"); + mockUpdateCatalogHashes.mockImplementation(() => { + throw error; + }); + + // Act + runCli([]); + + // Assert + expect(mockConsoleError).toHaveBeenCalled(); + expect(mockExit).toHaveBeenCalledWith(1); + }); + + it("should show error stack when available", () => { + // Arrange + const error = new Error("Test error with stack"); + error.stack = "Error: Test error with stack\n at test:1:1"; + mockUpdateCatalogHashes.mockImplementation(() => { + throw error; + }); + + // Act + runCli([]); + + // Assert + expect(mockConsoleError).toHaveBeenCalled(); + expect(mockExit).toHaveBeenCalledWith(1); + }); + }); +}); diff --git a/utils/internal/__tests__/get-catalog-deps-hash.test.ts b/utils/internal/__tests__/get-catalog-deps-hash.test.ts new file mode 100644 index 00000000000..e9581f39607 --- /dev/null +++ b/utils/internal/__tests__/get-catalog-deps-hash.test.ts @@ -0,0 +1,389 @@ +/** + * @jest-environment node + */ +import {createHash} from "node:crypto"; + +import {describe, expect, it, jest} from "@jest/globals"; + +import {getCatalogDepsHash} from "../get-catalog-deps-hash"; + +import type {PackageJson, PnpmWorkspace} from "../catalog-hash-utils"; + +// SHA-256 hash of an empty string (truncated to 16 chars) - used when a package has no catalog dependencies +const EMPTY_CATALOG_HASH = createHash("sha256") + .update("") + .digest("hex") + .substring(0, 16); + +const getMessagesFromSpy = ( + spy: jest.SpiedFunction, +): Array => + // For each call to the spy, get the first argument and return as array + spy.mock.calls.map((args) => { + const [message] = args; + return typeof message === "function" ? message() : message; + }); + +describe("getCatalogDepsHash", () => { + it("should return consistent hash for package with catalog dependencies", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + prodDeps: { + "tiny-invariant": "1.3.1", + }, + peerDeps: { + react: "^18.2.0", + "react-dom": "^18.2.0", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "@khanacademy/perseus-core": "workspace:*", + "tiny-invariant": "catalog:prodDeps", + }, + peerDependencies: { + react: "catalog:peerDeps", + "react-dom": "catalog:peerDeps", + }, + }; + + // Act + const result1 = getCatalogDepsHash(pnpmWorkspace, packageJson); + const result2 = getCatalogDepsHash(pnpmWorkspace, packageJson); + + // Assert - Should be deterministic and produce valid hash + expect(result1).toBe(result2); + expect(result1).toMatch(/^[0-9a-f]{16}$/); // 16-char hex string + + // Verify it's the correct hash for these dependencies + const expectedHash = createHash("sha256") + .update("react@^18.2.0,react-dom@^18.2.0,tiny-invariant@1.3.1") + .digest("hex") + .substring(0, 16); + expect(result1).toBe(expectedHash); + }); + + it("should return hash of empty string for packages with no catalog dependencies", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + peerDeps: { + react: "^18.2.0", + }, + }, + }; + + const packageJsonNoCatalog: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "@khanacademy/perseus-core": "workspace:*", + "some-other-dep": "1.0.0", + }, + }; + + const packageJsonNoDeps: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + }; + + // Act + const result1 = getCatalogDepsHash(pnpmWorkspace, packageJsonNoCatalog); + const result2 = getCatalogDepsHash(pnpmWorkspace, packageJsonNoDeps); + + // Assert - Both should produce the empty string hash + expect(result1).toBe(EMPTY_CATALOG_HASH); + expect(result2).toBe(EMPTY_CATALOG_HASH); + }); + + it("should sort dependencies alphabetically for deterministic hash", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + peerDeps: { + "z-package": "^1.0.0", + "a-package": "^2.0.0", + "m-package": "^3.0.0", + }, + }, + }; + + const packageJson1: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + peerDependencies: { + "z-package": "catalog:peerDeps", + "a-package": "catalog:peerDeps", + "m-package": "catalog:peerDeps", + }, + }; + + const packageJson2: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + peerDependencies: { + "m-package": "catalog:peerDeps", + "z-package": "catalog:peerDeps", + "a-package": "catalog:peerDeps", + }, + }; + + // Act + const result1 = getCatalogDepsHash(pnpmWorkspace, packageJson1); + const result2 = getCatalogDepsHash(pnpmWorkspace, packageJson2); + + // Assert - Should produce same hash regardless of order in package.json + expect(result1).toBe(result2); + }); + + it("should log verbose output", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + prodDeps: { + "tiny-invariant": "1.3.1", + }, + peerDeps: { + react: "^18.2.0", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "tiny-invariant": "catalog:prodDeps", + }, + peerDependencies: { + react: "catalog:peerDeps", + }, + }; + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + + // Act + getCatalogDepsHash(pnpmWorkspace, packageJson, true); + + // Assert + expect(getMessagesFromSpy(consoleLogSpy)).toMatchInlineSnapshot(` +[ + " 📦 Processing @khanacademy/test-package:", + " Catalog dependencies: tiny-invariant@1.3.1, react@^18.2.0", +] +`); + }); + + it("should not log when verbose is false", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + peerDeps: { + react: "^18.2.0", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + peerDependencies: { + react: "catalog:peerDeps", + }, + }; + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + + // Act + getCatalogDepsHash(pnpmWorkspace, packageJson, false); + + // Assert + expect(consoleLogSpy).not.toHaveBeenCalled(); + }); + + it("should handle mixed catalog and non-catalog dependencies", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + prodDeps: { + lodash: "4.17.21", + }, + peerDeps: { + react: "^18.2.0", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + lodash: "catalog:prodDeps", + "some-other-dep": "1.0.0", + "@khanacademy/perseus-core": "workspace:*", + }, + peerDependencies: { + react: "catalog:peerDeps", + "another-dep": "^2.0.0", + }, + }; + + // Act + const result = getCatalogDepsHash(pnpmWorkspace, packageJson); + + // Assert - Should only include catalog dependencies (sorted: lodash, react) + const expectedHash = createHash("sha256") + .update("lodash@4.17.21,react@^18.2.0") + .digest("hex") + .substring(0, 16); + expect(result).toBe(expectedHash); + }); + + it("should handle single catalog dependency", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + peerDeps: { + react: "^18.2.0", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + peerDependencies: { + react: "catalog:peerDeps", + }, + }; + + // Act + const result = getCatalogDepsHash(pnpmWorkspace, packageJson); + + // Assert - Should be consistent and deterministic + const expectedHash = createHash("sha256") + .update("react@^18.2.0") + .digest("hex") + .substring(0, 16); + expect(result).toBe(expectedHash); + }); + + it("should handle only prodDeps catalog", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + prodDeps: { + "tiny-invariant": "1.3.1", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "tiny-invariant": "catalog:prodDeps", + }, + }; + + // Act + const result = getCatalogDepsHash(pnpmWorkspace, packageJson); + + // Assert - Should produce correct hash for prodDeps + const expectedHash = createHash("sha256") + .update("tiny-invariant@1.3.1") + .digest("hex") + .substring(0, 16); + expect(result).toBe(expectedHash); + }); + + it("should handle only peerDeps catalog", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + peerDeps: { + react: "^18.2.0", + "react-dom": "^18.2.0", + }, + }, + }; + + const packageJson: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + peerDependencies: { + react: "catalog:peerDeps", + "react-dom": "catalog:peerDeps", + }, + }; + + // Act + const result = getCatalogDepsHash(pnpmWorkspace, packageJson); + + // Assert - Should produce correct hash for peerDeps (sorted alphabetically) + const expectedHash = createHash("sha256") + .update("react@^18.2.0,react-dom@^18.2.0") + .digest("hex") + .substring(0, 16); + expect(result).toBe(expectedHash); + }); + + it("should exclude devDeps catalog dependencies", () => { + // Arrange + const pnpmWorkspace: PnpmWorkspace = { + catalogs: { + devDeps: { + react: "18.2.0", + typescript: "5.0.0", + }, + peerDeps: { + react: "^18.2.0", + }, + }, + }; + + const packageJsonWithDevDeps: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + typescript: "catalog:devDeps", // Should be excluded + }, + peerDependencies: { + react: "catalog:peerDeps", // Should be included + }, + }; + + const packageJsonWithoutDevDeps: PackageJson = { + name: "@khanacademy/test-package", + version: "1.0.0", + peerDependencies: { + react: "catalog:peerDeps", // Should be included + }, + }; + + // Act + const resultWithDevDeps = getCatalogDepsHash( + pnpmWorkspace, + packageJsonWithDevDeps, + ); + const resultWithoutDevDeps = getCatalogDepsHash( + pnpmWorkspace, + packageJsonWithoutDevDeps, + ); + + // Assert - Should produce same hash since devDeps are excluded + expect(resultWithDevDeps).toBe(resultWithoutDevDeps); + const expectedHash = createHash("sha256") + .update("react@^18.2.0") + .digest("hex") + .substring(0, 16); + expect(resultWithDevDeps).toBe(expectedHash); + }); +}); diff --git a/utils/internal/__tests__/maybe-update-catalog-hash.test.ts b/utils/internal/__tests__/maybe-update-catalog-hash.test.ts new file mode 100644 index 00000000000..71828283c05 --- /dev/null +++ b/utils/internal/__tests__/maybe-update-catalog-hash.test.ts @@ -0,0 +1,502 @@ +/** + * @jest-environment node + */ +import fs from "node:fs"; + +import {describe, expect, it, jest, beforeEach} from "@jest/globals"; + +import * as GetCatalogDepsHash from "../get-catalog-deps-hash"; +import {maybeUpdateCatalogHash} from "../maybe-update-catalog-hash"; + +import type {PnpmWorkspace} from "../catalog-hash-utils"; + +// Mock fs to prevent actual file writes +jest.mock("node:fs"); + +const getMockPnpmWorkspace = (): PnpmWorkspace => ({ + catalogs: { + prodDeps: { + "tiny-invariant": "1.3.1", + }, + peerDeps: { + react: "^18.2.0", + "react-dom": "^18.2.0", + }, + devDeps: { + react: "18.2.0", + "react-dom": "18.2.0", + }, + }, +}); + +const getMockPackageJson = () => ({ + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "@khanacademy/perseus-core": "workspace:*", + "tiny-invariant": "catalog:prodDeps", + }, + peerDependencies: { + react: "catalog:peerDeps", + "react-dom": "catalog:peerDeps", + }, + khan: { + catalogHash: "old-hash-123", + }, +}); + +describe("maybeUpdateCatalogHash", () => { + beforeEach(() => { + jest.clearAllMocks(); + // Mock process.cwd() to return a consistent value + jest.spyOn(process, "cwd").mockReturnValue("/mock/perseus/root"); + }); + + describe("when package is in vendor directory", () => { + it("should return false", () => { + // Arrange + const vendorPackageJson = getMockPackageJson(); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(vendorPackageJson), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/vendor/raphael/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(result).toBe(false); + }); + + it("should not update the package", () => { + // Arrange + const vendorPackageJson = getMockPackageJson(); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(vendorPackageJson), + ); + const mockWriteFileSync = jest + .spyOn(fs, "writeFileSync") + .mockImplementation(() => {}); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/vendor/jsdiff/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(mockWriteFileSync).not.toHaveBeenCalled(); + }); + + it("should skip even if hash calculation would differ", () => { + // Arrange + const vendorPackageJson = getMockPackageJson(); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(vendorPackageJson), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + const getCatalogDepsHashSpy = jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/vendor/some-lib/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(getCatalogDepsHashSpy).not.toHaveBeenCalled(); + }); + }); + + describe("when package is marked as private", () => { + it("should return false", () => { + // Arrange + const privatePackageJson = { + ...getMockPackageJson(), + private: true, + }; + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(privatePackageJson), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(result).toBe(false); + }); + + it("should not update the package", () => { + // Arrange + const privatePackageJson = { + ...getMockPackageJson(), + private: true, + }; + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(privatePackageJson), + ); + const mockWriteFileSync = jest + .spyOn(fs, "writeFileSync") + .mockImplementation(() => {}); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(mockWriteFileSync).not.toHaveBeenCalled(); + }); + }); + + describe("when catalog hash has not changed", () => { + it("should return false", () => { + // Arrange + const newHash = "old-hash-123"; // Same as existing hash + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getMockPackageJson()), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue(newHash); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(result).toBe(false); + }); + + it("should not update the package", () => { + // Arrange + const newHash = "old-hash-123"; // Same as existing hash + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getMockPackageJson()), + ); + const mockWriteFileSync = jest + .spyOn(fs, "writeFileSync") + .mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue(newHash); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(mockWriteFileSync).not.toHaveBeenCalled(); + }); + }); + + describe("when hash changed", () => { + it("should log verbose message when verbose is true", () => { + // Arrange + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getMockPackageJson()), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + true, + true, // verbose + ); + + // Assert + expect(consoleLogSpy).toHaveBeenCalledWith( + " ✨ Hash changed from old-hash-123 to new-hash-456", + ); + }); + + describe("dry run", () => { + it("should return true", () => { + // Arrange + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getMockPackageJson()), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + true, + ); + + // Assert + expect(result).toBe(true); + }); + + it("should not update package.json", () => { + // Arrange + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getMockPackageJson()), + ); + const mockWriteFileSync = jest + .spyOn(fs, "writeFileSync") + .mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + true, + ); + + // Assert + expect(mockWriteFileSync).not.toHaveBeenCalled(); + }); + }); + + describe("actual run", () => { + it("should return true", () => { + // Arrange + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getMockPackageJson()), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(result).toBe(true); + }); + + it("should update package.json with new hash", () => { + // Arrange + const mockPackageJson = getMockPackageJson(); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(mockPackageJson), + ); + const mockWriteFileSync = jest + .spyOn(fs, "writeFileSync") + .mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(mockWriteFileSync).toHaveBeenCalledWith( + "/mock/perseus/root/packages/test-package/package.json", + JSON.stringify( + { + ...mockPackageJson, + khan: { + catalogHash: "new-hash-456", + }, + }, + null, + 4, + ) + "\n", + "utf-8", + ); + }); + }); + }); + + describe("when package.json has no existing catalogHash", () => { + it("should return true", () => { + // Arrange + const getPackageJsonWithoutHash = () => ({ + ...getMockPackageJson(), + khan: undefined, + }); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getPackageJsonWithoutHash()), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(result).toBe(true); + }); + + it("should create khan object and add catalogHash", () => { + // Arrange + const getPackageJsonWithoutHash = () => ({ + ...getMockPackageJson(), + khan: undefined, + }); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getPackageJsonWithoutHash()), + ); + const mockWriteFileSync = jest + .spyOn(fs, "writeFileSync") + .mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + const expectedPackageJson = { + ...getPackageJsonWithoutHash(), + khan: { + catalogHash: "new-hash-456", + }, + }; + expect(mockWriteFileSync).toHaveBeenCalledWith( + "/mock/perseus/root/packages/test-package/package.json", + JSON.stringify(expectedPackageJson, null, 4) + "\n", + "utf-8", + ); + }); + }); + + describe("edge cases", () => { + describe("package.json with no catalog dependencies", () => { + it("should return true when hash changes", () => { + // Arrange + const getPackageJsonNoCatalogDeps = () => ({ + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "@khanacademy/perseus-core": "workspace:*", + }, + khan: { + catalogHash: "old-hash-123", + }, + }); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(getPackageJsonNoCatalogDeps()), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + jest.spyOn( + GetCatalogDepsHash, + "getCatalogDepsHash", + ).mockReturnValue("new-hash-456"); + + // Act + const result = maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + getMockPnpmWorkspace(), + false, + ); + + // Assert + expect(result).toBe(true); + }); + + it("should call getCatalogDepsHash with correct arguments", () => { + // Arrange + const getPackageJsonNoCatalogDeps = () => ({ + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "@khanacademy/perseus-core": "workspace:*", + }, + khan: { + catalogHash: "old-hash-123", + }, + }); + const packageJson = getPackageJsonNoCatalogDeps(); + jest.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify(packageJson), + ); + jest.spyOn(fs, "writeFileSync").mockImplementation(() => {}); + const getCatalogDepsHashSpy = jest + .spyOn(GetCatalogDepsHash, "getCatalogDepsHash") + .mockReturnValue("new-hash-456"); + const mockWorkspace = getMockPnpmWorkspace(); + + // Act + maybeUpdateCatalogHash( + "/mock/perseus/root/packages/test-package/package.json", + mockWorkspace, + false, + ); + + // Assert + expect(getCatalogDepsHashSpy).toHaveBeenCalledWith( + mockWorkspace, + expect.objectContaining({ + name: "@khanacademy/test-package", + version: "1.0.0", + dependencies: { + "@khanacademy/perseus-core": "workspace:*", + }, + }), + false, + ); + }); + }); + }); +}); diff --git a/utils/internal/__tests__/update-catalog-hashes.test.ts b/utils/internal/__tests__/update-catalog-hashes.test.ts new file mode 100644 index 00000000000..40570d084a9 --- /dev/null +++ b/utils/internal/__tests__/update-catalog-hashes.test.ts @@ -0,0 +1,305 @@ +/** + * @jest-environment node + */ +import {describe, expect, it, jest, beforeEach} from "@jest/globals"; + +import * as CatalogHashUtils from "../catalog-hash-utils"; +import * as MaybeUpdateCatalogHash from "../maybe-update-catalog-hash"; +import {updateCatalogHashes} from "../update-catalog-hashes"; + +import type {PnpmWorkspace} from "../catalog-hash-utils"; + +describe("updateCatalogHashes", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + const getMockPnpmWorkspace = (): PnpmWorkspace => ({ + catalogs: { + prodDeps: { + "tiny-invariant": "1.3.1", + }, + peerDeps: { + react: "^18.2.0", + "react-dom": "^18.2.0", + }, + devDeps: { + react: "18.2.0", + "react-dom": "18.2.0", + }, + }, + }); + + describe("package processing", () => { + it("should process all package.json files", () => { + // Arrange + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [ + "/mock/perseus/root/packages/package1/package.json", + "/mock/perseus/root/packages/package2/package.json", + "/mock/perseus/root/packages/package3/package.json", + ], + ); + const maybeUpdateSpy = jest + .spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValue(false); + + // Act + updateCatalogHashes(false); + + // Assert - Should process each package.json file found + expect(maybeUpdateSpy).toHaveBeenCalledTimes(3); + }); + + it("should call maybeUpdateCatalogHash with correct parameters", () => { + // Arrange + const packagePath = + "/mock/perseus/root/packages/package1/package.json"; + const mockWorkspace = getMockPnpmWorkspace(); + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + mockWorkspace, + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [packagePath], + ); + const maybeUpdateSpy = jest + .spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValue(false); + + // Act + updateCatalogHashes(false); + + // Assert - Should call with correct parameters + expect(maybeUpdateSpy).toHaveBeenCalledWith( + packagePath, + expect.objectContaining({ + catalogs: mockWorkspace.catalogs, + }), + false, + false, + ); + }); + }); + + describe("dry-run mode", () => { + it("should show dry-run message when no updates are needed", () => { + // Arrange + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [ + "/mock/perseus/root/packages/package1/package.json", + "/mock/perseus/root/packages/package2/package.json", + ], + ); + jest.spyOn( + MaybeUpdateCatalogHash, + "maybeUpdateCatalogHash", + ).mockReturnValue(false); + + // Act + updateCatalogHashes(true); + + // Assert + expect(consoleLogSpy).toHaveBeenCalledWith( + "🔮 Would update 0 package.json files", + ); + }); + + it("should show dry-run message when updates are needed", () => { + // Arrange + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [ + "/mock/perseus/root/packages/package1/package.json", + "/mock/perseus/root/packages/package2/package.json", + "/mock/perseus/root/packages/package3/package.json", + ], + ); + jest.spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValueOnce(true) + .mockReturnValueOnce(false) + .mockReturnValueOnce(true); + + // Act + updateCatalogHashes(true); + + // Assert + expect(consoleLogSpy).toHaveBeenCalledWith( + "🔮 Would update 2 package.json files", + ); + }); + + it("should call maybeUpdateCatalogHash with isDryRun=true", () => { + // Arrange + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + ["/mock/perseus/root/packages/package1/package.json"], + ); + const maybeUpdateSpy = jest + .spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValue(false); + + // Act + updateCatalogHashes(true); + + // Assert + expect(maybeUpdateSpy).toHaveBeenCalledWith( + expect.any(String), + expect.any(Object), + true, // isDryRun + false, + ); + }); + }); + + describe("normal mode", () => { + it("should show success message when no updates are needed", () => { + // Arrange + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [ + "/mock/perseus/root/packages/package1/package.json", + "/mock/perseus/root/packages/package2/package.json", + ], + ); + jest.spyOn( + MaybeUpdateCatalogHash, + "maybeUpdateCatalogHash", + ).mockReturnValue(false); + + // Act + updateCatalogHashes(false); + + // Assert + expect(consoleLogSpy).toHaveBeenCalledWith( + "✅ Updated 0 package.json files", + ); + }); + + it("should show success message when updates are made", () => { + // Arrange + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [ + "/mock/perseus/root/packages/package1/package.json", + "/mock/perseus/root/packages/package2/package.json", + "/mock/perseus/root/packages/package3/package.json", + ], + ); + jest.spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValueOnce(true) + .mockReturnValueOnce(false) + .mockReturnValueOnce(true); + + // Act + updateCatalogHashes(false); + + // Assert + expect(consoleLogSpy).toHaveBeenCalledWith( + "✅ Updated 2 package.json files", + ); + }); + }); + + describe("verbose mode", () => { + it("should pass verbose flag to maybeUpdateCatalogHash", () => { + // Arrange + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + ["/mock/perseus/root/packages/package1/package.json"], + ); + const maybeUpdateSpy = jest + .spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValue(false); + + // Act + updateCatalogHashes(false, true); + + // Assert + expect(maybeUpdateSpy).toHaveBeenCalledWith( + expect.any(String), + expect.any(Object), + false, + true, // verbose + ); + }); + }); + + describe("integration scenarios", () => { + it("should handle empty package list", () => { + // Arrange + const consoleLogSpy = jest + .spyOn(console, "log") + .mockImplementation(() => {}); + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + [], + ); + const maybeUpdateSpy = jest + .spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValue(false); + + // Act + updateCatalogHashes(false); + + // Assert + expect(maybeUpdateSpy).not.toHaveBeenCalled(); + expect(consoleLogSpy).toHaveBeenCalledWith( + "✅ Updated 0 package.json files", + ); + }); + + it("should process all packages found by git ls-files", () => { + // Arrange + jest.spyOn(CatalogHashUtils, "loadPnpmWorkspace").mockReturnValue( + getMockPnpmWorkspace(), + ); + jest.spyOn(CatalogHashUtils, "findAllPackageJsons").mockReturnValue( + ["/mock/perseus/root/packages/package1/package.json"], + ); + const maybeUpdateSpy = jest + .spyOn(MaybeUpdateCatalogHash, "maybeUpdateCatalogHash") + .mockReturnValue(false); + + // Act + updateCatalogHashes(false); + + // Assert - Should process all packages found + expect(maybeUpdateSpy).toHaveBeenCalledWith( + expect.any(String), + expect.any(Object), + false, + false, + ); + }); + }); +}); diff --git a/utils/internal/catalog-hash-utils.ts b/utils/internal/catalog-hash-utils.ts new file mode 100644 index 00000000000..3bcf9d44a22 --- /dev/null +++ b/utils/internal/catalog-hash-utils.ts @@ -0,0 +1,58 @@ +import {execSync} from "node:child_process"; +import fs from "node:fs"; +import path from "node:path"; + +import yaml from "yaml"; + +export type PackageJson = { + name: string; + version?: string; + private?: boolean; + dependencies?: Record; + peerDependencies?: Record; + khan?: { + catalogHash?: string; + }; +}; + +export type PnpmWorkspace = { + catalogs: { + prodDeps?: Record; + peerDeps?: Record; + devDeps?: Record; + }; +}; + +/** + * Load the pnpm workspace configuration containing catalog dependencies. + */ +export function loadPnpmWorkspace(): PnpmWorkspace { + const workspaceContent = fs.readFileSync("pnpm-workspace.yaml", "utf-8"); + return yaml.parse(workspaceContent); +} + +/** + * Find all package.json files in the workspace that are tracked by git. + * This automatically excludes node_modules, dist, and other untracked files. + */ +export function findAllPackageJsons(): string[] { + try { + const output = execSync( + 'git ls-files "package.json" "**/package.json"', + { + encoding: "utf-8", + }, + ); + return output + .trim() + .split("\n") + .filter(Boolean) + .map((p) => path.resolve(process.cwd(), p)); + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + throw new Error( + `Failed to find package.json files using git. Ensure you're in a git repository. ${errorMessage}`, + ); + } +} diff --git a/utils/internal/get-catalog-deps-hash.ts b/utils/internal/get-catalog-deps-hash.ts new file mode 100644 index 00000000000..dfcbd2ddc6e --- /dev/null +++ b/utils/internal/get-catalog-deps-hash.ts @@ -0,0 +1,83 @@ +import {createHash} from "node:crypto"; + +import type {PackageJson, PnpmWorkspace} from "./catalog-hash-utils"; + +/** + * Get the hash of the catalog dependencies for a package. + * + * This function calculates a hash based on: + * - Dependencies and peerDependencies with versions like "catalog:prodDeps" or "catalog:peerDeps" + * - Their resolved versions from the pnpm-workspace.yaml catalogs + * - Does NOT include devDependencies with "catalog:devDeps" + * + * @param pnpmWorkspace The PNPM workspace configuration containing catalog dependencies. + * @param packageJson The package.json file to get the catalog dependencies hash for. + * @param verbose Whether to log verbose output. + * @returns The hash of the catalog dependencies used in the package.json file. + */ +export function getCatalogDepsHash( + pnpmWorkspace: PnpmWorkspace, + packageJson: PackageJson, + verbose = false, +): string { + // Generate a list of all dependency names and their resolved versions + const catalogDepVersions: Array<[string, string]> = []; + + // Process regular dependencies (prodDeps and peerDeps, but NOT devDeps) + for (const [dep, version] of Object.entries( + packageJson.dependencies ?? {}, + )) { + if (version.startsWith("catalog:")) { + const catalogName = version.replace( + "catalog:", + "", + ) as keyof typeof pnpmWorkspace.catalogs; + // Skip devDeps catalog + if (catalogName === "devDeps") { + continue; + } + const resolvedVersion = pnpmWorkspace.catalogs[catalogName]?.[dep]; + if (resolvedVersion) { + catalogDepVersions.push([dep, resolvedVersion]); + } + } + } + + // Process peer dependencies + for (const [dep, version] of Object.entries( + packageJson.peerDependencies ?? {}, + )) { + if (version.startsWith("catalog:")) { + const catalogName = version.replace( + "catalog:", + "", + ) as keyof typeof pnpmWorkspace.catalogs; + const resolvedVersion = pnpmWorkspace.catalogs[catalogName]?.[dep]; + if (resolvedVersion) { + catalogDepVersions.push([dep, resolvedVersion]); + } + } + } + + if (verbose) { + console.log(` 📦 Processing ${packageJson.name}:`); + console.log( + ` Catalog dependencies: ${catalogDepVersions.map(([n, v]) => `${n}@${v}`).join(", ")}`, + ); + } + + // Create a hash of the catalogDeps using Node's built-in crypto + const inputString = catalogDepVersions + // Sort by dependency name so that the hash is deterministic + .sort((a, b) => a[0].localeCompare(b[0])) + // Include both the name and version so that the hash is deterministic + .map(([dep, version]) => `${dep}@${version}`) + .join(","); + + // Use first 16 characters of SHA-256 (64 bits) for a shorter, more readable hash + // while maintaining extremely low collision probability + return createHash("sha256") + .update(inputString) + .digest("hex") + .substring(0, 16); +} diff --git a/utils/internal/maybe-update-catalog-hash.ts b/utils/internal/maybe-update-catalog-hash.ts new file mode 100644 index 00000000000..9807e189eed --- /dev/null +++ b/utils/internal/maybe-update-catalog-hash.ts @@ -0,0 +1,76 @@ +import fs from "node:fs"; + +import {getCatalogDepsHash} from "./get-catalog-deps-hash"; + +import type {PackageJson, PnpmWorkspace} from "./catalog-hash-utils"; + +/** + * Update the catalog hash in a package.json file if it has changed. + * + * This function checks if a package should have its catalog hash updated based on: + * - Whether the package is in the vendor directory (skips if so) + * - Whether the package is marked as private (skips if so) + * - Whether the current catalog hash differs from the newly calculated hash + * + * @param packageJsonPath - The absolute path to the package.json file to potentially update + * @param pnpmWorkspace - The pnpm workspace configuration containing catalog dependencies + * @param isDryRun - If true, logs what would be updated but doesn't actually modify the file + * @param verbose - If true, logs verbose information about catalog dependencies + * @returns `true` if the package was updated (or would be updated in dry run), `false` otherwise + */ +export function maybeUpdateCatalogHash( + packageJsonPath: string, + pnpmWorkspace: PnpmWorkspace, + isDryRun: boolean, + verbose = false, +): boolean { + const packageJsonContent = fs.readFileSync(packageJsonPath, "utf-8"); + const packageJson: PackageJson = JSON.parse(packageJsonContent); + const name = packageJson.name; + + // Skip vendor packages (third-party code we don't control) + if (packageJsonPath.includes("/vendor/")) { + return false; + } + + // Skip private packages (not published to npm) + if (packageJson.private === true) { + return false; + } + + const newCatalogDepsHash = getCatalogDepsHash( + pnpmWorkspace, + packageJson, + verbose, + ); + const oldCatalogDepsHash = packageJson.khan?.catalogHash; + + if (oldCatalogDepsHash === newCatalogDepsHash) { + return false; + } + + const message = isDryRun + ? `🔮 Would update package.json for ${name}` + : `🔄 Updating package.json for ${name}`; + console.log(message); + + if (verbose) { + console.log( + ` ✨ Hash changed from ${oldCatalogDepsHash} to ${newCatalogDepsHash}`, + ); + } + + if (!isDryRun) { + if (!packageJson.khan) { + packageJson.khan = {catalogHash: newCatalogDepsHash}; + } else { + packageJson.khan.catalogHash = newCatalogDepsHash; + } + fs.writeFileSync( + packageJsonPath, + JSON.stringify(packageJson, null, 4) + "\n", + "utf-8", + ); + } + return true; +} diff --git a/utils/internal/update-catalog-hashes.ts b/utils/internal/update-catalog-hashes.ts new file mode 100644 index 00000000000..995f921d23b --- /dev/null +++ b/utils/internal/update-catalog-hashes.ts @@ -0,0 +1,40 @@ +import {findAllPackageJsons, loadPnpmWorkspace} from "./catalog-hash-utils"; +import {maybeUpdateCatalogHash} from "./maybe-update-catalog-hash"; + +/** + * Update the catalog hashes in all package.json files in the project. + * + * This function will update the catalog hash in all package.json files + * in the project if the catalog dependencies have changed. + * + * @param isDryRun If true, will not update the catalog hashes, but will log + * what would be updated. + * @param verbose If true, will log detailed information about catalog dependencies. + */ +export function updateCatalogHashes(isDryRun: boolean, verbose = false): void { + const allPackagePaths = findAllPackageJsons(); + const pnpmWorkspace = loadPnpmWorkspace(); + + let updatedCount = 0; + + for (const packageJsonPath of allPackagePaths) { + if ( + maybeUpdateCatalogHash( + packageJsonPath, + pnpmWorkspace, + isDryRun, + verbose, + ) + ) { + updatedCount++; + } + } + + console.log(""); + + if (isDryRun) { + console.log(`🔮 Would update ${updatedCount} package.json files`); + } else { + console.log(`✅ Updated ${updatedCount} package.json files`); + } +} diff --git a/utils/internal/verify-catalog-hashes.ts b/utils/internal/verify-catalog-hashes.ts new file mode 100644 index 00000000000..60e85a797a3 --- /dev/null +++ b/utils/internal/verify-catalog-hashes.ts @@ -0,0 +1,53 @@ +import fs from "node:fs"; + +import {findAllPackageJsons, loadPnpmWorkspace} from "./catalog-hash-utils"; +import {getCatalogDepsHash} from "./get-catalog-deps-hash"; + +import type {PackageJson} from "./catalog-hash-utils"; + +/** + * Verify that catalog hashes are up-to-date for all published packages. + */ +export function verifyCatalogHashes(): { + /** `true` if all catalog hashes are current. */ + success: boolean; + /** The set of error messages if `success` is false. */ + errors: string[]; +} { + const allPackagePaths = findAllPackageJsons(); + const pnpmWorkspace = loadPnpmWorkspace(); + const errors: string[] = []; + + for (const packageJsonPath of allPackagePaths) { + const packageJsonContent = fs.readFileSync(packageJsonPath, "utf-8"); + const packageJson: PackageJson = JSON.parse(packageJsonContent); + const name = packageJson.name; + + // Skip vendor packages (third-party code we don't control) + if (packageJsonPath.includes("/vendor/")) { + continue; + } + + // Skip private packages (not published to npm) + if (packageJson.private === true) { + continue; + } + + // Calculate what the hash should be + const expectedHash = getCatalogDepsHash(pnpmWorkspace, packageJson); + const actualHash = packageJson.khan?.catalogHash; + + // Check if hash is missing or incorrect + if (actualHash !== expectedHash) { + errors.push( + `${name}: catalog hash is ${actualHash === undefined ? "missing" : "out of date"}. ` + + `Expected "${expectedHash}", got "${actualHash}".`, + ); + } + } + + return { + success: errors.length === 0, + errors, + }; +} diff --git a/utils/pre-publish-check-ci.ts b/utils/pre-publish-check-ci.ts index 5fd48cace19..ae6cad16d45 100755 --- a/utils/pre-publish-check-ci.ts +++ b/utils/pre-publish-check-ci.ts @@ -12,6 +12,7 @@ import { checkSource, checkPublishConfig, } from "./internal/pre-publish-utils"; +import {verifyCatalogHashes} from "./internal/verify-catalog-hashes"; // eslint-disable-next-line promise/catch-or-return fg(path.join(__dirname, "..", "packages", "*", "package.json")).then( @@ -32,6 +33,20 @@ fg(path.join(__dirname, "..", "packages", "*", "package.json")).then( } } + // Verify catalog hashes are up-to-date + console.log("\n🔍 Verifying catalog hashes..."); + const catalogHashResult = verifyCatalogHashes(); + if (!catalogHashResult.success) { + console.error("\n❌ Catalog hash verification failed:\n"); + for (const error of catalogHashResult.errors) { + console.error(` - ${error}`); + } + console.error("\nTo fix, run: pnpm update-catalog-hashes\n"); + allPassed = false; + } else { + console.log("✅ All catalog hashes are up-to-date"); + } + // Exit only after we've processed all the packages. if (!allPassed) { process.exit(1); diff --git a/utils/sync-dependencies.ts b/utils/sync-dependencies.ts index f322e39456d..bbdd5ff4abe 100755 --- a/utils/sync-dependencies.ts +++ b/utils/sync-dependencies.ts @@ -12,6 +12,8 @@ import fs from "node:fs"; import semver from "semver"; import yaml from "yaml"; +import {updateCatalogHashes} from "./internal/update-catalog-hashes"; + function printHelp() { console.log("--- Package Dependency Sync ---"); @@ -152,6 +154,10 @@ function main(argv: string[]) { process.stderr.write("> pnpm install\n"); spawnSync("pnpm", ["install"], {stdio: "inherit"}); + + // Update catalog hashes after syncing dependencies + console.log("\n> Updating catalog hashes..."); + updateCatalogHashes(false, false); } main(process.argv); diff --git a/utils/update-catalog-hashes-cli.ts b/utils/update-catalog-hashes-cli.ts new file mode 100755 index 00000000000..7a9644081f6 --- /dev/null +++ b/utils/update-catalog-hashes-cli.ts @@ -0,0 +1,51 @@ +#!/usr/bin/env -S node -r @swc-node/register +/** + * CLI script to update catalog hashes in package.json files. + * + * This script updates the 'catalogHash' field in package.json files for published + * packages when their catalog dependencies have changed. This helps ensure that + * packages are rebuilt when their catalog dependencies are updated, even if the + * package itself hasn't changed. + */ +import process from "node:process"; + +import {updateCatalogHashes} from "./internal/update-catalog-hashes"; + +function printHelp() { + console.log("Usage: update-catalog-hashes-cli.ts [options]"); + console.log(""); + console.log("Updates catalog hashes in package.json files"); + console.log(""); + console.log("Options:"); + console.log( + " --dry-run Show what would be updated without making changes", + ); + console.log( + " --verbose Show detailed information about catalog dependencies", + ); + console.log(" --help Show this help message"); + console.log(""); +} + +function main(argv: string[]) { + const args = argv.slice(2); + + if (args.includes("--help") || args.includes("-h")) { + printHelp(); + process.exit(0); + } + + const isDryRun = args.includes("--dry-run"); + const verbose = args.includes("--verbose"); + + try { + updateCatalogHashes(isDryRun, verbose); + } catch (error) { + console.error( + `‼️ Unexpected error: ${error instanceof Error ? error.stack ?? error : error}`, + ); + process.exit(1); + } +} + +main(process.argv);