diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..449f407
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @git-stunts
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 98cec6d..8697ffe 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,7 +5,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-## [5.3.2] — Unreleased
+## [5.3.3] — Unreleased
+
+### Added
+- **Review automation baseline** — added `.github/CODEOWNERS` with repo-wide ownership for `@git-stunts`.
+- **Release runbook** — added `docs/RELEASE.md` and linked it from `CONTRIBUTING.md` as the canonical patch-release workflow.
+- **`pnpm release:verify`** — new maintainer-facing release helper runs the full release checklist, captures observed test counts, and prints a Markdown summary that can be pasted into release notes or changelog prep.
+- **Deterministic property-based envelope coverage** — added a `fast-check`-backed property suite for envelope-encrypted store/restore round-trips and tamper rejection across empty, boundary-adjacent, and multi-chunk payload sizes.
+
+### Changed
+- **GitHub Actions runtime maintenance** — CI and release workflows now run on `actions/checkout@v6` and `actions/setup-node@v6`, clearing the Node 20 deprecation warnings from GitHub-hosted runners.
+- **Ubuntu-based Docker test stages** — the local/CI Node, Bun, and Deno test images now build on `ubuntu:24.04`, copying runtime binaries from the official upstream images instead of inheriting Debian-based runtime images directly, and the final test commands now run as an unprivileged `gitstunts` user.
+- **Test conventions expanded** — `test/CONVENTIONS.md` now documents Git tree filename ordering, Docker-only integration policy, pinned integration `fileParallelism: false`, and direct-argv subprocess helpers.
+
+### Fixed
+- **Bun blob writes in Git persistence** — `GitPersistenceAdapter.writeBlob()` now hashes temp files instead of piping large buffers through `git hash-object --stdin` under Bun, avoiding unhandled `EPIPE` failures during real Git-backed stores.
+- **Release verification runner failures** — `runReleaseVerify()` now converts thrown step-runner errors into structured step failures with a `ReleaseVerifyError` summary instead of letting raw exceptions escape.
+
+## [5.3.2] — 2026-03-15
### Changed
- **Vitest workspace split** — unit, integration, and benchmark suites now live in explicit workspace projects so the integration suite always runs with `fileParallelism: false`, regardless of the exact CLI invocation shape.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b393f22..e45ed12 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -10,12 +10,18 @@
2. **Install Git Hooks**: Run `bash scripts/install-hooks.sh` to set up local quality gates. This will ensure that linting and unit tests pass before every push.
3. **Run Tests Locally**:
- `pnpm test` for unit tests.
- - `pnpm run test:integration` for integration tests (requires Docker).
+ - `pnpm run test:integration:node` for Node integration tests (requires Docker).
+ - `pnpm run test:integration:bun` for Bun integration tests.
+ - `pnpm run test:integration:deno` for Deno integration tests.
+4. **Prepare Releases**:
+ - `pnpm release:verify` for the full release checklist and release-note summary output.
+ - Follow [docs/RELEASE.md](./docs/RELEASE.md) for the canonical patch-release flow.
## Quality Gates
We enforce high standards for code quality:
- **Linting**: Must pass `pnpm run lint`.
- **Unit Tests**: All unit tests must pass.
- **Integration Tests**: Must pass across Node, Bun, and Deno runtimes.
+- **Release Prep**: `pnpm release:verify` must pass before a tag is created.
-These gates are enforced both locally via git hooks and in CI/CD.
\ No newline at end of file
+These gates are enforced both locally via git hooks and in CI/CD.
diff --git a/Dockerfile b/Dockerfile
index f678987..ef88655 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,32 +1,54 @@
+FROM ubuntu:24.04 AS ubuntu-base
+ENV DEBIAN_FRONTEND=noninteractive
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends ca-certificates git \
+ && groupadd --system gitstunts \
+ && useradd --system --gid gitstunts --create-home --shell /usr/sbin/nologin gitstunts \
+ && rm -rf /var/lib/apt/lists/*
+
+FROM node:22 AS node-runtime
+FROM oven/bun:1 AS bun-runtime
+FROM denoland/deno:ubuntu-2.7.1 AS deno-runtime
+
# --- Node ---
-FROM node:22-slim AS node
-RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
-RUN corepack enable && corepack prepare pnpm@10 --activate
+FROM ubuntu-base AS node
+COPY --from=node-runtime /usr/local/ /usr/local/
+RUN npm install -g pnpm@10
WORKDIR /app
COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
COPY . .
+RUN chown -R gitstunts:gitstunts /app
ENV GIT_STUNTS_DOCKER=1
+ENV HOME=/home/gitstunts
+USER gitstunts
CMD ["pnpm", "vitest", "run", "test/unit"]
# --- Bun ---
-FROM oven/bun:1-slim AS bun
-RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
+FROM ubuntu-base AS bun
+COPY --from=bun-runtime /usr/local/bin/bun /usr/local/bin/bun
+COPY --from=bun-runtime /usr/local/bin/bunx /usr/local/bin/bunx
WORKDIR /app
-COPY package.json bun.lock* ./
+COPY package.json ./
RUN bun install
COPY . .
+RUN chown -R gitstunts:gitstunts /app
ENV GIT_STUNTS_DOCKER=1
+ENV HOME=/home/gitstunts
+USER gitstunts
CMD ["bunx", "vitest", "run", "test/unit"]
# --- Deno ---
-FROM denoland/deno:2.7.1 AS deno
-USER root
-RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
+FROM ubuntu-base AS deno
+COPY --from=deno-runtime /usr/bin/deno /usr/local/bin/deno
+COPY --from=node-runtime /usr/local/bin/node /usr/local/bin/node
WORKDIR /app
-COPY package.json deno.lock* ./
+COPY package.json ./
RUN deno install --allow-scripts || true
COPY . .
RUN deno install --allow-scripts
+RUN chown -R gitstunts:gitstunts /app
ENV GIT_STUNTS_DOCKER=1
-CMD ["deno", "run", "-A", "npm:vitest", "run", "test/unit"]
\ No newline at end of file
+ENV HOME=/home/gitstunts
+USER gitstunts
+CMD ["deno", "run", "-A", "npm:vitest", "run", "test/unit"]
diff --git a/README.md b/README.md
index 2263a15..20b5aea 100644
--- a/README.md
+++ b/README.md
@@ -38,6 +38,16 @@ We use the object database.
+## What's new in v5.3.2
+
+**Patch release — runtime/test stabilization.**
+
+- **Explicit Vitest workspace projects** — unit, integration, and benchmark suites now run as named workspace projects, with the integration suite always pinned to `fileParallelism: false`.
+- **Deterministic cross-runtime integration behavior** — Bun and Deno no longer depend on Vitest CLI argv shape to avoid subprocess `EPIPE` races.
+- **CLI version sync** — `git-cas --version` now reads package metadata instead of a stale literal, so the binary reports the correct in-repo release line.
+
+See [CHANGELOG.md](./CHANGELOG.md) for the full list of changes.
+
## What's new in v5.3.1
**Patch release — repeated chunk tree fix.**
diff --git a/ROADMAP.md b/ROADMAP.md
index aca7122..8c8a5d5 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -6,8 +6,8 @@ lives in [GRAVEYARD.md](./GRAVEYARD.md).
## Current Reality
-- **Current release:** `v5.3.1` (2026-03-15)
-- **Current line:** M16 Capstone shipped in `v5.3.0`; `v5.3.1` is the maintenance follow-up that fixed repeated-chunk tree emission for repetitive content.
+- **Current release:** `v5.3.2` (2026-03-15)
+- **Current line:** M16 Capstone shipped in `v5.3.0`; `v5.3.1` fixed repeated-chunk tree emission for repetitive content; `v5.3.2` stabilized test/runtime tooling; `v5.3.3` is the remaining M17 Ledger closeout in flight.
- **Supported runtimes:** Node.js 22.x (primary), Bun, Deno
- **Current operator experience:** the human-facing CLI/TUI is shipped now; the machine-facing agent CLI is planned next.
@@ -55,6 +55,7 @@ The agent CLI is a first-class workflow, not an extension of the human `--json`
| v5.2.0 | M12 | Carousel | Key rotation without re-encrypting data | ✅ Shipped |
| v5.3.0 | M16 | Capstone | Audit remediation and security hardening | ✅ Shipped |
| v5.3.1 | — | Maintenance | Repeated-chunk tree integrity fix | ✅ Shipped |
+| v5.3.2 | — | Maintenance | Vitest workspace split, CLI version sync, and runtime/tooling stabilization | ✅ Shipped |
Older history remains in [CHANGELOG.md](./CHANGELOG.md).
@@ -62,7 +63,7 @@ Older history remains in [CHANGELOG.md](./CHANGELOG.md).
| Version | Milestone | Codename | Theme | Status |
|---------|-----------|----------|-------|--------|
-| v5.3.2 | M17 | Ledger | Planning and ops reset | 📝 Planned |
+| v5.3.3 | M17 | Ledger | Planning and ops reset | 📝 Planned |
| v5.4.0 | M18 | Relay | LLM-native CLI foundation | 📝 Planned |
| v5.5.0 | M19 | Nouveau | Bijou v3 human UX refresh | 📝 Planned |
| v5.6.0 | M20 | Sentinel | Vault health and safety | 📝 Planned |
@@ -75,7 +76,7 @@ Older history remains in [CHANGELOG.md](./CHANGELOG.md).
## Dependency Sequence
```text
-M16 Capstone + v5.3.1 maintenance ✅
+M16 Capstone + v5.3.1/v5.3.2 maintenance ✅
|
M17 Ledger
|
@@ -101,7 +102,7 @@ interface split, then the human TUI refresh, and only then the broader feature e
## Open Milestones
-### M17 — Ledger (`v5.3.2`)
+### M17 — Ledger (`v5.3.3`)
**Theme:** planning and operational reset after Capstone.
diff --git a/STATUS.md b/STATUS.md
index dbc8829..a545122 100644
--- a/STATUS.md
+++ b/STATUS.md
@@ -1,9 +1,9 @@
# @git-stunts/cas — Project Status
-**Current release:** `v5.3.1`
-**Current branch version:** `v5.3.2`
+**Current release:** `v5.3.2`
+**Current branch version:** `v5.3.3`
**Last release:** `2026-03-15`
-**Current line:** M16 Capstone shipped in `v5.3.0`; `v5.3.1` fixed repeated-chunk tree emission; `v5.3.2` is the next maintenance/doc/test follow-up in flight.
+**Current line:** M16 Capstone shipped in `v5.3.0`; `v5.3.1` fixed repeated-chunk tree emission; `v5.3.2` stabilized test/runtime tooling; `v5.3.3` is the remaining M17 closeout in flight.
**Runtimes:** Node.js 22.x, Bun, Deno
---
@@ -19,6 +19,7 @@
| Version | Milestone | Highlights |
|---------|-----------|------------|
+| `v5.3.2` | Maintenance | Vitest workspace split for deterministic integration runs; CLI version sync; test/runtime tooling stabilization |
| `v5.3.1` | Maintenance | Repeated-chunk tree integrity fix; unique chunk tree entries; `git fsck` regression coverage |
| `v5.3.0` | M16 Capstone | Audit remediation, `.casrc`, passphrase-file support, restore guards, `encryptionCount`, lifecycle rename |
| `v5.2.0` | M12 Carousel | Key rotation without re-encrypting data |
@@ -34,7 +35,7 @@ Milestone labels are thematic and non-sequential; the versions above are listed
## Next Up
-### M17 — Ledger (`v5.3.2`)
+### M17 — Ledger (`v5.3.3`)
Planning and ops reset:
@@ -69,7 +70,7 @@ Human UX refresh:
| Version | Milestone | Theme |
|---------|-----------|-------|
-| `v5.3.2` | M17 Ledger | Planning and ops reset |
+| `v5.3.3` | M17 Ledger | Planning and ops reset |
| `v5.4.0` | M18 Relay | LLM-native CLI foundation |
| `v5.5.0` | M19 Nouveau | Bijou v3 human UX refresh |
| `v5.6.0` | M20 Sentinel | Vault health and safety |
diff --git a/bin/git-cas.js b/bin/git-cas.js
index 9f6f4d7..28882ba 100755
--- a/bin/git-cas.js
+++ b/bin/git-cas.js
@@ -4,9 +4,9 @@ import { readFileSync } from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { program, Option } from 'commander';
-import GitPlumbing, { ShellRunnerFactory } from '@git-stunts/plumbing';
import ContentAddressableStore, { EventEmitterObserver, CborCodec } from '../index.js';
import Manifest from '../src/domain/value-objects/Manifest.js';
+import { createGitPlumbing } from '../src/infrastructure/createGitPlumbing.js';
import { createStoreProgress, createRestoreProgress } from './ui/progress.js';
import { renderEncryptionCard } from './ui/encryption-card.js';
import { renderHistoryTimeline } from './ui/history-timeline.js';
@@ -55,8 +55,7 @@ function readKeyFile(keyFilePath) {
* @returns {ContentAddressableStore}
*/
function createCas(cwd, opts = {}) {
- const runner = ShellRunnerFactory.create();
- const plumbing = new GitPlumbing({ runner, cwd });
+ const plumbing = createGitPlumbing({ cwd });
/** @type {Record} */
const casOpts = { plumbing, ...opts };
if (casOpts.codec === 'cbor') {
@@ -530,8 +529,7 @@ vault
.option('-n, --max-count ', 'Limit number of commits')
.option('--pretty', 'Render as color-coded timeline')
.action(runAction(async (/** @type {Record} */ opts) => {
- const runner = ShellRunnerFactory.create();
- const plumbing = new GitPlumbing({ runner, cwd: opts.cwd || '.' });
+ const plumbing = createGitPlumbing({ cwd: opts.cwd || '.' });
const args = ['log', '--oneline', ContentAddressableStore.VAULT_REF];
if (opts.maxCount) {
const n = parseInt(opts.maxCount, 10);
diff --git a/docs/RELEASE.md b/docs/RELEASE.md
new file mode 100644
index 0000000..e76c152
--- /dev/null
+++ b/docs/RELEASE.md
@@ -0,0 +1,46 @@
+# Release Workflow
+
+This document defines the canonical patch-release flow for `git-cas`.
+
+## Patch Release Flow
+
+1. Branch from `main`.
+2. Bump the in-flight version in `package.json` and `jsr.json`.
+3. Add a new unreleased section to `CHANGELOG.md`.
+4. Run `pnpm release:verify`.
+5. Open a pull request and wait for review.
+6. Merge to `main`.
+7. Sync local `main` to `origin/main`.
+8. Run `pnpm release:verify` again on `main`.
+9. Finalize release-facing docs:
+ - mark the changelog entry released
+ - update the lead README “What’s new” section
+ - update `STATUS.md` and `ROADMAP.md`
+10. Create and push the tag (`vX.Y.Z`).
+
+## Release Verification
+
+`pnpm release:verify` is the maintainer-facing verification entrypoint for
+release prep. It runs the repository release gates in order and prints a
+Markdown summary that can be pasted into release notes or changelog prep.
+
+Current release verification includes:
+
+- `pnpm run lint`
+- `pnpm test`
+- `docker compose run --build --rm test-bun bunx vitest run test/unit`
+- `docker compose run --build --rm test-deno deno run -A npm:vitest run test/unit`
+- `pnpm run test:integration:node`
+- `pnpm run test:integration:bun`
+- `pnpm run test:integration:deno`
+- `npm pack --dry-run`
+- `npx jsr publish --dry-run --allow-dirty`
+
+The helper is intentionally read-only with respect to release notes. It does
+not edit `CHANGELOG.md`; it only prints a summary block for maintainers.
+
+## Release Notes Discipline
+
+- Treat release tags as immutable.
+- Do not tag until the merged `main` branch passes release verification.
+- If any runtime fails, fix the underlying problem before tagging.
diff --git a/jsr.json b/jsr.json
index 739c4d8..0c701e5 100644
--- a/jsr.json
+++ b/jsr.json
@@ -1,6 +1,6 @@
{
"name": "@git-stunts/git-cas",
- "version": "5.3.2",
+ "version": "5.3.3",
"exports": {
".": "./index.js",
"./service": "./src/domain/services/CasService.js",
diff --git a/package.json b/package.json
index c2cad14..b6b17bc 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@git-stunts/git-cas",
- "version": "5.3.2",
+ "version": "5.3.3",
"description": "Content-addressed storage backed by Git's object database, with optional encryption and pluggable codecs",
"type": "module",
"main": "index.js",
@@ -62,6 +62,7 @@
"test:platforms": "bats --jobs 3 test/platform/runtimes.bats",
"benchmark": "vitest bench test/benchmark",
"benchmark:local": "vitest bench test/benchmark",
+ "release:verify": "node scripts/release/verify.js",
"lint": "eslint .",
"format": "prettier --write ."
},
@@ -85,6 +86,7 @@
"@eslint/js": "^9.17.0",
"@types/node": "^25.3.2",
"eslint": "^9.17.0",
+ "fast-check": "^4.6.0",
"jsr": "^0.14.2",
"prettier": "^3.4.2",
"vitest": "^2.1.8"
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 5e4d3e8..7a55c39 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -42,6 +42,9 @@ importers:
eslint:
specifier: ^9.17.0
version: 9.39.2
+ fast-check:
+ specifier: ^4.6.0
+ version: 4.6.0
jsr:
specifier: ^0.14.2
version: 0.14.2
@@ -635,6 +638,10 @@ packages:
resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==}
engines: {node: '>=12.0.0'}
+ fast-check@4.6.0:
+ resolution: {integrity: sha512-h7H6Dm0Fy+H4ciQYFxFjXnXkzR2kr9Fb22c0UBpHnm59K2zpr2t13aPTHlltFiNT6zuxp6HMPAVVvgur4BLdpA==}
+ engines: {node: '>=12.17.0'}
+
fast-deep-equal@3.1.3:
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
@@ -809,6 +816,9 @@ packages:
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
engines: {node: '>=6'}
+ pure-rand@8.1.0:
+ resolution: {integrity: sha512-53B3MB8wetRdD6JZ4W/0gDKaOvKwuXrEmV1auQc0hASWge8rieKV4PCCVNVbJ+i24miiubb4c/B+dg8Ho0ikYw==}
+
resolve-from@4.0.0:
resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
engines: {node: '>=4'}
@@ -1443,6 +1453,10 @@ snapshots:
expect-type@1.3.0: {}
+ fast-check@4.6.0:
+ dependencies:
+ pure-rand: 8.1.0
+
fast-deep-equal@3.1.3: {}
fast-json-stable-stringify@2.1.0: {}
@@ -1589,6 +1603,8 @@ snapshots:
punycode@2.3.1: {}
+ pure-rand@8.1.0: {}
+
resolve-from@4.0.0: {}
rollup@4.57.1:
diff --git a/scripts/release/verify.js b/scripts/release/verify.js
new file mode 100644
index 0000000..3440ffd
--- /dev/null
+++ b/scripts/release/verify.js
@@ -0,0 +1,299 @@
+#!/usr/bin/env node
+
+/**
+ * @fileoverview Release verification runner for maintainers. Executes the full
+ * multi-runtime release checklist and emits a Markdown summary suitable for
+ * changelog or release-note preparation.
+ */
+
+import { spawn } from 'node:child_process';
+import { readFileSync } from 'node:fs';
+import path from 'node:path';
+import { fileURLToPath } from 'node:url';
+
+const __dirname = path.dirname(fileURLToPath(import.meta.url));
+const DEFAULT_CWD = path.resolve(__dirname, '../..');
+const ESC = String.fromCharCode(27);
+const ANSI_RE = new RegExp(`${ESC}(?:[@-Z\\\\-_]|\\[[0-?]*[ -/]*[@-~])`, 'gu');
+const DEFAULT_LOGGER = {
+ /**
+ * Print a single log line.
+ *
+ * @param {string} [text]
+ * @returns {void}
+ */
+ line(text = '') {
+ process.stdout.write(`${text}\n`);
+ },
+};
+
+export const RELEASE_STEPS = [
+ {
+ id: 'lint',
+ label: 'Lint',
+ command: 'pnpm',
+ args: ['run', 'lint'],
+ },
+ {
+ id: 'unit-node',
+ label: 'Unit Tests (Node)',
+ command: 'pnpm',
+ args: ['test'],
+ testCount: true,
+ },
+ {
+ id: 'unit-bun',
+ label: 'Unit Tests (Bun)',
+ command: 'docker',
+ args: ['compose', 'run', '--build', '--rm', 'test-bun', 'bunx', 'vitest', 'run', 'test/unit'],
+ testCount: true,
+ },
+ {
+ id: 'unit-deno',
+ label: 'Unit Tests (Deno)',
+ command: 'docker',
+ args: ['compose', 'run', '--build', '--rm', 'test-deno', 'deno', 'run', '-A', 'npm:vitest', 'run', 'test/unit'],
+ testCount: true,
+ },
+ {
+ id: 'integration-node',
+ label: 'Integration Tests (Node)',
+ command: 'pnpm',
+ args: ['run', 'test:integration:node'],
+ testCount: true,
+ },
+ {
+ id: 'integration-bun',
+ label: 'Integration Tests (Bun)',
+ command: 'pnpm',
+ args: ['run', 'test:integration:bun'],
+ testCount: true,
+ },
+ {
+ id: 'integration-deno',
+ label: 'Integration Tests (Deno)',
+ command: 'pnpm',
+ args: ['run', 'test:integration:deno'],
+ testCount: true,
+ },
+ {
+ id: 'npm-pack',
+ label: 'npm pack dry-run',
+ command: 'npm',
+ args: ['pack', '--dry-run'],
+ },
+ {
+ id: 'jsr-publish',
+ label: 'JSR publish dry-run',
+ command: 'npx',
+ args: ['jsr', 'publish', '--dry-run', '--allow-dirty'],
+ },
+];
+
+export class ReleaseVerifyError extends Error {
+ constructor(message, { step, results, summary } = {}) {
+ super(message);
+ this.name = 'ReleaseVerifyError';
+ this.step = step;
+ this.results = results ?? [];
+ this.summary = summary ?? '';
+ }
+}
+
+export function stripAnsi(text = '') {
+ return `${text}`.replace(ANSI_RE, '');
+}
+
+export function extractVitestTestCount(output = '') {
+ const normalized = stripAnsi(output);
+ const match = normalized.match(/Tests\s+(\d+)\s+passed/iu);
+ return match ? Number.parseInt(match[1], 10) : null;
+}
+
+export function renderMarkdownSummary({ version, results, totalTests }) {
+ const lines = [
+ '## Release Verification Summary',
+ '',
+ `- Version: \`${version}\``,
+ `- Steps passed: ${results.filter((result) => result.passed).length}/${results.length}`,
+ `- Total tests observed: ${totalTests}`,
+ '',
+ '| Step | Status | Tests |',
+ '| --- | --- | ---: |',
+ ];
+
+ for (const result of results) {
+ lines.push(`| ${result.label} | ${result.passed ? 'PASS' : 'FAIL'} | ${result.tests ?? '—'} |`);
+ }
+
+ return `${lines.join('\n')}\n`;
+}
+
+/**
+ * Sum every observed test count across all executed steps.
+ *
+ * @param {Array<{ tests: number | null | undefined }>} results
+ * @returns {number}
+ */
+function totalObservedTests(results) {
+ return results.reduce((sum, item) => sum + (item.tests ?? 0), 0);
+}
+
+/**
+ * Normalize a runner outcome into the release-step shape used by summaries.
+ *
+ * @param {typeof RELEASE_STEPS[number]} step
+ * @param {{ code?: number | null, signal?: NodeJS.Signals | null, stdout?: string, stderr?: string, errorMessage?: string | null }} outcome
+ * @returns {{ id: string, label: string, command: string, args: string[], testCount?: boolean, code: number | null | undefined, signal: NodeJS.Signals | null, passed: boolean, tests: number | null, errorMessage: string | null }}
+ */
+function buildStepResult(step, outcome) {
+ const combinedOutput = `${outcome.stdout ?? ''}${outcome.stderr ?? ''}`;
+ const signal = outcome.signal ?? null;
+ return {
+ ...step,
+ code: outcome.code,
+ signal,
+ passed: outcome.code === 0 && signal === null,
+ tests: step.testCount ? extractVitestTestCount(combinedOutput) : null,
+ errorMessage: outcome.errorMessage ?? null,
+ };
+}
+
+/**
+ * Execute a single step with live stdout/stderr passthrough.
+ *
+ * @param {typeof RELEASE_STEPS[number]} step
+ * @param {{ cwd?: string }} [options]
+ * @returns {Promise<{ code: number, signal: NodeJS.Signals | null, stdout: string, stderr: string }>}
+ */
+export async function defaultRunner(step, { cwd = DEFAULT_CWD } = {}) {
+ return new Promise((resolve, reject) => {
+ const child = spawn(step.command, step.args, {
+ cwd,
+ env: process.env,
+ stdio: ['inherit', 'pipe', 'pipe'],
+ });
+
+ let stdout = '';
+ let stderr = '';
+
+ child.stdout?.on('data', (chunk) => {
+ stdout += chunk.toString();
+ process.stdout.write(chunk);
+ });
+
+ child.stderr?.on('data', (chunk) => {
+ stderr += chunk.toString();
+ process.stderr.write(chunk);
+ });
+
+ child.on('error', reject);
+ child.on('close', (code, signal) => {
+ resolve({
+ code: code ?? 1,
+ signal: signal ?? null,
+ stdout,
+ stderr,
+ });
+ });
+ });
+}
+
+/**
+ * Read the package version from the repository root.
+ *
+ * @param {string} cwd
+ * @returns {string}
+ */
+function readVersion(cwd) {
+ const packageJson = JSON.parse(readFileSync(path.join(cwd, 'package.json'), 'utf8'));
+ return packageJson.version;
+}
+
+/**
+ * Print the heading for a release step.
+ *
+ * @param {typeof RELEASE_STEPS[number]} step
+ * @param {{ line: (text?: string) => void }} logger
+ * @returns {void}
+ */
+function printStepBanner(step, logger) {
+ logger.line(`\n==> ${step.label}`);
+ logger.line(`$ ${step.command} ${step.args.join(' ')}`);
+}
+
+/**
+ * Execute the full release checklist and return a Markdown summary.
+ *
+ * @param {{ cwd?: string, runner?: typeof defaultRunner, logger?: { line: (text?: string) => void } }} [options]
+ * @returns {Promise<{ version: string, results: Array>, totalTests: number, summary: string }>}
+ */
+export async function runReleaseVerify({
+ cwd = DEFAULT_CWD,
+ runner = defaultRunner,
+ logger = DEFAULT_LOGGER,
+} = {}) {
+ const version = readVersion(cwd);
+ const results = [];
+
+ for (const step of RELEASE_STEPS) {
+ printStepBanner(step, logger);
+ let outcome;
+
+ try {
+ outcome = await runner(step, { cwd });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : String(error);
+ outcome = {
+ code: 1,
+ signal: null,
+ stdout: '',
+ stderr: `${message}\n`,
+ errorMessage: message,
+ };
+ }
+
+ const result = buildStepResult(step, outcome);
+ results.push(result);
+
+ if (!result.passed) {
+ const totalTests = totalObservedTests(results);
+ const summary = renderMarkdownSummary({ version, results, totalTests });
+ throw new ReleaseVerifyError(`Release verification failed at ${step.label}`, {
+ step: result,
+ results,
+ summary,
+ });
+ }
+ }
+
+ const totalTests = totalObservedTests(results);
+ return {
+ version,
+ results,
+ totalTests,
+ summary: renderMarkdownSummary({ version, results, totalTests }),
+ };
+}
+
+/**
+ * CLI entry point for `pnpm release:verify`.
+ *
+ * @returns {Promise}
+ */
+async function main() {
+ try {
+ const report = await runReleaseVerify();
+ process.stdout.write(`\n${report.summary}`);
+ } catch (error) {
+ if (error instanceof ReleaseVerifyError) {
+ process.stderr.write(`\n${error.summary}`);
+ }
+ process.stderr.write(`${error instanceof Error ? error.message : String(error)}\n`);
+ process.exitCode = 1;
+ }
+}
+
+if (process.argv[1] && path.resolve(process.argv[1]) === fileURLToPath(import.meta.url)) {
+ await main();
+}
diff --git a/src/infrastructure/adapters/GitPersistenceAdapter.js b/src/infrastructure/adapters/GitPersistenceAdapter.js
index 797be53..a2c348e 100644
--- a/src/infrastructure/adapters/GitPersistenceAdapter.js
+++ b/src/infrastructure/adapters/GitPersistenceAdapter.js
@@ -1,4 +1,7 @@
import { Policy } from '@git-stunts/alfred';
+import { mkdtemp, rm, writeFile } from 'node:fs/promises';
+import os from 'node:os';
+import path from 'node:path';
import GitPersistencePort from '../../ports/GitPersistencePort.js';
import CasError from '../../domain/errors/CasError.js';
@@ -36,12 +39,14 @@ export default class GitPersistenceAdapter extends GitPersistencePort {
* @returns {Promise} The Git OID of the stored blob.
*/
async writeBlob(content) {
- return this.policy.execute(() =>
- this.plumbing.execute({
- args: ['hash-object', '-w', '--stdin'],
- input: content,
- }),
- );
+ return this.policy.execute(() => (
+ typeof globalThis.Bun !== 'undefined'
+ ? this.#writeBlobFromTempFile(content)
+ : this.plumbing.execute({
+ args: ['hash-object', '-w', '--stdin'],
+ input: content,
+ })
+ ));
}
/**
@@ -116,4 +121,26 @@ export default class GitPersistenceAdapter extends GitPersistencePort {
});
});
}
+
+ /**
+ * Bun can surface unhandled EPIPE writes when large buffers are fed through
+ * `git hash-object --stdin`. Write the blob to a temp file and hash the file
+ * directly instead. `--no-filters` preserves raw bytes.
+ *
+ * @param {Buffer|string} content
+ * @returns {Promise}
+ */
+ async #writeBlobFromTempFile(content) {
+ const tempDir = await mkdtemp(path.join(os.tmpdir(), 'git-cas-blob-'));
+ const tempPath = path.join(tempDir, 'blob.bin');
+
+ try {
+ await writeFile(tempPath, content);
+ return await this.plumbing.execute({
+ args: ['hash-object', '-w', '--no-filters', tempPath],
+ });
+ } finally {
+ await rm(tempDir, { recursive: true, force: true });
+ }
+ }
}
diff --git a/src/infrastructure/createGitPlumbing.js b/src/infrastructure/createGitPlumbing.js
new file mode 100644
index 0000000..9029b30
--- /dev/null
+++ b/src/infrastructure/createGitPlumbing.js
@@ -0,0 +1,44 @@
+/**
+ * @fileoverview Shared GitPlumbing factory helpers for runtime-aware CLI and
+ * test wiring.
+ */
+
+import GitPlumbing, { ShellRunnerFactory } from '@git-stunts/plumbing';
+
+/**
+ * Resolve the shell-runner environment override for the current runtime.
+ *
+ * Bun uses the Node-backed runner path because the native Bun subprocess path
+ * is more prone to `git` I/O edge cases in this repository.
+ *
+ * @returns {string | undefined}
+ */
+export function resolveGitRunnerEnv() {
+ return typeof globalThis.Bun !== 'undefined' ? ShellRunnerFactory.ENV_NODE : undefined;
+}
+
+/**
+ * Create a shell runner with the runtime-appropriate environment override.
+ *
+ * @param {{ env?: string }} [options]
+ * @returns {ReturnType}
+ */
+export function createGitRunner({ env } = {}) {
+ const runnerEnv = env ?? resolveGitRunnerEnv();
+ return runnerEnv
+ ? ShellRunnerFactory.create({ env: runnerEnv })
+ : ShellRunnerFactory.create();
+}
+
+/**
+ * Construct a GitPlumbing instance for the requested working tree.
+ *
+ * @param {{ cwd?: string, env?: string }} [options]
+ * @returns {GitPlumbing}
+ */
+export function createGitPlumbing({ cwd = '.', env } = {}) {
+ return new GitPlumbing({
+ runner: createGitRunner({ env }),
+ cwd,
+ });
+}
diff --git a/test/CONVENTIONS.md b/test/CONVENTIONS.md
index f723294..e9efb59 100644
--- a/test/CONVENTIONS.md
+++ b/test/CONVENTIONS.md
@@ -55,3 +55,40 @@ bits are a Unix concept — `chmod` is a no-op on Windows.
- **No global state patching when injection is available.** If you control
the code under test, add a parameter. Only patch globals for third-party
code you cannot modify.
+
+## Git Tree Assertions
+
+**Git tree reads are filename-sorted.** Git returns tree entries in name
+order, not in the original write order. Integration tests that round-trip
+through `readTree()` must therefore assert membership, uniqueness, and Git's
+sorted output semantics instead of assuming first-seen insertion order.
+
+If insertion order matters, assert it at the lower-level boundary that builds
+the tree entries before they are handed to Git.
+
+## Integration Runtime Policy
+
+**Integration suites are Docker-only.** The integration tests intentionally
+refuse to run on the host and require `GIT_STUNTS_DOCKER=1` so Git, Bun, and
+Deno run in a consistent environment.
+
+**Integration files run with `fileParallelism: false`.** These tests spawn real
+Git and CLI subprocesses, so the integration workspace is intentionally kept to
+single-file execution. Do not re-enable file-level parallelism unless the
+subprocess model changes and Bun/Deno are re-validated.
+
+## Subprocess Helpers
+
+**Use direct argv execution, never shell-wrapped commands.** CLI and Git
+integration helpers must call `spawnSync()` / `spawn()` with an explicit binary
+and argv array. Avoid `/bin/sh -c`, command-string helpers, or concatenated
+shell fragments because they introduce quoting drift and runtime-specific I/O
+differences.
+
+```js
+// preferred
+spawnSync('git', ['init', '--bare'], { cwd, encoding: 'utf8' });
+
+// avoid
+spawnSync('/bin/sh', ['-c', 'git init --bare'], { cwd, encoding: 'utf8' });
+```
diff --git a/test/helpers/property.js b/test/helpers/property.js
new file mode 100644
index 0000000..5e6d7a4
--- /dev/null
+++ b/test/helpers/property.js
@@ -0,0 +1,40 @@
+import fc from 'fast-check';
+
+export const PROPERTY_SEED = 5333;
+export const PROPERTY_RUNS = 25;
+export const PROPERTY_TIMEOUT_MS = 30_000;
+export const PROPERTY_CONFIG = {
+ seed: PROPERTY_SEED,
+ numRuns: PROPERTY_RUNS,
+};
+
+const ENVELOPE_SIZES = [
+ 0,
+ 1,
+ 1023,
+ 1024,
+ 1025,
+ 2047,
+ 2048,
+ 2049,
+ 3072,
+ 4096,
+];
+
+const recipientKeyHexArbitrary = fc
+ .uint8Array({ minLength: 32, maxLength: 32 })
+ .map((bytes) => Buffer.from(bytes).toString('hex'));
+
+export const envelopePayloadArbitrary = fc
+ .constantFrom(...ENVELOPE_SIZES)
+ .chain((size) => fc.uint8Array({ minLength: size, maxLength: size }))
+ .map((bytes) => Buffer.from(bytes));
+
+export const envelopeRecipientsArbitrary = fc
+ .uniqueArray(recipientKeyHexArbitrary, { minLength: 1, maxLength: 4 })
+ .map((hexKeys) => hexKeys.map((hex, index) => ({
+ label: `recipient-${index}`,
+ key: Buffer.from(hex, 'hex'),
+ })));
+
+export const envelopeTamperFieldArbitrary = fc.constantFrom('wrappedDek', 'nonce', 'tag');
diff --git a/test/integration/round-trip.test.js b/test/integration/round-trip.test.js
index e2eeee0..5836a71 100644
--- a/test/integration/round-trip.test.js
+++ b/test/integration/round-trip.test.js
@@ -13,11 +13,11 @@ import { randomBytes } from 'node:crypto';
import { spawnSync } from 'node:child_process';
import path from 'node:path';
import os from 'node:os';
-import GitPlumbing from '@git-stunts/plumbing';
import ContentAddressableStore from '../../index.js';
import CborCodec from '../../src/infrastructure/codecs/CborCodec.js';
import Manifest from '../../src/domain/value-objects/Manifest.js';
import CasError from '../../src/domain/errors/CasError.js';
+import { createGitPlumbing } from '../../src/infrastructure/createGitPlumbing.js';
// Hard gate: refuse to run outside Docker
if (process.env.GIT_STUNTS_DOCKER !== '1') {
@@ -45,7 +45,7 @@ beforeAll(() => {
repoDir = mkdtempSync(path.join(os.tmpdir(), 'cas-integ-'));
initBareRepo(repoDir);
- const plumbing = GitPlumbing.createDefault({ cwd: repoDir });
+ const plumbing = createGitPlumbing({ cwd: repoDir });
cas = new ContentAddressableStore({ plumbing });
casCbor = new ContentAddressableStore({ plumbing, codec: new CborCodec() });
});
@@ -320,7 +320,7 @@ describe('repeated chunks — v1 tree emission dedupe + fsck regression', () =>
const original = Buffer.concat([repeatedChunk, uniqueChunk, repeatedChunk, repeatedChunk]);
const { filePath, dir } = tempFile(original);
const repeatedCas = new ContentAddressableStore({
- plumbing: GitPlumbing.createDefault({ cwd: repoDir }),
+ plumbing: createGitPlumbing({ cwd: repoDir }),
chunkSize: 1024,
merkleThreshold: 10,
});
@@ -365,7 +365,7 @@ describe('repeated chunks — Merkle tree emission dedupe + fsck regression', ()
const original = Buffer.concat([chunkA, chunkB, chunkA, chunkC, chunkA]);
const { filePath, dir } = tempFile(original);
const repeatedCas = new ContentAddressableStore({
- plumbing: GitPlumbing.createDefault({ cwd: repoDir }),
+ plumbing: createGitPlumbing({ cwd: repoDir }),
chunkSize: 1024,
merkleThreshold: 2,
});
diff --git a/test/integration/vault-cli.test.js b/test/integration/vault-cli.test.js
index b002ea0..7c58ab5 100644
--- a/test/integration/vault-cli.test.js
+++ b/test/integration/vault-cli.test.js
@@ -15,8 +15,8 @@ import { spawnSync } from 'node:child_process';
import path from 'node:path';
import os from 'node:os';
import { fileURLToPath } from 'node:url';
-import GitPlumbing from '@git-stunts/plumbing';
import ContentAddressableStore from '../../index.js';
+import { createGitPlumbing } from '../../src/infrastructure/createGitPlumbing.js';
// Hard gate: refuse to run outside Docker
if (process.env.GIT_STUNTS_DOCKER !== '1') {
@@ -320,7 +320,7 @@ describe('vault CLI — restore --oid with Merkle manifest', () => {
initBareRepo(merkleRepoDir);
({ filePath: merkleInputFile, dir: merkleInputDir } = tempFile(merkleOriginal));
- const plumbing = GitPlumbing.createDefault({ cwd: merkleRepoDir });
+ const plumbing = createGitPlumbing({ cwd: merkleRepoDir });
const cas = new ContentAddressableStore({
plumbing,
chunkSize: 1024,
diff --git a/test/integration/vault.test.js b/test/integration/vault.test.js
index a582d04..9be4232 100644
--- a/test/integration/vault.test.js
+++ b/test/integration/vault.test.js
@@ -13,10 +13,10 @@ import { randomBytes } from 'node:crypto';
import { spawnSync } from 'node:child_process';
import path from 'node:path';
import os from 'node:os';
-import GitPlumbing from '@git-stunts/plumbing';
import ContentAddressableStore from '../../index.js';
import VaultService from '../../src/domain/services/VaultService.js';
import CasError from '../../src/domain/errors/CasError.js';
+import { createGitPlumbing } from '../../src/infrastructure/createGitPlumbing.js';
// Hard gate: refuse to run outside Docker
if (process.env.GIT_STUNTS_DOCKER !== '1') {
@@ -43,7 +43,7 @@ beforeAll(() => {
repoDir = mkdtempSync(path.join(os.tmpdir(), 'cas-vault-integ-'));
initBareRepo(repoDir);
- const plumbing = GitPlumbing.createDefault({ cwd: repoDir });
+ const plumbing = createGitPlumbing({ cwd: repoDir });
cas = new ContentAddressableStore({ plumbing });
});
@@ -198,7 +198,7 @@ describe('encrypted vault', () => {
beforeAll(() => {
encRepoDir = mkdtempSync(path.join(os.tmpdir(), 'cas-vault-enc-integ-'));
initBareRepo(encRepoDir);
- const plumbing = GitPlumbing.createDefault({ cwd: encRepoDir });
+ const plumbing = createGitPlumbing({ cwd: encRepoDir });
encCas = new ContentAddressableStore({ plumbing });
});
diff --git a/test/unit/domain/services/CasService.envelope.property.test.js b/test/unit/domain/services/CasService.envelope.property.test.js
new file mode 100644
index 0000000..88926e5
--- /dev/null
+++ b/test/unit/domain/services/CasService.envelope.property.test.js
@@ -0,0 +1,132 @@
+import { describe, it, expect } from 'vitest';
+import fc from 'fast-check';
+import CasService from '../../../../src/domain/services/CasService.js';
+import Manifest from '../../../../src/domain/value-objects/Manifest.js';
+import CasError from '../../../../src/domain/errors/CasError.js';
+import JsonCodec from '../../../../src/infrastructure/codecs/JsonCodec.js';
+import SilentObserver from '../../../../src/infrastructure/adapters/SilentObserver.js';
+import { getTestCryptoAdapter } from '../../../helpers/crypto-adapter.js';
+import {
+ PROPERTY_CONFIG,
+ PROPERTY_TIMEOUT_MS,
+ envelopePayloadArbitrary,
+ envelopeRecipientsArbitrary,
+ envelopeTamperFieldArbitrary,
+} from '../../../helpers/property.js';
+
+const testCrypto = await getTestCryptoAdapter();
+
+function createService() {
+ const blobStore = new Map();
+ const crypto = testCrypto;
+
+ const persistence = {
+ writeBlob: async (content) => {
+ const buffer = Buffer.isBuffer(content) ? content : Buffer.from(content);
+ const oid = await crypto.sha256(buffer);
+ blobStore.set(oid, buffer);
+ return oid;
+ },
+ writeTree: async () => 'mock-tree-oid',
+ readBlob: async (oid) => {
+ const buffer = blobStore.get(oid);
+ if (!buffer) { throw new Error(`Blob not found: ${oid}`); }
+ return buffer;
+ },
+ };
+
+ return new CasService({
+ persistence,
+ crypto,
+ codec: new JsonCodec(),
+ chunkSize: 1024,
+ observability: new SilentObserver(),
+ });
+}
+
+async function* bufferSource(buffer) {
+ yield buffer;
+}
+
+function tamperRecipientField(manifest, field) {
+ const json = JSON.parse(JSON.stringify(manifest.toJSON()));
+ const encoded = Buffer.from(json.encryption.recipients[0][field], 'base64');
+ encoded[0] ^= 0xff;
+ json.encryption.recipients[0][field] = encoded.toString('base64');
+ return new Manifest(json);
+}
+
+function recipientLabels(recipients) {
+ return recipients.map((recipient) => recipient.label);
+}
+
+function isNoMatchingRecipient(error) {
+ return error instanceof CasError && error.code === 'NO_MATCHING_RECIPIENT';
+}
+
+async function assertEnvelopeRoundTrip(original, recipients) {
+ const service = createService();
+ const manifest = await service.store({
+ source: bufferSource(original),
+ slug: `property-${original.length}-${recipients.length}`,
+ filename: 'property.bin',
+ recipients,
+ });
+
+ expect(manifest.encryption.recipients).toHaveLength(recipients.length);
+ expect(recipientLabels(manifest.encryption.recipients)).toEqual(recipientLabels(recipients));
+
+ for (const recipient of recipients) {
+ const { buffer } = await service.restore({
+ manifest,
+ encryptionKey: recipient.key,
+ });
+ expect(buffer.equals(original)).toBe(true);
+ }
+}
+
+async function assertEnvelopeTamperFailure(original, recipients, field) {
+ const service = createService();
+ const manifest = await service.store({
+ source: bufferSource(original),
+ slug: `tamper-${field}-${original.length}`,
+ filename: 'tamper.bin',
+ recipients,
+ });
+
+ const tamperedManifest = tamperRecipientField(manifest, field);
+
+ await expect(
+ service.restore({
+ manifest: tamperedManifest,
+ encryptionKey: recipients[0].key,
+ }),
+ ).rejects.toSatisfy(isNoMatchingRecipient);
+}
+
+function createEnvelopeRoundTripProperty() {
+ return fc.asyncProperty(
+ envelopePayloadArbitrary,
+ envelopeRecipientsArbitrary,
+ assertEnvelopeRoundTrip,
+ );
+}
+
+function createEnvelopeTamperProperty() {
+ return fc.asyncProperty(
+ envelopePayloadArbitrary,
+ envelopeRecipientsArbitrary,
+ envelopeTamperFieldArbitrary,
+ assertEnvelopeTamperFailure,
+ );
+}
+
+describe('CasService – envelope encryption (property)', () => {
+ it('round-trips generated payloads for every valid recipient', async () => {
+ await fc.assert(createEnvelopeRoundTripProperty(), PROPERTY_CONFIG);
+ }, PROPERTY_TIMEOUT_MS);
+
+ it('fails to restore after recipient metadata tampering', async () => {
+ await fc.assert(createEnvelopeTamperProperty(), PROPERTY_CONFIG);
+ }, PROPERTY_TIMEOUT_MS);
+});
diff --git a/test/unit/domain/services/rotateVaultPassphrase.test.js b/test/unit/domain/services/rotateVaultPassphrase.test.js
index f5c1fc2..41c4073 100644
--- a/test/unit/domain/services/rotateVaultPassphrase.test.js
+++ b/test/unit/domain/services/rotateVaultPassphrase.test.js
@@ -4,13 +4,13 @@ import { randomBytes } from 'node:crypto';
import path from 'node:path';
import os from 'node:os';
import { execSync } from 'node:child_process';
-import GitPlumbing from '@git-stunts/plumbing';
import CasService from '../../../../src/domain/services/CasService.js';
import VaultService from '../../../../src/domain/services/VaultService.js';
import GitPersistenceAdapter from '../../../../src/infrastructure/adapters/GitPersistenceAdapter.js';
import GitRefAdapter from '../../../../src/infrastructure/adapters/GitRefAdapter.js';
import JsonCodec from '../../../../src/infrastructure/codecs/JsonCodec.js';
import SilentObserver from '../../../../src/infrastructure/adapters/SilentObserver.js';
+import { createGitPlumbing } from '../../../../src/infrastructure/createGitPlumbing.js';
import { getTestCryptoAdapter } from '../../../helpers/crypto-adapter.js';
import rotateVaultPassphrase from '../../../../src/domain/services/rotateVaultPassphrase.js';
import CasError from '../../../../src/domain/errors/CasError.js';
@@ -29,7 +29,7 @@ function createRepo() {
}
async function createDeps(repoDir) {
- const plumbing = GitPlumbing.createDefault({ cwd: repoDir });
+ const plumbing = createGitPlumbing({ cwd: repoDir });
const crypto = await getTestCryptoAdapter();
const persistence = new GitPersistenceAdapter({ plumbing });
const ref = new GitRefAdapter({ plumbing });
diff --git a/test/unit/facade/ContentAddressableStore.rotation.test.js b/test/unit/facade/ContentAddressableStore.rotation.test.js
index ca1beb1..80af59f 100644
--- a/test/unit/facade/ContentAddressableStore.rotation.test.js
+++ b/test/unit/facade/ContentAddressableStore.rotation.test.js
@@ -4,8 +4,8 @@ import { randomBytes } from 'node:crypto';
import path from 'node:path';
import os from 'node:os';
import { execSync } from 'node:child_process';
-import GitPlumbing from '@git-stunts/plumbing';
import ContentAddressableStore from '../../../index.js';
+import { createGitPlumbing } from '../../../src/infrastructure/createGitPlumbing.js';
const LONG_TEST_TIMEOUT_MS = 15000;
@@ -21,7 +21,7 @@ function createRepo() {
}
function createCas(repoDir) {
- const plumbing = GitPlumbing.createDefault({ cwd: repoDir });
+ const plumbing = createGitPlumbing({ cwd: repoDir });
return new ContentAddressableStore({ plumbing, chunkSize: 1024 });
}
diff --git a/test/unit/infrastructure/adapters/GitPersistenceAdapter.writeBlob.test.js b/test/unit/infrastructure/adapters/GitPersistenceAdapter.writeBlob.test.js
new file mode 100644
index 0000000..90d5988
--- /dev/null
+++ b/test/unit/infrastructure/adapters/GitPersistenceAdapter.writeBlob.test.js
@@ -0,0 +1,42 @@
+import { existsSync } from 'node:fs';
+import { describe, it, expect, vi } from 'vitest';
+import GitPersistenceAdapter from '../../../../src/infrastructure/adapters/GitPersistenceAdapter.js';
+
+const noPolicy = { execute: (fn) => fn() };
+const itOnBun = typeof globalThis.Bun !== 'undefined' ? it : it.skip;
+const itOffBun = typeof globalThis.Bun === 'undefined' ? it : it.skip;
+
+function createAdapter(plumbing) {
+ return new GitPersistenceAdapter({ plumbing, policy: noPolicy });
+}
+
+describe('GitPersistenceAdapter.writeBlob()', () => {
+ itOffBun('streams blob content over stdin outside Bun', async () => {
+ const content = Buffer.from('blob-data');
+ const plumbing = { execute: vi.fn().mockResolvedValue('blob-oid') };
+ const adapter = createAdapter(plumbing);
+
+ await expect(adapter.writeBlob(content)).resolves.toBe('blob-oid');
+ expect(plumbing.execute).toHaveBeenCalledWith({
+ args: ['hash-object', '-w', '--stdin'],
+ input: content,
+ });
+ });
+
+ itOnBun('writes blob content to a temp file before hashing', async () => {
+ let tempPath;
+ const plumbing = {
+ execute: vi.fn(async ({ args }) => {
+ tempPath = args.at(-1);
+ expect(args.slice(0, 3)).toEqual(['hash-object', '-w', '--no-filters']);
+ expect(existsSync(tempPath)).toBe(true);
+ return 'blob-oid';
+ }),
+ };
+ const adapter = createAdapter(plumbing);
+
+ await expect(adapter.writeBlob(Buffer.from('blob-data'))).resolves.toBe('blob-oid');
+ expect(plumbing.execute).toHaveBeenCalledTimes(1);
+ expect(existsSync(tempPath)).toBe(false);
+ });
+});
diff --git a/test/unit/infrastructure/createGitPlumbing.test.js b/test/unit/infrastructure/createGitPlumbing.test.js
new file mode 100644
index 0000000..85f1df7
--- /dev/null
+++ b/test/unit/infrastructure/createGitPlumbing.test.js
@@ -0,0 +1,15 @@
+import { describe, it, expect } from 'vitest';
+import { createGitPlumbing, resolveGitRunnerEnv } from '../../../src/infrastructure/createGitPlumbing.js';
+
+describe('createGitPlumbing helpers', () => {
+ it('uses the node runner path under Bun and native defaults elsewhere', () => {
+ const expected = typeof globalThis.Bun !== 'undefined' ? 'node' : undefined;
+ expect(resolveGitRunnerEnv()).toBe(expected);
+ });
+
+ it('creates a plumbing instance for the requested cwd', () => {
+ const plumbing = createGitPlumbing({ cwd: process.cwd() });
+ expect(plumbing).toBeDefined();
+ expect(typeof plumbing.execute).toBe('function');
+ });
+});
diff --git a/test/unit/scripts/release-verify.test.js b/test/unit/scripts/release-verify.test.js
new file mode 100644
index 0000000..bd6749f
--- /dev/null
+++ b/test/unit/scripts/release-verify.test.js
@@ -0,0 +1,156 @@
+import { describe, it, expect, vi } from 'vitest';
+import {
+ RELEASE_STEPS,
+ ReleaseVerifyError,
+ extractVitestTestCount,
+ renderMarkdownSummary,
+ runReleaseVerify,
+} from '../../../scripts/release/verify.js';
+
+const QUIET_LOGGER = { line() {} };
+
+/**
+ * Create a successful release-step runner.
+ *
+ * @param {number} [testCount]
+ * @returns {ReturnType}
+ */
+function makeSuccessRunner(testCount = 5) {
+ return vi.fn(async (step) => ({
+ code: 0,
+ signal: null,
+ stdout: step.testCount ? `Tests ${testCount} passed (${testCount})` : '',
+ stderr: '',
+ }));
+}
+
+/**
+ * Create a runner that returns a non-zero outcome for a specific step.
+ *
+ * @param {string} failId
+ * @param {number} [testCount]
+ * @returns {ReturnType}
+ */
+function makeFailingRunner(failId, testCount = 5) {
+ return vi.fn(async (step) => {
+ if (step.id === failId) {
+ return {
+ code: 1,
+ signal: null,
+ stdout: `Tests ${testCount} passed (${testCount})`,
+ stderr: 'boom',
+ };
+ }
+
+ return {
+ code: 0,
+ signal: null,
+ stdout: step.testCount ? `Tests ${testCount} passed (${testCount})` : '',
+ stderr: '',
+ };
+ });
+}
+
+/**
+ * Create a runner that throws for a specific step.
+ *
+ * @param {string} failId
+ * @returns {ReturnType}
+ */
+function makeThrowingRunner(failId) {
+ return vi.fn(async (step) => {
+ if (step.id === failId) {
+ throw new Error('runner exploded');
+ }
+
+ return {
+ code: 0,
+ signal: null,
+ stdout: step.testCount ? 'Tests 5 passed (5)' : '',
+ stderr: '',
+ };
+ });
+}
+
+/**
+ * Create a runner that omits signal values from successful outcomes.
+ *
+ * @returns {ReturnType}
+ */
+function makeUndefinedSignalRunner() {
+ return vi.fn(async (step) => ({
+ code: 0,
+ stdout: step.testCount ? 'Tests 5 passed (5)' : '',
+ stderr: '',
+ }));
+}
+
+describe('release verify helpers', () => {
+ it('parses Vitest test counts from ANSI-colored output', () => {
+ const output = '\u001b[32mTests\u001b[39m 147 passed (147)';
+ expect(extractVitestTestCount(output)).toBe(147);
+ });
+
+ it('renders a markdown summary with total test counts', () => {
+ const summary = renderMarkdownSummary({
+ version: '5.3.3',
+ totalTests: 12,
+ results: [
+ { label: 'Lint', passed: true, tests: null },
+ { label: 'Unit Tests (Node)', passed: true, tests: 12 },
+ ],
+ });
+
+ expect(summary).toContain('## Release Verification Summary');
+ expect(summary).toContain('- Version: `5.3.3`');
+ expect(summary).toContain('- Total tests observed: 12');
+ expect(summary).toContain('| Unit Tests (Node) | PASS | 12 |');
+ });
+});
+
+describe('release verify execution', () => {
+ it('runs the release steps in order and aggregates test counts', async () => {
+ const runner = makeSuccessRunner();
+
+ const report = await runReleaseVerify({ runner, logger: QUIET_LOGGER });
+
+ expect(runner).toHaveBeenCalledTimes(RELEASE_STEPS.length);
+ expect(report.totalTests).toBe(30);
+ expect(report.results.every((result) => result.passed)).toBe(true);
+ });
+
+ it('treats missing signal values as a successful exit', async () => {
+ const runner = makeUndefinedSignalRunner();
+
+ const report = await runReleaseVerify({ runner, logger: QUIET_LOGGER });
+
+ expect(report.results.every((result) => result.passed)).toBe(true);
+ expect(report.results.every((result) => result.signal === null)).toBe(true);
+ });
+
+ it('stops on the first failure and exposes a partial summary', async () => {
+ const runner = makeFailingRunner('unit-bun');
+ const failure = await runReleaseVerify({ runner, logger: QUIET_LOGGER }).catch((error) => error);
+
+ expect(failure).toBeInstanceOf(ReleaseVerifyError);
+ expect(failure.name).toBe('ReleaseVerifyError');
+ expect(failure.step).toMatchObject({ id: 'unit-bun', passed: false });
+ expect(failure.summary).toContain('| Unit Tests (Bun) | FAIL | 5 |');
+ expect(runner).toHaveBeenCalledTimes(3);
+ });
+
+ it('converts thrown runner errors into structured release failures', async () => {
+ const runner = makeThrowingRunner('unit-bun');
+ const failure = await runReleaseVerify({ runner, logger: QUIET_LOGGER }).catch((error) => error);
+
+ expect(failure).toBeInstanceOf(ReleaseVerifyError);
+ expect(failure.step).toMatchObject({
+ id: 'unit-bun',
+ passed: false,
+ code: 1,
+ errorMessage: 'runner exploded',
+ });
+ expect(failure.summary).toContain('| Unit Tests (Bun) | FAIL |');
+ expect(runner).toHaveBeenCalledTimes(3);
+ });
+});