Add 5 pi extensions: pi-subagents, pi-crew, rpiv-pi, pi-interactive-shell, pi-intercom
This commit is contained in:
194
extensions/rpiv-pi/extensions/rpiv-core/agents.test.ts
Normal file
194
extensions/rpiv-pi/extensions/rpiv-core/agents.test.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import {
|
||||
chmodSync,
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
mkdtempSync,
|
||||
readdirSync,
|
||||
readFileSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { BUNDLED_AGENTS_DIR, syncBundledAgents } from "./agents.js";
|
||||
|
||||
let cwd: string;
|
||||
let targetDir: string;
|
||||
let manifestPath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
cwd = mkdtempSync(join(tmpdir(), "rpiv-agents-"));
|
||||
targetDir = join(cwd, ".pi", "agents");
|
||||
manifestPath = join(targetDir, ".rpiv-managed.json");
|
||||
});
|
||||
afterEach(() => {
|
||||
rmSync(cwd, { recursive: true, force: true });
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — first run (empty target)", () => {
|
||||
it("copies every source .md and writes manifest", () => {
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
const bundled = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
expect(r.added.sort()).toEqual(bundled.sort());
|
||||
expect(r.updated).toEqual([]);
|
||||
expect(r.errors).toEqual([]);
|
||||
expect(existsSync(manifestPath)).toBe(true);
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, "utf-8"));
|
||||
expect(manifest.sort()).toEqual(bundled.sort());
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — bootstrap-claim from manifest-less drift", () => {
|
||||
it("claims pre-existing files matching bundled names as managed", () => {
|
||||
const bundled = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
if (bundled.length === 0) return;
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(join(targetDir, bundled[0]), "drift content", "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.pendingUpdate).toContain(bundled[0]);
|
||||
expect(readFileSync(join(targetDir, bundled[0]), "utf-8")).toBe("drift content");
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — apply=false (detect only)", () => {
|
||||
it("reports pendingUpdate for changed managed files without touching them", () => {
|
||||
const bundled = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
if (bundled.length === 0) return;
|
||||
syncBundledAgents(cwd, true);
|
||||
writeFileSync(join(targetDir, bundled[0]), "user-modified", "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.pendingUpdate).toContain(bundled[0]);
|
||||
expect(readFileSync(join(targetDir, bundled[0]), "utf-8")).toBe("user-modified");
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — apply=true (mutating sync)", () => {
|
||||
it("overwrites changed managed files", () => {
|
||||
const bundled = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
if (bundled.length === 0) return;
|
||||
syncBundledAgents(cwd, true);
|
||||
writeFileSync(join(targetDir, bundled[0]), "user-modified", "utf-8");
|
||||
const r = syncBundledAgents(cwd, true);
|
||||
expect(r.updated).toContain(bundled[0]);
|
||||
const srcContent = readFileSync(join(BUNDLED_AGENTS_DIR, bundled[0]), "utf-8");
|
||||
expect(readFileSync(join(targetDir, bundled[0]), "utf-8")).toBe(srcContent);
|
||||
});
|
||||
|
||||
it("removes stale managed files absent from source", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(join(targetDir, "stale.md"), "x", "utf-8");
|
||||
writeFileSync(manifestPath, JSON.stringify(["stale.md"]), "utf-8");
|
||||
const r = syncBundledAgents(cwd, true);
|
||||
expect(r.removed).toContain("stale.md");
|
||||
expect(existsSync(join(targetDir, "stale.md"))).toBe(false);
|
||||
});
|
||||
|
||||
it("leaves unchanged managed files alone", () => {
|
||||
syncBundledAgents(cwd, true);
|
||||
const r = syncBundledAgents(cwd, true);
|
||||
expect(r.updated).toEqual([]);
|
||||
expect(r.unchanged.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — error paths", () => {
|
||||
it.skipIf(process.platform === "win32")("collects copy error when dest is read-only", () => {
|
||||
// Create a read-only target dir so copyFileSync fails with EACCES/EPERM
|
||||
const bundled = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
if (bundled.length === 0) return;
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
chmodSync(targetDir, 0o500);
|
||||
try {
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
// At least one copy op should have failed; otherwise nothing proves the error path
|
||||
const errorTripped = r.errors.some((e) => e.op === "copy") || r.added.length < bundled.length;
|
||||
expect(errorTripped).toBe(true);
|
||||
} finally {
|
||||
chmodSync(targetDir, 0o700);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — stale-file detection (apply=false)", () => {
|
||||
it("reports pendingRemove when a managed file has no matching source", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(join(targetDir, "stale.md"), "x", "utf-8");
|
||||
writeFileSync(manifestPath, JSON.stringify(["stale.md"]), "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.pendingRemove).toContain("stale.md");
|
||||
expect(r.removed).toEqual([]);
|
||||
expect(existsSync(join(targetDir, "stale.md"))).toBe(true);
|
||||
});
|
||||
|
||||
it("keeps pendingRemove entries in the manifest so the next apply can finish removal", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(join(targetDir, "stale.md"), "x", "utf-8");
|
||||
writeFileSync(manifestPath, JSON.stringify(["stale.md"]), "utf-8");
|
||||
syncBundledAgents(cwd, false);
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, "utf-8")) as string[];
|
||||
expect(manifest).toContain("stale.md");
|
||||
});
|
||||
|
||||
it("skips pendingRemove when the stale file no longer exists on disk", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
// Manifest claims stale.md but disk does not have it
|
||||
writeFileSync(manifestPath, JSON.stringify(["stale.md"]), "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.pendingRemove).not.toContain("stale.md");
|
||||
expect(r.removed).not.toContain("stale.md");
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — manifest robustness", () => {
|
||||
it("treats a corrupt manifest (invalid JSON) as empty and re-bootstraps", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(manifestPath, "{ not json ::", "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.errors).toEqual([]);
|
||||
// After sync, the manifest should be valid JSON again.
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, "utf-8")) as string[];
|
||||
expect(Array.isArray(manifest)).toBe(true);
|
||||
});
|
||||
|
||||
it("treats a non-array manifest as empty and re-bootstraps", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(manifestPath, JSON.stringify({ oops: true }), "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.errors).toEqual([]);
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, "utf-8")) as string[];
|
||||
expect(Array.isArray(manifest)).toBe(true);
|
||||
});
|
||||
|
||||
it("filters non-string manifest entries during parse", () => {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
writeFileSync(join(targetDir, "unrelated.md"), "keep me", "utf-8");
|
||||
// Write manifest containing mixed types (must be ignored per-entry rather than whole-file)
|
||||
writeFileSync(manifestPath, JSON.stringify([42, null, "unrelated.md"]), "utf-8");
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.errors).toEqual([]);
|
||||
// unrelated.md is not in source, so it will be tracked for pendingRemove
|
||||
expect(r.pendingRemove).toContain("unrelated.md");
|
||||
});
|
||||
});
|
||||
|
||||
describe("syncBundledAgents — subsequent-run bookkeeping", () => {
|
||||
it("reports unchanged (not added) on a second run with no changes", () => {
|
||||
syncBundledAgents(cwd, true);
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.added).toEqual([]);
|
||||
expect(r.updated).toEqual([]);
|
||||
expect(r.pendingUpdate).toEqual([]);
|
||||
expect(r.unchanged.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("treats a destination file that was manually removed as a new add on next sync", () => {
|
||||
syncBundledAgents(cwd, true);
|
||||
const bundled = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
if (bundled.length === 0) return;
|
||||
rmSync(join(targetDir, bundled[0]));
|
||||
const r = syncBundledAgents(cwd, false);
|
||||
expect(r.added).toContain(bundled[0]);
|
||||
});
|
||||
});
|
||||
268
extensions/rpiv-pi/extensions/rpiv-core/agents.ts
Normal file
268
extensions/rpiv-pi/extensions/rpiv-core/agents.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
/**
|
||||
* Agent auto-copy — copies bundled agents into <cwd>/.pi/agents/.
|
||||
*
|
||||
* Pure utility. No ExtensionAPI interactions.
|
||||
*/
|
||||
|
||||
import { copyFileSync, existsSync, mkdirSync, readdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Package-root resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Resolves the rpiv-pi package root from this module's file URL.
|
||||
* Walks up from `extensions/rpiv-core/agents.ts` to the repo root.
|
||||
*/
|
||||
export const PACKAGE_ROOT = (() => {
|
||||
const thisFile = fileURLToPath(import.meta.url);
|
||||
// extensions/rpiv-core/agents.ts -> rpiv-pi/
|
||||
return dirname(dirname(dirname(thisFile)));
|
||||
})();
|
||||
|
||||
export const BUNDLED_AGENTS_DIR = join(PACKAGE_ROOT, "agents");
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface SyncError {
|
||||
file?: string;
|
||||
op: "read-src" | "read-dest" | "copy" | "remove" | "manifest-read" | "manifest-write";
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface SyncResult {
|
||||
/** New files copied (present in source, absent from destination). */
|
||||
added: string[];
|
||||
/** Existing managed files overwritten with updated source content. */
|
||||
updated: string[];
|
||||
/** Managed files whose destination content matches source exactly. */
|
||||
unchanged: string[];
|
||||
/** Stale managed files removed (present in manifest but absent from source). */
|
||||
removed: string[];
|
||||
/** Managed files with different destination content (detected but not applied). */
|
||||
pendingUpdate: string[];
|
||||
/** Managed files no longer in source (detected but not removed). */
|
||||
pendingRemove: string[];
|
||||
/** Per-file errors collected during sync. */
|
||||
errors: SyncError[];
|
||||
}
|
||||
|
||||
/** Create an empty SyncResult with all arrays initialized. */
|
||||
function emptySyncResult(): SyncResult {
|
||||
return {
|
||||
added: [],
|
||||
updated: [],
|
||||
unchanged: [],
|
||||
removed: [],
|
||||
pendingUpdate: [],
|
||||
pendingRemove: [],
|
||||
errors: [],
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Manifest
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const MANIFEST_FILE = ".rpiv-managed.json";
|
||||
|
||||
/**
|
||||
* Read the managed-file manifest from the target directory.
|
||||
* Returns an empty array on missing/invalid/unreadable manifest.
|
||||
* Fail-soft: never throws.
|
||||
*/
|
||||
function readManifest(targetDir: string): string[] {
|
||||
const manifestPath = join(targetDir, MANIFEST_FILE);
|
||||
if (!existsSync(manifestPath)) return [];
|
||||
try {
|
||||
const raw = readFileSync(manifestPath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
if (!Array.isArray(parsed)) return [];
|
||||
return parsed.filter((e): e is string => typeof e === "string");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the managed-file manifest to the target directory.
|
||||
* Fail-soft: swallows write errors (permissions, disk full, etc.).
|
||||
*/
|
||||
function writeManifest(targetDir: string, filenames: string[]): void {
|
||||
const manifestPath = join(targetDir, MANIFEST_FILE);
|
||||
try {
|
||||
writeFileSync(manifestPath, `${JSON.stringify(filenames, null, 2)}\n`, "utf-8");
|
||||
} catch {
|
||||
// non-fatal — sync results will still be correct for this run;
|
||||
// next run will re-bootstrap if manifest is missing
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bootstrap the managed-file manifest on first run after upgrade.
|
||||
*
|
||||
* When no manifest exists, claims all existing destination files whose
|
||||
* names match the current bundled source list as rpiv-managed.
|
||||
* Writes the manifest and returns the managed set.
|
||||
*
|
||||
* If a manifest already exists, returns it as-is.
|
||||
*/
|
||||
function bootstrapManifest(targetDir: string, sourceNames: Set<string>): string[] {
|
||||
const existing = readManifest(targetDir);
|
||||
if (existing.length > 0) return existing;
|
||||
|
||||
const managed: string[] = [];
|
||||
try {
|
||||
const destEntries = readdirSync(targetDir).filter((f) => f.endsWith(".md"));
|
||||
for (const name of destEntries) {
|
||||
if (sourceNames.has(name)) {
|
||||
managed.push(name);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// dest dir may not exist yet — that's fine, empty manifest
|
||||
}
|
||||
|
||||
writeManifest(targetDir, managed);
|
||||
return managed;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Agent Sync Engine
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Synchronize bundled agents from <PACKAGE_ROOT>/agents/ into <cwd>/.pi/agents/.
|
||||
*
|
||||
* When `apply` is false (session_start): adds new files only.
|
||||
* Detects pending updates and removals without applying them.
|
||||
* When `apply` is true (/rpiv-update-agents): adds new, overwrites changed
|
||||
* managed files, removes stale managed files.
|
||||
*
|
||||
* Never throws — errors are collected in `result.errors`.
|
||||
*/
|
||||
export function syncBundledAgents(cwd: string, apply: boolean): SyncResult {
|
||||
const result = emptySyncResult();
|
||||
|
||||
if (!existsSync(BUNDLED_AGENTS_DIR)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const targetDir = join(cwd, ".pi", "agents");
|
||||
try {
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
} catch {
|
||||
result.errors.push({ op: "manifest-write", message: "Failed to create target directory" });
|
||||
return result;
|
||||
}
|
||||
|
||||
// 1. Enumerate source files
|
||||
let sourceEntries: string[];
|
||||
try {
|
||||
sourceEntries = readdirSync(BUNDLED_AGENTS_DIR).filter((f) => f.endsWith(".md"));
|
||||
} catch {
|
||||
result.errors.push({ op: "read-src", message: "Failed to read bundled agents directory" });
|
||||
return result;
|
||||
}
|
||||
|
||||
const sourceNames = new Set(sourceEntries);
|
||||
|
||||
// 2. Bootstrap manifest and get managed set
|
||||
const managedNames = new Set(bootstrapManifest(targetDir, sourceNames));
|
||||
|
||||
// 3. Process each source file
|
||||
for (const entry of sourceEntries) {
|
||||
const src = join(BUNDLED_AGENTS_DIR, entry);
|
||||
const dest = join(targetDir, entry);
|
||||
|
||||
if (!existsSync(dest)) {
|
||||
try {
|
||||
copyFileSync(src, dest);
|
||||
result.added.push(entry);
|
||||
} catch (e) {
|
||||
result.errors.push({
|
||||
file: entry,
|
||||
op: "copy",
|
||||
message: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let srcContent: Buffer;
|
||||
let destContent: Buffer;
|
||||
try {
|
||||
srcContent = readFileSync(src);
|
||||
} catch (e) {
|
||||
result.errors.push({
|
||||
file: entry,
|
||||
op: "read-src",
|
||||
message: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
destContent = readFileSync(dest);
|
||||
} catch (e) {
|
||||
result.errors.push({
|
||||
file: entry,
|
||||
op: "read-dest",
|
||||
message: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Buffer.compare(srcContent, destContent) === 0) {
|
||||
result.unchanged.push(entry);
|
||||
} else if (apply) {
|
||||
try {
|
||||
copyFileSync(src, dest);
|
||||
result.updated.push(entry);
|
||||
} catch (e) {
|
||||
result.errors.push({
|
||||
file: entry,
|
||||
op: "copy",
|
||||
message: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
result.pendingUpdate.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Process stale managed files (in manifest but not in source)
|
||||
for (const name of managedNames) {
|
||||
if (sourceNames.has(name)) continue;
|
||||
|
||||
const destPath = join(targetDir, name);
|
||||
if (!existsSync(destPath)) continue;
|
||||
|
||||
if (apply) {
|
||||
try {
|
||||
unlinkSync(destPath);
|
||||
result.removed.push(name);
|
||||
} catch (e) {
|
||||
result.errors.push({
|
||||
file: name,
|
||||
op: "remove",
|
||||
message: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
result.pendingRemove.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Update manifest to reflect what's currently managed on disk.
|
||||
// apply=true: stale files were removed, so manifest = sourceEntries.
|
||||
// apply=false: stale files still exist on disk and must stay tracked
|
||||
// so the next apply can remove them.
|
||||
const manifestEntries = apply ? sourceEntries : [...sourceEntries, ...result.pendingRemove];
|
||||
writeManifest(targetDir, manifestEntries);
|
||||
|
||||
return result;
|
||||
}
|
||||
14
extensions/rpiv-pi/extensions/rpiv-core/constants.test.ts
Normal file
14
extensions/rpiv-pi/extensions/rpiv-core/constants.test.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { FLAG_DEBUG, MSG_TYPE_GIT_CONTEXT, MSG_TYPE_GUIDANCE } from "./constants.js";
|
||||
|
||||
describe("rpiv-core constants", () => {
|
||||
it("FLAG_DEBUG is the canonical debug-flag name", () => {
|
||||
expect(FLAG_DEBUG).toBe("rpiv-debug");
|
||||
});
|
||||
it("MSG_TYPE_GIT_CONTEXT is the canonical git-context message type", () => {
|
||||
expect(MSG_TYPE_GIT_CONTEXT).toBe("rpiv-git-context");
|
||||
});
|
||||
it("MSG_TYPE_GUIDANCE is the canonical guidance message type", () => {
|
||||
expect(MSG_TYPE_GUIDANCE).toBe("rpiv-guidance");
|
||||
});
|
||||
});
|
||||
3
extensions/rpiv-pi/extensions/rpiv-core/constants.ts
Normal file
3
extensions/rpiv-pi/extensions/rpiv-core/constants.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export const FLAG_DEBUG = "rpiv-debug";
|
||||
export const MSG_TYPE_GIT_CONTEXT = "rpiv-git-context";
|
||||
export const MSG_TYPE_GUIDANCE = "rpiv-guidance";
|
||||
@@ -0,0 +1,47 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { isGitMutatingCommand } from "./git-context.js";
|
||||
|
||||
describe("isGitMutatingCommand — positives", () => {
|
||||
const mutating = [
|
||||
"git checkout main",
|
||||
"git switch feature",
|
||||
"git commit -m 'x'",
|
||||
"git merge main",
|
||||
"git rebase main",
|
||||
"git pull",
|
||||
"git reset --hard HEAD",
|
||||
"git revert abc",
|
||||
"git cherry-pick abc",
|
||||
"git worktree add ../wt",
|
||||
"git am < patch",
|
||||
"git stash",
|
||||
];
|
||||
for (const cmd of mutating) {
|
||||
it(`matches: ${cmd}`, () => {
|
||||
expect(isGitMutatingCommand(cmd)).toBe(true);
|
||||
});
|
||||
}
|
||||
it("matches when chained with preceding command", () => {
|
||||
expect(isGitMutatingCommand("cd x && git commit")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isGitMutatingCommand — negatives", () => {
|
||||
const nonMutating = [
|
||||
"git status",
|
||||
"git log",
|
||||
"git diff",
|
||||
"git rev-parse HEAD",
|
||||
"git config user.name",
|
||||
"gitmoji commit",
|
||||
"git --version",
|
||||
];
|
||||
for (const cmd of nonMutating) {
|
||||
it(`does NOT match: ${cmd}`, () => {
|
||||
expect(isGitMutatingCommand(cmd)).toBe(false);
|
||||
});
|
||||
}
|
||||
it("rejects empty string", () => {
|
||||
expect(isGitMutatingCommand("")).toBe(false);
|
||||
});
|
||||
});
|
||||
109
extensions/rpiv-pi/extensions/rpiv-core/git-context.test.ts
Normal file
109
extensions/rpiv-pi/extensions/rpiv-core/git-context.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { createMockPi, stubGitExec } from "@juicesharp/rpiv-test-utils";
|
||||
import { beforeEach, describe, expect, it } from "vitest";
|
||||
import { clearGitContextCache, getGitContext, resetInjectedMarker, takeGitContextIfChanged } from "./git-context.js";
|
||||
|
||||
beforeEach(() => {
|
||||
clearGitContextCache();
|
||||
resetInjectedMarker();
|
||||
});
|
||||
|
||||
describe("getGitContext", () => {
|
||||
it("parses branch + commit + user from three exec calls", async () => {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc1234", user: "alice" }) as never,
|
||||
});
|
||||
const ctx = await getGitContext(pi);
|
||||
expect(ctx).toEqual({ branch: "main", commit: "abc1234", user: "alice" });
|
||||
});
|
||||
|
||||
it("remaps literal HEAD to 'detached'", async () => {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "HEAD", commit: "abc", user: "alice" }) as never,
|
||||
});
|
||||
const ctx = await getGitContext(pi);
|
||||
expect(ctx?.branch).toBe("detached");
|
||||
});
|
||||
|
||||
it("returns null when both branch and commit are empty (not a repo)", async () => {
|
||||
const { pi } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
expect(await getGitContext(pi)).toBeNull();
|
||||
});
|
||||
|
||||
it("falls back to process.env.USER when git config user.name errors", async () => {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", userError: new Error("no config") }) as never,
|
||||
});
|
||||
process.env.USER = "env-alice";
|
||||
const ctx = await getGitContext(pi);
|
||||
expect(ctx?.user).toBe("env-alice");
|
||||
});
|
||||
|
||||
it("falls back to 'unknown' when neither git nor env has user", async () => {
|
||||
const origUser = process.env.USER;
|
||||
delete process.env.USER;
|
||||
try {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", userError: new Error("x") }) as never,
|
||||
});
|
||||
const ctx = await getGitContext(pi);
|
||||
expect(ctx?.user).toBe("unknown");
|
||||
} finally {
|
||||
if (origUser) process.env.USER = origUser;
|
||||
}
|
||||
});
|
||||
|
||||
it("memoises: subsequent calls do not re-exec", async () => {
|
||||
const exec = stubGitExec({ branch: "main", commit: "abc", user: "alice" });
|
||||
const { pi } = createMockPi({ exec: exec as never });
|
||||
await getGitContext(pi);
|
||||
await getGitContext(pi);
|
||||
expect(exec).toHaveBeenCalledTimes(3); // 3 initial exec calls, no second-round
|
||||
});
|
||||
|
||||
it("clearGitContextCache forces re-read", async () => {
|
||||
const exec = stubGitExec({ branch: "main", commit: "abc", user: "alice" });
|
||||
const { pi } = createMockPi({ exec: exec as never });
|
||||
await getGitContext(pi);
|
||||
clearGitContextCache();
|
||||
await getGitContext(pi);
|
||||
expect(exec).toHaveBeenCalledTimes(6);
|
||||
});
|
||||
});
|
||||
|
||||
describe("takeGitContextIfChanged", () => {
|
||||
it("returns the context-line on first call", async () => {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", user: "alice" }) as never,
|
||||
});
|
||||
const r = await takeGitContextIfChanged(pi);
|
||||
expect(r).toContain("- Branch: main");
|
||||
expect(r).toContain("- Commit: abc");
|
||||
expect(r).toContain("- User: alice");
|
||||
});
|
||||
|
||||
it("returns null on second call when signature unchanged", async () => {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", user: "alice" }) as never,
|
||||
});
|
||||
await takeGitContextIfChanged(pi);
|
||||
expect(await takeGitContextIfChanged(pi)).toBeNull();
|
||||
});
|
||||
|
||||
it("re-emits after clearGitContextCache + resetInjectedMarker + signature change", async () => {
|
||||
const { pi } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", user: "alice" }) as never,
|
||||
});
|
||||
await takeGitContextIfChanged(pi);
|
||||
clearGitContextCache();
|
||||
resetInjectedMarker();
|
||||
const { pi: pi2 } = createMockPi({
|
||||
exec: stubGitExec({ branch: "feature", commit: "def", user: "alice" }) as never,
|
||||
});
|
||||
expect(await takeGitContextIfChanged(pi2)).not.toBeNull();
|
||||
});
|
||||
|
||||
it("returns null when not in a git repo", async () => {
|
||||
const { pi } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
expect(await takeGitContextIfChanged(pi)).toBeNull();
|
||||
});
|
||||
});
|
||||
79
extensions/rpiv-pi/extensions/rpiv-core/git-context.ts
Normal file
79
extensions/rpiv-pi/extensions/rpiv-core/git-context.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* Cached branch + short commit. Injected into the transcript once at
|
||||
* session_start, re-injected on session_compact (transcript cleared) and
|
||||
* only when the cached value changes (e.g. after a mutating git command).
|
||||
* Two parallel `git rev-parse` calls — one call can't combine
|
||||
* `--abbrev-ref` and `--short` cleanly because the `--abbrev-ref` mode
|
||||
* persists to subsequent revs. git itself resolves worktree gitdir
|
||||
* redirection, so either form is worktree-safe.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
type GitContext = { branch: string; commit: string; user: string };
|
||||
|
||||
// Signature (branch+commit) of the last message pushed into the transcript.
|
||||
// null = transcript has nothing current and needs re-injection.
|
||||
let lastInjectedSig: string | null = null;
|
||||
|
||||
// undefined = not loaded yet, null = not a git repo / failed, object = valid
|
||||
let cache: GitContext | null | undefined;
|
||||
|
||||
export async function getGitContext(pi: ExtensionAPI): Promise<GitContext | null> {
|
||||
if (cache !== undefined) return cache;
|
||||
cache = await loadGitContext(pi);
|
||||
return cache;
|
||||
}
|
||||
|
||||
export function clearGitContextCache(): void {
|
||||
cache = undefined;
|
||||
}
|
||||
|
||||
// Detached HEAD emits literal "HEAD" for --abbrev-ref; remap so frontmatter is meaningful.
|
||||
async function loadGitContext(pi: ExtensionAPI): Promise<GitContext | null> {
|
||||
try {
|
||||
const [branchRes, commitRes] = await Promise.all([
|
||||
pi.exec("git", ["rev-parse", "--abbrev-ref", "HEAD"], { timeout: 5000 }),
|
||||
pi.exec("git", ["rev-parse", "--short", "HEAD"], { timeout: 5000 }),
|
||||
]);
|
||||
const rawBranch = branchRes.stdout.trim();
|
||||
const commit = commitRes.stdout.trim();
|
||||
if (!rawBranch && !commit) return null;
|
||||
const branch = rawBranch === "HEAD" ? "detached" : rawBranch;
|
||||
let user = "";
|
||||
try {
|
||||
const r2 = await pi.exec("git", ["config", "user.name"], { timeout: 5000 });
|
||||
user = r2.stdout.trim();
|
||||
} catch {
|
||||
// fall through to env fallback
|
||||
}
|
||||
if (!user) user = process.env.USER || "unknown";
|
||||
return {
|
||||
branch: branch || "no-branch",
|
||||
commit: commit || "no-commit",
|
||||
user,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function resetInjectedMarker(): void {
|
||||
lastInjectedSig = null;
|
||||
}
|
||||
|
||||
// Returns the message content to inject, or null if the transcript is
|
||||
// already up-to-date or we're not in a git repo. Updates the marker
|
||||
// whenever it returns non-null.
|
||||
export async function takeGitContextIfChanged(pi: ExtensionAPI): Promise<string | null> {
|
||||
const g = await getGitContext(pi);
|
||||
if (!g) return null;
|
||||
const sig = `${g.branch}\n${g.commit}\n${g.user}`;
|
||||
if (sig === lastInjectedSig) return null;
|
||||
lastInjectedSig = sig;
|
||||
return `## Git Context\n- Branch: ${g.branch}\n- Commit: ${g.commit}\n- User: ${g.user}`;
|
||||
}
|
||||
|
||||
export function isGitMutatingCommand(cmd: string): boolean {
|
||||
return /\bgit\s+(checkout|switch|commit|merge|rebase|pull|reset|revert|cherry-pick|worktree|am|stash)\b/.test(cmd);
|
||||
}
|
||||
140
extensions/rpiv-pi/extensions/rpiv-core/guidance.test.ts
Normal file
140
extensions/rpiv-pi/extensions/rpiv-core/guidance.test.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { mkdtempSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { createMockPi, writeGuidanceTree } from "@juicesharp/rpiv-test-utils";
|
||||
import { afterEach, beforeEach, describe, expect, it, type vi } from "vitest";
|
||||
import { clearInjectionState, handleToolCallGuidance, injectRootGuidance, resolveGuidance } from "./guidance.js";
|
||||
|
||||
let projectDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
projectDir = mkdtempSync(join(tmpdir(), "rpiv-guidance-"));
|
||||
clearInjectionState();
|
||||
});
|
||||
afterEach(() => {
|
||||
rmSync(projectDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe("resolveGuidance — ladder", () => {
|
||||
it("AGENTS.md > CLAUDE.md > architecture.md at depth > 0", () => {
|
||||
writeGuidanceTree(projectDir, {
|
||||
"src/AGENTS.md": "agents-body",
|
||||
"src/CLAUDE.md": "claude-body",
|
||||
".rpiv/guidance/src/architecture.md": "arch-body",
|
||||
});
|
||||
const resolved = resolveGuidance(join(projectDir, "src", "foo.ts"), projectDir);
|
||||
const srcEntry = resolved.find((r) => r.relativePath.startsWith("src/"));
|
||||
expect(srcEntry?.kind).toBe("agents");
|
||||
});
|
||||
|
||||
it("depth 0 skips AGENTS/CLAUDE but keeps root architecture.md", () => {
|
||||
writeGuidanceTree(projectDir, {
|
||||
"AGENTS.md": "root-agents",
|
||||
".rpiv/guidance/architecture.md": "root-arch",
|
||||
});
|
||||
const resolved = resolveGuidance(join(projectDir, "any", "file.ts"), projectDir);
|
||||
const rootEntry = resolved.find((r) => r.relativePath === ".rpiv/guidance/architecture.md");
|
||||
expect(rootEntry?.kind).toBe("architecture");
|
||||
expect(resolved.some((r) => r.relativePath === "AGENTS.md")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns root-first, specific-last order", () => {
|
||||
writeGuidanceTree(projectDir, {
|
||||
".rpiv/guidance/architecture.md": "root",
|
||||
"a/AGENTS.md": "a",
|
||||
"a/b/AGENTS.md": "ab",
|
||||
});
|
||||
const resolved = resolveGuidance(join(projectDir, "a", "b", "c.ts"), projectDir);
|
||||
expect(resolved.map((r) => r.content)).toEqual(["root", "a", "ab"]);
|
||||
});
|
||||
|
||||
it("returns empty when file is outside projectDir", () => {
|
||||
expect(resolveGuidance("/totally/elsewhere/foo.ts", projectDir)).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns empty when nothing exists along the ladder", () => {
|
||||
expect(resolveGuidance(join(projectDir, "x.ts"), projectDir)).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("injectRootGuidance", () => {
|
||||
it("sends root architecture.md when present", () => {
|
||||
writeGuidanceTree(projectDir, { ".rpiv/guidance/architecture.md": "body" });
|
||||
const { pi } = createMockPi();
|
||||
injectRootGuidance(projectDir, pi);
|
||||
expect(pi.sendMessage).toHaveBeenCalledTimes(1);
|
||||
const content = (pi.sendMessage as ReturnType<typeof vi.fn>).mock.calls[0][0].content;
|
||||
expect(content).toContain("body");
|
||||
expect(content).toContain("reference material, NOT a task");
|
||||
expect(content).toContain("auto-loaded at session start");
|
||||
});
|
||||
|
||||
it("is idempotent across calls within a session", () => {
|
||||
writeGuidanceTree(projectDir, { ".rpiv/guidance/architecture.md": "body" });
|
||||
const { pi } = createMockPi();
|
||||
injectRootGuidance(projectDir, pi);
|
||||
injectRootGuidance(projectDir, pi);
|
||||
expect(pi.sendMessage).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("re-injects after clearInjectionState", () => {
|
||||
writeGuidanceTree(projectDir, { ".rpiv/guidance/architecture.md": "body" });
|
||||
const { pi } = createMockPi();
|
||||
injectRootGuidance(projectDir, pi);
|
||||
clearInjectionState();
|
||||
injectRootGuidance(projectDir, pi);
|
||||
expect(pi.sendMessage).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it("no-ops when root architecture.md is missing", () => {
|
||||
const { pi } = createMockPi();
|
||||
injectRootGuidance(projectDir, pi);
|
||||
expect(pi.sendMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleToolCallGuidance", () => {
|
||||
it("skips non-read/edit/write tools", () => {
|
||||
const { pi } = createMockPi();
|
||||
handleToolCallGuidance({ toolName: "bash", input: {} }, { cwd: projectDir }, pi);
|
||||
expect(pi.sendMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("dedupes per-file across multiple tool_calls", () => {
|
||||
writeGuidanceTree(projectDir, { "src/AGENTS.md": "a" });
|
||||
const { pi } = createMockPi();
|
||||
const ev = { toolName: "read", input: { file_path: join(projectDir, "src", "x.ts") } };
|
||||
handleToolCallGuidance(ev, { cwd: projectDir }, pi);
|
||||
handleToolCallGuidance(ev, { cwd: projectDir }, pi);
|
||||
expect(pi.sendMessage).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("supports both 'path' and 'file_path' input keys", () => {
|
||||
writeGuidanceTree(projectDir, { "src/AGENTS.md": "a" });
|
||||
const { pi } = createMockPi();
|
||||
handleToolCallGuidance(
|
||||
{ toolName: "edit", input: { path: join(projectDir, "src", "x.ts") } },
|
||||
{ cwd: projectDir },
|
||||
pi,
|
||||
);
|
||||
expect(pi.sendMessage).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("emits one sendMessage combining multiple newly-resolved files", () => {
|
||||
writeGuidanceTree(projectDir, {
|
||||
".rpiv/guidance/architecture.md": "root",
|
||||
"src/AGENTS.md": "src",
|
||||
});
|
||||
const { pi } = createMockPi();
|
||||
handleToolCallGuidance(
|
||||
{ toolName: "write", input: { file_path: join(projectDir, "src", "x.ts") } },
|
||||
{ cwd: projectDir },
|
||||
pi,
|
||||
);
|
||||
expect(pi.sendMessage).toHaveBeenCalledTimes(1);
|
||||
const content = (pi.sendMessage as ReturnType<typeof vi.fn>).mock.calls[0][0].content;
|
||||
expect(content).toContain("root");
|
||||
expect(content).toContain("src");
|
||||
expect(content).toContain("auto-loaded because write touched src/x.ts");
|
||||
});
|
||||
});
|
||||
235
extensions/rpiv-pi/extensions/rpiv-core/guidance.ts
Normal file
235
extensions/rpiv-pi/extensions/rpiv-core/guidance.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
/**
|
||||
* Guidance injection — resolves and injects subfolder guidance files.
|
||||
*
|
||||
* At each directory depth from project root down to the touched file's
|
||||
* directory, picks the first existing of:
|
||||
* AGENTS.md > CLAUDE.md > .rpiv/guidance/<sub>/architecture.md
|
||||
*
|
||||
* Depth 0 (project root) skips AGENTS.md/CLAUDE.md because Pi's own
|
||||
* resource-loader (loadContextFileFromDir at resource-loader.js:30-46)
|
||||
* already loads <cwd>/AGENTS.md or <cwd>/CLAUDE.md into the system
|
||||
* prompt's # Project Context block. Depth 0 still checks
|
||||
* <cwd>/.rpiv/guidance/architecture.md — Pi's loader does not see that
|
||||
* path.
|
||||
*
|
||||
* `resolveGuidance` is pure logic with no ExtensionAPI references
|
||||
* (utility-module rule from extensions/rpiv-core/CLAUDE.md). Side
|
||||
* effects (sendMessage, in-memory dedup Set) live in
|
||||
* `handleToolCallGuidance`, `injectRootGuidance`, and
|
||||
* `clearInjectionState`.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { dirname, isAbsolute, join, relative, sep } from "node:path";
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { FLAG_DEBUG, MSG_TYPE_GUIDANCE } from "./constants.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Guidance Resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type GuidanceKind = "agents" | "claude" | "architecture";
|
||||
|
||||
interface GuidanceFile {
|
||||
/** Forward-slash-normalized path from project root — stable dedup key. */
|
||||
relativePath: string;
|
||||
absolutePath: string;
|
||||
content: string;
|
||||
kind: GuidanceKind;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve guidance files for a given file path.
|
||||
*
|
||||
* Walks from project root to the file's directory. At each depth, picks
|
||||
* the first existing of AGENTS.md > CLAUDE.md > architecture.md (Pi's
|
||||
* own per-dir precedence at resource-loader.js:30-46, extended with
|
||||
* architecture.md as a third candidate). Depth 0 only checks
|
||||
* architecture.md — Pi's loader already handles <cwd>/AGENTS.md and
|
||||
* <cwd>/CLAUDE.md.
|
||||
*
|
||||
* Returns files root-first (general → specific), at most one per depth.
|
||||
*/
|
||||
export function resolveGuidance(filePath: string, projectDir: string): GuidanceFile[] {
|
||||
const fileDir = dirname(filePath);
|
||||
const relativeDir = relative(projectDir, fileDir);
|
||||
|
||||
// Guard: file is outside project root
|
||||
if (relativeDir.startsWith("..") || isAbsolute(relativeDir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const parts = relativeDir ? relativeDir.split(sep) : [];
|
||||
const results: GuidanceFile[] = [];
|
||||
|
||||
for (let depth = 0; depth <= parts.length; depth++) {
|
||||
const subPath = parts.slice(0, depth).join(sep);
|
||||
|
||||
// Per-depth candidate ladder. First-match wins.
|
||||
const candidates: Array<{ relative: string; kind: GuidanceKind }> = [];
|
||||
|
||||
// Depth 0: skip AGENTS/CLAUDE — Pi's loader handles <cwd> already.
|
||||
if (depth > 0) {
|
||||
candidates.push({ relative: join(subPath, "AGENTS.md"), kind: "agents" });
|
||||
candidates.push({ relative: join(subPath, "CLAUDE.md"), kind: "claude" });
|
||||
}
|
||||
candidates.push({
|
||||
relative: subPath
|
||||
? join(".rpiv", "guidance", subPath, "architecture.md")
|
||||
: join(".rpiv", "guidance", "architecture.md"),
|
||||
kind: "architecture",
|
||||
});
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const absolute = join(projectDir, candidate.relative);
|
||||
if (existsSync(absolute)) {
|
||||
results.push({
|
||||
relativePath: candidate.relative.split(sep).join("/"),
|
||||
absolutePath: absolute,
|
||||
content: readFileSync(absolute, "utf-8"),
|
||||
kind: candidate.kind,
|
||||
});
|
||||
break; // first-match wins at this depth
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Session State
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** In-memory set of injected guidance paths per session. */
|
||||
const injectedGuidance = new Set<string>();
|
||||
|
||||
export function clearInjectionState() {
|
||||
injectedGuidance.clear();
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Root Guidance Injection (session_start)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Inject the root `.rpiv/guidance/architecture.md` at session start.
|
||||
*
|
||||
* Called from `session_start` so the root guidance is available before the
|
||||
* first agent turn — without waiting for a read/edit/write tool_call.
|
||||
* Uses the same `injectedGuidance` Set for dedup, so `handleToolCallGuidance`
|
||||
* won't re-inject it later.
|
||||
*/
|
||||
export function injectRootGuidance(cwd: string, pi: ExtensionAPI): void {
|
||||
const relativePath = ".rpiv/guidance/architecture.md";
|
||||
|
||||
if (injectedGuidance.has(relativePath)) return;
|
||||
|
||||
const absolutePath = join(cwd, relativePath);
|
||||
if (!existsSync(absolutePath)) return;
|
||||
|
||||
let content: string;
|
||||
try {
|
||||
content = readFileSync(absolutePath, "utf-8");
|
||||
} catch {
|
||||
// Silent failure mirrors handleToolCallGuidance's posture — session_start
|
||||
// runs before any UI is bound, so a permissions/race error here must not
|
||||
// crash the hook. Don't mark as injected so a later tool_call can retry.
|
||||
return;
|
||||
}
|
||||
injectedGuidance.add(relativePath);
|
||||
|
||||
const file: GuidanceFile = { relativePath, absolutePath, content, kind: "architecture" };
|
||||
pi.sendMessage({
|
||||
customType: MSG_TYPE_GUIDANCE,
|
||||
content: wrapGuidance(formatLabel(file), content, "auto-loaded at session start"),
|
||||
display: !!pi.getFlag(FLAG_DEBUG),
|
||||
});
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tool-call Handler
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Handle guidance injection on tool_call events for read/edit/write.
|
||||
* Sends hidden messages via pi.sendMessage as a side effect.
|
||||
*/
|
||||
export function handleToolCallGuidance(
|
||||
event: { toolName: string; input: Record<string, unknown> },
|
||||
ctx: { cwd: string },
|
||||
pi: ExtensionAPI,
|
||||
): void {
|
||||
if (!["read", "edit", "write"].includes(event.toolName)) return;
|
||||
|
||||
const filePath = (event.input as any).file_path ?? (event.input as any).path;
|
||||
if (!filePath) return;
|
||||
|
||||
const resolved = resolveGuidance(filePath, ctx.cwd);
|
||||
if (resolved.length === 0) return;
|
||||
|
||||
const newFiles = resolved.filter((g) => !injectedGuidance.has(g.relativePath));
|
||||
if (newFiles.length === 0) return;
|
||||
|
||||
// Mark before sendMessage — idempotence > reliability.
|
||||
for (const g of newFiles) {
|
||||
injectedGuidance.add(g.relativePath);
|
||||
}
|
||||
|
||||
const trigger = `auto-loaded because ${event.toolName} touched ${shortenPath(filePath, ctx.cwd)}`;
|
||||
const contextParts = newFiles.map((g) => wrapGuidance(formatLabel(g), g.content, trigger));
|
||||
|
||||
pi.sendMessage({
|
||||
customType: MSG_TYPE_GUIDANCE,
|
||||
content: contextParts.join("\n\n---\n\n"),
|
||||
display: !!pi.getFlag(FLAG_DEBUG),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap guidance content in a non-task envelope. The opening disclaimer tells
|
||||
* the agent this block is reference material — not an instruction — and states
|
||||
* the trigger so the agent can judge whether the block is relevant to the
|
||||
* current user request. Heading is `## Architecture Guidance:` to match the
|
||||
* `PreToolUse:Read` hook output and the actual content (architecture.md).
|
||||
*/
|
||||
function wrapGuidance(label: string, content: string, trigger: string): string {
|
||||
return [
|
||||
`[rpiv-guidance — reference material, NOT a task. ${trigger}.`,
|
||||
`Consult only if directly relevant to the user's current request; otherwise ignore.]`,
|
||||
"",
|
||||
`## Architecture Guidance: ${label}`,
|
||||
"",
|
||||
content,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a project-relative, forward-slash-normalized path for the trigger
|
||||
* disclaimer. Falls back to the absolute path if the file lives outside the
|
||||
* project root (defensive — `handleToolCallGuidance` already short-circuits
|
||||
* via `resolveGuidance` in that case, so this branch is unreachable today).
|
||||
*/
|
||||
function shortenPath(filePath: string, cwd: string): string {
|
||||
const r = relative(cwd, filePath);
|
||||
return r && !r.startsWith("..") ? r.split(sep).join("/") : filePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a guidance file's heading label.
|
||||
* extensions/rpiv-core/AGENTS.md → "extensions/rpiv-core (AGENTS.md)"
|
||||
* scripts/CLAUDE.md → "scripts (CLAUDE.md)"
|
||||
* .rpiv/guidance/scripts/architecture.md → "scripts (architecture.md)"
|
||||
* .rpiv/guidance/architecture.md → "root (architecture.md)"
|
||||
*/
|
||||
function formatLabel(g: GuidanceFile): string {
|
||||
if (g.kind === "architecture") {
|
||||
const stripped = g.relativePath.replace(/^\.rpiv\/guidance\//, "");
|
||||
const sub = stripped === "architecture.md" ? "" : stripped.replace(/\/architecture\.md$/, "");
|
||||
return `${sub || "root"} (architecture.md)`;
|
||||
}
|
||||
const fileName = g.kind === "agents" ? "AGENTS.md" : "CLAUDE.md";
|
||||
const idx = g.relativePath.lastIndexOf("/");
|
||||
const sub = idx > 0 ? g.relativePath.slice(0, idx) : "";
|
||||
return `${sub || "root"} (${fileName})`;
|
||||
}
|
||||
25
extensions/rpiv-pi/extensions/rpiv-core/index.ts
Normal file
25
extensions/rpiv-pi/extensions/rpiv-core/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* rpiv-core — Pure-orchestrator extension for rpiv-pi.
|
||||
*
|
||||
* Composes session hooks and the two slash commands. All logic lives in the
|
||||
* registrar modules; this file is the table of contents.
|
||||
*
|
||||
* Tool-owning plugins are siblings (see siblings.ts); install via /rpiv-setup.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { FLAG_DEBUG } from "./constants.js";
|
||||
import { registerSessionHooks } from "./session-hooks.js";
|
||||
import { registerSetupCommand } from "./setup-command.js";
|
||||
import { registerUpdateAgentsCommand } from "./update-agents-command.js";
|
||||
|
||||
export default function (pi: ExtensionAPI) {
|
||||
pi.registerFlag(FLAG_DEBUG, {
|
||||
description: "Show injected guidance and git-context messages",
|
||||
type: "boolean",
|
||||
default: false,
|
||||
});
|
||||
registerSessionHooks(pi);
|
||||
registerUpdateAgentsCommand(pi);
|
||||
registerSetupCommand(pi);
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
import { mkdirSync, writeFileSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { findMissingSiblings } from "./package-checks.js";
|
||||
import { SIBLINGS } from "./siblings.js";
|
||||
|
||||
const SETTINGS_PATH = join(process.env.HOME!, ".pi", "agent", "settings.json");
|
||||
|
||||
function writeSettings(contents: unknown) {
|
||||
mkdirSync(dirname(SETTINGS_PATH), { recursive: true });
|
||||
writeFileSync(SETTINGS_PATH, JSON.stringify(contents), "utf-8");
|
||||
}
|
||||
|
||||
describe("findMissingSiblings", () => {
|
||||
it("returns all 7 siblings when settings.json is missing", () => {
|
||||
expect(findMissingSiblings()).toHaveLength(SIBLINGS.length);
|
||||
});
|
||||
|
||||
it("returns all 7 siblings when JSON is invalid", () => {
|
||||
mkdirSync(dirname(SETTINGS_PATH), { recursive: true });
|
||||
writeFileSync(SETTINGS_PATH, "{not json", "utf-8");
|
||||
expect(findMissingSiblings()).toHaveLength(SIBLINGS.length);
|
||||
});
|
||||
|
||||
it("returns all 7 siblings when packages field is absent", () => {
|
||||
writeSettings({ other: "data" });
|
||||
expect(findMissingSiblings()).toHaveLength(SIBLINGS.length);
|
||||
});
|
||||
|
||||
it("returns all 7 siblings when packages is not an array", () => {
|
||||
writeSettings({ packages: "not-array" });
|
||||
expect(findMissingSiblings()).toHaveLength(SIBLINGS.length);
|
||||
});
|
||||
|
||||
it("filters out non-string entries defensively", () => {
|
||||
writeSettings({ packages: [null, 42, "@juicesharp/rpiv-todo"] });
|
||||
const missing = findMissingSiblings();
|
||||
expect(missing.find((s) => s.matches.test("@juicesharp/rpiv-todo"))).toBeUndefined();
|
||||
});
|
||||
|
||||
it("matches case-insensitively", () => {
|
||||
writeSettings({ packages: ["@JUICESHARP/RPIV-TODO"] });
|
||||
const missing = findMissingSiblings();
|
||||
expect(missing.find((s) => s.matches.test("@juicesharp/rpiv-todo"))).toBeUndefined();
|
||||
});
|
||||
|
||||
it("rpiv-args word-boundary: treats rpiv-args-extended as non-install", () => {
|
||||
writeSettings({ packages: ["@juicesharp/rpiv-args-extended"] });
|
||||
const missing = findMissingSiblings();
|
||||
expect(missing.find((s) => s.pkg.endsWith("/rpiv-args"))).toBeDefined();
|
||||
});
|
||||
|
||||
it("returns [] when all 7 siblings are installed", () => {
|
||||
writeSettings({
|
||||
packages: SIBLINGS.map((s) => s.pkg.replace(/^npm:/, "")),
|
||||
});
|
||||
expect(findMissingSiblings()).toEqual([]);
|
||||
});
|
||||
});
|
||||
33
extensions/rpiv-pi/extensions/rpiv-core/package-checks.ts
Normal file
33
extensions/rpiv-pi/extensions/rpiv-core/package-checks.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* Detect which SIBLINGS are installed by reading ~/.pi/agent/settings.json.
|
||||
* Pure utility — no ExtensionAPI.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { homedir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { SIBLINGS, type SiblingPlugin } from "./siblings.js";
|
||||
|
||||
const PI_AGENT_SETTINGS = join(homedir(), ".pi", "agent", "settings.json");
|
||||
|
||||
function readInstalledPackages(): string[] {
|
||||
if (!existsSync(PI_AGENT_SETTINGS)) return [];
|
||||
try {
|
||||
const raw = readFileSync(PI_AGENT_SETTINGS, "utf-8");
|
||||
const settings = JSON.parse(raw) as { packages?: unknown };
|
||||
if (!Array.isArray(settings.packages)) return [];
|
||||
return settings.packages.filter((e): e is string => typeof e === "string");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the SIBLINGS not currently installed.
|
||||
* Reads ~/.pi/agent/settings.json once per call — callers that need both the
|
||||
* full snapshot and the missing subset should call this once and filter.
|
||||
*/
|
||||
export function findMissingSiblings(): SiblingPlugin[] {
|
||||
const installed = readInstalledPackages();
|
||||
return SIBLINGS.filter((s) => !installed.some((entry) => s.matches.test(entry)));
|
||||
}
|
||||
100
extensions/rpiv-pi/extensions/rpiv-core/pi-installer.test.ts
Normal file
100
extensions/rpiv-pi/extensions/rpiv-core/pi-installer.test.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { makeSpawnStub } from "@juicesharp/rpiv-test-utils";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("node:child_process", () => ({ spawn: vi.fn() }));
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { spawnPiInstall } from "./pi-installer.js";
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(spawn).mockReset();
|
||||
});
|
||||
|
||||
describe("spawnPiInstall — success path", () => {
|
||||
it("resolves with exit 0 + buffered stdout/stderr", async () => {
|
||||
vi.mocked(spawn).mockImplementationOnce(
|
||||
() => makeSpawnStub({ stdout: "installed\n", stderr: "", exitCode: 0 }) as unknown as ReturnType<typeof spawn>,
|
||||
);
|
||||
const r = await spawnPiInstall("@x/y", 30_000);
|
||||
expect(r).toEqual({ code: 0, stdout: "installed\n", stderr: "" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("spawnPiInstall — non-zero exit", () => {
|
||||
it("returns exit code and accumulated stderr", async () => {
|
||||
vi.mocked(spawn).mockImplementationOnce(
|
||||
() => makeSpawnStub({ stdout: "", stderr: "fail\n", exitCode: 2 }) as unknown as ReturnType<typeof spawn>,
|
||||
);
|
||||
const r = await spawnPiInstall("@x/y", 30_000);
|
||||
expect(r.code).toBe(2);
|
||||
expect(r.stderr).toBe("fail\n");
|
||||
});
|
||||
|
||||
it("fallback code=1 when close emits null", async () => {
|
||||
const stub = makeSpawnStub({ neverSettles: true });
|
||||
vi.mocked(spawn).mockImplementationOnce(() => stub as unknown as ReturnType<typeof spawn>);
|
||||
const promise = spawnPiInstall("@x/y", 30_000);
|
||||
stub.emit("close", null);
|
||||
const r = await promise;
|
||||
expect(r.code).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("spawnPiInstall — error event before close", () => {
|
||||
it("settles with code=1 + error.message in stderr", async () => {
|
||||
vi.mocked(spawn).mockImplementationOnce(
|
||||
() => makeSpawnStub({ error: new Error("ENOENT pi") }) as unknown as ReturnType<typeof spawn>,
|
||||
);
|
||||
const r = await spawnPiInstall("@x/y", 30_000);
|
||||
expect(r.code).toBe(1);
|
||||
expect(r.stderr).toContain("ENOENT pi");
|
||||
});
|
||||
});
|
||||
|
||||
describe("spawnPiInstall — timeout", () => {
|
||||
it("kills with SIGTERM at timeout and resolves with code 124", async () => {
|
||||
vi.useFakeTimers();
|
||||
const stub = makeSpawnStub({ neverSettles: true });
|
||||
const killSpy = vi.spyOn(stub, "kill");
|
||||
vi.mocked(spawn).mockImplementationOnce(() => stub as unknown as ReturnType<typeof spawn>);
|
||||
const promise = spawnPiInstall("@x/y", 30_000);
|
||||
await vi.advanceTimersByTimeAsync(30_000);
|
||||
vi.useRealTimers();
|
||||
const r = await promise;
|
||||
expect(killSpy).toHaveBeenCalledWith("SIGTERM");
|
||||
expect(r.code).toBe(124);
|
||||
expect(r.stderr).toContain("timed out");
|
||||
});
|
||||
});
|
||||
|
||||
describe("spawnPiInstall — settle idempotence", () => {
|
||||
it("only resolves once even if close fires after timeout", async () => {
|
||||
vi.useFakeTimers();
|
||||
const stub = makeSpawnStub({ neverSettles: true });
|
||||
vi.mocked(spawn).mockImplementationOnce(() => stub as unknown as ReturnType<typeof spawn>);
|
||||
const promise = spawnPiInstall("@x/y", 30_000);
|
||||
await vi.advanceTimersByTimeAsync(30_000);
|
||||
stub.emit("close", 0); // late close — must not replace the timeout result
|
||||
vi.useRealTimers();
|
||||
const r = await promise;
|
||||
expect(r.code).toBe(124);
|
||||
});
|
||||
});
|
||||
|
||||
describe("spawnPiInstall — Windows branch", () => {
|
||||
it("invokes via cmd.exe /c pi install on win32", async () => {
|
||||
const origPlatform = process.platform;
|
||||
Object.defineProperty(process, "platform", { value: "win32", configurable: true });
|
||||
try {
|
||||
vi.mocked(spawn).mockImplementationOnce(
|
||||
() => makeSpawnStub({ exitCode: 0 }) as unknown as ReturnType<typeof spawn>,
|
||||
);
|
||||
await spawnPiInstall("@x/y", 30_000);
|
||||
const firstCall = vi.mocked(spawn).mock.calls[0];
|
||||
expect(firstCall[0]).toBe("cmd.exe");
|
||||
expect(firstCall[1]).toEqual(["/c", "pi", "install", "@x/y"]);
|
||||
} finally {
|
||||
Object.defineProperty(process, "platform", { value: origPlatform, configurable: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
59
extensions/rpiv-pi/extensions/rpiv-core/pi-installer.ts
Normal file
59
extensions/rpiv-pi/extensions/rpiv-core/pi-installer.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Windows-safe wrapper around `pi install <pkg>`.
|
||||
*
|
||||
* Pi's own `pi.exec` calls `child_process.spawn(cmd, args, { shell: false })`,
|
||||
* which cannot launch `.cmd`/`.bat` shims on Windows — npm installs `pi` as
|
||||
* `pi.cmd`, so on Windows the spawn ENOENTs silently and the caller sees only
|
||||
* `exit 1`. We side-step it here by invoking via `cmd.exe /c` on Windows.
|
||||
*/
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
|
||||
export interface PiInstallResult {
|
||||
code: number;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
}
|
||||
|
||||
export function spawnPiInstall(pkg: string, timeoutMs: number): Promise<PiInstallResult> {
|
||||
return new Promise((resolve) => {
|
||||
const isWindows = process.platform === "win32";
|
||||
const [cmd, args, spawnOpts] = isWindows
|
||||
? (["cmd.exe", ["/c", "pi", "install", pkg], { windowsHide: true }] as const)
|
||||
: (["pi", ["install", pkg], {}] as const);
|
||||
|
||||
let settled = false;
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
|
||||
const proc = spawn(cmd, args, { ...spawnOpts, stdio: ["ignore", "pipe", "pipe"] });
|
||||
proc.stdout?.on("data", (d) => {
|
||||
stdout += d.toString();
|
||||
});
|
||||
proc.stderr?.on("data", (d) => {
|
||||
stderr += d.toString();
|
||||
});
|
||||
|
||||
const settle = (result: PiInstallResult) => {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
clearTimeout(timer);
|
||||
resolve(result);
|
||||
};
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
proc.kill("SIGTERM");
|
||||
setTimeout(() => {
|
||||
if (!proc.killed) proc.kill("SIGKILL");
|
||||
}, 5000);
|
||||
settle({ code: 124, stdout, stderr: `${stderr}\n[timed out after ${timeoutMs}ms]` });
|
||||
}, timeoutMs);
|
||||
|
||||
proc.on("error", (err) => {
|
||||
settle({ code: 1, stdout, stderr: stderr + (stderr ? "\n" : "") + err.message });
|
||||
});
|
||||
proc.on("close", (code) => {
|
||||
settle({ code: code ?? 1, stdout, stderr });
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,162 @@
|
||||
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { findLegacySiblings, pruneLegacySiblings } from "./prune-legacy-siblings.js";
|
||||
|
||||
const SETTINGS_PATH = join(process.env.HOME!, ".pi", "agent", "settings.json");
|
||||
|
||||
function writeSettings(contents: unknown): void {
|
||||
mkdirSync(dirname(SETTINGS_PATH), { recursive: true });
|
||||
writeFileSync(SETTINGS_PATH, JSON.stringify(contents), "utf-8");
|
||||
}
|
||||
|
||||
function readSettings(): unknown {
|
||||
return JSON.parse(readFileSync(SETTINGS_PATH, "utf-8"));
|
||||
}
|
||||
|
||||
describe("pruneLegacySiblings", () => {
|
||||
it("no settings file → pruned: []", () => {
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
});
|
||||
|
||||
it("invalid JSON → pruned: [], file byte-exact unchanged", () => {
|
||||
mkdirSync(dirname(SETTINGS_PATH), { recursive: true });
|
||||
writeFileSync(SETTINGS_PATH, "{not json", "utf-8");
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
expect(readFileSync(SETTINGS_PATH, "utf-8")).toBe("{not json");
|
||||
});
|
||||
|
||||
it("non-object top-level (array) → pruned: [], file unchanged", () => {
|
||||
writeSettings([1, 2, 3]);
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
expect(readSettings()).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("no packages field → pruned: []", () => {
|
||||
writeSettings({ other: "data" });
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
expect(readSettings()).toEqual({ other: "data" });
|
||||
});
|
||||
|
||||
it("non-array packages field → pruned: []", () => {
|
||||
writeSettings({ packages: "not-array" });
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
});
|
||||
|
||||
it("only non-legacy entries → pruned: [], file unchanged", () => {
|
||||
writeSettings({
|
||||
packages: ["npm:pi-perplexity", "npm:@juicesharp/rpiv-todo", "npm:@tintinweb/pi-subagents"],
|
||||
});
|
||||
const before = readFileSync(SETTINGS_PATH, "utf-8");
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
expect(readFileSync(SETTINGS_PATH, "utf-8")).toBe(before);
|
||||
});
|
||||
|
||||
it("legacy-only: removes pi-subagents (nicobailon fork), preserves other top-level keys", () => {
|
||||
writeSettings({
|
||||
defaultProvider: "zai",
|
||||
theme: "dark",
|
||||
packages: ["npm:pi-subagents"],
|
||||
});
|
||||
const result = pruneLegacySiblings();
|
||||
expect(result.pruned).toEqual(["npm:pi-subagents"]);
|
||||
expect(readSettings()).toEqual({
|
||||
defaultProvider: "zai",
|
||||
theme: "dark",
|
||||
packages: [],
|
||||
});
|
||||
});
|
||||
|
||||
it("mixed list: prunes nicobailon's pi-subagents only, preserves @tintinweb/pi-subagents and other entries", () => {
|
||||
writeSettings({
|
||||
packages: [
|
||||
"npm:pi-perplexity",
|
||||
"npm:@tintinweb/pi-subagents",
|
||||
"npm:@juicesharp/rpiv-todo",
|
||||
"/Users/x/rpiv-mono/packages/rpiv-pi",
|
||||
null,
|
||||
42,
|
||||
"npm:pi-subagents",
|
||||
],
|
||||
});
|
||||
const result = pruneLegacySiblings();
|
||||
expect(result.pruned).toEqual(["npm:pi-subagents"]);
|
||||
expect(readSettings()).toEqual({
|
||||
packages: [
|
||||
"npm:pi-perplexity",
|
||||
"npm:@tintinweb/pi-subagents",
|
||||
"npm:@juicesharp/rpiv-todo",
|
||||
"/Users/x/rpiv-mono/packages/rpiv-pi",
|
||||
null,
|
||||
42,
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it("idempotent: second call after prune is a no-op", () => {
|
||||
writeSettings({
|
||||
packages: ["npm:pi-subagents"],
|
||||
});
|
||||
expect(pruneLegacySiblings().pruned).toEqual(["npm:pi-subagents"]);
|
||||
expect(pruneLegacySiblings()).toEqual({ pruned: [] });
|
||||
});
|
||||
|
||||
it("case-insensitive match", () => {
|
||||
writeSettings({
|
||||
packages: ["NPM:Pi-Subagents"],
|
||||
});
|
||||
expect(pruneLegacySiblings().pruned).toEqual(["NPM:Pi-Subagents"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findLegacySiblings (read-only scan)", () => {
|
||||
it("no settings file → []", () => {
|
||||
expect(findLegacySiblings()).toEqual([]);
|
||||
});
|
||||
|
||||
it("invalid JSON → []", () => {
|
||||
mkdirSync(dirname(SETTINGS_PATH), { recursive: true });
|
||||
writeFileSync(SETTINGS_PATH, "{not json", "utf-8");
|
||||
expect(findLegacySiblings()).toEqual([]);
|
||||
});
|
||||
|
||||
it("non-object top-level → []", () => {
|
||||
writeSettings([1, 2, 3]);
|
||||
expect(findLegacySiblings()).toEqual([]);
|
||||
});
|
||||
|
||||
it("no packages field → []", () => {
|
||||
writeSettings({ other: "data" });
|
||||
expect(findLegacySiblings()).toEqual([]);
|
||||
});
|
||||
|
||||
it("non-array packages field → []", () => {
|
||||
writeSettings({ packages: "not-array" });
|
||||
expect(findLegacySiblings()).toEqual([]);
|
||||
});
|
||||
|
||||
it("only non-legacy entries → []", () => {
|
||||
writeSettings({
|
||||
packages: ["npm:pi-perplexity", "npm:@juicesharp/rpiv-todo", "npm:@tintinweb/pi-subagents"],
|
||||
});
|
||||
expect(findLegacySiblings()).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns legacy entries without mutating settings.json", () => {
|
||||
writeSettings({
|
||||
defaultProvider: "zai",
|
||||
packages: ["npm:pi-subagents", "npm:@juicesharp/rpiv-todo"],
|
||||
});
|
||||
const before = readFileSync(SETTINGS_PATH, "utf-8");
|
||||
expect(findLegacySiblings()).toEqual(["npm:pi-subagents"]);
|
||||
expect(readFileSync(SETTINGS_PATH, "utf-8")).toBe(before);
|
||||
});
|
||||
|
||||
it("idempotent: repeat call returns the same list and does not mutate", () => {
|
||||
writeSettings({ packages: ["npm:pi-subagents"] });
|
||||
const before = readFileSync(SETTINGS_PATH, "utf-8");
|
||||
expect(findLegacySiblings()).toEqual(["npm:pi-subagents"]);
|
||||
expect(findLegacySiblings()).toEqual(["npm:pi-subagents"]);
|
||||
expect(readFileSync(SETTINGS_PATH, "utf-8")).toBe(before);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Detect + remove deprecated sibling package entries from
|
||||
* ~/.pi/agent/settings.json.
|
||||
*
|
||||
* Split into two phases so /rpiv-setup can preview pending changes in the
|
||||
* confirmation dialog and apply the mutation only after the user agrees:
|
||||
*
|
||||
* findLegacySiblings() — read-only scan; returns the entries that WOULD
|
||||
* be pruned. Safe to call before confirmation.
|
||||
* pruneLegacySiblings() — mutating apply step; rewrites settings.json.
|
||||
* Call only after the user has confirmed.
|
||||
*
|
||||
* Both helpers are fail-soft (missing file / invalid JSON / non-object /
|
||||
* unwritable → empty result), idempotent, and have no plugin API
|
||||
* dependency.
|
||||
*
|
||||
* Background: 0.13.x → 1.0.0 upgraders may have both nicobailon's
|
||||
* pi-subagents and @tintinweb/pi-subagents in settings.json simultaneously,
|
||||
* which makes Pi reject boot with duplicate-tool registration when both
|
||||
* load. The prune is the upgrade's must-do mutation, but it must not run
|
||||
* before the user has consented to /rpiv-setup mutating settings.json.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { homedir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { LEGACY_SIBLINGS } from "./siblings.js";
|
||||
|
||||
const PI_AGENT_SETTINGS = join(homedir(), ".pi", "agent", "settings.json");
|
||||
|
||||
export interface PruneLegacySiblingsResult {
|
||||
/** settings.json `packages[]` entries that were removed (empty = no-op). */
|
||||
pruned: string[];
|
||||
}
|
||||
|
||||
interface ParsedSettings {
|
||||
settings: Record<string, unknown>;
|
||||
packages: unknown[];
|
||||
}
|
||||
|
||||
function readSettings(): ParsedSettings | undefined {
|
||||
if (!existsSync(PI_AGENT_SETTINGS)) return undefined;
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(readFileSync(PI_AGENT_SETTINGS, "utf-8"));
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) return undefined;
|
||||
const settings = parsed as Record<string, unknown>;
|
||||
if (!Array.isArray(settings.packages)) return undefined;
|
||||
return { settings, packages: settings.packages as unknown[] };
|
||||
}
|
||||
|
||||
function partitionPackages(packages: unknown[]): { legacy: string[]; kept: unknown[] } {
|
||||
const legacy: string[] = [];
|
||||
const kept = packages.filter((entry) => {
|
||||
if (typeof entry !== "string") return true;
|
||||
const isLegacy = LEGACY_SIBLINGS.some((l) => l.matches.test(entry));
|
||||
if (isLegacy) legacy.push(entry);
|
||||
return !isLegacy;
|
||||
});
|
||||
return { legacy, kept };
|
||||
}
|
||||
|
||||
/**
|
||||
* Read-only scan: returns the legacy entries that pruneLegacySiblings()
|
||||
* would remove. Does not touch the filesystem beyond reading settings.json.
|
||||
* Safe to call before any user confirmation.
|
||||
*/
|
||||
export function findLegacySiblings(): string[] {
|
||||
const parsed = readSettings();
|
||||
if (!parsed) return [];
|
||||
return partitionPackages(parsed.packages).legacy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutating apply step: rewrites settings.json with legacy entries removed.
|
||||
* Returns a structured report so callers can emit a conditional notify.
|
||||
* Never throws. Call AFTER the user has confirmed the cleanup.
|
||||
*/
|
||||
export function pruneLegacySiblings(): PruneLegacySiblingsResult {
|
||||
const parsed = readSettings();
|
||||
if (!parsed) return { pruned: [] };
|
||||
const { legacy, kept } = partitionPackages(parsed.packages);
|
||||
if (legacy.length === 0) return { pruned: [] };
|
||||
|
||||
parsed.settings.packages = kept;
|
||||
try {
|
||||
writeFileSync(PI_AGENT_SETTINGS, `${JSON.stringify(parsed.settings, null, 2)}\n`, "utf-8");
|
||||
} catch {
|
||||
return { pruned: [] };
|
||||
}
|
||||
return { pruned: legacy };
|
||||
}
|
||||
216
extensions/rpiv-pi/extensions/rpiv-core/session-hooks.test.ts
Normal file
216
extensions/rpiv-pi/extensions/rpiv-core/session-hooks.test.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { existsSync, mkdtempSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { createMockCtx, createMockPi, stubGitExec } from "@juicesharp/rpiv-test-utils";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("./package-checks.js", () => ({ findMissingSiblings: vi.fn(() => []) }));
|
||||
vi.mock("./agents.js", async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import("./agents.js")>();
|
||||
return {
|
||||
...actual,
|
||||
syncBundledAgents: vi.fn(() => ({
|
||||
added: [],
|
||||
updated: [],
|
||||
unchanged: [],
|
||||
removed: [],
|
||||
pendingUpdate: [],
|
||||
pendingRemove: [],
|
||||
errors: [],
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
import type { SyncResult } from "./agents.js";
|
||||
import { syncBundledAgents } from "./agents.js";
|
||||
import { clearGitContextCache, getGitContext, resetInjectedMarker, takeGitContextIfChanged } from "./git-context.js";
|
||||
import { clearInjectionState } from "./guidance.js";
|
||||
import { findMissingSiblings } from "./package-checks.js";
|
||||
import { registerSessionHooks } from "./session-hooks.js";
|
||||
|
||||
const emptySync: SyncResult = {
|
||||
added: [],
|
||||
updated: [],
|
||||
unchanged: [],
|
||||
removed: [],
|
||||
pendingUpdate: [],
|
||||
pendingRemove: [],
|
||||
errors: [],
|
||||
};
|
||||
|
||||
let projectDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
projectDir = mkdtempSync(join(tmpdir(), "rpiv-session-"));
|
||||
clearInjectionState();
|
||||
clearGitContextCache();
|
||||
resetInjectedMarker();
|
||||
});
|
||||
afterEach(() => {
|
||||
rmSync(projectDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe("registerSessionHooks — event wiring", () => {
|
||||
it("registers 5 events", () => {
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSessionHooks(pi);
|
||||
for (const ev of ["session_start", "session_compact", "session_shutdown", "tool_call", "before_agent_start"]) {
|
||||
expect(captured.events.has(ev)).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("session_start hook", () => {
|
||||
it("scaffolds thoughts dirs under ctx.cwd", async () => {
|
||||
const { pi, captured } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
registerSessionHooks(pi);
|
||||
const handler = captured.events.get("session_start")?.[0];
|
||||
const ctx = createMockCtx({ cwd: projectDir, hasUI: true });
|
||||
await handler?.({ reason: "startup" } as never, ctx as never);
|
||||
for (const d of [
|
||||
"thoughts/shared/discover",
|
||||
"thoughts/shared/research",
|
||||
"thoughts/shared/designs",
|
||||
"thoughts/shared/plans",
|
||||
"thoughts/shared/handoffs",
|
||||
"thoughts/shared/reviews",
|
||||
]) {
|
||||
expect(existsSync(join(projectDir, d))).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("session_start hook — notifications", () => {
|
||||
it("emits 'Copied N agents' info when added > 0", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValueOnce({ ...emptySync, added: ["a.md", "b.md"] });
|
||||
vi.mocked(findMissingSiblings).mockReturnValueOnce([]);
|
||||
const { pi, captured } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
registerSessionHooks(pi);
|
||||
const ctx = createMockCtx({ cwd: projectDir, hasUI: true });
|
||||
await captured.events.get("session_start")?.[0]({ reason: "startup" } as never, ctx as never);
|
||||
expect(ctx.ui.notify).toHaveBeenCalledWith(expect.stringMatching(/Copied 2 rpiv-pi agent/), "info");
|
||||
});
|
||||
|
||||
it("emits a single drift line combining pendingUpdate + pendingRemove", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValueOnce({
|
||||
...emptySync,
|
||||
pendingUpdate: ["a.md"],
|
||||
pendingRemove: ["b.md", "c.md"],
|
||||
});
|
||||
vi.mocked(findMissingSiblings).mockReturnValueOnce([]);
|
||||
const { pi, captured } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
registerSessionHooks(pi);
|
||||
const ctx = createMockCtx({ cwd: projectDir, hasUI: true });
|
||||
await captured.events.get("session_start")?.[0]({ reason: "startup" } as never, ctx as never);
|
||||
const driftCall = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls.find(
|
||||
(c) => typeof c[0] === "string" && c[0].includes("outdated"),
|
||||
);
|
||||
expect(driftCall).toBeDefined();
|
||||
expect(driftCall?.[0]).toContain("1 outdated");
|
||||
expect(driftCall?.[0]).toContain("2 removed from bundle");
|
||||
expect(driftCall?.[1]).toBe("info");
|
||||
});
|
||||
|
||||
it("warns about missing siblings with npm: prefix stripped", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValueOnce(emptySync);
|
||||
vi.mocked(findMissingSiblings).mockReturnValueOnce([
|
||||
{ pkg: "npm:@juicesharp/rpiv-advisor", matches: /./, provides: "x" },
|
||||
{ pkg: "npm:@juicesharp/rpiv-args", matches: /./, provides: "y" },
|
||||
] as never);
|
||||
const { pi, captured } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
registerSessionHooks(pi);
|
||||
const ctx = createMockCtx({ cwd: projectDir, hasUI: true });
|
||||
await captured.events.get("session_start")?.[0]({ reason: "startup" } as never, ctx as never);
|
||||
const warnCall = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls.find((c) => c[1] === "warning");
|
||||
expect(warnCall).toBeDefined();
|
||||
expect(warnCall?.[0]).toContain("rpiv-pi requires 2 sibling");
|
||||
expect(warnCall?.[0]).toContain("@juicesharp/rpiv-advisor");
|
||||
expect(warnCall?.[0]).toContain("@juicesharp/rpiv-args");
|
||||
expect(warnCall?.[0]).not.toContain("npm:");
|
||||
});
|
||||
|
||||
it("skips notifications when !hasUI", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValueOnce({ ...emptySync, added: ["a.md"] });
|
||||
vi.mocked(findMissingSiblings).mockReturnValueOnce([
|
||||
{ pkg: "npm:@juicesharp/rpiv-todo", matches: /./, provides: "t" },
|
||||
] as never);
|
||||
const { pi, captured } = createMockPi({ exec: stubGitExec({}) as never });
|
||||
registerSessionHooks(pi);
|
||||
const ctx = createMockCtx({ cwd: projectDir, hasUI: false });
|
||||
await captured.events.get("session_start")?.[0]({ reason: "startup" } as never, ctx as never);
|
||||
expect(ctx.ui.notify).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("session_compact hook", () => {
|
||||
it("re-injects guidance + git-context after compaction (clears caches first)", async () => {
|
||||
const exec = stubGitExec({ branch: "main", commit: "abc", user: "alice" });
|
||||
const { pi, captured } = createMockPi({ exec: exec as never });
|
||||
registerSessionHooks(pi);
|
||||
// Prime the git-context cache first via session_start so compact's clear has work to do.
|
||||
await captured.events.get("session_start")?.[0](
|
||||
{ reason: "startup" } as never,
|
||||
createMockCtx({ cwd: projectDir, hasUI: false }) as never,
|
||||
);
|
||||
const sendBefore = (pi.sendMessage as ReturnType<typeof vi.fn>).mock.calls.length;
|
||||
await captured.events.get("session_compact")?.[0]({} as never, createMockCtx({ cwd: projectDir }) as never);
|
||||
// After compact, the next pi.sendMessage call (from injectGitContext) should fire because
|
||||
// resetInjectedMarker + clearGitContextCache make takeGitContextIfChanged re-emit.
|
||||
const sendAfter = (pi.sendMessage as ReturnType<typeof vi.fn>).mock.calls.length;
|
||||
expect(sendAfter).toBeGreaterThan(sendBefore);
|
||||
});
|
||||
});
|
||||
|
||||
describe("session_shutdown hook", () => {
|
||||
it("clears git-context cache and allows takeGitContextIfChanged to re-emit", async () => {
|
||||
const exec = stubGitExec({ branch: "main", commit: "abc", user: "alice" });
|
||||
const { pi, captured } = createMockPi({ exec: exec as never });
|
||||
registerSessionHooks(pi);
|
||||
await takeGitContextIfChanged(pi);
|
||||
const callsBefore = exec.mock.calls.length;
|
||||
await captured.events.get("session_shutdown")?.[0]({} as never, createMockCtx() as never);
|
||||
const reemit = await takeGitContextIfChanged(pi);
|
||||
expect(reemit).not.toBeNull();
|
||||
expect(exec.mock.calls.length).toBeGreaterThan(callsBefore);
|
||||
});
|
||||
});
|
||||
|
||||
describe("tool_call hook", () => {
|
||||
it("clears git-context cache on mutating bash command", async () => {
|
||||
const exec = stubGitExec({ branch: "main", commit: "a", user: "u" });
|
||||
const { pi, captured } = createMockPi({ exec: exec as never });
|
||||
registerSessionHooks(pi);
|
||||
const handler = captured.events.get("tool_call")?.[0];
|
||||
const ctx = createMockCtx({ cwd: projectDir });
|
||||
await getGitContext(pi);
|
||||
const before = exec.mock.calls.length;
|
||||
await handler?.({ toolName: "bash", input: { command: "git commit -m x" } } as never, ctx as never);
|
||||
await getGitContext(pi);
|
||||
expect(exec.mock.calls.length).toBeGreaterThan(before);
|
||||
});
|
||||
});
|
||||
|
||||
describe("before_agent_start hook", () => {
|
||||
it("returns {message} on changed git sig", async () => {
|
||||
const { pi, captured } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", user: "alice" }) as never,
|
||||
});
|
||||
registerSessionHooks(pi);
|
||||
const handler = captured.events.get("before_agent_start")?.[0];
|
||||
const ctx = createMockCtx({ cwd: projectDir });
|
||||
const r = await handler?.({} as never, ctx as never);
|
||||
expect(r).toHaveProperty("message");
|
||||
});
|
||||
|
||||
it("returns undefined on dedup (signature unchanged)", async () => {
|
||||
const { pi, captured } = createMockPi({
|
||||
exec: stubGitExec({ branch: "main", commit: "abc", user: "alice" }) as never,
|
||||
});
|
||||
registerSessionHooks(pi);
|
||||
const handler = captured.events.get("before_agent_start")?.[0];
|
||||
const ctx = createMockCtx({ cwd: projectDir });
|
||||
await handler?.({} as never, ctx as never);
|
||||
const second = await handler?.({} as never, ctx as never);
|
||||
expect(second).toBeUndefined();
|
||||
});
|
||||
});
|
||||
114
extensions/rpiv-pi/extensions/rpiv-core/session-hooks.ts
Normal file
114
extensions/rpiv-pi/extensions/rpiv-core/session-hooks.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Session lifecycle wiring for rpiv-core.
|
||||
*
|
||||
* Each handler body is a named helper; pi.on(...) lines are pure wiring.
|
||||
* Ordering and invariants preserved verbatim from the pre-refactor index.ts.
|
||||
*/
|
||||
|
||||
import { mkdirSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { type ExtensionAPI, isToolCallEventType } from "@mariozechner/pi-coding-agent";
|
||||
import { type SyncResult, syncBundledAgents } from "./agents.js";
|
||||
import { FLAG_DEBUG, MSG_TYPE_GIT_CONTEXT } from "./constants.js";
|
||||
import {
|
||||
clearGitContextCache,
|
||||
isGitMutatingCommand,
|
||||
resetInjectedMarker,
|
||||
takeGitContextIfChanged,
|
||||
} from "./git-context.js";
|
||||
import { clearInjectionState, handleToolCallGuidance, injectRootGuidance } from "./guidance.js";
|
||||
import { findMissingSiblings } from "./package-checks.js";
|
||||
|
||||
const THOUGHTS_DIRS = [
|
||||
"thoughts/shared/discover",
|
||||
"thoughts/shared/research",
|
||||
"thoughts/shared/designs",
|
||||
"thoughts/shared/plans",
|
||||
"thoughts/shared/handoffs",
|
||||
"thoughts/shared/reviews",
|
||||
] as const;
|
||||
|
||||
const msgAgentsAdded = (n: number) => `Copied ${n} rpiv-pi agent(s) to .pi/agents/`;
|
||||
const msgAgentsDrift = (parts: string[]) => `${parts.join(", ")} agent(s). Run /rpiv-update-agents to sync.`;
|
||||
const msgMissingSiblings = (n: number, list: string) =>
|
||||
`rpiv-pi requires ${n} sibling extension(s): ${list}. Run /rpiv-setup to install them.`;
|
||||
|
||||
type UI = { notify: (msg: string, sev: "info" | "warning" | "error") => void };
|
||||
|
||||
export function registerSessionHooks(pi: ExtensionAPI): void {
|
||||
pi.on("session_start", async (_event, ctx) => {
|
||||
resetInjectionState();
|
||||
injectRootGuidance(ctx.cwd, pi);
|
||||
scaffoldThoughtsDirs(ctx.cwd);
|
||||
await injectGitContext(pi, (msg) =>
|
||||
pi.sendMessage({ customType: MSG_TYPE_GIT_CONTEXT, content: msg, display: !!pi.getFlag(FLAG_DEBUG) }),
|
||||
);
|
||||
const agents = syncBundledAgents(ctx.cwd, false);
|
||||
if (ctx.hasUI) {
|
||||
notifyAgentSyncDrift(ctx.ui, agents);
|
||||
warnMissingSiblings(ctx.ui);
|
||||
}
|
||||
});
|
||||
|
||||
pi.on("session_compact", async (_event, ctx) => {
|
||||
resetInjectionState();
|
||||
clearGitContextCache();
|
||||
resetInjectedMarker();
|
||||
injectRootGuidance(ctx.cwd, pi);
|
||||
await injectGitContext(pi, (msg) =>
|
||||
pi.sendMessage({ customType: MSG_TYPE_GIT_CONTEXT, content: msg, display: !!pi.getFlag(FLAG_DEBUG) }),
|
||||
);
|
||||
});
|
||||
|
||||
pi.on("session_shutdown", async () => {
|
||||
resetInjectionState();
|
||||
clearGitContextCache();
|
||||
resetInjectedMarker();
|
||||
});
|
||||
|
||||
pi.on("tool_call", async (event, ctx) => {
|
||||
handleToolCallGuidance(event, ctx, pi);
|
||||
if (isToolCallEventType("bash", event) && isGitMutatingCommand(event.input.command)) {
|
||||
clearGitContextCache();
|
||||
}
|
||||
});
|
||||
|
||||
pi.on("before_agent_start", async () => {
|
||||
const content = await takeGitContextIfChanged(pi);
|
||||
if (!content) return;
|
||||
return { message: { customType: MSG_TYPE_GIT_CONTEXT, content, display: !!pi.getFlag(FLAG_DEBUG) } };
|
||||
});
|
||||
}
|
||||
|
||||
function resetInjectionState(): void {
|
||||
clearInjectionState();
|
||||
}
|
||||
|
||||
function scaffoldThoughtsDirs(cwd: string): void {
|
||||
for (const dir of THOUGHTS_DIRS) {
|
||||
mkdirSync(join(cwd, dir), { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
async function injectGitContext(pi: ExtensionAPI, send: (msg: string) => void): Promise<void> {
|
||||
const msg = await takeGitContextIfChanged(pi);
|
||||
if (msg) send(msg);
|
||||
}
|
||||
|
||||
function notifyAgentSyncDrift(ui: UI, result: SyncResult): void {
|
||||
if (result.added.length > 0) {
|
||||
ui.notify(msgAgentsAdded(result.added.length), "info");
|
||||
}
|
||||
const parts: string[] = [];
|
||||
if (result.pendingUpdate.length > 0) parts.push(`${result.pendingUpdate.length} outdated`);
|
||||
if (result.pendingRemove.length > 0) parts.push(`${result.pendingRemove.length} removed from bundle`);
|
||||
if (parts.length > 0) {
|
||||
ui.notify(msgAgentsDrift(parts), "info");
|
||||
}
|
||||
}
|
||||
|
||||
function warnMissingSiblings(ui: UI): void {
|
||||
const missing = findMissingSiblings();
|
||||
if (missing.length === 0) return;
|
||||
ui.notify(msgMissingSiblings(missing.length, missing.map((m) => m.pkg.replace(/^npm:/, "")).join(", ")), "warning");
|
||||
}
|
||||
200
extensions/rpiv-pi/extensions/rpiv-core/setup-command.test.ts
Normal file
200
extensions/rpiv-pi/extensions/rpiv-core/setup-command.test.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import { createMockCtx, createMockPi } from "@juicesharp/rpiv-test-utils";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("./pi-installer.js", () => ({ spawnPiInstall: vi.fn() }));
|
||||
vi.mock("./package-checks.js", () => ({ findMissingSiblings: vi.fn() }));
|
||||
vi.mock("./prune-legacy-siblings.js", () => ({
|
||||
findLegacySiblings: vi.fn(),
|
||||
pruneLegacySiblings: vi.fn(),
|
||||
}));
|
||||
|
||||
import { findMissingSiblings } from "./package-checks.js";
|
||||
import { spawnPiInstall } from "./pi-installer.js";
|
||||
import { findLegacySiblings, pruneLegacySiblings } from "./prune-legacy-siblings.js";
|
||||
import { registerSetupCommand } from "./setup-command.js";
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(spawnPiInstall).mockReset();
|
||||
vi.mocked(findMissingSiblings).mockReset();
|
||||
vi.mocked(findLegacySiblings).mockReset();
|
||||
vi.mocked(findLegacySiblings).mockReturnValue([]);
|
||||
vi.mocked(pruneLegacySiblings).mockReset();
|
||||
vi.mocked(pruneLegacySiblings).mockReturnValue({ pruned: [] });
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — command shape", () => {
|
||||
it("registers under 'rpiv-setup'", () => {
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
expect(captured.commands.has("rpiv-setup")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — !hasUI", () => {
|
||||
it("notifies error and exits without inspecting siblings or settings", async () => {
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: false });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(ctx.ui.notify).toHaveBeenCalledWith(expect.stringContaining("interactive"), "error");
|
||||
expect(findMissingSiblings).not.toHaveBeenCalled();
|
||||
expect(findLegacySiblings).not.toHaveBeenCalled();
|
||||
expect(pruneLegacySiblings).not.toHaveBeenCalled();
|
||||
expect(spawnPiInstall).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — nothing to do", () => {
|
||||
it("notifies all-installed and skips confirmation when no missing siblings AND no legacy entries", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue([]);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(ctx.ui.notify).toHaveBeenCalledWith(expect.stringContaining("already installed"), "info");
|
||||
expect(ctx.ui.confirm).not.toHaveBeenCalled();
|
||||
expect(pruneLegacySiblings).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — pre-confirm read-only contract", () => {
|
||||
it("does NOT call pruneLegacySiblings before user confirmation", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue(["npm:pi-subagents"]);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
(ctx.ui.confirm as ReturnType<typeof vi.fn>).mockImplementation(async () => {
|
||||
expect(pruneLegacySiblings).not.toHaveBeenCalled();
|
||||
return false;
|
||||
});
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(ctx.ui.confirm).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("includes legacy entries in the confirmation body so the user sees what will be removed", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue(["npm:pi-subagents"]);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
const confirmCall = (ctx.ui.confirm as ReturnType<typeof vi.fn>).mock.calls[0]!;
|
||||
expect(confirmCall[1]).toContain("Remove from");
|
||||
expect(confirmCall[1]).toContain("npm:pi-subagents");
|
||||
});
|
||||
|
||||
it("includes pending installs in the confirmation body", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([{ pkg: "npm:@x/a", matches: /./, provides: "A" }]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue([]);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
const confirmCall = (ctx.ui.confirm as ReturnType<typeof vi.fn>).mock.calls[0]!;
|
||||
expect(confirmCall[1]).toContain("Install via `pi install`:");
|
||||
expect(confirmCall[1]).toContain("npm:@x/a");
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — user cancels", () => {
|
||||
it("notifies cancelled and skips both prune and install", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([{ pkg: "npm:@x/y", matches: /./, provides: "p" }]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue(["npm:pi-subagents"]);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
(ctx.ui.confirm as ReturnType<typeof vi.fn>).mockResolvedValueOnce(false);
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(ctx.ui.notify).toHaveBeenCalledWith(expect.stringContaining("cancelled"), "info");
|
||||
expect(pruneLegacySiblings).not.toHaveBeenCalled();
|
||||
expect(spawnPiInstall).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — post-confirm prune execution", () => {
|
||||
it("runs pruneLegacySiblings after confirm and emits notify when entries removed", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue(["npm:pi-subagents"]);
|
||||
vi.mocked(pruneLegacySiblings).mockReturnValue({ pruned: ["npm:pi-subagents"] });
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(pruneLegacySiblings).toHaveBeenCalledTimes(1);
|
||||
const pruneNotify = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls.find(
|
||||
(c) => typeof c[0] === "string" && c[0].startsWith("Removed legacy subagent library"),
|
||||
);
|
||||
expect(pruneNotify).toBeDefined();
|
||||
expect(pruneNotify?.[0]).toContain("npm:pi-subagents");
|
||||
});
|
||||
|
||||
it("skips pruneLegacySiblings when no legacy entries were detected pre-confirm", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([{ pkg: "npm:@x/y", matches: /./, provides: "p" }]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue([]);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(pruneLegacySiblings).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — mixed success/failure report", () => {
|
||||
it("reports succeeded + failed with 300-char stderr snippets", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([
|
||||
{ pkg: "npm:@x/a", matches: /./, provides: "A" },
|
||||
{ pkg: "npm:@x/b", matches: /./, provides: "B" },
|
||||
]);
|
||||
vi.mocked(spawnPiInstall)
|
||||
.mockResolvedValueOnce({ code: 0, stdout: "ok", stderr: "" })
|
||||
.mockResolvedValueOnce({ code: 1, stdout: "", stderr: "x".repeat(500) });
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
const reportCall = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls.at(-1);
|
||||
const report: string = reportCall![0];
|
||||
expect(report).toContain("npm:@x/a");
|
||||
expect(report).toContain("npm:@x/b");
|
||||
expect((report.match(/x+/g) ?? []).every((m) => m.length <= 300)).toBe(true);
|
||||
expect(reportCall![1]).toBe("warning");
|
||||
});
|
||||
|
||||
it("uses stdout fallback when stderr empty", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([{ pkg: "npm:@x/a", matches: /./, provides: "A" }]);
|
||||
vi.mocked(spawnPiInstall).mockResolvedValueOnce({ code: 1, stdout: "stdout-error", stderr: "" });
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
const report = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls.at(-1)![0];
|
||||
expect(report).toContain("stdout-error");
|
||||
});
|
||||
|
||||
it("all-failed report omits Restart line", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([{ pkg: "npm:@x/a", matches: /./, provides: "A" }]);
|
||||
vi.mocked(spawnPiInstall).mockResolvedValueOnce({ code: 1, stdout: "", stderr: "err" });
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
const report = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls.at(-1)![0];
|
||||
expect(report).not.toContain("Restart");
|
||||
});
|
||||
});
|
||||
|
||||
describe("/rpiv-setup — prune-only flow (no missing siblings)", () => {
|
||||
it("skips installMissing when only legacy entries exist", async () => {
|
||||
vi.mocked(findMissingSiblings).mockReturnValue([]);
|
||||
vi.mocked(findLegacySiblings).mockReturnValue(["npm:pi-subagents"]);
|
||||
vi.mocked(pruneLegacySiblings).mockReturnValue({ pruned: ["npm:pi-subagents"] });
|
||||
const { pi, captured } = createMockPi();
|
||||
registerSetupCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-setup")?.handler("", ctx as never);
|
||||
expect(pruneLegacySiblings).toHaveBeenCalledTimes(1);
|
||||
expect(spawnPiInstall).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
128
extensions/rpiv-pi/extensions/rpiv-core/setup-command.ts
Normal file
128
extensions/rpiv-pi/extensions/rpiv-core/setup-command.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* /rpiv-setup — installs any SIBLINGS not present in ~/.pi/agent/settings.json
|
||||
* and prunes deprecated entries (e.g. the unscoped `npm:pi-subagents` from
|
||||
* the rpiv-pi 0.12.x → 0.14.0 line). Both mutations are previewed in the
|
||||
* confirmation dialog and only executed after the user agrees.
|
||||
*
|
||||
* Serial `pi install <pkg>` loop via spawnPiInstall (Windows-safe).
|
||||
* Reports succeeded/failed split; prompts the user to restart Pi on success.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { findMissingSiblings } from "./package-checks.js";
|
||||
import { spawnPiInstall } from "./pi-installer.js";
|
||||
import { findLegacySiblings, pruneLegacySiblings } from "./prune-legacy-siblings.js";
|
||||
import type { SiblingPlugin } from "./siblings.js";
|
||||
|
||||
const INSTALL_TIMEOUT_MS = 120_000;
|
||||
const STDERR_SNIPPET_CHARS = 300;
|
||||
|
||||
const MSG_INTERACTIVE_ONLY = "/rpiv-setup requires interactive mode";
|
||||
const MSG_NOTHING_TO_DO = "All rpiv-pi sibling dependencies already installed.";
|
||||
const MSG_CANCELLED = "/rpiv-setup cancelled";
|
||||
const MSG_CONFIRM_TITLE = "Apply rpiv-pi setup changes?";
|
||||
const MSG_RESTART = "Restart your Pi session to load the newly-installed extensions.";
|
||||
|
||||
const msgInstalling = (pkg: string) => `Installing ${pkg}…`;
|
||||
const msgInstalledLine = (pkgs: string[]) => `✓ Installed: ${pkgs.join(", ")}`;
|
||||
const msgFailedHeader = () => `✗ Failed:`;
|
||||
const msgFailedLine = (pkg: string, err: string) => ` ${pkg}: ${err}`;
|
||||
const msgLegacyPruned = (entries: string[]) =>
|
||||
`Removed legacy subagent library from settings.json: ${entries.join(", ")}. Run \`pi uninstall\` to free disk space, then restart Pi.`;
|
||||
|
||||
type UI = {
|
||||
notify: (msg: string, sev: "info" | "warning" | "error") => void;
|
||||
confirm: (title: string, body: string) => Promise<boolean>;
|
||||
};
|
||||
|
||||
function buildConfirmBody(missing: SiblingPlugin[], legacyEntries: string[]): string {
|
||||
const lines: string[] = ["rpiv-pi will apply the following changes:", ""];
|
||||
if (missing.length > 0) {
|
||||
lines.push("Install via `pi install`:");
|
||||
for (const m of missing) lines.push(` • ${m.pkg} (required — provides ${m.provides})`);
|
||||
lines.push("");
|
||||
}
|
||||
if (legacyEntries.length > 0) {
|
||||
lines.push("Remove from `~/.pi/agent/settings.json` (deprecated):");
|
||||
for (const entry of legacyEntries) lines.push(` • ${entry}`);
|
||||
lines.push("");
|
||||
}
|
||||
lines.push("Your `~/.pi/agent/settings.json` will be updated. Proceed?");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
export function registerSetupCommand(pi: ExtensionAPI): void {
|
||||
pi.registerCommand("rpiv-setup", {
|
||||
description: "Install rpiv-pi's sibling extension plugins",
|
||||
handler: async (_args, ctx) => {
|
||||
if (!ctx.hasUI) {
|
||||
ctx.ui.notify(MSG_INTERACTIVE_ONLY, "error");
|
||||
return;
|
||||
}
|
||||
|
||||
const missing = findMissingSiblings();
|
||||
const legacyEntries = findLegacySiblings();
|
||||
if (missing.length === 0 && legacyEntries.length === 0) {
|
||||
ctx.ui.notify(MSG_NOTHING_TO_DO, "info");
|
||||
return;
|
||||
}
|
||||
|
||||
const confirmed = await ctx.ui.confirm(MSG_CONFIRM_TITLE, buildConfirmBody(missing, legacyEntries));
|
||||
if (!confirmed) {
|
||||
ctx.ui.notify(MSG_CANCELLED, "info");
|
||||
return;
|
||||
}
|
||||
|
||||
if (legacyEntries.length > 0) {
|
||||
const prune = pruneLegacySiblings();
|
||||
if (prune.pruned.length > 0) {
|
||||
ctx.ui.notify(msgLegacyPruned(prune.pruned), "info");
|
||||
}
|
||||
}
|
||||
|
||||
if (missing.length === 0) return;
|
||||
|
||||
const { succeeded, failed } = await installMissing(ctx.ui, missing);
|
||||
ctx.ui.notify(buildReport(succeeded, failed), failed.length > 0 ? "warning" : "info");
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function installMissing(
|
||||
ui: UI,
|
||||
missing: SiblingPlugin[],
|
||||
): Promise<{ succeeded: string[]; failed: Array<{ pkg: string; error: string }> }> {
|
||||
const succeeded: string[] = [];
|
||||
const failed: Array<{ pkg: string; error: string }> = [];
|
||||
for (const { pkg } of missing) {
|
||||
ui.notify(msgInstalling(pkg), "info");
|
||||
try {
|
||||
const result = await spawnPiInstall(pkg, INSTALL_TIMEOUT_MS);
|
||||
if (result.code === 0) {
|
||||
succeeded.push(pkg);
|
||||
} else {
|
||||
failed.push({
|
||||
pkg,
|
||||
error: (result.stderr || result.stdout || `exit ${result.code}`).trim().slice(0, STDERR_SNIPPET_CHARS),
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
failed.push({ pkg, error: err instanceof Error ? err.message : String(err) });
|
||||
}
|
||||
}
|
||||
return { succeeded, failed };
|
||||
}
|
||||
|
||||
function buildReport(succeeded: string[], failed: Array<{ pkg: string; error: string }>): string {
|
||||
const lines: string[] = [];
|
||||
if (succeeded.length > 0) lines.push(msgInstalledLine(succeeded));
|
||||
if (failed.length > 0) {
|
||||
lines.push(msgFailedHeader());
|
||||
for (const { pkg, error } of failed) lines.push(msgFailedLine(pkg, error));
|
||||
}
|
||||
if (succeeded.length > 0) {
|
||||
lines.push("");
|
||||
lines.push(MSG_RESTART);
|
||||
}
|
||||
return lines.join("\n");
|
||||
}
|
||||
64
extensions/rpiv-pi/extensions/rpiv-core/siblings.test.ts
Normal file
64
extensions/rpiv-pi/extensions/rpiv-core/siblings.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { LEGACY_SIBLINGS, SIBLINGS } from "./siblings.js";
|
||||
|
||||
describe("SIBLINGS registry", () => {
|
||||
it("contains 8 entries (pi-subagents at SIBLINGS[0] — tintinweb fork is the dispatch runtime)", () => {
|
||||
expect(SIBLINGS).toHaveLength(8);
|
||||
});
|
||||
|
||||
it("lists @tintinweb/pi-subagents at SIBLINGS[0]", () => {
|
||||
expect(SIBLINGS[0]?.pkg).toBe("npm:@tintinweb/pi-subagents");
|
||||
});
|
||||
|
||||
it("does NOT list nicobailon's unscoped pi-subagents (superseded in 0.14.0)", () => {
|
||||
expect(SIBLINGS.find((s) => s.pkg === "npm:pi-subagents")).toBeUndefined();
|
||||
});
|
||||
|
||||
for (const s of SIBLINGS) {
|
||||
it(`${s.pkg} — self-match against settings.json line shape`, () => {
|
||||
expect(s.matches.test(s.pkg.replace(/^npm:/, ""))).toBe(true);
|
||||
});
|
||||
it(`${s.pkg} — case-insensitive match`, () => {
|
||||
expect(s.matches.test(s.pkg.toUpperCase().replace(/^NPM:/, ""))).toBe(true);
|
||||
});
|
||||
}
|
||||
|
||||
it("rpiv-args does NOT match rpiv-args-extended (word boundary)", () => {
|
||||
const argsEntry = SIBLINGS.find((s) => s.pkg.endsWith("/rpiv-args"));
|
||||
expect(argsEntry).toBeDefined();
|
||||
expect(argsEntry?.matches.test("@juicesharp/rpiv-args-extended")).toBe(false);
|
||||
});
|
||||
|
||||
it("rpiv-i18n does NOT match rpiv-i18n-utils (word boundary)", () => {
|
||||
const i18nEntry = SIBLINGS.find((s) => s.pkg.endsWith("/rpiv-i18n"));
|
||||
expect(i18nEntry).toBeDefined();
|
||||
expect(i18nEntry?.matches.test("@juicesharp/rpiv-i18n-utils")).toBe(false);
|
||||
expect(i18nEntry?.matches.test("@juicesharp/rpiv-i18n")).toBe(true);
|
||||
});
|
||||
|
||||
it("every entry has non-empty pkg + provides", () => {
|
||||
for (const s of SIBLINGS) {
|
||||
expect(s.pkg.length).toBeGreaterThan(0);
|
||||
expect(s.provides.length).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("LEGACY_SIBLINGS registry", () => {
|
||||
it("lists nicobailon's pi-subagents for pruning (superseded by @tintinweb/pi-subagents in 0.14.0)", () => {
|
||||
const entry = LEGACY_SIBLINGS.find((l) => l.label === "pi-subagents");
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.matches.test("npm:pi-subagents")).toBe(true);
|
||||
expect(entry?.matches.test("pi-subagents")).toBe(true);
|
||||
});
|
||||
|
||||
it("pi-subagents legacy match does NOT catch @tintinweb/pi-subagents (active sibling)", () => {
|
||||
const piSubagents = LEGACY_SIBLINGS.find((l) => l.label === "pi-subagents");
|
||||
expect(piSubagents?.matches.test("@tintinweb/pi-subagents")).toBe(false);
|
||||
});
|
||||
|
||||
it("pi-subagents legacy match does NOT catch pi-subagents-legacy (word boundary)", () => {
|
||||
const piSubagents = LEGACY_SIBLINGS.find((l) => l.label === "pi-subagents");
|
||||
expect(piSubagents?.matches.test("pi-subagents-legacy")).toBe(false);
|
||||
});
|
||||
});
|
||||
90
extensions/rpiv-pi/extensions/rpiv-core/siblings.ts
Normal file
90
extensions/rpiv-pi/extensions/rpiv-core/siblings.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* Declarative registry of rpiv-pi's sibling Pi plugins.
|
||||
*
|
||||
* Single source of truth for: presence detection (package-checks.ts),
|
||||
* session_start "missing plugins" warning (session-hooks.ts), and
|
||||
* /rpiv-setup installer (setup-command.ts). Add a sibling here and every
|
||||
* consumer picks it up automatically.
|
||||
*
|
||||
* Detection is filesystem-based via a regex over ~/.pi/agent/settings.json
|
||||
* — no runtime import of sibling packages (keeps rpiv-core pure-orchestrator).
|
||||
*/
|
||||
|
||||
export interface SiblingPlugin {
|
||||
/** Install spec passed to `pi install`. Prefixed with `npm:` for Pi's installer. */
|
||||
readonly pkg: string;
|
||||
/** Case-insensitive regex that matches the package in ~/.pi/agent/settings.json. */
|
||||
readonly matches: RegExp;
|
||||
/** What the sibling provides — shown in /rpiv-setup confirmation and reports. */
|
||||
readonly provides: string;
|
||||
}
|
||||
|
||||
export const SIBLINGS: readonly SiblingPlugin[] = [
|
||||
{
|
||||
pkg: "npm:@tintinweb/pi-subagents",
|
||||
matches: /@tintinweb\/pi-subagents/i,
|
||||
provides: "Agent / get_subagent_result / steer_subagent tools",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-ask-user-question",
|
||||
matches: /rpiv-ask-user-question/i,
|
||||
provides: "ask_user_question tool",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-todo",
|
||||
matches: /rpiv-todo/i,
|
||||
provides: "todo tool + /todos command + overlay widget",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-advisor",
|
||||
matches: /rpiv-advisor/i,
|
||||
provides: "advisor tool + /advisor command",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-btw",
|
||||
matches: /rpiv-btw/i,
|
||||
provides: "/btw side-question command",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-i18n",
|
||||
matches: /rpiv-i18n(?![-\w])/i,
|
||||
provides: "i18n SDK for Pi extensions — /languages command + --locale flag + registerStrings/scope/tr API",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-web-tools",
|
||||
matches: /rpiv-web-tools/i,
|
||||
provides: "web_search + web_fetch tools + /web-search-config",
|
||||
},
|
||||
{
|
||||
pkg: "npm:@juicesharp/rpiv-args",
|
||||
matches: /rpiv-args(?![-\w])/i,
|
||||
provides: "skill-argument resolver — substitutes $N/$ARGUMENTS in skill bodies",
|
||||
},
|
||||
];
|
||||
|
||||
/**
|
||||
* Deprecated sibling packages that `/rpiv-setup` actively prunes from
|
||||
* ~/.pi/agent/settings.json (so upgraders don't end up with superseded
|
||||
* libraries loaded alongside their replacements). Single source of truth
|
||||
* for `prune-legacy-siblings.ts`.
|
||||
*/
|
||||
export interface LegacyPackage {
|
||||
/** Human-readable label used in the prune notify message. */
|
||||
readonly label: string;
|
||||
/** Case-insensitive regex matched against settings.json `packages[]` entries. */
|
||||
readonly matches: RegExp;
|
||||
/** Short reason — useful when debugging; not user-facing. */
|
||||
readonly reason: string;
|
||||
}
|
||||
|
||||
export const LEGACY_SIBLINGS: readonly LegacyPackage[] = [
|
||||
{
|
||||
// nicobailon's pi-subagents fork was the SIBLINGS[0] package between
|
||||
// rpiv-pi 0.12.0 and 0.13.x. Reverted to @tintinweb/pi-subagents in
|
||||
// rpiv-pi 1.0.0 once tintinweb resumed active maintenance and shipped
|
||||
// 0.6.x against pi-coding-agent ^0.70.5.
|
||||
label: "pi-subagents",
|
||||
matches: /(^|[^\w/-])pi-subagents(?![-\w])/i,
|
||||
reason: "superseded by @tintinweb/pi-subagents (resumed maintenance) in rpiv-pi 1.0.0",
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,74 @@
|
||||
import { createMockCtx, createMockPi } from "@juicesharp/rpiv-test-utils";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("./agents.js", () => ({
|
||||
syncBundledAgents: vi.fn(),
|
||||
}));
|
||||
|
||||
import { syncBundledAgents } from "./agents.js";
|
||||
import { registerUpdateAgentsCommand } from "./update-agents-command.js";
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(syncBundledAgents).mockReset();
|
||||
});
|
||||
|
||||
const empty = (overrides: Partial<ReturnType<typeof syncBundledAgents>> = {}) => ({
|
||||
added: [],
|
||||
updated: [],
|
||||
unchanged: [],
|
||||
removed: [],
|
||||
pendingUpdate: [],
|
||||
pendingRemove: [],
|
||||
errors: [],
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe("/rpiv-update-agents", () => {
|
||||
it("registers the command", () => {
|
||||
const { pi, captured } = createMockPi();
|
||||
registerUpdateAgentsCommand(pi);
|
||||
expect(captured.commands.has("rpiv-update-agents")).toBe(true);
|
||||
});
|
||||
|
||||
it("UP_TO_DATE when no changes, no errors", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValue(empty());
|
||||
const { pi, captured } = createMockPi();
|
||||
registerUpdateAgentsCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-update-agents")?.handler("", ctx as never);
|
||||
expect(ctx.ui.notify).toHaveBeenCalledWith(expect.stringContaining("up-to-date"), "info");
|
||||
});
|
||||
|
||||
it("synced report when added+updated+removed > 0", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValue(empty({ added: ["a.md"], updated: ["b.md"], removed: ["c.md"] }));
|
||||
const { pi, captured } = createMockPi();
|
||||
registerUpdateAgentsCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-update-agents")?.handler("", ctx as never);
|
||||
const report = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls[0][0];
|
||||
expect(report).toContain("1 added");
|
||||
expect(report).toContain("1 updated");
|
||||
expect(report).toContain("1 removed");
|
||||
});
|
||||
|
||||
it("errors-only report uses 'warning' severity", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValue(
|
||||
empty({ errors: [{ op: "copy", message: "EACCES", file: "a.md" }] }),
|
||||
);
|
||||
const { pi, captured } = createMockPi();
|
||||
registerUpdateAgentsCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: true });
|
||||
await captured.commands.get("rpiv-update-agents")?.handler("", ctx as never);
|
||||
const [, severity] = (ctx.ui.notify as ReturnType<typeof vi.fn>).mock.calls[0];
|
||||
expect(severity).toBe("warning");
|
||||
});
|
||||
|
||||
it("stays silent when !hasUI", async () => {
|
||||
vi.mocked(syncBundledAgents).mockReturnValue(empty({ added: ["x.md"] }));
|
||||
const { pi, captured } = createMockPi();
|
||||
registerUpdateAgentsCommand(pi);
|
||||
const ctx = createMockCtx({ hasUI: false });
|
||||
await captured.commands.get("rpiv-update-agents")?.handler("", ctx as never);
|
||||
expect(ctx.ui.notify).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* /rpiv-update-agents — apply-mode sync of bundled agents into <cwd>/.pi/agents/.
|
||||
* Adds new, overwrites changed managed files, removes stale managed files.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { type SyncResult, syncBundledAgents } from "./agents.js";
|
||||
|
||||
const MSG_UP_TO_DATE = "All agents already up-to-date.";
|
||||
const MSG_NO_CHANGES = "No changes needed.";
|
||||
|
||||
const msgSynced = (parts: string[]) => `Synced agents: ${parts.join(", ")}.`;
|
||||
const msgSyncedWithErrors = (summary: string, errors: string[]) =>
|
||||
`${summary} ${errors.length} error(s): ${errors.join("; ")}`;
|
||||
|
||||
export function registerUpdateAgentsCommand(pi: ExtensionAPI): void {
|
||||
pi.registerCommand("rpiv-update-agents", {
|
||||
description: "Sync rpiv-pi bundled agents into .pi/agents/: add new, update changed, remove stale",
|
||||
handler: async (_args, ctx) => {
|
||||
const result = syncBundledAgents(ctx.cwd, true);
|
||||
if (!ctx.hasUI) return;
|
||||
ctx.ui.notify(formatSyncReport(result), result.errors.length > 0 ? "warning" : "info");
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function formatSyncReport(result: SyncResult): string {
|
||||
const totalSynced = result.added.length + result.updated.length + result.removed.length;
|
||||
if (totalSynced === 0 && result.errors.length === 0) return MSG_UP_TO_DATE;
|
||||
|
||||
const parts: string[] = [];
|
||||
if (result.added.length > 0) parts.push(`${result.added.length} added`);
|
||||
if (result.updated.length > 0) parts.push(`${result.updated.length} updated`);
|
||||
if (result.removed.length > 0) parts.push(`${result.removed.length} removed`);
|
||||
|
||||
const summary = parts.length > 0 ? msgSynced(parts) : MSG_NO_CHANGES;
|
||||
if (result.errors.length > 0) {
|
||||
return msgSyncedWithErrors(
|
||||
summary,
|
||||
result.errors.map((e) => e.message),
|
||||
);
|
||||
}
|
||||
return summary;
|
||||
}
|
||||
Reference in New Issue
Block a user