Add plannotator extension v0.19.10

This commit is contained in:
2026-05-07 11:38:14 +10:00
parent e914bc59c9
commit f37e4565ff
91 changed files with 35103 additions and 0 deletions

View File

@@ -0,0 +1,515 @@
/**
* Agent Jobs — Pi (node:http) server handler.
*
* Manages background agent processes (spawn, monitor, kill) and exposes
* HTTP routes + SSE broadcasting for job status updates.
*
* Mirrors packages/server/agent-jobs.ts but uses node:http primitives.
*/
import type { IncomingMessage, ServerResponse } from "node:http";
import { spawn, execFileSync, type ChildProcess } from "node:child_process";
import {
type AgentJobInfo,
type AgentJobEvent,
type AgentCapability,
type AgentCapabilities,
isTerminalStatus,
jobSource,
serializeAgentSSEEvent,
AGENT_HEARTBEAT_COMMENT,
AGENT_HEARTBEAT_INTERVAL_MS,
} from "../generated/agent-jobs.js";
import { formatClaudeLogEvent } from "../generated/claude-review.js";
import { json, parseBody } from "./helpers.js";
// ---------------------------------------------------------------------------
// Route prefixes
// ---------------------------------------------------------------------------
const BASE = "/api/agents";
const JOBS = `${BASE}/jobs`;
const JOBS_STREAM = `${JOBS}/stream`;
const CAPABILITIES = `${BASE}/capabilities`;
// ---------------------------------------------------------------------------
// which() helper for Node.js
// ---------------------------------------------------------------------------
function whichCmd(cmd: string): boolean {
try {
const bin = process.platform === "win32" ? "where" : "which";
execFileSync(bin, [cmd], { encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
return true;
} catch {
return false;
}
}
// ---------------------------------------------------------------------------
// Factory
// ---------------------------------------------------------------------------
export interface AgentJobHandlerOptions {
mode: "plan" | "review" | "annotate";
getServerUrl: () => string;
getCwd: () => string;
/** Server-side command builder for known providers (codex, claude, tour). */
buildCommand?: (provider: string, config?: Record<string, unknown>) => Promise<{
command: string[];
outputPath?: string;
captureStdout?: boolean;
stdinPrompt?: string;
cwd?: string;
prompt?: string;
label?: string;
/** Underlying engine used (e.g., "claude" or "codex"). Stored on AgentJobInfo for UI display. */
engine?: string;
/** Model used (e.g., "sonnet", "opus"). Stored on AgentJobInfo for UI display. */
model?: string;
/** Claude --effort level. */
effort?: string;
/** Codex reasoning effort level. */
reasoningEffort?: string;
/** Whether Codex fast mode was enabled. */
fastMode?: boolean;
/** PR URL at launch time. */
prUrl?: string;
/** PR diff scope at launch time. */
diffScope?: string;
/** Diff context snapshot at launch (stored on AgentJobInfo for per-job "Copy All"). */
diffContext?: AgentJobInfo["diffContext"];
} | null>;
/** Called when a job completes successfully — parse results and push annotations. */
onJobComplete?: (job: AgentJobInfo, meta: { outputPath?: string; stdout?: string; cwd?: string }) => void | Promise<void>;
}
export function createAgentJobHandler(options: AgentJobHandlerOptions) {
const { mode, getServerUrl, getCwd } = options;
// --- State ---
const jobs = new Map<string, { info: AgentJobInfo; proc: ChildProcess | null }>();
const jobOutputPaths = new Map<string, string>();
const subscribers = new Set<ServerResponse>();
let version = 0;
// --- Capability detection (run once) ---
const capabilities: AgentCapability[] = [
{ id: "claude", name: "Claude Code", available: whichCmd("claude") },
{ id: "codex", name: "Codex CLI", available: whichCmd("codex") },
{ id: "tour", name: "Code Tour", available: whichCmd("claude") || whichCmd("codex") },
];
const capabilitiesResponse: AgentCapabilities = {
mode,
providers: capabilities,
available: capabilities.some((c) => c.available),
};
// --- SSE broadcasting ---
function broadcast(event: AgentJobEvent): void {
version++;
const data = serializeAgentSSEEvent(event);
for (const res of subscribers) {
try {
res.write(data);
} catch {
subscribers.delete(res);
}
}
}
// --- Process lifecycle ---
function spawnJob(
provider: string,
command: string[],
label: string,
outputPath?: string,
spawnOptions?: { captureStdout?: boolean; stdinPrompt?: string; cwd?: string; prompt?: string; engine?: string; model?: string; effort?: string; reasoningEffort?: string; fastMode?: boolean; prUrl?: string; diffScope?: string; diffContext?: AgentJobInfo["diffContext"] },
): AgentJobInfo {
const id = crypto.randomUUID();
const source = jobSource(id);
const info: AgentJobInfo = {
id,
source,
provider,
label,
status: "starting",
startedAt: Date.now(),
command,
cwd: getCwd(),
...(spawnOptions?.engine && { engine: spawnOptions.engine }),
...(spawnOptions?.model && { model: spawnOptions.model }),
...(spawnOptions?.effort && { effort: spawnOptions.effort }),
...(spawnOptions?.reasoningEffort && { reasoningEffort: spawnOptions.reasoningEffort }),
...(spawnOptions?.fastMode && { fastMode: spawnOptions.fastMode }),
...(spawnOptions?.prUrl && { prUrl: spawnOptions.prUrl }),
...(spawnOptions?.diffScope && { diffScope: spawnOptions.diffScope }),
...(spawnOptions?.diffContext && { diffContext: spawnOptions.diffContext }),
};
let proc: ChildProcess | null = null;
try {
const spawnCwd = spawnOptions?.cwd ?? getCwd();
const captureStdout = spawnOptions?.captureStdout ?? false;
const hasStdinPrompt = !!spawnOptions?.stdinPrompt;
proc = spawn(command[0], command.slice(1), {
cwd: spawnCwd,
stdio: [
hasStdinPrompt ? "pipe" : "ignore",
captureStdout ? "pipe" : "ignore",
"pipe",
],
env: {
...process.env,
PLANNOTATOR_AGENT_SOURCE: source,
PLANNOTATOR_API_URL: getServerUrl(),
},
});
// Write prompt to stdin and close (for providers that read prompt from stdin)
if (hasStdinPrompt && proc.stdin) {
proc.stdin.write(spawnOptions!.stdinPrompt!);
proc.stdin.end();
}
info.status = "running";
info.cwd = spawnCwd;
if (spawnOptions?.prompt) info.prompt = spawnOptions.prompt;
jobs.set(id, { info, proc });
if (outputPath) jobOutputPaths.set(id, outputPath);
if (spawnOptions?.cwd) jobOutputPaths.set(`${id}:cwd`, spawnOptions.cwd);
broadcast({ type: "job:started", job: { ...info } });
// --- Stdout capture (Claude JSONL streaming) ---
let stdoutBuf = "";
if (captureStdout && proc.stdout) {
proc.stdout.on("data", (chunk: Buffer) => {
const text = chunk.toString();
stdoutBuf += text;
// Forward JSONL lines as log events
const lines = text.split('\n');
for (const line of lines) {
if (!line.trim()) continue;
// Tour jobs with the Claude engine also stream Claude JSONL.
if (provider === "claude" || spawnOptions?.engine === "claude") {
const formatted = formatClaudeLogEvent(line);
if (formatted !== null) {
broadcast({ type: "job:log", jobId: id, delta: formatted + '\n' });
}
continue;
}
try {
const event = JSON.parse(line);
if (event.type === 'result') continue;
} catch { /* not JSON — forward as raw log */ }
broadcast({ type: "job:log", jobId: id, delta: line + '\n' });
}
});
}
// --- Stderr: buffer tail for errors + live log streaming ---
let stderrBuf = "";
let logPending = "";
let logFlushTimer: ReturnType<typeof setTimeout> | null = null;
if (proc.stderr) {
proc.stderr.on("data", (chunk: Buffer) => {
const text = chunk.toString();
stderrBuf = (stderrBuf + text).slice(-500);
logPending += text;
if (!logFlushTimer) {
logFlushTimer = setTimeout(() => {
if (logPending) {
broadcast({ type: "job:log", jobId: id, delta: logPending });
logPending = "";
}
logFlushTimer = null;
}, 200);
}
});
}
// Monitor process close (fires after stdio streams are fully drained,
// unlike 'exit' which fires before — critical for stdout capture)
proc.on("close", async (exitCode) => {
// Flush remaining stderr
if (logFlushTimer) { clearTimeout(logFlushTimer); logFlushTimer = null; }
if (logPending) {
broadcast({ type: "job:log", jobId: id, delta: logPending });
logPending = "";
}
const entry = jobs.get(id);
if (!entry || isTerminalStatus(entry.info.status)) return;
entry.info.endedAt = Date.now();
entry.info.exitCode = exitCode ?? undefined;
entry.info.status = exitCode === 0 ? "done" : "failed";
if (exitCode !== 0 && stderrBuf) {
entry.info.error = stderrBuf;
}
// Ingest results before broadcasting completion
const jobOutputPath = jobOutputPaths.get(id);
const jobCwd = jobOutputPaths.get(`${id}:cwd`);
if (exitCode === 0 && options.onJobComplete) {
try {
await options.onJobComplete(entry.info, {
outputPath: jobOutputPath,
stdout: captureStdout ? stdoutBuf : undefined,
cwd: jobCwd,
});
} catch {
// Result ingestion failure shouldn't prevent job completion broadcast
}
}
jobOutputPaths.delete(id);
jobOutputPaths.delete(`${id}:cwd`);
broadcast({ type: "job:completed", job: { ...entry.info } });
});
// Handle spawn errors after process starts
proc.on("error", (err) => {
const entry = jobs.get(id);
if (!entry || isTerminalStatus(entry.info.status)) return;
entry.info.status = "failed";
entry.info.endedAt = Date.now();
entry.info.error = err.message;
broadcast({ type: "job:completed", job: { ...entry.info } });
});
} catch (err) {
jobs.set(id, { info, proc: null });
broadcast({ type: "job:started", job: { ...info } });
info.status = "failed";
info.endedAt = Date.now();
info.error = err instanceof Error ? err.message : String(err);
broadcast({ type: "job:completed", job: { ...info } });
}
return { ...info };
}
function killJob(id: string): boolean {
const entry = jobs.get(id);
if (!entry || isTerminalStatus(entry.info.status)) return false;
if (entry.proc) {
try {
entry.proc.kill();
} catch {
// Process may have already exited
}
}
entry.info.status = "killed";
entry.info.endedAt = Date.now();
jobOutputPaths.delete(id);
jobOutputPaths.delete(`${id}:cwd`);
broadcast({ type: "job:completed", job: { ...entry.info } });
return true;
}
function killAll(): number {
let count = 0;
for (const [id, entry] of jobs) {
if (!isTerminalStatus(entry.info.status)) {
killJob(id);
count++;
}
}
return count;
}
function getAllJobs(): AgentJobInfo[] {
return Array.from(jobs.values()).map((e) => ({ ...e.info }));
}
// --- HTTP handler ---
return {
killAll,
async handle(
req: IncomingMessage,
res: ServerResponse,
url: URL,
): Promise<boolean> {
// --- GET /api/agents/capabilities ---
if (url.pathname === CAPABILITIES && req.method === "GET") {
json(res, capabilitiesResponse);
return true;
}
// --- SSE stream ---
if (url.pathname === JOBS_STREAM && req.method === "GET") {
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive",
});
res.setTimeout(0);
// Send current state as snapshot
const snapshot: AgentJobEvent = {
type: "snapshot",
jobs: getAllJobs(),
};
res.write(serializeAgentSSEEvent(snapshot));
subscribers.add(res);
// Heartbeat to keep connection alive
const heartbeatTimer = setInterval(() => {
try {
res.write(AGENT_HEARTBEAT_COMMENT);
} catch {
clearInterval(heartbeatTimer);
subscribers.delete(res);
}
}, AGENT_HEARTBEAT_INTERVAL_MS);
// Clean up on disconnect
res.on("close", () => {
clearInterval(heartbeatTimer);
subscribers.delete(res);
});
return true;
}
// --- GET /api/agents/jobs (snapshot / polling fallback) ---
if (url.pathname === JOBS && req.method === "GET") {
const since = url.searchParams.get("since");
if (since !== null) {
const sinceVersion = parseInt(since, 10);
if (!isNaN(sinceVersion) && sinceVersion === version) {
res.writeHead(304);
res.end();
return true;
}
}
json(res, { jobs: getAllJobs(), version });
return true;
}
// --- POST /api/agents/jobs (launch) ---
if (url.pathname === JOBS && req.method === "POST") {
try {
const body = await parseBody(req);
const provider = typeof body.provider === "string" ? body.provider : "";
let rawCommand = Array.isArray(body.command) ? body.command : [];
let command = rawCommand.filter((c: unknown): c is string => typeof c === "string");
let label = typeof body.label === "string" ? body.label : `${provider} agent`;
let outputPath: string | undefined;
// Validate provider is a known, available capability
const cap = capabilities.find((c) => c.id === provider);
if (!cap || !cap.available) {
json(res, { error: `Unknown or unavailable provider: ${provider}` }, 400);
return true;
}
// Try server-side command building for known providers
let captureStdout = false;
let stdinPrompt: string | undefined;
let spawnCwd: string | undefined;
let promptText: string | undefined;
let jobEngine: string | undefined;
let jobModel: string | undefined;
let jobEffort: string | undefined;
let jobReasoningEffort: string | undefined;
let jobFastMode: boolean | undefined;
let jobPrUrl: string | undefined;
let jobDiffScope: string | undefined;
let jobDiffContext: AgentJobInfo["diffContext"] | undefined;
if (options.buildCommand) {
// Thread config from POST body to buildCommand
const config: Record<string, unknown> = {};
if (typeof body.engine === "string") config.engine = body.engine;
if (typeof body.model === "string") config.model = body.model;
if (typeof body.reasoningEffort === "string") config.reasoningEffort = body.reasoningEffort;
if (typeof body.effort === "string") config.effort = body.effort;
if (body.fastMode === true) config.fastMode = true;
const built = await options.buildCommand(provider, Object.keys(config).length > 0 ? config : undefined);
if (built) {
command = built.command;
outputPath = built.outputPath;
captureStdout = built.captureStdout ?? false;
stdinPrompt = built.stdinPrompt;
spawnCwd = built.cwd;
promptText = built.prompt;
if (built.label) label = built.label;
jobEngine = built.engine;
jobModel = built.model;
jobEffort = built.effort;
jobReasoningEffort = built.reasoningEffort;
jobFastMode = built.fastMode;
jobPrUrl = built.prUrl;
jobDiffScope = built.diffScope;
jobDiffContext = built.diffContext;
}
}
if (command.length === 0) {
json(res, { error: 'Missing "command" array' }, 400);
return true;
}
const job = spawnJob(provider, command, label, outputPath, {
captureStdout,
stdinPrompt,
cwd: spawnCwd,
prompt: promptText,
engine: jobEngine,
model: jobModel,
effort: jobEffort,
reasoningEffort: jobReasoningEffort,
fastMode: jobFastMode,
prUrl: jobPrUrl,
diffScope: jobDiffScope,
diffContext: jobDiffContext,
});
json(res, { job }, 201);
} catch {
json(res, { error: "Invalid JSON" }, 400);
}
return true;
}
// --- DELETE /api/agents/jobs/:id (kill one) ---
if (url.pathname.startsWith(JOBS + "/") && url.pathname !== JOBS_STREAM && req.method === "DELETE") {
const id = url.pathname.slice(JOBS.length + 1);
if (!id) {
json(res, { error: "Missing job ID" }, 400);
return true;
}
const found = killJob(id);
if (!found) {
json(res, { error: "Job not found or already terminal" }, 404);
return true;
}
json(res, { ok: true });
return true;
}
// --- DELETE /api/agents/jobs (kill all) ---
if (url.pathname === JOBS && req.method === "DELETE") {
const count = killAll();
json(res, { ok: true, killed: count });
return true;
}
// Not handled
return false;
},
};
}

View File

@@ -0,0 +1,85 @@
/**
* Editor annotation handler (in-memory store for VS Code integration).
* EditorAnnotation type, createEditorAnnotationHandler
*/
import { randomUUID } from "node:crypto";
import type { IncomingMessage } from "node:http";
import { json, parseBody } from "./helpers";
interface EditorAnnotation {
id: string;
filePath: string;
selectedText: string;
lineStart: number;
lineEnd: number;
comment?: string;
createdAt: number;
}
export function createEditorAnnotationHandler() {
const annotations: EditorAnnotation[] = [];
return {
async handle(
req: IncomingMessage,
res: import("node:http").ServerResponse,
url: URL,
): Promise<boolean> {
if (url.pathname === "/api/editor-annotations" && req.method === "GET") {
json(res, { annotations });
return true;
}
if (url.pathname === "/api/editor-annotation" && req.method === "POST") {
try {
const body = await parseBody(req);
if (
!body.filePath ||
!body.selectedText ||
!body.lineStart ||
!body.lineEnd
) {
json(res, { error: "Missing required fields" }, 400);
return true;
}
const annotation: EditorAnnotation = {
id: randomUUID(),
filePath: String(body.filePath),
selectedText: String(body.selectedText),
lineStart: Number(body.lineStart),
lineEnd: Number(body.lineEnd),
comment: typeof body.comment === "string" ? body.comment : undefined,
createdAt: Date.now(),
};
annotations.push(annotation);
json(res, { id: annotation.id });
} catch {
json(res, { error: "Invalid JSON" }, 400);
}
return true;
}
if (
url.pathname === "/api/editor-annotation" &&
req.method === "DELETE"
) {
const id = url.searchParams.get("id");
if (!id) {
json(res, { error: "Missing id parameter" }, 400);
return true;
}
const idx = annotations.findIndex((annotation) => annotation.id === id);
if (idx !== -1) {
annotations.splice(idx, 1);
}
json(res, { ok: true });
return true;
}
return false;
},
};
}

View File

@@ -0,0 +1,189 @@
/**
* External Annotations — Pi (node:http) server handler.
*
* Thin HTTP adapter over the shared annotation store. Mirrors the Bun
* handler at packages/server/external-annotations.ts but uses node:http
* IncomingMessage/ServerResponse + res.write() for SSE.
*/
import type { IncomingMessage, ServerResponse } from "node:http";
import {
createAnnotationStore,
transformPlanInput,
transformReviewInput,
serializeSSEEvent,
HEARTBEAT_COMMENT,
HEARTBEAT_INTERVAL_MS,
type StorableAnnotation,
type ExternalAnnotationEvent,
} from "../generated/external-annotation.js";
import { json, parseBody } from "./helpers.js";
// ---------------------------------------------------------------------------
// Route prefix
// ---------------------------------------------------------------------------
const BASE = "/api/external-annotations";
const STREAM = `${BASE}/stream`;
// ---------------------------------------------------------------------------
// Factory
// ---------------------------------------------------------------------------
export function createExternalAnnotationHandler(mode: "plan" | "review") {
const store = createAnnotationStore<StorableAnnotation>();
const subscribers = new Set<ServerResponse>();
const transform = mode === "plan" ? transformPlanInput : transformReviewInput;
// Wire store mutations → SSE broadcast
store.onMutation((event: ExternalAnnotationEvent<StorableAnnotation>) => {
const data = serializeSSEEvent(event);
for (const res of subscribers) {
try {
res.write(data);
} catch {
// Response closed — clean up
subscribers.delete(res);
}
}
});
return {
/** Push annotations directly into the store (bypasses HTTP, reuses same validation). */
addAnnotations(body: unknown): { ids: string[] } | { error: string } {
const parsed = transform(body);
if ("error" in parsed) return { error: parsed.error };
const created = store.add(parsed.annotations);
return { ids: created.map((a: { id: string }) => a.id) };
},
async handle(
req: IncomingMessage,
res: ServerResponse,
url: URL,
): Promise<boolean> {
// --- SSE stream ---
if (url.pathname === STREAM && req.method === "GET") {
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive",
});
// Disable idle timeout for SSE connections
res.setTimeout(0);
// Send current state as snapshot
const snapshot: ExternalAnnotationEvent<StorableAnnotation> = {
type: "snapshot",
annotations: store.getAll(),
};
res.write(serializeSSEEvent(snapshot));
subscribers.add(res);
// Heartbeat to keep connection alive
const heartbeatTimer = setInterval(() => {
try {
res.write(HEARTBEAT_COMMENT);
} catch {
clearInterval(heartbeatTimer);
subscribers.delete(res);
}
}, HEARTBEAT_INTERVAL_MS);
// Clean up on disconnect
res.on("close", () => {
clearInterval(heartbeatTimer);
subscribers.delete(res);
});
// Don't end the response — SSE stays open
return true;
}
// --- GET snapshot (polling fallback) ---
if (url.pathname === BASE && req.method === "GET") {
const since = url.searchParams.get("since");
if (since !== null) {
const sinceVersion = parseInt(since, 10);
if (!isNaN(sinceVersion) && sinceVersion === store.version) {
res.writeHead(304);
res.end();
return true;
}
}
json(res, {
annotations: store.getAll(),
version: store.version,
});
return true;
}
// --- POST (add single or batch) ---
if (url.pathname === BASE && req.method === "POST") {
try {
const body = await parseBody(req);
const parsed = transform(body);
if ("error" in parsed) {
json(res, { error: parsed.error }, 400);
return true;
}
const created = store.add(parsed.annotations);
json(res, { ids: created.map((a: StorableAnnotation) => a.id) }, 201);
} catch {
json(res, { error: "Invalid JSON" }, 400);
}
return true;
}
// --- PATCH (update fields on a single annotation) ---
if (url.pathname === BASE && req.method === "PATCH") {
const id = url.searchParams.get("id");
if (!id) {
json(res, { error: "Missing ?id parameter" }, 400);
return true;
}
try {
const body = await parseBody(req);
const updated = store.update(id, body as Partial<StorableAnnotation>);
if (!updated) {
json(res, { error: "Not found" }, 404);
return true;
}
json(res, { annotation: updated });
} catch {
json(res, { error: "Invalid JSON" }, 400);
}
return true;
}
// --- DELETE (by id, by source, or clear all) ---
if (url.pathname === BASE && req.method === "DELETE") {
const id = url.searchParams.get("id");
const source = url.searchParams.get("source");
if (id) {
store.remove(id);
json(res, { ok: true });
return true;
}
if (source) {
const count = store.clearBySource(source);
json(res, { ok: true, removed: count });
return true;
}
const count = store.clearAll();
json(res, { ok: true, removed: count });
return true;
}
// Not handled — pass through
return false;
},
};
}

View File

@@ -0,0 +1,210 @@
/**
* Shared request handlers reused across plan, review, and annotate servers.
* handleImageRequest, handleUploadRequest, handleDraftRequest, handleFavicon
*/
import { randomUUID } from "node:crypto";
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import type { IncomingMessage } from "node:http";
import { tmpdir } from "node:os";
import { join, resolve as resolvePath } from "node:path";
import { saveDraft, loadDraft, deleteDraft } from "../generated/draft.js";
import { FAVICON_SVG } from "../generated/favicon.js";
import { json, parseBody, send, toWebRequest } from "./helpers";
type Res = import("node:http").ServerResponse;
const ALLOWED_IMAGE_EXTENSIONS = new Set([
"png",
"jpg",
"jpeg",
"gif",
"webp",
"svg",
"bmp",
"ico",
"tiff",
"tif",
"avif",
]);
const IMAGE_CONTENT_TYPES: Record<string, string> = {
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
webp: "image/webp",
svg: "image/svg+xml",
bmp: "image/bmp",
ico: "image/x-icon",
tiff: "image/tiff",
tif: "image/tiff",
avif: "image/avif",
};
const UPLOAD_DIR = join(tmpdir(), "plannotator");
function getExtension(filePath: string): string {
const lastDot = filePath.lastIndexOf(".");
if (lastDot === -1) return "";
return filePath.slice(lastDot + 1).toLowerCase();
}
function validateImagePath(rawPath: string): {
valid: boolean;
resolved: string;
error?: string;
} {
const resolved = resolvePath(rawPath);
const ext = getExtension(resolved);
if (!ALLOWED_IMAGE_EXTENSIONS.has(ext)) {
return {
valid: false,
resolved,
error: "Path does not point to a supported image file",
};
}
return { valid: true, resolved };
}
function validateUploadExtension(fileName: string): {
valid: boolean;
ext: string;
error?: string;
} {
const ext = getExtension(fileName) || "png";
if (!ALLOWED_IMAGE_EXTENSIONS.has(ext)) {
return {
valid: false,
ext,
error: `File extension ".${ext}" is not a supported image type`,
};
}
return { valid: true, ext };
}
function getImageContentType(filePath: string): string {
return (
IMAGE_CONTENT_TYPES[getExtension(filePath)] || "application/octet-stream"
);
}
export function handleImageRequest(res: Res, url: URL): void {
const imagePath = url.searchParams.get("path");
if (!imagePath) {
send(res, "Missing path parameter", 400, { "Content-Type": "text/plain" });
return;
}
const tryServePath = (candidate: string): boolean => {
const validation = validateImagePath(candidate);
if (!validation.valid) return false;
try {
if (!existsSync(validation.resolved)) return false;
const data = readFileSync(validation.resolved);
send(res, data, 200, {
"Content-Type": getImageContentType(validation.resolved),
});
return true;
} catch {
return false;
}
};
if (tryServePath(imagePath)) return;
const base = url.searchParams.get("base");
if (
base &&
!imagePath.startsWith("/") &&
tryServePath(resolvePath(base, imagePath))
) {
return;
}
const validation = validateImagePath(imagePath);
if (!validation.valid) {
send(res, validation.error || "Invalid image path", 403, {
"Content-Type": "text/plain",
});
return;
}
send(res, "File not found", 404, { "Content-Type": "text/plain" });
}
export async function handleUploadRequest(
req: IncomingMessage,
res: Res,
): Promise<void> {
try {
const request = toWebRequest(req);
const formData = await request.formData();
const file = formData.get("file");
if (
!file ||
typeof file !== "object" ||
!("arrayBuffer" in file) ||
!("name" in file)
) {
json(res, { error: "No file provided" }, 400);
return;
}
const upload = file as File;
const extResult = validateUploadExtension(upload.name);
if (!extResult.valid) {
json(res, { error: extResult.error }, 400);
return;
}
mkdirSync(UPLOAD_DIR, { recursive: true });
const tempPath = join(UPLOAD_DIR, `${randomUUID()}.${extResult.ext}`);
const bytes = Buffer.from(await upload.arrayBuffer());
writeFileSync(tempPath, bytes);
json(res, { path: tempPath, originalName: upload.name });
} catch (err) {
const message = err instanceof Error ? err.message : "Upload failed";
json(res, { error: message }, 500);
}
}
export function handleDraftRequest(
req: IncomingMessage,
res: Res,
draftKey: string,
): Promise<void> | void {
if (req.method === "POST") {
return parseBody(req)
.then((body) => {
saveDraft(draftKey, body);
json(res, { ok: true });
})
.catch((err: unknown) => {
const message = err instanceof Error ? err.message : "Failed to save draft";
console.error(`[draft] save failed: ${message}`);
json(res, { error: message }, 500);
});
} else if (req.method === "DELETE") {
deleteDraft(draftKey);
json(res, { ok: true });
} else {
const draft = loadDraft(draftKey);
if (!draft) {
json(res, { found: false }, 404);
return;
}
json(res, draft);
}
}
export function handleFavicon(res: Res): void {
send(res, FAVICON_SVG, 200, {
"Content-Type": "image/svg+xml",
"Cache-Control": "public, max-age=86400",
});
}

View File

@@ -0,0 +1,78 @@
/**
* Core HTTP helpers for Pi extension servers.
* parseBody, json, html, send, toWebRequest
*/
import type { IncomingMessage } from "node:http";
import { Readable } from "node:stream";
export function parseBody(
req: IncomingMessage,
): Promise<Record<string, unknown>> {
return new Promise((resolve) => {
let data = "";
req.on("data", (chunk: string) => (data += chunk));
req.on("end", () => {
try {
resolve(JSON.parse(data));
} catch {
resolve({});
}
});
});
}
export function json(
res: import("node:http").ServerResponse,
data: unknown,
status = 200,
): void {
res.writeHead(status, { "Content-Type": "application/json" });
res.end(JSON.stringify(data));
}
export function html(
res: import("node:http").ServerResponse,
content: string,
): void {
res.writeHead(200, { "Content-Type": "text/html" });
res.end(content);
}
export function send(
res: import("node:http").ServerResponse,
body: string | Buffer,
status = 200,
headers: Record<string, string> = {},
): void {
res.writeHead(status, headers);
res.end(body);
}
export function requestUrl(req: IncomingMessage): URL {
return new URL(req.url ?? "/", "http://localhost");
}
export function toWebRequest(req: IncomingMessage): Request {
const headers = new Headers();
for (const [key, value] of Object.entries(req.headers)) {
if (value === undefined) continue;
if (Array.isArray(value)) {
for (const item of value) headers.append(key, item);
} else {
headers.set(key, value);
}
}
const init: RequestInit & { duplex?: "half" } = {
method: req.method,
headers,
};
if (req.method !== "GET" && req.method !== "HEAD") {
init.body = Readable.toWeb(req) as unknown as BodyInit;
init.duplex = "half";
}
return new Request(`http://localhost${req.url ?? "/"}`, init);
}

View File

@@ -0,0 +1,46 @@
/**
* IDE integration — open plan diffs in VS Code.
* Node.js equivalent of packages/server/ide.ts.
*/
import { spawn } from "node:child_process";
/** Open two files in VS Code's diff viewer. Node.js equivalent of packages/server/ide.ts */
export function openEditorDiff(
oldPath: string,
newPath: string,
): Promise<{ ok: true } | { error: string }> {
return new Promise((resolve) => {
const proc = spawn("code", ["--diff", oldPath, newPath], {
stdio: ["ignore", "ignore", "pipe"],
});
let stderr = "";
proc.stderr?.on("data", (chunk: Buffer) => {
stderr += chunk.toString();
});
proc.on("error", (err) => {
if (err.message.includes("ENOENT")) {
resolve({
error:
"VS Code CLI not found. Run 'Shell Command: Install code command in PATH' from the VS Code command palette.",
});
} else {
resolve({ error: err.message });
}
});
proc.on("close", (code) => {
if (code !== 0) {
if (stderr.includes("not found") || stderr.includes("ENOENT")) {
resolve({
error:
"VS Code CLI not found. Run 'Shell Command: Install code command in PATH' from the VS Code command palette.",
});
} else {
resolve({ error: `code --diff exited with ${code}: ${stderr}` });
}
} else {
resolve({ ok: true });
}
});
});
}

View File

@@ -0,0 +1,195 @@
/**
* Note-taking app integrations (Obsidian, Bear, Octarine).
* Node.js equivalents of packages/server/integrations.ts.
* Config types, save functions, tag extraction, filename generation
*/
import { execSync, spawn } from "node:child_process";
import { existsSync, mkdirSync, statSync, writeFileSync } from "node:fs";
import { basename, join } from "node:path";
import {
type ObsidianConfig,
type BearConfig,
type OctarineConfig,
type IntegrationResult,
extractTitle,
generateFrontmatter,
generateFilename,
generateOctarineFrontmatter,
stripH1,
buildHashtags,
buildBearContent,
detectObsidianVaults,
} from "../generated/integrations-common.js";
import { sanitizeTag } from "../generated/project.js";
import { resolveUserPath } from "../generated/resolve-file.js";
export type { ObsidianConfig, BearConfig, OctarineConfig, IntegrationResult };
export {
extractTitle,
generateFrontmatter,
generateFilename,
generateOctarineFrontmatter,
stripH1,
buildHashtags,
buildBearContent,
detectObsidianVaults,
};
/** Detect project name from git or cwd (sync). Used by extractTags for note integrations. */
function detectProjectNameSync(): string | null {
try {
const toplevel = execSync("git rev-parse --show-toplevel", {
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
}).trim();
if (toplevel) {
const name = sanitizeTag(basename(toplevel));
if (name) return name;
}
} catch {
/* not in a git repo */
}
try {
return sanitizeTag(basename(process.cwd())) ?? null;
} catch {
return null;
}
}
export async function extractTags(markdown: string): Promise<string[]> {
const tags = new Set<string>(["plannotator"]);
const projectName = detectProjectNameSync();
if (projectName) tags.add(projectName);
const stopWords = new Set([
"the",
"and",
"for",
"with",
"this",
"that",
"from",
"into",
"plan",
"implementation",
"overview",
"phase",
"step",
"steps",
]);
const h1Match = markdown.match(
/^#\s+(?:Implementation\s+Plan:|Plan:)?\s*(.+)$/im,
);
if (h1Match) {
h1Match[1]
.toLowerCase()
.replace(/[^\w\s-]/g, " ")
.split(/\s+/)
.filter((w) => w.length > 2 && !stopWords.has(w))
.slice(0, 3)
.forEach((w) => tags.add(w));
}
const seenLangs = new Set<string>();
let langMatch: RegExpExecArray | null;
const langRegex = /```(\w+)/g;
while ((langMatch = langRegex.exec(markdown)) !== null) {
const lang = langMatch[1];
const n = lang.toLowerCase();
if (
!seenLangs.has(n) &&
!["json", "yaml", "yml", "text", "txt", "markdown", "md"].includes(n)
) {
seenLangs.add(n);
tags.add(n);
}
}
return Array.from(tags).slice(0, 7);
}
export async function saveToObsidian(
config: ObsidianConfig,
): Promise<IntegrationResult> {
try {
const { vaultPath, folder, plan } = config;
if (!vaultPath?.trim()) {
return { success: false, error: "Vault path is required" };
}
const normalizedVault = resolveUserPath(vaultPath);
if (!existsSync(normalizedVault))
return {
success: false,
error: `Vault path does not exist: ${normalizedVault}`,
};
if (!statSync(normalizedVault).isDirectory())
return {
success: false,
error: `Vault path is not a directory: ${normalizedVault}`,
};
const folderName = folder.trim() || "plannotator";
const targetFolder = join(normalizedVault, folderName);
if (!existsSync(targetFolder)) mkdirSync(targetFolder, { recursive: true });
const filename = generateFilename(
plan,
config.filenameFormat,
config.filenameSeparator,
);
const filePath = join(targetFolder, filename);
const tags = await extractTags(plan);
const frontmatter = generateFrontmatter(tags);
const content = `${frontmatter}\n\n[[Plannotator Plans]]\n\n${plan}`;
writeFileSync(filePath, content);
return { success: true, path: filePath };
} catch (err) {
return {
success: false,
error: err instanceof Error ? err.message : "Unknown error",
};
}
}
export async function saveToBear(
config: BearConfig,
): Promise<IntegrationResult> {
try {
const { plan, customTags, tagPosition = "append" } = config;
const title = extractTitle(plan);
const body = stripH1(plan);
const tags = customTags?.trim() ? undefined : await extractTags(plan);
const hashtags = buildHashtags(customTags, tags ?? []);
const content = buildBearContent(body, hashtags, tagPosition);
const url = `bear://x-callback-url/create?title=${encodeURIComponent(title)}&text=${encodeURIComponent(content)}&open_note=no`;
spawn("open", [url], { stdio: "ignore" });
return { success: true };
} catch (err) {
return {
success: false,
error: err instanceof Error ? err.message : "Unknown error",
};
}
}
export async function saveToOctarine(
config: OctarineConfig,
): Promise<IntegrationResult> {
try {
const { plan } = config;
const workspace = config.workspace.trim();
if (!workspace) return { success: false, error: "Workspace is required" };
const folder = config.folder.trim() || "plannotator";
const filename = generateFilename(plan);
const base = filename.replace(/\.md$/, "");
const path = folder ? `${folder}/${base}` : base;
const tags = await extractTags(plan);
const frontmatter = generateOctarineFrontmatter(tags);
const content = `${frontmatter}\n\n${plan}`;
const url = `octarine://create?path=${encodeURIComponent(path)}&content=${encodeURIComponent(content)}&workspace=${encodeURIComponent(workspace)}&fresh=true&openAfter=false`;
spawn("open", [url], { stdio: "ignore" });
return { success: true, path };
} catch (err) {
return {
success: false,
error: err instanceof Error ? err.message : "Unknown error",
};
}
}

View File

@@ -0,0 +1,109 @@
import { afterEach, describe, expect, test } from "bun:test";
import { getServerHostname, getServerPort, isRemoteSession } from "./network";
const savedEnv: Record<string, string | undefined> = {};
const envKeys = ["PLANNOTATOR_REMOTE", "PLANNOTATOR_PORT", "SSH_TTY", "SSH_CONNECTION"];
function clearEnv() {
for (const key of envKeys) {
savedEnv[key] = process.env[key];
delete process.env[key];
}
}
afterEach(() => {
for (const key of envKeys) {
if (savedEnv[key] !== undefined) {
process.env[key] = savedEnv[key];
} else {
delete process.env[key];
}
}
});
describe("pi remote detection", () => {
test("false by default", () => {
clearEnv();
expect(isRemoteSession()).toBe(false);
});
test("true when PLANNOTATOR_REMOTE=1", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "1";
expect(isRemoteSession()).toBe(true);
});
test("true when PLANNOTATOR_REMOTE=true", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "true";
expect(isRemoteSession()).toBe(true);
});
test("false when PLANNOTATOR_REMOTE=0", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "0";
expect(isRemoteSession()).toBe(false);
});
test("false when PLANNOTATOR_REMOTE=false", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "false";
expect(isRemoteSession()).toBe(false);
});
test("PLANNOTATOR_REMOTE=false overrides SSH_TTY", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "false";
process.env.SSH_TTY = "/dev/pts/0";
expect(isRemoteSession()).toBe(false);
});
test("PLANNOTATOR_REMOTE=0 overrides SSH_CONNECTION", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "0";
process.env.SSH_CONNECTION = "192.168.1.1 12345 192.168.1.2 22";
expect(isRemoteSession()).toBe(false);
});
test("true when SSH_TTY is set and env var is unset", () => {
clearEnv();
process.env.SSH_TTY = "/dev/pts/0";
expect(isRemoteSession()).toBe(true);
});
});
describe("pi port selection", () => {
test("uses random local port when false overrides SSH", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "false";
process.env.SSH_TTY = "/dev/pts/0";
expect(getServerPort()).toEqual({ port: 0, portSource: "random" });
});
test("uses default remote port when SSH is detected", () => {
clearEnv();
process.env.SSH_CONNECTION = "192.168.1.1 12345 192.168.1.2 22";
expect(getServerPort()).toEqual({ port: 19432, portSource: "remote-default" });
});
test("PLANNOTATOR_PORT still takes precedence", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "false";
process.env.SSH_TTY = "/dev/pts/0";
process.env.PLANNOTATOR_PORT = "9999";
expect(getServerPort()).toEqual({ port: 9999, portSource: "env" });
});
});
describe("pi server hostname", () => {
test("binds local sessions to loopback", () => {
clearEnv();
expect(getServerHostname()).toBe("127.0.0.1");
});
test("binds remote sessions to all interfaces", () => {
clearEnv();
process.env.PLANNOTATOR_REMOTE = "1";
expect(getServerHostname()).toBe("0.0.0.0");
});
});

View File

@@ -0,0 +1,173 @@
/**
* Network utilities — remote detection, port binding, browser opening.
* isRemoteSession, getServerPort, listenOnPort, openBrowser
*/
import { spawn } from "node:child_process";
import type { Server } from "node:http";
import { release } from "node:os";
const DEFAULT_REMOTE_PORT = 19432;
const LOOPBACK_HOST = "127.0.0.1";
/**
* Check if running in a remote session (SSH, devcontainer, etc.)
* Honors PLANNOTATOR_REMOTE as a tri-state override, or detects SSH_TTY/SSH_CONNECTION.
*/
function getRemoteOverride(): boolean | null {
const remote = process.env.PLANNOTATOR_REMOTE;
if (remote === undefined) {
return null;
}
if (remote === "1" || remote?.toLowerCase() === "true") {
return true;
}
if (remote === "0" || remote?.toLowerCase() === "false") {
return false;
}
return null;
}
export function isRemoteSession(): boolean {
const remoteOverride = getRemoteOverride();
if (remoteOverride !== null) {
return remoteOverride;
}
// Legacy SSH detection
if (process.env.SSH_TTY || process.env.SSH_CONNECTION) {
return true;
}
return false;
}
/**
* Get the server port to use.
* - PLANNOTATOR_PORT env var takes precedence
* - Remote sessions default to 19432 (for port forwarding)
* - Local sessions use random port
* Returns { port, portSource } so caller can notify user if needed.
*/
export function getServerPort(): {
port: number;
portSource: "env" | "remote-default" | "random";
} {
const envPort = process.env.PLANNOTATOR_PORT;
if (envPort) {
const parsed = parseInt(envPort, 10);
if (!Number.isNaN(parsed) && parsed > 0 && parsed < 65536) {
return { port: parsed, portSource: "env" };
}
// Invalid port - fall back silently, caller can check env var themselves
}
if (isRemoteSession()) {
return { port: DEFAULT_REMOTE_PORT, portSource: "remote-default" };
}
return { port: 0, portSource: "random" };
}
export function getServerHostname(): string {
return isRemoteSession() ? "0.0.0.0" : LOOPBACK_HOST;
}
const MAX_RETRIES = 5;
const RETRY_DELAY_MS = 500;
export async function listenOnPort(
server: Server,
): Promise<{ port: number; portSource: "env" | "remote-default" | "random" }> {
const result = getServerPort();
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
try {
await new Promise<void>((resolve, reject) => {
server.once("error", reject);
server.listen(
result.port,
getServerHostname(),
() => {
server.removeListener("error", reject);
resolve();
},
);
});
const addr = server.address() as { port: number };
return { port: addr.port, portSource: result.portSource };
} catch (err: unknown) {
const isAddressInUse =
err instanceof Error && err.message.includes("EADDRINUSE");
if (isAddressInUse && attempt < MAX_RETRIES) {
await new Promise((r) => setTimeout(r, RETRY_DELAY_MS));
continue;
}
if (isAddressInUse) {
const hint = isRemoteSession()
? " (set PLANNOTATOR_PORT to use a different port)"
: "";
throw new Error(
`Port ${result.port} in use after ${MAX_RETRIES} retries${hint}`,
);
}
throw err;
}
}
// Unreachable, but satisfies TypeScript
throw new Error("Failed to bind port");
}
/**
* Open URL in system browser (Node-compatible, no Bun $ dependency).
* Honors PLANNOTATOR_BROWSER and BROWSER env vars.
* Returns { opened: true } if browser was opened, { opened: false, isRemote: true, url } if remote session.
*/
export function openBrowser(url: string): {
opened: boolean;
isRemote?: boolean;
url?: string;
} {
const browser = process.env.PLANNOTATOR_BROWSER || process.env.BROWSER;
if (isRemoteSession() && !browser) {
return { opened: false, isRemote: true, url };
}
try {
const platform = process.platform;
const wsl =
platform === "linux" && release().toLowerCase().includes("microsoft");
let cmd: string;
let args: string[];
if (browser) {
if (process.env.PLANNOTATOR_BROWSER && platform === "darwin") {
cmd = "open";
args = ["-a", browser, url];
} else if (platform === "win32" || wsl) {
cmd = "cmd.exe";
args = ["/c", "start", "", browser, url];
} else {
cmd = browser;
args = [url];
}
} else if (platform === "win32" || wsl) {
cmd = "cmd.exe";
args = ["/c", "start", "", url];
} else if (platform === "darwin") {
cmd = "open";
args = [url];
} else {
cmd = "xdg-open";
args = [url];
}
const child = spawn(cmd, args, { detached: true, stdio: "ignore" });
child.once("error", () => {});
child.unref();
return { opened: true };
} catch {
return { opened: false };
}
}

View File

@@ -0,0 +1,124 @@
/**
* PR/MR provider for Node.js runtime.
* Node.js PRRuntime + bound dispatch functions from shared pr-provider.
*/
import { spawn } from "node:child_process";
import {
checkAuth as checkAuthCore,
fetchPRContext as fetchPRContextCore,
fetchPR as fetchPRCore,
fetchPRFileContent as fetchPRFileContentCore,
fetchPRViewedFiles as fetchPRViewedFilesCore,
fetchPRStack as fetchPRStackCore,
fetchPRList as fetchPRListCore,
getUser as getUserCore,
markPRFilesViewed as markPRFilesViewedCore,
type PRMetadata,
type PRRef,
type PRReviewFileComment,
type PRRuntime,
type PRStackTree,
type PRListItem,
parsePRUrl as parsePRUrlCore,
submitPRReview as submitPRReviewCore,
} from "../generated/pr-provider.js";
const prRuntime: PRRuntime = {
async runCommand(cmd, args) {
return new Promise((resolve, reject) => {
const proc = spawn(cmd, args, { stdio: ["ignore", "pipe", "pipe"] });
let stdout = "";
let stderr = "";
proc.stdout?.on("data", (chunk: Buffer) => {
stdout += chunk.toString();
});
proc.stderr?.on("data", (chunk: Buffer) => {
stderr += chunk.toString();
});
proc.on("error", reject);
proc.on("close", (exitCode) => {
resolve({ stdout, stderr, exitCode: exitCode ?? 1 });
});
});
},
async runCommandWithInput(cmd, args, input) {
return new Promise((resolve, reject) => {
const proc = spawn(cmd, args, { stdio: ["pipe", "pipe", "pipe"] });
let stdout = "";
let stderr = "";
proc.stdout?.on("data", (chunk: Buffer) => {
stdout += chunk.toString();
});
proc.stderr?.on("data", (chunk: Buffer) => {
stderr += chunk.toString();
});
proc.on("error", reject);
proc.on("close", (exitCode) => {
resolve({ stdout, stderr, exitCode: exitCode ?? 1 });
});
proc.stdin?.write(input);
proc.stdin?.end();
});
},
};
export const parsePRUrl = parsePRUrlCore;
export function checkPRAuth(ref: PRRef) {
return checkAuthCore(prRuntime, ref);
}
export function getPRUser(ref: PRRef) {
return getUserCore(prRuntime, ref);
}
export function fetchPR(ref: PRRef) {
return fetchPRCore(prRuntime, ref);
}
export function fetchPRContext(ref: PRRef) {
return fetchPRContextCore(prRuntime, ref);
}
export function fetchPRFileContent(ref: PRRef, sha: string, filePath: string) {
return fetchPRFileContentCore(prRuntime, ref, sha, filePath);
}
export function submitPRReview(
ref: PRRef,
headSha: string,
action: "approve" | "comment",
body: string,
fileComments: PRReviewFileComment[],
) {
return submitPRReviewCore(
prRuntime,
ref,
headSha,
action,
body,
fileComments,
);
}
export function fetchPRViewedFiles(ref: PRRef): Promise<Record<string, boolean>> {
return fetchPRViewedFilesCore(prRuntime, ref);
}
export function markPRFilesViewed(
ref: PRRef,
prNodeId: string,
filePaths: string[],
viewed: boolean,
): Promise<void> {
return markPRFilesViewedCore(prRuntime, ref, prNodeId, filePaths, viewed);
}
export function fetchPRStack(
ref: PRRef,
metadata: PRMetadata,
): Promise<PRStackTree | null> {
return fetchPRStackCore(prRuntime, ref, metadata);
}
export function fetchPRList(
ref: PRRef,
): Promise<PRListItem[]> {
return fetchPRListCore(prRuntime, ref);
}

View File

@@ -0,0 +1,64 @@
/**
* Project detection — repo info, project name, remote URL parsing.
* detectProjectName, getRepoInfo, parseRemoteUrl
*/
import { execSync } from "node:child_process";
import { basename } from "node:path";
import { sanitizeTag } from "../generated/project.js";
import { parseRemoteUrl, getDirName } from "../generated/repo.js";
/** Run a git command and return stdout (empty string on error). */
function git(cmd: string): string {
try {
return execSync(`git ${cmd}`, {
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
}).trim();
} catch {
return "";
}
}
export function detectProjectName(): string {
try {
const toplevel = execSync("git rev-parse --show-toplevel", {
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
}).trim();
const name = basename(toplevel);
return sanitizeTag(name) ?? "_unknown";
} catch {
// Not a git repo — fall back to cwd
}
try {
const name = basename(process.cwd());
return sanitizeTag(name) ?? "_unknown";
} catch {
return "_unknown";
}
}
export function getRepoInfo(): { display: string; branch?: string } | null {
const branch = git("rev-parse --abbrev-ref HEAD");
const safeBranch = branch && branch !== "HEAD" ? branch : undefined;
const originUrl = git("remote get-url origin");
const orgRepo = parseRemoteUrl(originUrl);
if (orgRepo) {
return { display: orgRepo, branch: safeBranch };
}
const topLevel = git("rev-parse --show-toplevel");
const repoName = getDirName(topLevel);
if (repoName) {
return { display: repoName, branch: safeBranch };
}
const cwdName = getDirName(process.cwd());
if (cwdName) {
return { display: cwdName };
}
return null;
}

View File

@@ -0,0 +1,358 @@
/**
* Document and reference handlers (Node.js equivalents of packages/server/reference-handlers.ts).
* VaultNode, buildFileTree, walkMarkdownFiles, handleDocRequest,
* detectObsidianVaults, handleObsidian*, handleFileBrowserRequest
*/
import {
existsSync,
readdirSync,
readFileSync,
statSync,
type Dirent,
} from "node:fs";
import type { ServerResponse } from "node:http";
import { join, resolve as resolvePath } from "node:path";
import { json, parseBody } from "./helpers";
import type { IncomingMessage } from "node:http";
import {
type VaultNode,
buildFileTree,
FILE_BROWSER_EXCLUDED,
} from "../generated/reference-common.js";
import { detectObsidianVaults } from "../generated/integrations-common.js";
import {
isAbsoluteUserPath,
isCodeFilePath,
resolveCodeFile,
resolveMarkdownFile,
resolveUserPath,
isWithinProjectRoot,
warmFileListCache,
} from "../generated/resolve-file.js";
import { htmlToMarkdown } from "../generated/html-to-markdown.js";
import { preloadFile } from "@pierre/diffs/ssr";
type Res = ServerResponse;
/** Recursively walk a directory collecting files by extension, skipping ignored dirs. */
function walkMarkdownFiles(dir: string, root: string, results: string[], extensions: RegExp = /\.(mdx?|html?)$/i): void {
let entries: Dirent[];
try {
entries = readdirSync(dir, { withFileTypes: true }) as Dirent[];
} catch {
return;
}
for (const entry of entries) {
if (entry.isDirectory()) {
if (FILE_BROWSER_EXCLUDED.includes(entry.name + "/")) continue;
walkMarkdownFiles(join(dir, entry.name), root, results, extensions);
} else if (entry.isFile() && extensions.test(entry.name)) {
const relative = join(dir, entry.name)
.slice(root.length + 1)
.replace(/\\/g, "/");
results.push(relative);
}
}
}
/** Serve a linked markdown document. Uses shared resolveMarkdownFile for parity with Bun server. */
export async function handleDocRequest(res: Res, url: URL): Promise<void> {
const requestedPath = url.searchParams.get("path");
if (!requestedPath) {
json(res, { error: "Missing path parameter" }, 400);
return;
}
// Side-channel: warm the code-file walk so /api/doc/exists POSTs land warm.
void warmFileListCache(process.cwd(), "code");
// Try resolving relative to base directory first (used by annotate mode).
// No isWithinProjectRoot check here — intentional, matches pre-existing
// markdown behavior. The base param is set server-side by the annotate
// server (see serverAnnotate.ts /api/doc route). The standalone HTML
// block below (no base) retains its cwd-based containment check.
const base = url.searchParams.get("base");
const resolvedBase = base ? resolveUserPath(base) : null;
if (
resolvedBase &&
!isAbsoluteUserPath(requestedPath) &&
/\.(mdx?|html?)$/i.test(requestedPath)
) {
const fromBase = resolveUserPath(requestedPath, resolvedBase);
try {
if (existsSync(fromBase)) {
const raw = readFileSync(fromBase, "utf-8");
const isHtml = /\.html?$/i.test(requestedPath);
const markdown = isHtml ? htmlToMarkdown(raw) : raw;
json(res, { markdown, filepath: fromBase, isConverted: isHtml });
return;
}
} catch {
/* fall through to standard resolution */
}
}
// HTML files: resolve directly (not via resolveMarkdownFile which only handles .md/.mdx)
const projectRoot = process.cwd();
if (/\.html?$/i.test(requestedPath)) {
const resolvedHtml = resolveUserPath(requestedPath, resolvedBase || projectRoot);
if (!isWithinProjectRoot(resolvedHtml, projectRoot)) {
json(res, { error: "Access denied: path is outside project root" }, 403);
return;
}
try {
if (existsSync(resolvedHtml)) {
const html = readFileSync(resolvedHtml, "utf-8");
json(res, { markdown: htmlToMarkdown(html), filepath: resolvedHtml, isConverted: true });
return;
}
} catch { /* fall through to 404 */ }
json(res, { error: `File not found: ${requestedPath}` }, 404);
return;
}
// Code files: try literal resolve first; on miss, fall back to smart resolver.
if (isCodeFilePath(requestedPath)) {
const literalPath = resolveUserPath(requestedPath, resolvedBase || projectRoot);
const literalAllowed = resolvedBase || isWithinProjectRoot(literalPath, projectRoot);
let resolvedCode: string | null = null;
if (literalAllowed && existsSync(literalPath)) {
resolvedCode = literalPath;
}
if (!resolvedCode) {
const result = await resolveCodeFile(requestedPath, projectRoot);
if (result.kind === "found") {
resolvedCode = result.path;
} else if (result.kind === "ambiguous") {
const prefix = `${projectRoot}/`;
const relative = result.matches.map((m: string) =>
m.startsWith(prefix) ? m.slice(prefix.length) : m,
);
json(res, { error: `Ambiguous path '${requestedPath}'`, matches: relative }, 400);
return;
} else {
json(res, { error: `File not found: ${requestedPath}` }, 404);
return;
}
if (!isWithinProjectRoot(resolvedCode, projectRoot)) {
json(res, { error: "Access denied: path is outside project root" }, 403);
return;
}
}
try {
const stat = statSync(resolvedCode);
if (stat.size > 2 * 1024 * 1024) {
json(res, { error: "File too large (max 2MB)" }, 413);
return;
}
const contents = readFileSync(resolvedCode, "utf-8");
const displayName = resolvedCode.split("/").pop() || resolvedCode;
let prerenderedHTML: string | undefined;
try {
const result = await preloadFile({
file: { name: displayName, contents },
options: { disableFileHeader: true },
});
prerenderedHTML = result.prerenderedHTML;
} catch {
// Fall back to client-side rendering
}
json(res, { codeFile: true, contents, filepath: resolvedCode, prerenderedHTML });
return;
} catch {
json(res, { error: `File not found: ${requestedPath}` }, 404);
return;
}
}
const result = resolveMarkdownFile(requestedPath, projectRoot);
if (result.kind === "ambiguous") {
json(
res,
{
error: `Ambiguous filename '${result.input}': found ${result.matches.length} matches`,
matches: result.matches,
},
400,
);
return;
}
if (result.kind === "not_found" || result.kind === "unavailable") {
json(res, { error: `File not found: ${result.input}` }, 404);
return;
}
try {
const markdown = readFileSync(result.path, "utf-8");
json(res, { markdown, filepath: result.path });
} catch {
json(res, { error: "Failed to read file" }, 500);
}
}
/**
* Batch existence check for code-file paths the renderer wants to linkify.
* POST /api/doc/exists with { paths: string[] }.
*
* TODO(security): see packages/server/reference-handlers.ts handleDocExists —
* both absolute paths in `paths[]` AND the `base` field are honored verbatim
* with no project-root containment check, leaking file existence back to the
* caller. Fix in lockstep with the Bun handler.
*/
export async function handleDocExistsRequest(res: Res, req: IncomingMessage): Promise<void> {
const body = await parseBody(req);
const paths = (body as { paths?: unknown }).paths;
if (!Array.isArray(paths) || !paths.every((p) => typeof p === "string")) {
json(res, { error: "Expected { paths: string[] }" }, 400);
return;
}
if (paths.length > 500) {
json(res, { error: "Too many paths (max 500)" }, 400);
return;
}
const baseRaw = (body as { base?: unknown }).base;
const baseDir = typeof baseRaw === "string" && baseRaw.length > 0
? resolveUserPath(baseRaw)
: undefined;
const projectRoot = process.cwd();
const results: Record<
string,
| { status: "found"; resolved: string }
| { status: "ambiguous"; matches: string[] }
| { status: "missing" }
| { status: "unavailable" }
> = {};
await Promise.all(
(paths as string[]).map(async (p) => {
const r = await resolveCodeFile(p, projectRoot, baseDir);
if (r.kind === "found") {
results[p] = { status: "found", resolved: r.path };
} else if (r.kind === "ambiguous") {
const prefix = `${projectRoot}/`;
results[p] = {
status: "ambiguous",
matches: r.matches.map((m: string) => (m.startsWith(prefix) ? m.slice(prefix.length) : m)),
};
} else if (r.kind === "unavailable") {
results[p] = { status: "unavailable" };
} else {
results[p] = { status: "missing" };
}
}),
);
json(res, { results });
}
export function handleObsidianVaultsRequest(res: Res): void {
json(res, { vaults: detectObsidianVaults() });
}
export function handleObsidianFilesRequest(res: Res, url: URL): void {
const vaultPath = url.searchParams.get("vaultPath");
if (!vaultPath) {
json(res, { error: "Missing vaultPath parameter" }, 400);
return;
}
const resolvedVault = resolveUserPath(vaultPath);
if (!existsSync(resolvedVault) || !statSync(resolvedVault).isDirectory()) {
json(res, { error: "Invalid vault path" }, 400);
return;
}
try {
const files: string[] = [];
walkMarkdownFiles(resolvedVault, resolvedVault, files, /\.mdx?$/i);
files.sort();
json(res, { tree: buildFileTree(files) });
} catch {
json(res, { error: "Failed to list vault files" }, 500);
}
}
export function handleObsidianDocRequest(res: Res, url: URL): void {
const vaultPath = url.searchParams.get("vaultPath");
const filePath = url.searchParams.get("path");
if (!vaultPath || !filePath) {
json(res, { error: "Missing vaultPath or path parameter" }, 400);
return;
}
if (!/\.mdx?$/i.test(filePath)) {
json(res, { error: "Only markdown files are supported" }, 400);
return;
}
const resolvedVault = resolveUserPath(vaultPath);
let resolvedFile = resolvePath(resolvedVault, filePath);
// Bare filename search within vault
if (!existsSync(resolvedFile) && !filePath.includes("/")) {
const files: string[] = [];
walkMarkdownFiles(resolvedVault, resolvedVault, files, /\.mdx?$/i);
const matches = files.filter(
(f) => f.split("/").pop()!.toLowerCase() === filePath.toLowerCase(),
);
if (matches.length === 1) {
resolvedFile = resolvePath(resolvedVault, matches[0]);
} else if (matches.length > 1) {
json(
res,
{
error: `Ambiguous filename '${filePath}': found ${matches.length} matches`,
matches,
},
400,
);
return;
}
}
// Security: must be within vault
if (
!resolvedFile.startsWith(resolvedVault + "/") &&
resolvedFile !== resolvedVault
) {
json(res, { error: "Access denied: path is outside vault" }, 403);
return;
}
if (!existsSync(resolvedFile)) {
json(res, { error: `File not found: ${filePath}` }, 404);
return;
}
try {
const markdown = readFileSync(resolvedFile, "utf-8");
json(res, { markdown, filepath: resolvedFile });
} catch {
json(res, { error: "Failed to read file" }, 500);
}
}
export function handleFileBrowserRequest(res: Res, url: URL): void {
const dirPath = url.searchParams.get("dirPath");
if (!dirPath) {
json(res, { error: "Missing dirPath parameter" }, 400);
return;
}
const resolvedDir = resolveUserPath(dirPath);
if (!existsSync(resolvedDir) || !statSync(resolvedDir).isDirectory()) {
json(res, { error: "Invalid directory path" }, 400);
return;
}
try {
const files: string[] = [];
walkMarkdownFiles(resolvedDir, resolvedDir, files);
files.sort();
json(res, { tree: buildFileTree(files) });
} catch {
json(res, { error: "Failed to list directory files" }, 500);
}
}

View File

@@ -0,0 +1,181 @@
import { createServer } from "node:http";
import { dirname, resolve as resolvePath } from "node:path";
import { contentHash, deleteDraft } from "../generated/draft.js";
import { saveConfig, detectGitUser, getServerConfig } from "../generated/config.js";
import {
handleDraftRequest,
handleFavicon,
handleImageRequest,
handleUploadRequest,
} from "./handlers.js";
import { html, json, parseBody, requestUrl } from "./helpers.js";
import { listenOnPort } from "./network.js";
import { getRepoInfo } from "./project.js";
import {
handleDocRequest,
handleDocExistsRequest,
handleFileBrowserRequest,
handleObsidianVaultsRequest,
handleObsidianFilesRequest,
handleObsidianDocRequest,
} from "./reference.js";
import { warmFileListCache } from "../generated/resolve-file.js";
import { createExternalAnnotationHandler } from "./external-annotations.js";
export interface AnnotateServerResult {
port: number;
portSource: "env" | "remote-default" | "random";
url: string;
waitForDecision: () => Promise<{ feedback: string; annotations: unknown[]; exit?: boolean; approved?: boolean }>;
stop: () => void;
}
export async function startAnnotateServer(options: {
markdown: string;
filePath: string;
htmlContent: string;
origin?: string;
mode?: string;
folderPath?: string;
sharingEnabled?: boolean;
shareBaseUrl?: string;
pasteApiUrl?: string;
sourceInfo?: string;
sourceConverted?: boolean;
gate?: boolean;
}): Promise<AnnotateServerResult> {
// Side-channel pre-warm so /api/doc/exists POSTs land on warm cache.
void warmFileListCache(process.cwd(), "code");
const gitUser = detectGitUser();
const sharingEnabled =
options.sharingEnabled ?? process.env.PLANNOTATOR_SHARE !== "disabled";
const shareBaseUrl =
(options.shareBaseUrl ?? process.env.PLANNOTATOR_SHARE_URL) || undefined;
const pasteApiUrl =
(options.pasteApiUrl ?? process.env.PLANNOTATOR_PASTE_URL) || undefined;
let resolveDecision!: (result: {
feedback: string;
annotations: unknown[];
exit?: boolean;
approved?: boolean;
}) => void;
const decisionPromise = new Promise<{
feedback: string;
annotations: unknown[];
exit?: boolean;
approved?: boolean;
}>((r) => {
resolveDecision = r;
});
// Folder annotation has no stable markdown body, so key drafts by folder path instead.
const draftSource =
options.mode === "annotate-folder" && options.folderPath
? `folder:${resolvePath(options.folderPath)}`
: options.markdown;
const draftKey = contentHash(draftSource);
// Detect repo info (cached for this session)
const repoInfo = getRepoInfo();
const externalAnnotations = createExternalAnnotationHandler("plan");
const server = createServer(async (req, res) => {
const url = requestUrl(req);
if (await externalAnnotations.handle(req, res, url)) return;
if (url.pathname === "/api/plan" && req.method === "GET") {
json(res, {
plan: options.markdown,
origin: options.origin ?? "pi",
mode: options.mode || "annotate",
filePath: options.filePath,
sourceInfo: options.sourceInfo,
sourceConverted: options.sourceConverted ?? false,
gate: options.gate ?? false,
sharingEnabled,
shareBaseUrl,
pasteApiUrl,
repoInfo,
projectRoot: options.folderPath || process.cwd(),
serverConfig: getServerConfig(gitUser),
});
} else if (url.pathname === "/api/config" && req.method === "POST") {
try {
const body = (await parseBody(req)) as { displayName?: string; diffOptions?: Record<string, unknown>; conventionalComments?: boolean };
const toSave: Record<string, unknown> = {};
if (body.displayName !== undefined) toSave.displayName = body.displayName;
if (body.diffOptions !== undefined) toSave.diffOptions = body.diffOptions;
if (body.conventionalComments !== undefined) toSave.conventionalComments = body.conventionalComments;
if (Object.keys(toSave).length > 0) saveConfig(toSave as Parameters<typeof saveConfig>[0]);
json(res, { ok: true });
} catch {
json(res, { error: "Invalid request" }, 400);
}
} else if (url.pathname === "/api/image") {
handleImageRequest(res, url);
} else if (url.pathname === "/api/upload" && req.method === "POST") {
await handleUploadRequest(req, res);
} else if (url.pathname === "/api/draft") {
await handleDraftRequest(req, res, draftKey);
} else if (url.pathname === "/api/doc" && req.method === "GET") {
// Inject source file's directory as base for relative path resolution.
// Skip for URL annotations — there's no local directory to resolve against.
if (!url.searchParams.has("base") && options.filePath && !/^https?:\/\//i.test(options.filePath)) {
url.searchParams.set("base", dirname(resolvePath(options.filePath)));
}
await handleDocRequest(res, url);
} else if (url.pathname === "/api/doc/exists" && req.method === "POST") {
await handleDocExistsRequest(res, req);
} else if (url.pathname === "/api/obsidian/vaults") {
handleObsidianVaultsRequest(res);
} else if (url.pathname === "/api/reference/obsidian/files" && req.method === "GET") {
handleObsidianFilesRequest(res, url);
} else if (url.pathname === "/api/reference/obsidian/doc" && req.method === "GET") {
handleObsidianDocRequest(res, url);
} else if (url.pathname === "/api/reference/files" && req.method === "GET") {
handleFileBrowserRequest(res, url);
} else if (url.pathname === "/favicon.svg") {
handleFavicon(res);
} else if (url.pathname === "/api/exit" && req.method === "POST") {
deleteDraft(draftKey);
resolveDecision({ feedback: "", annotations: [], exit: true });
json(res, { ok: true });
} else if (url.pathname === "/api/approve" && req.method === "POST") {
deleteDraft(draftKey);
resolveDecision({ feedback: "", annotations: [], approved: true });
json(res, { ok: true });
} else if (url.pathname === "/api/feedback" && req.method === "POST") {
try {
const body = await parseBody(req);
deleteDraft(draftKey);
resolveDecision({
feedback: (body.feedback as string) || "",
annotations: (body.annotations as unknown[]) || [],
});
json(res, { ok: true });
} catch (err) {
const message = err instanceof Error ? err.message : "Failed to process feedback";
json(res, { error: message }, 500);
}
} else {
html(res, options.htmlContent);
}
});
const { port, portSource } = await listenOnPort(server);
return {
port,
portSource,
url: `http://localhost:${port}`,
waitForDecision: () => decisionPromise,
stop: () => server.close(),
};
}

View File

@@ -0,0 +1,480 @@
import { randomUUID } from "node:crypto";
import { createServer } from "node:http";
import { contentHash, deleteDraft } from "../generated/draft.js";
import {
type ArchivedPlan,
generateSlug,
getPlanVersion,
getPlanVersionPath,
getVersionCount,
listArchivedPlans,
listVersions,
readArchivedPlan,
saveAnnotations,
saveFinalSnapshot,
saveToHistory,
} from "../generated/storage.js";
import { createEditorAnnotationHandler } from "./annotations.js";
import { createExternalAnnotationHandler } from "./external-annotations.js";
import {
handleDraftRequest,
handleFavicon,
handleImageRequest,
handleUploadRequest,
} from "./handlers.js";
import { html, json, parseBody, requestUrl } from "./helpers.js";
import { openEditorDiff } from "./ide.js";
import {
type BearConfig,
type IntegrationResult,
type ObsidianConfig,
type OctarineConfig,
saveToBear,
saveToObsidian,
saveToOctarine,
} from "./integrations.js";
import { listenOnPort } from "./network.js";
import { saveConfig, detectGitUser, getServerConfig } from "../generated/config.js";
import { detectProjectName, getRepoInfo } from "./project.js";
import {
handleDocRequest,
handleDocExistsRequest,
handleFileBrowserRequest,
handleObsidianDocRequest,
handleObsidianFilesRequest,
handleObsidianVaultsRequest,
} from "./reference.js";
import { warmFileListCache } from "../generated/resolve-file.js";
export interface PlanReviewDecision {
approved: boolean;
feedback?: string;
savedPath?: string;
agentSwitch?: string;
permissionMode?: string;
}
export interface PlanServerResult {
reviewId: string;
port: number;
portSource: "env" | "remote-default" | "random";
url: string;
waitForDecision: () => Promise<PlanReviewDecision>;
onDecision: (listener: (result: PlanReviewDecision) => void | Promise<void>) => () => void;
waitForDone?: () => Promise<void>;
stop: () => void;
}
export async function startPlanReviewServer(options: {
plan: string;
htmlContent: string;
origin?: string;
permissionMode?: string;
sharingEnabled?: boolean;
shareBaseUrl?: string;
pasteApiUrl?: string;
mode?: "archive";
customPlanPath?: string | null;
}): Promise<PlanServerResult> {
// Side-channel pre-warm so /api/doc/exists POSTs land on warm cache.
void warmFileListCache(process.cwd(), "code");
const gitUser = detectGitUser();
const sharingEnabled =
options.sharingEnabled ?? process.env.PLANNOTATOR_SHARE !== "disabled";
const shareBaseUrl =
(options.shareBaseUrl ?? process.env.PLANNOTATOR_SHARE_URL) || undefined;
const pasteApiUrl =
(options.pasteApiUrl ?? process.env.PLANNOTATOR_PASTE_URL) || undefined;
// --- Archive mode setup ---
let archivePlans: ArchivedPlan[] = [];
let initialArchivePlan = "";
let resolveDone: (() => void) | undefined;
let donePromise: Promise<void> | undefined;
if (options.mode === "archive") {
archivePlans = listArchivedPlans(options.customPlanPath ?? undefined);
initialArchivePlan =
archivePlans.length > 0
? (readArchivedPlan(
archivePlans[0].filename,
options.customPlanPath ?? undefined,
) ?? "")
: "";
donePromise = new Promise<void>((resolve) => {
resolveDone = resolve;
});
}
// --- Plan review mode setup (skip in archive mode) ---
const repoInfo = options.mode !== "archive" ? getRepoInfo() : null;
const slug = options.mode !== "archive" ? generateSlug(options.plan) : "";
const project = options.mode !== "archive" ? detectProjectName() : "";
const historyResult =
options.mode !== "archive"
? saveToHistory(project, slug, options.plan)
: { version: 0, path: "", isNew: false };
const previousPlan =
options.mode !== "archive" && historyResult.version > 1
? getPlanVersion(project, slug, historyResult.version - 1)
: null;
const versionInfo =
options.mode !== "archive"
? {
version: historyResult.version,
totalVersions: getVersionCount(project, slug),
project,
}
: null;
const reviewId = randomUUID();
let resolveDecision!: (result: PlanReviewDecision) => void;
const decisionListeners = new Set<(result: PlanReviewDecision) => void | Promise<void>>();
let decisionSettled = false;
const decisionPromise = new Promise<PlanReviewDecision>((r) => {
resolveDecision = r;
});
const publishDecision = (result: PlanReviewDecision): boolean => {
if (decisionSettled) return false;
decisionSettled = true;
resolveDecision(result);
for (const listener of decisionListeners) {
Promise.resolve(listener(result)).catch((error) => {
console.error("[Plan Review] Decision listener failed:", error);
});
}
return true;
};
// Draft key for annotation persistence
const draftKey = options.mode !== "archive" ? contentHash(options.plan) : "";
// Editor annotations (in-memory, VS Code integration — skip in archive mode)
const editorAnnotations = options.mode !== "archive" ? createEditorAnnotationHandler() : null;
const externalAnnotations = options.mode !== "archive" ? createExternalAnnotationHandler("plan") : null;
// Lazy cache for in-session archive tab
let cachedArchivePlans: ArchivedPlan[] | null = null;
const server = createServer(async (req, res) => {
const url = requestUrl(req);
if (url.pathname === "/api/done" && req.method === "POST") {
resolveDone?.();
json(res, { ok: true });
} else if (url.pathname === "/api/archive/plans" && req.method === "GET") {
const customPath = url.searchParams.get("customPath") || undefined;
if (!cachedArchivePlans)
cachedArchivePlans = listArchivedPlans(customPath);
json(res, { plans: cachedArchivePlans });
} else if (url.pathname === "/api/archive/plan" && req.method === "GET") {
const filename = url.searchParams.get("filename");
const customPath = url.searchParams.get("customPath") || undefined;
if (!filename) {
json(res, { error: "Missing filename" }, 400);
return;
}
const markdown = readArchivedPlan(filename, customPath);
if (!markdown) {
json(res, { error: "Not found" }, 404);
return;
}
json(res, { markdown, filepath: filename });
} else if (url.pathname === "/api/plan/version") {
const vParam = url.searchParams.get("v");
if (!vParam) {
json(res, { error: "Missing v parameter" }, 400);
return;
}
const v = parseInt(vParam, 10);
if (Number.isNaN(v) || v < 1) {
json(res, { error: "Invalid version number" }, 400);
return;
}
const content = getPlanVersion(project, slug, v);
if (content === null) {
json(res, { error: "Version not found" }, 404);
return;
}
json(res, { plan: content, version: v });
} else if (url.pathname === "/api/plan/versions") {
json(res, { project, slug, versions: listVersions(project, slug) });
} else if (url.pathname === "/api/plan") {
if (options.mode === "archive") {
json(res, {
plan: initialArchivePlan,
origin: options.origin ?? "pi",
mode: "archive",
archivePlans,
sharingEnabled,
shareBaseUrl,
serverConfig: getServerConfig(gitUser),
});
} else {
json(res, {
plan: options.plan,
origin: options.origin ?? "pi",
permissionMode: options.permissionMode,
previousPlan,
versionInfo,
sharingEnabled,
shareBaseUrl,
pasteApiUrl,
repoInfo,
projectRoot: process.cwd(),
serverConfig: getServerConfig(gitUser),
});
}
} else if (url.pathname === "/api/config" && req.method === "POST") {
try {
const body = (await parseBody(req)) as { displayName?: string; diffOptions?: Record<string, unknown>; conventionalComments?: boolean };
const toSave: Record<string, unknown> = {};
if (body.displayName !== undefined) toSave.displayName = body.displayName;
if (body.diffOptions !== undefined) toSave.diffOptions = body.diffOptions;
if (body.conventionalComments !== undefined) toSave.conventionalComments = body.conventionalComments;
if (Object.keys(toSave).length > 0) saveConfig(toSave as Parameters<typeof saveConfig>[0]);
json(res, { ok: true });
} catch {
json(res, { error: "Invalid request" }, 400);
}
} else if (url.pathname === "/api/image") {
handleImageRequest(res, url);
} else if (url.pathname === "/api/upload" && req.method === "POST") {
await handleUploadRequest(req, res);
} else if (url.pathname === "/api/draft") {
await handleDraftRequest(req, res, draftKey);
} else if (editorAnnotations && (await editorAnnotations.handle(req, res, url))) {
return;
} else if (externalAnnotations && (await externalAnnotations.handle(req, res, url))) {
return;
} else if (url.pathname === "/api/doc" && req.method === "GET") {
await handleDocRequest(res, url);
} else if (url.pathname === "/api/doc/exists" && req.method === "POST") {
await handleDocExistsRequest(res, req);
} else if (url.pathname === "/api/obsidian/vaults") {
handleObsidianVaultsRequest(res);
} else if (url.pathname === "/api/reference/obsidian/files" && req.method === "GET") {
handleObsidianFilesRequest(res, url);
} else if (url.pathname === "/api/reference/obsidian/doc" && req.method === "GET") {
handleObsidianDocRequest(res, url);
} else if (url.pathname === "/api/reference/files" && req.method === "GET") {
handleFileBrowserRequest(res, url);
} else if (
url.pathname === "/api/plan/vscode-diff" &&
req.method === "POST"
) {
try {
const body = await parseBody(req);
const baseVersion = body.baseVersion as number;
if (!baseVersion) {
json(res, { error: "Missing baseVersion" }, 400);
return;
}
const basePath = getPlanVersionPath(project, slug, baseVersion);
if (!basePath) {
json(res, { error: `Version ${baseVersion} not found` }, 404);
return;
}
const result = await openEditorDiff(basePath, historyResult.path);
if ("error" in result) {
json(res, { error: result.error }, 500);
return;
}
json(res, { ok: true });
} catch (err) {
json(
res,
{
error:
err instanceof Error
? err.message
: "Failed to open VS Code diff",
},
500,
);
}
} else if (url.pathname === "/api/agents" && req.method === "GET") {
json(res, { agents: [] });
} else if (url.pathname === "/favicon.svg") {
handleFavicon(res);
} else if (url.pathname === "/api/save-notes" && req.method === "POST") {
const results: {
obsidian?: IntegrationResult;
bear?: IntegrationResult;
octarine?: IntegrationResult;
} = {};
try {
const body = await parseBody(req);
const promises: Promise<void>[] = [];
const obsConfig = body.obsidian as ObsidianConfig | undefined;
const bearConfig = body.bear as BearConfig | undefined;
const octConfig = body.octarine as OctarineConfig | undefined;
if (obsConfig?.vaultPath && obsConfig?.plan) {
promises.push(
saveToObsidian(obsConfig).then((r) => {
results.obsidian = r;
}),
);
}
if (bearConfig?.plan) {
promises.push(
saveToBear(bearConfig).then((r) => {
results.bear = r;
}),
);
}
if (octConfig?.plan && octConfig?.workspace) {
promises.push(
saveToOctarine(octConfig).then((r) => {
results.octarine = r;
}),
);
}
await Promise.allSettled(promises);
for (const [name, result] of Object.entries(results)) {
if (!result?.success && result)
console.error(`[${name}] Save failed: ${result.error}`);
}
} catch (err) {
console.error(`[Save Notes] Error:`, err);
json(res, { error: "Save failed" }, 500);
return;
}
json(res, { ok: true, results });
} else if (url.pathname === "/api/approve" && req.method === "POST") {
if (decisionSettled) {
json(res, { ok: true, duplicate: true });
return;
}
let feedback: string | undefined;
let agentSwitch: string | undefined;
let requestedPermissionMode: string | undefined;
let planSaveEnabled = true;
let planSaveCustomPath: string | undefined;
try {
const body = await parseBody(req);
if (body.feedback) feedback = body.feedback as string;
if (body.agentSwitch) agentSwitch = body.agentSwitch as string;
if (body.permissionMode)
requestedPermissionMode = body.permissionMode as string;
if (body.planSave !== undefined) {
const ps = body.planSave as { enabled: boolean; customPath?: string };
planSaveEnabled = ps.enabled;
planSaveCustomPath = ps.customPath;
}
// Run note integrations in parallel
const integrationResults: Record<string, IntegrationResult> = {};
const integrationPromises: Promise<void>[] = [];
const obsConfig = body.obsidian as ObsidianConfig | undefined;
const bearConfig = body.bear as BearConfig | undefined;
const octConfig = body.octarine as OctarineConfig | undefined;
if (obsConfig?.vaultPath && obsConfig?.plan) {
integrationPromises.push(
saveToObsidian(obsConfig).then((r) => {
integrationResults.obsidian = r;
}),
);
}
if (bearConfig?.plan) {
integrationPromises.push(
saveToBear(bearConfig).then((r) => {
integrationResults.bear = r;
}),
);
}
if (octConfig?.plan && octConfig?.workspace) {
integrationPromises.push(
saveToOctarine(octConfig).then((r) => {
integrationResults.octarine = r;
}),
);
}
await Promise.allSettled(integrationPromises);
for (const [name, result] of Object.entries(integrationResults)) {
if (!result?.success && result)
console.error(`[${name}] Save failed: ${result.error}`);
}
} catch (err) {
console.error(`[Integration] Error:`, err);
}
// Save annotations and final snapshot
let savedPath: string | undefined;
if (planSaveEnabled) {
const annotations = feedback || "";
if (annotations) saveAnnotations(slug, annotations, planSaveCustomPath);
savedPath = saveFinalSnapshot(
slug,
"approved",
options.plan,
annotations,
planSaveCustomPath,
);
}
deleteDraft(draftKey);
const effectivePermissionMode = requestedPermissionMode || options.permissionMode;
publishDecision({
approved: true,
feedback,
savedPath,
agentSwitch,
permissionMode: effectivePermissionMode,
});
json(res, { ok: true, savedPath });
} else if (url.pathname === "/api/deny" && req.method === "POST") {
if (decisionSettled) {
json(res, { ok: true, duplicate: true });
return;
}
let feedback = "Plan rejected by user";
let planSaveEnabled = true;
let planSaveCustomPath: string | undefined;
try {
const body = await parseBody(req);
feedback = (body.feedback as string) || feedback;
if (body.planSave !== undefined) {
const ps = body.planSave as { enabled: boolean; customPath?: string };
planSaveEnabled = ps.enabled;
planSaveCustomPath = ps.customPath;
}
} catch {
/* use default feedback */
}
let savedPath: string | undefined;
if (planSaveEnabled) {
saveAnnotations(slug, feedback, planSaveCustomPath);
savedPath = saveFinalSnapshot(
slug,
"denied",
options.plan,
feedback,
planSaveCustomPath,
);
}
deleteDraft(draftKey);
publishDecision({ approved: false, feedback, savedPath });
json(res, { ok: true, savedPath });
} else {
html(res, options.htmlContent);
}
});
const { port, portSource } = await listenOnPort(server);
return {
reviewId,
port,
portSource,
url: `http://localhost:${port}`,
waitForDecision: () => decisionPromise,
onDecision: (listener) => {
decisionListeners.add(listener);
return () => {
decisionListeners.delete(listener);
};
},
...(donePromise && { waitForDone: () => donePromise }),
stop: () => server.close(),
};
}

File diff suppressed because it is too large Load Diff