diff --git a/.pi/extensions/calvana-shiplog.ts b/.pi/extensions/calvana-shiplog.ts
index 8541ccf..2605772 100644
--- a/.pi/extensions/calvana-shiplog.ts
+++ b/.pi/extensions/calvana-shiplog.ts
@@ -1,7 +1,9 @@
/**
- * Calvana Ship Log Extension
+ * Calvana Ship Log Extension — DB-ONLY Architecture
*
- * Automatically tracks what you're shipping and updates the live Calvana site.
+ * ZERO in-memory state. ZERO session reconstruction.
+ * Every read hits PostgreSQL. Every write hits PostgreSQL.
+ * If DB is unreachable, operations fail loudly — never silently use stale data.
*
* Tools (LLM-callable):
* - calvana_ship: Add/update/complete shipping log entries
@@ -11,14 +13,6 @@
* Commands (user):
* /ships — View current shipping log
* /ship-deploy — Force deploy to calvana.quikcue.com
- *
- * How it works:
- * 1. When you work on tasks, the LLM uses calvana_ship to track progress
- * 2. If something breaks, calvana_oops logs it
- * 3. calvana_deploy rebuilds the /live page HTML and pushes it to the server
- * 4. The extension auto-injects context so the LLM knows to track ships
- *
- * Edit the SSH/deploy config in the DEPLOY_CONFIG section below.
*/
import { StringEnum } from "@mariozechner/pi-ai";
@@ -27,7 +21,7 @@ import { Text, truncateToWidth, matchesKey } from "@mariozechner/pi-tui";
import { Type } from "@sinclair/typebox";
// ════════════════════════════════════════════════════════════════════
-// CONFIGURATION — Edit these to change deploy target, copy, links
+// CONFIGURATION
// ════════════════════════════════════════════════════════════════════
const DEPLOY_CONFIG = {
@@ -46,36 +40,91 @@ const SITE_CONFIG = {
};
// ════════════════════════════════════════════════════════════════════
-// TYPES
+// DB ACCESS — Single source of truth. No caching. No fallbacks.
// ════════════════════════════════════════════════════════════════════
-type ShipStatus = "planned" | "shipping" | "shipped";
+const SSH_BASE = `ssh -o ConnectTimeout=10 -o StrictHostKeyChecking=no -p ${DEPLOY_CONFIG.sshPort} ${DEPLOY_CONFIG.sshHost}`;
+const PG_CONTAINER_CMD = `$(docker ps --format '{{.Names}}' | grep dokploy-postgres)`;
-interface ShipEntry {
- id: number;
- title: string;
- status: ShipStatus;
- timestamp: string;
- metric: string;
- prLink: string;
- deployLink: string;
- loomLink: string;
+/**
+ * Run a SQL query against the calvana DB.
+ * Uses base64 encoding to bypass the 7-layer quoting hell
+ * (local bash → SSH → remote bash → incus → container bash → docker → psql).
+ * Returns raw stdout. Throws on failure — callers MUST handle errors.
+ * No silent fallbacks. No swallowed exceptions.
+ */
+async function dbQuery(pi: ExtensionAPI, sql: string, timeout = 15000): Promise {
+ // Base64-encode the SQL to avoid ALL quoting issues through the SSH/incus/docker chain
+ const b64Sql = Buffer.from(sql).toString("base64");
+ const result = await pi.exec("bash", ["-c",
+ `${SSH_BASE} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'echo ${b64Sql} | base64 -d | docker exec -i ${PG_CONTAINER_CMD} psql -U dokploy -d calvana -t -A -F \\\"|||\\\"'"`
+ ], { timeout });
+
+ if (result.code !== 0) {
+ throw new Error(`DB query failed (exit ${result.code}): ${result.stderr?.slice(0, 200)}`);
+ }
+ return result.stdout?.trim() || "";
}
-interface OopsEntry {
+// ════════════════════════════════════════════════════════════════════
+// DB READ HELPERS — Always fresh from DB. Never cached.
+// ════════════════════════════════════════════════════════════════════
+
+interface DbShip {
+ id: number;
+ title: string;
+ status: string;
+ metric: string;
+ created: string;
+}
+
+interface DbOops {
id: number;
description: string;
fixTime: string;
commitLink: string;
- timestamp: string;
+ created: string;
}
-interface ShipLogState {
- ships: ShipEntry[];
- oops: OopsEntry[];
- nextShipId: number;
- nextOopsId: number;
- lastDeployed: string | null;
+async function fetchShips(pi: ExtensionAPI): Promise {
+ const raw = await dbQuery(pi, "SELECT id, title, status, COALESCE(metric, '-'), created_at::text FROM ships ORDER BY id");
+ if (!raw) return [];
+ const ships: DbShip[] = [];
+ for (const line of raw.split("\n")) {
+ if (!line.trim()) continue;
+ const p = line.split("|||");
+ if (p.length >= 5) {
+ ships.push({ id: parseInt(p[0]), title: p[1], status: p[2], metric: p[3], created: p[4] });
+ }
+ }
+ return ships;
+}
+
+async function fetchOops(pi: ExtensionAPI): Promise {
+ const raw = await dbQuery(pi, "SELECT id, description, COALESCE(fix_time, '-'), COALESCE(commit_link, '#commit'), created_at::text FROM oops ORDER BY id");
+ if (!raw) return [];
+ const oops: DbOops[] = [];
+ for (const line of raw.split("\n")) {
+ if (!line.trim()) continue;
+ const p = line.split("|||");
+ if (p.length >= 4) {
+ oops.push({ id: parseInt(p[0]), description: p[1], fixTime: p[2], commitLink: p[3], created: p[4] || "" });
+ }
+ }
+ return oops;
+}
+
+/** Get summary counts for status bar / system prompt. */
+async function fetchSummary(pi: ExtensionAPI): Promise<{ total: number; shipped: number; oops: number }> {
+ try {
+ const raw = await dbQuery(pi,
+ "SELECT (SELECT count(*) FROM ships), (SELECT count(*) FROM ships WHERE status='shipped'), (SELECT count(*) FROM oops)"
+ );
+ const p = raw.split("|||");
+ return { total: parseInt(p[0]) || 0, shipped: parseInt(p[1]) || 0, oops: parseInt(p[2]) || 0 };
+ } catch {
+ return { total: -1, shipped: -1, oops: -1 }; // -1 signals DB unreachable
+ }
}
// ════════════════════════════════════════════════════════════════════
@@ -109,116 +158,34 @@ const DeployParams = Type.Object({
// ════════════════════════════════════════════════════════════════════
export default function (pi: ExtensionAPI) {
- // ── State ──
- let state: ShipLogState = {
- ships: [],
- oops: [],
- nextShipId: 1,
- nextOopsId: 1,
- lastDeployed: null,
- };
+ // ── NO in-memory state. Only a cache for the status bar display. ──
+ let lastDeployed: string | null = null;
+ let statusCache = { total: 0, shipped: 0, oops: 0 };
- // ── State reconstruction: DB first, session fallback ──
- const reconstructFromDb = async () => {
- try {
- const sshBase = `ssh -o ConnectTimeout=5 -p ${DEPLOY_CONFIG.sshPort} ${DEPLOY_CONFIG.sshHost}`;
- const pgContainer = "$(docker ps --format '{{.Names}}' | grep dokploy-postgres)";
-
- const shipsResult = await pi.exec("bash", ["-c",
- `${sshBase} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'docker exec ${pgContainer} psql -U dokploy -d calvana -t -A -F \\\"|||\\\" -c \\\"SELECT id, title, status, COALESCE(metric, chr(45)), created_at::text FROM ships ORDER BY id\\\"'" 2>/dev/null`
- ], { timeout: 15000 });
-
- if (shipsResult.code === 0 && shipsResult.stdout.trim()) {
- state.ships = [];
- let maxId = 0;
- for (const line of shipsResult.stdout.trim().split("\n")) {
- if (!line.trim()) continue;
- const parts = line.split("|||");
- if (parts.length >= 5) {
- const id = parseInt(parts[0]);
- if (id > maxId) maxId = id;
- state.ships.push({
- id,
- title: parts[1],
- status: parts[2] as ShipStatus,
- timestamp: parts[4],
- metric: parts[3],
- prLink: "#pr",
- deployLink: "#deploy",
- loomLink: "#loomclip",
- });
- }
- }
- state.nextShipId = maxId + 1;
- }
-
- const oopsResult = await pi.exec("bash", ["-c",
- `${sshBase} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'docker exec ${pgContainer} psql -U dokploy -d calvana -t -A -F \\\"|||\\\" -c \\\"SELECT id, description, COALESCE(fix_time, chr(45)), COALESCE(commit_link, chr(35)), created_at::text FROM oops ORDER BY id\\\"'" 2>/dev/null`
- ], { timeout: 15000 });
-
- if (oopsResult.code === 0 && oopsResult.stdout.trim()) {
- state.oops = [];
- let maxOopsId = 0;
- for (const line of oopsResult.stdout.trim().split("\n")) {
- if (!line.trim()) continue;
- const parts = line.split("|||");
- if (parts.length >= 4) {
- const id = parseInt(parts[0]);
- if (id > maxOopsId) maxOopsId = id;
- state.oops.push({
- id,
- description: parts[1],
- fixTime: parts[2],
- commitLink: parts[3],
- timestamp: parts[4] || "",
- });
- }
- }
- state.nextOopsId = maxOopsId + 1;
- }
- } catch {
- // DB unavailable — fall through to session reconstruction
+ const refreshStatusBar = async (ctx: ExtensionContext) => {
+ if (!ctx.hasUI) return;
+ const summary = await fetchSummary(pi);
+ if (summary.total === -1) {
+ ctx.ui.setStatus("calvana", ctx.ui.theme.fg("error", "🚀 DB unreachable"));
+ } else {
+ statusCache = summary;
+ ctx.ui.setStatus("calvana", ctx.ui.theme.fg("dim",
+ `🚀 ${summary.shipped}/${summary.total} shipped · ${summary.oops} oops (DB)`
+ ));
}
};
- const reconstructState = async (ctx: ExtensionContext) => {
- state = { ships: [], oops: [], nextShipId: 1, nextOopsId: 1, lastDeployed: null };
-
- // Always try DB first — this is the source of truth
- await reconstructFromDb();
-
- // If DB returned data, we're done
- if (state.ships.length > 0) return;
-
- // Fallback: reconstruct from session history (when DB is unreachable)
- for (const entry of ctx.sessionManager.getBranch()) {
- if (entry.type !== "message") continue;
- const msg = entry.message;
- if (msg.role !== "toolResult") continue;
- if (msg.toolName === "calvana_ship" || msg.toolName === "calvana_oops" || msg.toolName === "calvana_deploy") {
- const details = msg.details as { state?: ShipLogState } | undefined;
- if (details?.state) {
- state = details.state;
- }
- }
- }
- };
-
- pi.on("session_start", async (_event, ctx) => {
- await reconstructState(ctx);
- if (ctx.hasUI) {
- const theme = ctx.ui.theme;
- const shipCount = state.ships.length;
- const shipped = state.ships.filter(s => s.status === "shipped").length;
- ctx.ui.setStatus("calvana", theme.fg("dim", `🚀 ${shipped}/${shipCount} shipped (DB)`));
- }
- });
- pi.on("session_switch", async (_event, ctx) => await reconstructState(ctx));
- pi.on("session_fork", async (_event, ctx) => await reconstructState(ctx));
- pi.on("session_tree", async (_event, ctx) => await reconstructState(ctx));
+ // ── Session events: just refresh the status bar. NO state reconstruction. ──
+ pi.on("session_start", async (_event, ctx) => await refreshStatusBar(ctx));
+ pi.on("session_switch", async (_event, ctx) => await refreshStatusBar(ctx));
+ pi.on("session_fork", async (_event, ctx) => await refreshStatusBar(ctx));
+ pi.on("session_tree", async (_event, ctx) => await refreshStatusBar(ctx));
+ pi.on("turn_end", async (_event, ctx) => await refreshStatusBar(ctx));
// ── Inject context so LLM knows about ship tracking ──
pi.on("before_agent_start", async (event, _ctx) => {
+ const s = await fetchSummary(pi);
+ const dbStatus = s.total === -1 ? "⚠️ DB UNREACHABLE" : `${s.total} ships (${s.shipped} shipped), ${s.oops} oops`;
const shipContext = `
[Calvana Ship Log Extension Active — DB-backed]
Ship log is persisted in PostgreSQL (calvana DB on dokploy-postgres).
@@ -236,29 +203,13 @@ Rules:
- After significant changes, use calvana_deploy to push updates live.
- calvana_deploy reads from the DATABASE — it shows ALL ships ever, not just this session.
-Current state from DB: ${state.ships.length} ships (${state.ships.filter(s => s.status === "shipped").length} shipped), ${state.oops.length} oops
+Current state from DB: ${dbStatus}
`;
- return {
- systemPrompt: event.systemPrompt + shipContext,
- };
- });
-
- // ── Update status bar on turn end ──
- pi.on("turn_end", async (_event, ctx) => {
- if (ctx.hasUI) {
- const theme = ctx.ui.theme;
- const shipped = state.ships.filter(s => s.status === "shipped").length;
- const shipping = state.ships.filter(s => s.status === "shipping").length;
- const total = state.ships.length;
- let statusText = `🚀 ${shipped}/${total} shipped`;
- if (shipping > 0) statusText += ` · ${shipping} in flight`;
- if (state.lastDeployed) statusText += ` · last deploy ${state.lastDeployed}`;
- ctx.ui.setStatus("calvana", theme.fg("dim", statusText));
- }
+ return { systemPrompt: event.systemPrompt + shipContext };
});
// ════════════════════════════════════════════════════════════════
- // TOOL: calvana_ship
+ // TOOL: calvana_ship — ALL operations go directly to DB
// ════════════════════════════════════════════════════════════════
pi.registerTool({
@@ -268,105 +219,67 @@ Current state from DB: ${state.ships.length} ships (${state.ships.filter(s => s.
parameters: ShipParams,
async execute(_toolCallId, params, _signal, _onUpdate, _ctx) {
- const now = new Date().toISOString().replace("T", " ").slice(0, 19) + " GMT+8";
-
switch (params.action) {
case "add": {
if (!params.title) {
- return {
- content: [{ type: "text", text: "Error: title required" }],
- details: { state: { ...state }, error: "title required" },
- };
+ return { content: [{ type: "text", text: "Error: title required" }], isError: true };
}
- const entry: ShipEntry = {
- id: state.nextShipId++,
- title: params.title,
- status: (params.status as ShipStatus) || "planned",
- timestamp: now,
- metric: params.metric || "—",
- prLink: params.prLink || "#pr",
- deployLink: params.deployLink || "#deploy",
- loomLink: params.loomLink || "#loomclip",
- };
- state.ships.push(entry);
+ const title = params.title.replace(/'/g, "''");
+ const status = params.status || "planned";
+ const metric = (params.metric || "—").replace(/'/g, "''");
- // Persist to PostgreSQL
- const addTitle = entry.title.replace(/'/g, "''");
- const addMetric = (entry.metric || "—").replace(/'/g, "''");
- const addStatus = entry.status;
try {
- const dbResult = await pi.exec("bash", ["-c",
- `ssh -o ConnectTimeout=10 -p ${DEPLOY_CONFIG.sshPort} ${DEPLOY_CONFIG.sshHost} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'docker exec \$(docker ps --format {{.Names}} | grep dokploy-postgres) psql -U dokploy -d calvana -t -c \\\"INSERT INTO ships (title, status, metric) VALUES (\\x27${addTitle}\\x27, \\x27${addStatus}\\x27, \\x27${addMetric}\\x27) RETURNING id\\\"'"`
- ], { timeout: 15000 });
- const dbId = parseInt((dbResult.stdout || "").trim());
- if (dbId > 0) entry.id = dbId;
- } catch { /* DB write failed, local state still updated */ }
-
- return {
- content: [{ type: "text", text: `Ship #${entry.id} added: "${entry.title}" [${entry.status}]` }],
- details: { state: { ...state, ships: [...state.ships] } },
- };
+ const raw = await dbExec(pi,
+ `INSERT INTO ships (title, status, metric) VALUES ('${title}', '${status}', '${metric}') RETURNING id`
+ );
+ const id = parseInt(raw.trim()) || 0;
+ return { content: [{ type: "text", text: `Ship #${id} added: "${params.title}" [${status}]` }] };
+ } catch (err: any) {
+ return { content: [{ type: "text", text: `DB ERROR adding ship: ${err.message}` }], isError: true };
+ }
}
case "update": {
if (params.id === undefined) {
- return {
- content: [{ type: "text", text: "Error: id required for update" }],
- details: { state: { ...state }, error: "id required" },
- };
+ return { content: [{ type: "text", text: "Error: id required for update" }], isError: true };
}
- const ship = state.ships.find(s => s.id === params.id);
- if (!ship) {
- return {
- content: [{ type: "text", text: `Ship #${params.id} not found` }],
- details: { state: { ...state }, error: `#${params.id} not found` },
- };
- }
- if (params.status) ship.status = params.status as ShipStatus;
- if (params.metric) ship.metric = params.metric;
- if (params.prLink) ship.prLink = params.prLink;
- if (params.deployLink) ship.deployLink = params.deployLink;
- if (params.loomLink) ship.loomLink = params.loomLink;
- ship.timestamp = now;
- // Persist update to PostgreSQL
const setClauses: string[] = [];
if (params.status) setClauses.push(`status='${params.status}'`);
- if (params.metric) setClauses.push(`metric='${(params.metric || "").replace(/'/g, "''")}'`);
+ if (params.metric) setClauses.push(`metric='${params.metric.replace(/'/g, "''")}'`);
setClauses.push("updated_at=now()");
- try {
- await pi.exec("bash", ["-c",
- `ssh -o ConnectTimeout=10 -p ${DEPLOY_CONFIG.sshPort} ${DEPLOY_CONFIG.sshHost} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'docker exec \$(docker ps --format {{.Names}} | grep dokploy-postgres) psql -U dokploy -d calvana -c \\\"UPDATE ships SET ${setClauses.join(", ")} WHERE id=${params.id}\\\"'"`
- ], { timeout: 15000 });
- } catch { /* DB write failed, local state still updated */ }
- return {
- content: [{ type: "text", text: `Ship #${ship.id} updated: "${ship.title}" [${ship.status}]` }],
- details: { state: { ...state, ships: [...state.ships] } },
- };
+ try {
+ const raw = await dbExec(pi,
+ `UPDATE ships SET ${setClauses.join(", ")} WHERE id=${params.id} RETURNING id, title, status`
+ );
+ if (!raw.trim()) {
+ return { content: [{ type: "text", text: `Ship #${params.id} not found in DB` }], isError: true };
+ }
+ const p = raw.trim().split("|||");
+ return { content: [{ type: "text", text: `Ship #${p[0]} updated: "${p[1]}" [${p[2]}]` }] };
+ } catch (err: any) {
+ return { content: [{ type: "text", text: `DB ERROR updating ship: ${err.message}` }], isError: true };
+ }
}
case "list": {
- if (state.ships.length === 0) {
- return {
- content: [{ type: "text", text: "No ships logged yet." }],
- details: { state: { ...state } },
- };
+ try {
+ const ships = await fetchShips(pi);
+ if (ships.length === 0) {
+ return { content: [{ type: "text", text: "No ships in DB." }] };
+ }
+ const lines = ships.map(s =>
+ `#${s.id} [${s.status.toUpperCase()}] ${s.title} — ${s.metric}`
+ );
+ return { content: [{ type: "text", text: lines.join("\n") }] };
+ } catch (err: any) {
+ return { content: [{ type: "text", text: `DB ERROR listing ships: ${err.message}` }], isError: true };
}
- const lines = state.ships.map(s =>
- `#${s.id} [${s.status.toUpperCase()}] ${s.title} (${s.timestamp}) — ${s.metric}`
- );
- return {
- content: [{ type: "text", text: lines.join("\n") }],
- details: { state: { ...state } },
- };
}
default:
- return {
- content: [{ type: "text", text: `Unknown action: ${params.action}` }],
- details: { state: { ...state } },
- };
+ return { content: [{ type: "text", text: `Unknown action: ${params.action}` }], isError: true };
}
},
@@ -380,30 +293,15 @@ Current state from DB: ${state.ships.length} ships (${state.ships.filter(s => s.
},
renderResult(result, { expanded }, theme) {
- const details = result.details as { state?: ShipLogState; error?: string } | undefined;
- if (details?.error) return new Text(theme.fg("error", `Error: ${details.error}`), 0, 0);
-
- const st = details?.state;
- if (!st || st.ships.length === 0) return new Text(theme.fg("dim", "No ships"), 0, 0);
-
- const shipped = st.ships.filter(s => s.status === "shipped").length;
- const total = st.ships.length;
- let text = theme.fg("success", `${shipped}/${total} shipped`);
-
- if (expanded) {
- for (const s of st.ships) {
- const badge = s.status === "shipped" ? theme.fg("success", "✓")
- : s.status === "shipping" ? theme.fg("warning", "●")
- : theme.fg("dim", "○");
- text += `\n ${badge} ${theme.fg("accent", `#${s.id}`)} ${theme.fg("muted", s.title)}`;
- }
- }
- return new Text(text, 0, 0);
+ const text = result.content[0];
+ const msg = text?.type === "text" ? text.text : "";
+ if (result.isError) return new Text(theme.fg("error", msg), 0, 0);
+ return new Text(theme.fg("success", msg), 0, 0);
},
});
// ════════════════════════════════════════════════════════════════
- // TOOL: calvana_oops
+ // TOOL: calvana_oops — ALL operations go directly to DB
// ════════════════════════════════════════════════════════════════
pi.registerTool({
@@ -413,64 +311,41 @@ Current state from DB: ${state.ships.length} ships (${state.ships.filter(s => s.
parameters: OopsParams,
async execute(_toolCallId, params, _signal, _onUpdate, _ctx) {
- const now = new Date().toISOString().replace("T", " ").slice(0, 19) + " GMT+8";
-
switch (params.action) {
case "add": {
if (!params.description) {
- return {
- content: [{ type: "text", text: "Error: description required" }],
- details: { state: { ...state }, error: "description required" },
- };
+ return { content: [{ type: "text", text: "Error: description required" }], isError: true };
}
- const entry: OopsEntry = {
- id: state.nextOopsId++,
- description: params.description,
- fixTime: params.fixTime || "—",
- commitLink: params.commitLink || "#commit",
- timestamp: now,
- };
- state.oops.push(entry);
+ const desc = params.description.replace(/'/g, "''");
+ const fixTime = (params.fixTime || "—").replace(/'/g, "''");
+ const commitLink = (params.commitLink || "#commit").replace(/'/g, "''");
- // Persist to PostgreSQL
- const oopsDesc = entry.description.replace(/'/g, "''");
- const oopsTime = (entry.fixTime || "-").replace(/'/g, "''");
- const oopsCommit = (entry.commitLink || "#commit").replace(/'/g, "''");
try {
- const dbResult = await pi.exec("bash", ["-c",
- `ssh -o ConnectTimeout=10 -p ${DEPLOY_CONFIG.sshPort} ${DEPLOY_CONFIG.sshHost} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'docker exec \$(docker ps --format {{.Names}} | grep dokploy-postgres) psql -U dokploy -d calvana -t -c \\\"INSERT INTO oops (description, fix_time, commit_link) VALUES (\\x27${oopsDesc}\\x27, \\x27${oopsTime}\\x27, \\x27${oopsCommit}\\x27) RETURNING id\\\"'"`
- ], { timeout: 15000 });
- const dbId = parseInt((dbResult.stdout || "").trim());
- if (dbId > 0) entry.id = dbId;
- } catch { /* DB write failed, local state still updated */ }
-
- return {
- content: [{ type: "text", text: `Oops #${entry.id}: "${entry.description}" (fixed in ${entry.fixTime})` }],
- details: { state: { ...state, oops: [...state.oops] } },
- };
+ const raw = await dbExec(pi,
+ `INSERT INTO oops (description, fix_time, commit_link) VALUES ('${desc}', '${fixTime}', '${commitLink}') RETURNING id`
+ );
+ const id = parseInt(raw.trim()) || 0;
+ return { content: [{ type: "text", text: `Oops #${id}: "${params.description}" (fixed in ${params.fixTime || "—"})` }] };
+ } catch (err: any) {
+ return { content: [{ type: "text", text: `DB ERROR adding oops: ${err.message}` }], isError: true };
+ }
}
case "list": {
- if (state.oops.length === 0) {
- return {
- content: [{ type: "text", text: "No oops entries. Clean run so far." }],
- details: { state: { ...state } },
- };
+ try {
+ const oops = await fetchOops(pi);
+ if (oops.length === 0) {
+ return { content: [{ type: "text", text: "No oops entries. Clean run so far." }] };
+ }
+ const lines = oops.map(o => `#${o.id} ${o.description} — fixed in ${o.fixTime}`);
+ return { content: [{ type: "text", text: lines.join("\n") }] };
+ } catch (err: any) {
+ return { content: [{ type: "text", text: `DB ERROR listing oops: ${err.message}` }], isError: true };
}
- const lines = state.oops.map(o =>
- `#${o.id} ${o.description} — fixed in ${o.fixTime}`
- );
- return {
- content: [{ type: "text", text: lines.join("\n") }],
- details: { state: { ...state } },
- };
}
default:
- return {
- content: [{ type: "text", text: `Unknown action: ${params.action}` }],
- details: { state: { ...state } },
- };
+ return { content: [{ type: "text", text: `Unknown action: ${params.action}` }], isError: true };
}
},
@@ -482,15 +357,15 @@ Current state from DB: ${state.ships.length} ships (${state.ships.filter(s => s.
},
renderResult(result, _options, theme) {
- const details = result.details as { state?: ShipLogState; error?: string } | undefined;
- if (details?.error) return new Text(theme.fg("error", `Error: ${details.error}`), 0, 0);
const text = result.content[0];
- return new Text(theme.fg("warning", text?.type === "text" ? text.text : ""), 0, 0);
+ const msg = text?.type === "text" ? text.text : "";
+ if (result.isError) return new Text(theme.fg("error", msg), 0, 0);
+ return new Text(theme.fg("warning", msg), 0, 0);
},
});
// ════════════════════════════════════════════════════════════════
- // TOOL: calvana_deploy
+ // TOOL: calvana_deploy — Reads ONLY from DB, never from memory
// ════════════════════════════════════════════════════════════════
pi.registerTool({
@@ -502,14 +377,13 @@ Current state from DB: ${state.ships.length} ships (${state.ships.filter(s => s.
async execute(_toolCallId, params, signal, onUpdate, _ctx) {
onUpdate?.({ content: [{ type: "text", text: "Querying database for full ship log..." }] });
- // ── Build a helper script on the remote server to avoid quoting hell ──
- // This is the ONLY way to reliably run psql inside docker inside incus inside ssh.
- // Previous approach with nested escaping silently returned empty results.
- const sshBase = `ssh -o ConnectTimeout=10 -p ${DEPLOY_CONFIG.sshPort} ${DEPLOY_CONFIG.sshHost}`;
+ // NOTE: We intentionally EXCLUDE the 'details' column from ships.
+ // It contains multiline HTML that breaks the ||| line-based parser.
+ // The metric column is sufficient for the live page cards.
const HELPER_SCRIPT = `
PG_CONTAINER=$(incus exec ${DEPLOY_CONFIG.container} -- bash -c "docker ps --format '{{.Names}}' | grep dokploy-postgres")
if [ -z "$PG_CONTAINER" ]; then echo "ERR:NO_PG_CONTAINER"; exit 1; fi
-SHIPS=$(incus exec ${DEPLOY_CONFIG.container} -- bash -c "docker exec $PG_CONTAINER psql -U dokploy -d calvana -t -A -F '|||' -c \\"SELECT id, title, status, COALESCE(metric,'-'), COALESCE(details,' '), created_at::text, COALESCE(updated_at::text, created_at::text) FROM ships ORDER BY id\\"")
+SHIPS=$(incus exec ${DEPLOY_CONFIG.container} -- bash -c "docker exec $PG_CONTAINER psql -U dokploy -d calvana -t -A -F '|||' -c \\"SELECT id, title, status, COALESCE(metric,'-'), created_at::text, COALESCE(updated_at::text, created_at::text) FROM ships ORDER BY id\\"")
OOPS=$(incus exec ${DEPLOY_CONFIG.container} -- bash -c "docker exec $PG_CONTAINER psql -U dokploy -d calvana -t -A -F '|||' -c \\"SELECT id, description, COALESCE(fix_time,'-'), COALESCE(commit_link,'#commit'), created_at::text FROM oops ORDER BY id\\"")
echo "===SHIPS==="
echo "$SHIPS"
@@ -519,15 +393,13 @@ echo "===END==="
`.trim();
try {
- // 1. Run the helper script via SSH to get ALL ships + oops from DB
const dbResult = await pi.exec("bash", ["-c",
- `${sshBase} 'bash -s' << 'DBEOF'\n${HELPER_SCRIPT}\nDBEOF`
+ `${SSH_BASE} 'bash -s' << 'DBEOF'\n${HELPER_SCRIPT}\nDBEOF`
], { signal, timeout: 30000 });
if (dbResult.code !== 0) {
return {
content: [{ type: "text", text: `DB query failed (code ${dbResult.code}): ${dbResult.stderr}\nstdout: ${dbResult.stdout?.slice(0, 200)}` }],
- details: { state: { ...state }, error: dbResult.stderr },
isError: true,
};
}
@@ -535,29 +407,22 @@ echo "===END==="
const output = dbResult.stdout || "";
if (output.includes("ERR:NO_PG_CONTAINER")) {
return {
- content: [{ type: "text", text: `ABORT: PostgreSQL container not found. Refusing to deploy.` }],
- details: { state: { ...state }, error: "PG container not found" },
+ content: [{ type: "text", text: `ABORT: PostgreSQL container not found.` }],
isError: true,
};
}
- // 2. Parse the structured output
const shipsSection = output.split("===SHIPS===")[1]?.split("===OOPS===")[0]?.trim() || "";
const oopsSection = output.split("===OOPS===")[1]?.split("===END===")[0]?.trim() || "";
- const dbShips: Array<{ id: number; title: string; status: string; metric: string; details: string; created: string; updated: string }> = [];
+ const dbShips: Array<{ id: number; title: string; status: string; metric: string; created: string; updated: string }> = [];
for (const line of shipsSection.split("\n")) {
if (!line.trim()) continue;
const parts = line.split("|||");
- if (parts.length >= 6) {
+ if (parts.length >= 5) {
dbShips.push({
- id: parseInt(parts[0]),
- title: parts[1],
- status: parts[2],
- metric: parts[3],
- details: parts[4],
- created: parts[5],
- updated: parts[6] || parts[5],
+ id: parseInt(parts[0]), title: parts[1], status: parts[2],
+ metric: parts[3], created: parts[4], updated: parts[5] || parts[4],
});
}
}
@@ -568,69 +433,56 @@ echo "===END==="
const parts = line.split("|||");
if (parts.length >= 4) {
dbOops.push({
- id: parseInt(parts[0]),
- description: parts[1],
- fixTime: parts[2],
- commitLink: parts[3],
- created: parts[4] || "",
+ id: parseInt(parts[0]), description: parts[1],
+ fixTime: parts[2], commitLink: parts[3], created: parts[4] || "",
});
}
}
- // ── SAFETY GATE: refuse to deploy if DB returned 0 ships ──
- // The DB has 48+ entries. If we get 0, the query broke silently.
if (dbShips.length === 0) {
return {
- content: [{ type: "text", text: `ABORT: DB query returned 0 ships. This would wipe the live site.\nRaw output (first 500 chars): ${output.slice(0, 500)}\n\nRefusing to deploy. Fix the DB query first.` }],
- details: { state: { ...state }, error: "0 ships from DB — refusing to deploy" },
+ content: [{ type: "text", text: `ABORT: DB returned 0 ships. Refusing to deploy.\nRaw: ${output.slice(0, 500)}` }],
isError: true,
};
}
- onUpdate?.({ content: [{ type: "text", text: `Found ${dbShips.length} ships + ${dbOops.length} oops from DB. Generating HTML...` }] });
+ onUpdate?.({ content: [{ type: "text", text: `Found ${dbShips.length} ships + ${dbOops.length} oops. Generating HTML...` }] });
- // 3. Generate HTML from DB data
const liveHtml = generateLivePageFromDb(dbShips, dbOops);
if (params.dryRun) {
return {
content: [{ type: "text", text: `Dry run — ${dbShips.length} ships, ${dbOops.length} oops, ${liveHtml.length} bytes HTML.\n\n${liveHtml.slice(0, 500)}...` }],
- details: { state: { ...state }, dryRun: true },
};
}
- onUpdate?.({ content: [{ type: "text", text: `Deploying ${dbShips.length} ships to server...` }] });
+ onUpdate?.({ content: [{ type: "text", text: `Deploying ${dbShips.length} ships...` }] });
- // 4. Deploy via base64
const b64Html = Buffer.from(liveHtml).toString("base64");
const deployResult = await pi.exec("bash", ["-c",
- `${sshBase} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'echo ${b64Html} | base64 -d > ${DEPLOY_CONFIG.sitePath}/live/index.html'"`
+ `${SSH_BASE} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'echo ${b64Html} | base64 -d > ${DEPLOY_CONFIG.sitePath}/live/index.html'"`
], { signal, timeout: 30000 });
if (deployResult.code !== 0) {
return {
- content: [{ type: "text", text: `Deploy failed: ${deployResult.stderr}` }],
- details: { state: { ...state }, error: deployResult.stderr },
+ content: [{ type: "text", text: `Deploy write failed: ${deployResult.stderr}` }],
isError: true,
};
}
- // 5. Rebuild and update docker service
const rebuildResult = await pi.exec("bash", ["-c",
- `${sshBase} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'cd /opt/calvana && docker build -t calvana:latest . 2>&1 | tail -2 && docker service update --force calvana 2>&1 | tail -2'"`
+ `${SSH_BASE} "incus exec ${DEPLOY_CONFIG.container} -- bash -c 'cd /opt/calvana && docker build -t calvana:latest . 2>&1 | tail -2 && docker service update --force calvana 2>&1 | tail -2'"`
], { signal, timeout: 60000 });
const now = new Date().toISOString().replace("T", " ").slice(0, 19);
- state.lastDeployed = now;
+ lastDeployed = now;
return {
content: [{ type: "text", text: `✓ Deployed to https://${DEPLOY_CONFIG.domain}/live\n${dbShips.length} ships + ${dbOops.length} oops from database\n${rebuildResult.stdout}` }],
- details: { state: { ...state, lastDeployed: now } },
};
} catch (err: any) {
return {
content: [{ type: "text", text: `Deploy error: ${err.message}` }],
- details: { state: { ...state }, error: err.message },
isError: true,
};
}
@@ -641,26 +493,35 @@ echo "===END==="
},
renderResult(result, _options, theme) {
- const details = result.details as { error?: string } | undefined;
- if (details?.error) return new Text(theme.fg("error", `✗ ${details.error}`), 0, 0);
+ if (result.isError) {
+ const text = result.content[0];
+ return new Text(theme.fg("error", `✗ ${text?.type === "text" ? text.text : "failed"}`), 0, 0);
+ }
return new Text(theme.fg("success", `✓ Live at https://${DEPLOY_CONFIG.domain}/live`), 0, 0);
},
});
// ════════════════════════════════════════════════════════════════
- // COMMAND: /ships
+ // COMMAND: /ships — reads directly from DB
// ════════════════════════════════════════════════════════════════
pi.registerCommand("ships", {
- description: "View current Calvana shipping log",
+ description: "View current Calvana shipping log (from DB)",
handler: async (_args, ctx) => {
- if (!ctx.hasUI) {
- ctx.ui.notify("Requires interactive mode", "error");
- return;
+ if (!ctx.hasUI) return;
+
+ let ships: DbShip[] = [];
+ let oops: DbOops[] = [];
+ let dbError: string | null = null;
+
+ try {
+ [ships, oops] = await Promise.all([fetchShips(pi), fetchOops(pi)]);
+ } catch (err: any) {
+ dbError = err.message;
}
await ctx.ui.custom((_tui, theme, _kb, done) => {
- return new ShipLogComponent(state, theme, () => done());
+ return new ShipLogComponent(ships, oops, lastDeployed, dbError, theme, () => done());
});
},
});
@@ -674,8 +535,6 @@ echo "===END==="
handler: async (_args, ctx) => {
const ok = await ctx.ui.confirm("Deploy?", `Push ship log to https://${DEPLOY_CONFIG.domain}/live?`);
if (!ok) return;
-
- // Queue a deploy via the LLM
pi.sendUserMessage("Use calvana_deploy to push the current ship log to the live site.", { deliverAs: "followUp" });
},
});
@@ -686,14 +545,20 @@ echo "===END==="
// ════════════════════════════════════════════════════════════════════
class ShipLogComponent {
- private state: ShipLogState;
+ private ships: DbShip[];
+ private oops: DbOops[];
+ private lastDeployed: string | null;
+ private dbError: string | null;
private theme: Theme;
private onClose: () => void;
private cachedWidth?: number;
private cachedLines?: string[];
- constructor(state: ShipLogState, theme: Theme, onClose: () => void) {
- this.state = state;
+ constructor(ships: DbShip[], oops: DbOops[], lastDeployed: string | null, dbError: string | null, theme: Theme, onClose: () => void) {
+ this.ships = ships;
+ this.oops = oops;
+ this.lastDeployed = lastDeployed;
+ this.dbError = dbError;
this.theme = theme;
this.onClose = onClose;
}
@@ -713,53 +578,48 @@ class ShipLogComponent {
lines.push("");
lines.push(truncateToWidth(
th.fg("borderMuted", "─".repeat(3)) +
- th.fg("accent", " 🚀 Calvana Ship Log ") +
- th.fg("borderMuted", "─".repeat(Math.max(0, width - 26))),
+ th.fg("accent", " 🚀 Calvana Ship Log (DB) ") +
+ th.fg("borderMuted", "─".repeat(Math.max(0, width - 30))),
width
));
lines.push("");
- // Ships
- if (this.state.ships.length === 0) {
+ if (this.dbError) {
+ lines.push(truncateToWidth(` ${th.fg("error", `DB ERROR: ${this.dbError}`)}`, width));
+ lines.push("");
+ lines.push(truncateToWidth(` ${th.fg("dim", "Press Escape to close")}`, width));
+ lines.push("");
+ this.cachedWidth = width;
+ this.cachedLines = lines;
+ return lines;
+ }
+
+ if (this.ships.length === 0) {
lines.push(truncateToWidth(` ${th.fg("dim", "No ships yet.")}`, width));
} else {
- const shipped = this.state.ships.filter(s => s.status === "shipped").length;
- lines.push(truncateToWidth(
- ` ${th.fg("muted", `${shipped}/${this.state.ships.length} shipped`)}`,
- width
- ));
+ const shipped = this.ships.filter(s => s.status === "shipped").length;
+ lines.push(truncateToWidth(` ${th.fg("muted", `${shipped}/${this.ships.length} shipped`)}`, width));
lines.push("");
-
- for (const s of this.state.ships) {
+ for (const s of this.ships) {
const badge = s.status === "shipped" ? th.fg("success", "✓ SHIPPED ")
: s.status === "shipping" ? th.fg("warning", "● SHIPPING")
: th.fg("dim", "○ PLANNED ");
- lines.push(truncateToWidth(
- ` ${badge} ${th.fg("accent", `#${s.id}`)} ${th.fg("text", s.title)}`,
- width
- ));
- lines.push(truncateToWidth(
- ` ${th.fg("dim", s.timestamp)} · ${th.fg("dim", s.metric)}`,
- width
- ));
+ lines.push(truncateToWidth(` ${badge} ${th.fg("accent", `#${s.id}`)} ${th.fg("text", s.title)}`, width));
+ lines.push(truncateToWidth(` ${th.fg("dim", s.created)} · ${th.fg("dim", s.metric)}`, width));
}
}
- // Oops
- if (this.state.oops.length > 0) {
+ if (this.oops.length > 0) {
lines.push("");
lines.push(truncateToWidth(` ${th.fg("warning", "💥 Oops Log")}`, width));
- for (const o of this.state.oops) {
- lines.push(truncateToWidth(
- ` ${th.fg("error", "─")} ${th.fg("muted", o.description)} ${th.fg("dim", `(${o.fixTime})`)}`,
- width
- ));
+ for (const o of this.oops) {
+ lines.push(truncateToWidth(` ${th.fg("error", "─")} ${th.fg("muted", o.description)} ${th.fg("dim", `(${o.fixTime})`)}`, width));
}
}
lines.push("");
- if (this.state.lastDeployed) {
- lines.push(truncateToWidth(` ${th.fg("dim", `Last deployed: ${this.state.lastDeployed}`)}`, width));
+ if (this.lastDeployed) {
+ lines.push(truncateToWidth(` ${th.fg("dim", `Last deployed: ${this.lastDeployed}`)}`, width));
}
lines.push(truncateToWidth(` ${th.fg("dim", "Press Escape to close")}`, width));
lines.push("");
@@ -775,27 +635,22 @@ class ShipLogComponent {
}
}
-// ════════════════════════════════════════════════════════════════════
-// HTML GENERATOR — Builds the /live page from current state
-// ════════════════════════════════════════════════════════════════════
+// Interface re-exports for the component
+interface DbShip { id: number; title: string; status: string; metric: string; created: string; }
+interface DbOops { id: number; description: string; fixTime: string; commitLink: string; created: string; }
-// Keep the old function signature for backward compat but it's no longer called by deploy
-function generateLivePageHtml(state: ShipLogState): string {
- return generateLivePageFromDb(
- state.ships.map(s => ({ id: s.id, title: s.title, status: s.status, metric: s.metric, details: "", created: s.timestamp, updated: s.timestamp })),
- state.oops.map(o => ({ id: o.id, description: o.description, fixTime: o.fixTime, commitLink: o.commitLink, created: o.timestamp }))
- );
-}
+// ════════════════════════════════════════════════════════════════════
+// HTML GENERATOR
+// ════════════════════════════════════════════════════════════════════
function generateLivePageFromDb(
- ships: Array<{ id: number; title: string; status: string; metric: string; details: string; created: string; updated: string }>,
+ ships: Array<{ id: number; title: string; status: string; metric: string; created: string; updated: string }>,
oops: Array<{ id: number; description: string; fixTime: string; commitLink: string; created: string }>
): string {
const now = new Date().toISOString();
const shipped = ships.filter(s => s.status === "shipped").length;
const shipping = ships.filter(s => s.status === "shipping").length;
- // Group ships by date (newest first)
const shipsByDate = new Map();
for (const s of [...ships].reverse()) {
const date = s.created.split(" ")[0] || s.created.split("T")[0] || "Unknown";
@@ -813,33 +668,23 @@ function generateLivePageFromDb(
let shipSections = "";
for (const [date, dateShips] of shipsByDate) {
const cards = dateShips.map(s => {
- const badgeClass = s.status === "shipped" ? "badge-shipped"
- : s.status === "shipping" ? "badge-shipping"
- : "badge-planned";
+ const badgeClass = s.status === "shipped" ? "badge-shipped" : s.status === "shipping" ? "badge-shipping" : "badge-planned";
const badgeLabel = s.status.charAt(0).toUpperCase() + s.status.slice(1);
- // If details has HTML (from DB), use it; otherwise use metric
- const hasDetails = s.details && s.details.trim().length > 10 && s.details.includes("<");
- const detailsBlock = hasDetails
- ? `\n ${s.details}
`
- : "";
-
return `
-
${escapeHtml(s.metric)}
${detailsBlock}
+
${escapeHtml(s.metric)}
`;
}).join("\n");
shipSections += `
-
-${cards}
-
+ \n${cards}\n
`;
}
@@ -935,9 +780,7 @@ ${shipSections}
Oops Log
If it's not here, I haven't broken it yet.
-
-${oopsEntries}
-
+ \n${oopsEntries}\n
- {/* Stacking cards — sticky scroll effect */}
+ {/* Stacking cards — all sticky siblings in ONE parent */}
- {PERSONAS.map((p, i) => {
+ {PERSONAS.flatMap((p, i) => {
const imgFirst = i % 2 === 0
- return (
-
-
+
-
-
- {/* Image — ~58%, fills full card height */}
-
-
-
-
- {/* Text — ~42% */}
-
-
- {p.scenario}
-
-
- {/* Pain stats */}
-
-
- {p.stat1}
-
-
- {p.stat2}
-
-
-
-
- {p.copy}
-
-
-
- {p.cta} →
-
-
+
-
- )
+
+
+ {p.scenario}
+
+
+
+ {p.stat1}
+
+
+ {p.stat2}
+
+
+
+ {p.copy}
+
+
+ {p.cta} →
+
+
+
+
+
,
+ ]
+ if (i < PERSONAS.length - 1) {
+ els.push(
)
+ }
+ return els
})}
diff --git a/screenshots/fidya-current/fidya-feeding.jpg b/screenshots/fidya-current/fidya-feeding.jpg
new file mode 100644
index 0000000..a492951
Binary files /dev/null and b/screenshots/fidya-current/fidya-feeding.jpg differ
diff --git a/screenshots/fidya-current/fidya-quran-elder.jpg b/screenshots/fidya-current/fidya-quran-elder.jpg
new file mode 100644
index 0000000..f348200
Binary files /dev/null and b/screenshots/fidya-current/fidya-quran-elder.jpg differ
diff --git a/screenshots/fidya-current/kaffarah-community.jpg b/screenshots/fidya-current/kaffarah-community.jpg
new file mode 100644
index 0000000..c56c989
Binary files /dev/null and b/screenshots/fidya-current/kaffarah-community.jpg differ
diff --git a/screenshots/fidya-kaffarah-full.png b/screenshots/fidya-kaffarah-full.png
new file mode 100644
index 0000000..bed9a05
Binary files /dev/null and b/screenshots/fidya-kaffarah-full.png differ
diff --git a/screenshots/homepage-full.png b/screenshots/homepage-full.png
new file mode 100644
index 0000000..21efbb6
Binary files /dev/null and b/screenshots/homepage-full.png differ
diff --git a/screenshots/make-it-right-full.png b/screenshots/make-it-right-full.png
new file mode 100644
index 0000000..7ee1110
Binary files /dev/null and b/screenshots/make-it-right-full.png differ
diff --git a/screenshots/one-eats-full.png b/screenshots/one-eats-full.png
new file mode 100644
index 0000000..48d1a27
Binary files /dev/null and b/screenshots/one-eats-full.png differ
diff --git a/screenshots/ramadan-full.png b/screenshots/ramadan-full.png
new file mode 100644
index 0000000..bb8441c
Binary files /dev/null and b/screenshots/ramadan-full.png differ
diff --git a/screenshots/reference/oneeats-slide-1.png b/screenshots/reference/oneeats-slide-1.png
new file mode 100644
index 0000000..5f1f459
Binary files /dev/null and b/screenshots/reference/oneeats-slide-1.png differ
diff --git a/screenshots/reference/oneeats-slide-2.png b/screenshots/reference/oneeats-slide-2.png
new file mode 100644
index 0000000..70085d0
Binary files /dev/null and b/screenshots/reference/oneeats-slide-2.png differ
diff --git a/screenshots/reference/ramadan-cr-fidya-new.jpg b/screenshots/reference/ramadan-cr-fidya-new.jpg
new file mode 100644
index 0000000..e2c882c
Binary files /dev/null and b/screenshots/reference/ramadan-cr-fidya-new.jpg differ
diff --git a/screenshots/reference/ramadan-cr-hunger6-zakat.jpg b/screenshots/reference/ramadan-cr-hunger6-zakat.jpg
new file mode 100644
index 0000000..003f4e0
Binary files /dev/null and b/screenshots/reference/ramadan-cr-hunger6-zakat.jpg differ
diff --git a/screenshots/reference/ramadan-cr-wrong-1.jpg b/screenshots/reference/ramadan-cr-wrong-1.jpg
new file mode 100644
index 0000000..504700e
Binary files /dev/null and b/screenshots/reference/ramadan-cr-wrong-1.jpg differ
diff --git a/screenshots/reference/ramadan-cr-wrong-4.jpg b/screenshots/reference/ramadan-cr-wrong-4.jpg
new file mode 100644
index 0000000..76def8c
Binary files /dev/null and b/screenshots/reference/ramadan-cr-wrong-4.jpg differ
diff --git a/screenshots/reference/spoonfeed-hero-6.png b/screenshots/reference/spoonfeed-hero-6.png
new file mode 100644
index 0000000..b239b1b
Binary files /dev/null and b/screenshots/reference/spoonfeed-hero-6.png differ
diff --git a/screenshots/reference/spoonfeed-hero-7.png b/screenshots/reference/spoonfeed-hero-7.png
new file mode 100644
index 0000000..37b2c34
Binary files /dev/null and b/screenshots/reference/spoonfeed-hero-7.png differ