Skip to content

Instantly share code, notes, and snippets.

@galligan
Created January 31, 2026 04:35
Show Gist options
  • Select an option

  • Save galligan/fc0579f6888bd1a0ee6ce748d7fb96c5 to your computer and use it in GitHub Desktop.

Select an option

Save galligan/fc0579f6888bd1a0ee6ce748d7fb96c5 to your computer and use it in GitHub Desktop.
Feed-o-matic (patch-feed) — append-only micro-log for cross-session continuity

Feed-o-matic (aka patch-feed)

A tiny, append-only micro-log intended to act as a “continuity spine” across fragmented contexts (Slack threads/DMs/channels, multiple sessions, background runs).

Core idea:

  • Write path: fast, messy, append-only capture
  • Read path: semantic/keyword retrieval + later curation

Format (JSONL)

One JSON object per line.

Fields (v1-ish):

  • ts (unix seconds)
  • text (canonical, human-legible)
  • session (short hash derived from session key when available)
  • tags (string[])
  • refs (optional) e.g. { kind: "file", path: "..." }, { kind: "url", url: "..." }, { kind: "slack", channel, thread_ts, ts }

CLI

./bin/patch-feed add "fixed Slack thread reads" --tag slack --ref-url https://...
./bin/patch-feed tail --n 10
./bin/patch-feed prune --days 7

Notes

  • The canonical thing is text. Structure is additive.
  • Prune = archive (never delete).
{"ts":1769831262,"text":"Idea: package Feed-o-matic as a versioned skill so it can be shared/updated cleanly.","session":"50d8b4a9","tags":["feed-o-matic","skills","shipping"],"refs":[{"kind":"file","path":"memory/notes/2026-01-30-patch-feed.md"}],"meta":{"actor":"patch"}}
#!/usr/bin/env bun
import { createHash, randomUUID } from "crypto";
import { mkdir, readFile, rename, stat, writeFile, appendFile } from "fs/promises";
import * as path from "path";
type Ref =
| { kind: "file"; path: string }
| { kind: "url"; url: string };
type FeedEntry = {
ts: number; // unix seconds
text: string;
session: string;
refs?: Ref[];
meta?: Record<string, unknown>;
};
const ROOT = path.resolve(import.meta.dir, "..");
const FEED_PATH = path.join(ROOT, "memory", "patch-feed.jsonl");
const ARCHIVE_DIR = path.join(ROOT, "memory", "patch-feed-archive");
function sha1Hex(input: string): string {
return createHash("sha1").update(input).digest("hex");
}
function sessionHash(): string {
const sessionKey =
process.env.OPENCLAW_SESSION_KEY ??
process.env.OC_SESSION_KEY ??
process.env.SESSION_KEY ??
process.env.SESSION ??
undefined;
const key = sessionKey && sessionKey.length > 0 ? sessionKey : "unknown";
return sha1Hex(key).slice(0, 8);
}
function nowUnixSeconds(): number {
return Math.floor(Date.now() / 1000);
}
function ensureHashTag(text: string, tagRaw: string): string {
const tag = tagRaw.startsWith("#") ? tagRaw.slice(1) : tagRaw;
if (!tag) return text;
const re = new RegExp(`(^|\\s)#${escapeRegExp(tag)}(\\s|$)`);
if (re.test(text)) return text;
const sep = text.length === 0 ? "" : " ";
return `${text}${sep}#${tag}`;
}
function escapeRegExp(s: string): string {
return s.replace(/[.*+?^${}()|[\\]\\]/g, "\\$&");
}
function softLimit280(text: string): { text: string; truncated: boolean } {
if (text.length <= 280) return { text, truncated: false };
// Prefer to keep a single unicode ellipsis while staying <= 280.
if (280 >= 1) {
return { text: text.slice(0, 279) + "…", truncated: true };
}
return { text: text.slice(0, 280), truncated: true };
}
async function ensureParentDir(p: string) {
await mkdir(path.dirname(p), { recursive: true });
}
async function fileExists(p: string): Promise<boolean> {
try {
await stat(p);
return true;
} catch {
return false;
}
}
function printHelp(exitCode = 0): never {
const msg = `patch-feed — tiny append-only micro-log\n\nUsage:\n patch-feed add <text> [--ref-file <path> ...] [--ref-url <url> ...] [--tag <tag> ...]\n patch-feed tail [--n 10]\n patch-feed prune [--days 7]\n\nEnv:\n OPENCLAW_SESSION_KEY / OC_SESSION_KEY / SESSION_KEY used to derive session hash\n`;
console.log(msg);
process.exit(exitCode);
}
function parseArgs(argv: string[]) {
const args = argv.slice(2);
const cmd = args[0];
const rest = args.slice(1);
return { cmd, rest };
}
function parseFlags(rest: string[]) {
const flags: {
refFiles: string[];
refUrls: string[];
tags: string[];
n?: number;
days?: number;
textParts: string[];
} = {
refFiles: [],
refUrls: [],
tags: [],
textParts: [],
};
let i = 0;
let inText = false;
while (i < rest.length) {
const tok = rest[i];
if (!inText && tok === "--") {
inText = true;
i++;
continue;
}
if (!inText && tok === "--ref-file") {
const v = rest[i + 1];
if (!v) throw new Error("--ref-file requires a value");
flags.refFiles.push(v);
i += 2;
continue;
}
if (!inText && tok === "--ref-url") {
const v = rest[i + 1];
if (!v) throw new Error("--ref-url requires a value");
flags.refUrls.push(v);
i += 2;
continue;
}
if (!inText && tok === "--tag") {
const v = rest[i + 1];
if (!v) throw new Error("--tag requires a value");
flags.tags.push(v);
i += 2;
continue;
}
if (!inText && tok === "--n") {
const v = rest[i + 1];
if (!v) throw new Error("--n requires a value");
flags.n = Number(v);
if (!Number.isFinite(flags.n) || flags.n < 0) throw new Error("--n must be a non-negative number");
i += 2;
continue;
}
if (!inText && tok === "--days") {
const v = rest[i + 1];
if (!v) throw new Error("--days requires a value");
flags.days = Number(v);
if (!Number.isFinite(flags.days) || flags.days < 0) throw new Error("--days must be a non-negative number");
i += 2;
continue;
}
flags.textParts.push(tok);
i++;
}
return flags;
}
async function cmdAdd(rest: string[]) {
const { refFiles, refUrls, tags, textParts } = parseFlags(rest);
const textInput = textParts.join(" ").trim();
if (!textInput) throw new Error("add requires <text>");
let text = textInput;
for (const t of tags) text = ensureHashTag(text, t);
const limited = softLimit280(text);
text = limited.text;
const refs: Ref[] = [];
for (const p of refFiles) refs.push({ kind: "file", path: p });
for (const u of refUrls) refs.push({ kind: "url", url: u });
const entry: FeedEntry = {
ts: nowUnixSeconds(),
text,
session: sessionHash(),
...(refs.length > 0 ? { refs } : {}),
meta: { actor: "patch" },
};
await ensureParentDir(FEED_PATH);
await appendFile(FEED_PATH, JSON.stringify(entry) + "\n", "utf8");
if (limited.truncated) {
console.error("patch-feed: warning: text exceeded 280 chars; truncated");
}
}
function formatEntryLine(e: FeedEntry): string {
const d = new Date(e.ts * 1000);
const ts = d.toLocaleString(undefined, {
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
});
const refs =
e.refs && e.refs.length
? " " +
e.refs
.map((r) => {
if (r.kind === "file") return `[file:${r.path}]`;
if (r.kind === "url") return `[url:${r.url}]`;
return `[ref]`;
})
.join(" ")
: "";
return `${ts} (${e.session}) ${e.text}${refs}`;
}
async function readEntriesFromFeed(): Promise<FeedEntry[]> {
if (!(await fileExists(FEED_PATH))) return [];
const buf = await readFile(FEED_PATH, "utf8");
const lines = buf.split(/\r?\n/).filter((l) => l.trim().length > 0);
const entries: FeedEntry[] = [];
for (const line of lines) {
try {
entries.push(JSON.parse(line));
} catch {
// Skip malformed lines rather than breaking the tool.
}
}
return entries;
}
async function cmdTail(rest: string[]) {
const { n } = parseFlags(rest);
const take = n ?? 10;
const entries = await readEntriesFromFeed();
const slice = entries.slice(Math.max(0, entries.length - take)).reverse();
for (const e of slice) {
console.log(formatEntryLine(e));
}
}
function localArchiveNameForTs(ts: number): string {
const d = new Date(ts * 1000);
const y = d.getFullYear();
const m = String(d.getMonth() + 1).padStart(2, "0");
return `${y}-${m}.jsonl`;
}
async function cmdPrune(rest: string[]) {
const { days } = parseFlags(rest);
const keepDays = days ?? 7;
const now = nowUnixSeconds();
const cutoff = now - keepDays * 86400;
const entries = await readEntriesFromFeed();
if (entries.length === 0) return;
const keep: FeedEntry[] = [];
const move: FeedEntry[] = [];
for (const e of entries) {
if (typeof e.ts !== "number") {
keep.push(e);
continue;
}
if (e.ts <= cutoff) move.push(e);
else keep.push(e);
}
if (move.length > 0) {
await mkdir(ARCHIVE_DIR, { recursive: true });
const byArchive = new Map<string, FeedEntry[]>();
for (const e of move) {
const name = localArchiveNameForTs(e.ts);
const p = path.join(ARCHIVE_DIR, name);
const arr = byArchive.get(p) ?? [];
arr.push(e);
byArchive.set(p, arr);
}
for (const [archivePath, arr] of byArchive.entries()) {
// Keep archive append-only too.
const payload = arr.map((e) => JSON.stringify(e)).join("\n") + "\n";
await appendFile(archivePath, payload, "utf8");
}
}
// Atomic rewrite of active feed
await ensureParentDir(FEED_PATH);
const tmp = `${FEED_PATH}.tmp-${process.pid}-${randomUUID()}`;
const payload = keep.map((e) => JSON.stringify(e)).join("\n");
await writeFile(tmp, payload.length ? payload + "\n" : "", "utf8");
await rename(tmp, FEED_PATH);
}
async function main() {
const { cmd, rest } = parseArgs(process.argv);
if (!cmd || cmd === "help" || cmd === "-h" || cmd === "--help") printHelp(0);
try {
if (cmd === "add") return await cmdAdd(rest);
if (cmd === "tail") return await cmdTail(rest);
if (cmd === "prune") return await cmdPrune(rest);
console.error(`patch-feed: unknown command: ${cmd}`);
printHelp(1);
} catch (err: any) {
console.error(`patch-feed: ${err?.message ?? String(err)}`);
process.exit(1);
}
}
await main();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment