Created
December 20, 2025 00:33
-
-
Save Dhravya/23fb13628b24161c2c0eb2b559aeebd9 to your computer and use it in GitHub Desktop.
supermemory opencode plugin WIP
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| /** | |
| * memory plugin - adds long-term memory to opencode using supermemory | |
| * | |
| * features: | |
| * - silently injects memories into system prompt (no visible messages) | |
| * - provides search_memories tool for agent-driven retrieval | |
| * - incremental conversation ingestion with consistent customId | |
| * - file-specific contextual memory | |
| * - persistent across session compactions | |
| * | |
| * requires SUPERMEMORY_API_KEY environment variable | |
| */ | |
| import type { Plugin } from "@opencode-ai/plugin"; | |
| import { tool } from "@opencode-ai/plugin"; | |
| import Supermemory from "supermemory"; | |
| import * as fs from "fs"; | |
| import * as path from "path"; | |
| import * as os from "os"; | |
| const SEARCH_TIMEOUT_MS = 2000; | |
| const MIN_MESSAGES_TO_INGEST = 2; | |
| const INCREMENTAL_INGEST_INTERVAL = 5; // Ingest every N messages | |
| const FILE_CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes | |
| // Log file setup | |
| const LOG_FILE = ".opencode-supermemory.log"; | |
| const logToFile = (message: string) => { | |
| const timestamp = new Date().toISOString(); | |
| const logLine = `[${timestamp}] ${message}\n`; | |
| try { | |
| fs.appendFileSync(LOG_FILE, logLine); | |
| } catch (e) { | |
| // Silently fail if we can't write to log | |
| } | |
| }; | |
| const log = (...args: any[]) => { | |
| const message = args.map(arg => | |
| typeof arg === 'object' ? JSON.stringify(arg, null, 2) : String(arg) | |
| ).join(' '); | |
| logToFile(message); | |
| }; | |
| interface MemoryResult { | |
| id: string; | |
| memory: string; | |
| metadata: Record<string, unknown> | null; | |
| similarity: number; | |
| updatedAt: string; | |
| chunks?: Array<{ | |
| content: string; | |
| documentId: string; | |
| position?: number; | |
| score?: number; | |
| }>; | |
| version?: number | null; | |
| } | |
| interface SearchResponse { | |
| results: MemoryResult[]; | |
| timing: number; | |
| total: number; | |
| } | |
| function formatProfile(profile: any): string | null { | |
| const parts: string[] = []; | |
| // Static profile - persistent facts about coding preferences | |
| if (profile.profile?.static?.length > 0) { | |
| parts.push("## Your Coding Profile"); | |
| for (const item of profile.profile.static) { | |
| parts.push(`- ${item}`); | |
| } | |
| } | |
| // Dynamic profile - recent patterns and behaviors | |
| if (profile.profile?.dynamic?.length > 0) { | |
| parts.push("\n## Recent Patterns"); | |
| for (const item of profile.profile.dynamic) { | |
| parts.push(`- ${item}`); | |
| } | |
| } | |
| // Relevant memories from search results | |
| if (profile.search_results?.results?.length > 0) { | |
| parts.push("\n## Relevant Context"); | |
| for (const mem of profile.search_results.results) { | |
| const content = mem.memory || mem.chunks?.[0]?.content; | |
| if (content) { | |
| parts.push(`- ${content}`); | |
| } | |
| } | |
| } | |
| if (parts.length === 0) return null; | |
| return parts.join("\n"); | |
| } | |
| function formatFileMemories( | |
| filePath: string, | |
| memories: MemoryResult[], | |
| ): string | null { | |
| if (!memories.length) return null; | |
| const parts = [`## Context for ${filePath}`]; | |
| for (const mem of memories) { | |
| const content = mem.memory || mem.chunks?.[0]?.content; | |
| if (content) { | |
| parts.push(`- ${content}`); | |
| } | |
| } | |
| if (parts.length === 1) return null; | |
| return `<file_memories>\n${parts.join("\n")}\n</file_memories>`; | |
| } | |
| function formatConversation( | |
| messages: Array<{ | |
| info: { role: string }; | |
| parts: Array<{ type: string; text?: string }>; | |
| }>, | |
| ): string { | |
| return messages | |
| .map((m) => { | |
| const role = m.info.role === "user" ? "User" : "Assistant"; | |
| const text = m.parts | |
| .filter((p) => p.type === "text" && p.text) | |
| .map((p) => p.text) | |
| .join("\n"); | |
| return text ? `${role}: ${text}` : null; | |
| }) | |
| .filter(Boolean) | |
| .join("\n\n"); | |
| } | |
| function withTimeout<T>( | |
| promise: Promise<T>, | |
| ms: number, | |
| fallback: T, | |
| ): Promise<T> { | |
| return Promise.race([ | |
| promise, | |
| new Promise<T>((resolve) => setTimeout(() => resolve(fallback), ms)), | |
| ]); | |
| } | |
| export const MemoryPlugin: Plugin = async ({ client, project, directory }) => { | |
| const apiKey = process.env.SUPERMEMORY_API_KEY; | |
| if (!apiKey) { | |
| log("[memory] SUPERMEMORY_API_KEY not set, plugin disabled"); | |
| return {}; | |
| } | |
| log(`[memory] 🚀 Plugin initializing... Log file: ${LOG_FILE}`); | |
| log(`[memory] 📂 Tail logs with: tail -f ${LOG_FILE}`); | |
| const supermemory = new Supermemory({ apiKey }); | |
| const projectName = project?.id || directory.split("/").pop() || "unknown"; | |
| log(`[memory] 📦 Project: ${projectName}`); | |
| const containers = { | |
| global: "opencode-global", | |
| project: `opencode-${projectName}`, | |
| }; | |
| let currentSessionId: string | null = null; | |
| let initialMemoriesContext: string | null = null; | |
| let messageCount = 0; | |
| const fileMemoryCache = new Map< | |
| string, | |
| { timestamp: number; memories: MemoryResult[] } | |
| >(); | |
| // Session-specific memory state | |
| const sessionMemoryState = new Map<string, { | |
| context: string | null; | |
| injected: boolean; | |
| }>(); | |
| // Track loading promises to prevent race conditions | |
| const sessionLoadingPromises = new Map<string, Promise<void>>(); | |
| async function searchMemories( | |
| query: string, | |
| containerTag: string, | |
| limit: number = 5, | |
| threshold: number = 0.5, | |
| ): Promise<SearchResponse> { | |
| try { | |
| const result = await withTimeout( | |
| supermemory.search.memories({ | |
| q: query, | |
| containerTag, | |
| limit, | |
| threshold, | |
| }), | |
| SEARCH_TIMEOUT_MS, | |
| { results: [], timing: 0, total: 0 }, | |
| ); | |
| return result; | |
| } catch (error) { | |
| log(`[memory] search failed for ${containerTag}:`, error); | |
| return { results: [], timing: 0, total: 0 }; | |
| } | |
| } | |
| async function loadMemories(sessionId: string): Promise<void> { | |
| log(`[memory] 🔄 Starting loadMemories for session: ${sessionId}`); | |
| try { | |
| // Use profile API for richer context | |
| log(`[memory] 🔍 Fetching profile from Supermemory...`); | |
| const profile = await withTimeout( | |
| supermemory.profile({ | |
| containerTag: containers.project, | |
| q: "recent coding context and preferences", | |
| }), | |
| SEARCH_TIMEOUT_MS, | |
| { profile: { static: [], dynamic: [] }, searchResults: { results: [], timing: 0, total: 0 } }, | |
| ); | |
| const staticCount = profile.profile?.static?.length || 0; | |
| const dynamicCount = profile.profile?.dynamic?.length || 0; | |
| const relevantCount = profile.searchResults?.results?.length || 0; | |
| const totalMemories = staticCount + dynamicCount + relevantCount; | |
| log(`[memory] 📊 Profile results: ${staticCount} static, ${dynamicCount} dynamic, ${relevantCount} relevant`); | |
| const context = formatProfile(profile); | |
| log(`[memory] 📝 Formatted context length: ${context?.length || 0} chars`); | |
| if (context) { | |
| // Store context for this session | |
| sessionMemoryState.set(sessionId, { | |
| context, | |
| injected: false, | |
| }); | |
| initialMemoriesContext = context; | |
| log( | |
| `[memory] ✅ Stored profile in sessionMemoryState for ${sessionId}`, | |
| ); | |
| // Show toast notification about memories being loaded (not creating a visible message) | |
| await client.tui.showToast({ | |
| body: { | |
| message: `Loaded ${totalMemories} items from Supermemory profile`, | |
| variant: "info", | |
| }, | |
| }).catch((err) => { | |
| log("[memory] Toast failed (probably not in TUI mode):", err?.message || "unknown error"); | |
| }); | |
| } else { | |
| log(`[memory] ⚠️ No context generated (no memories found or formatting returned null)`); | |
| } | |
| } catch (error) { | |
| log("[memory] ❌ Failed to load memories:", error); | |
| } | |
| } | |
| async function ingestConversation(sessionId: string, isIncremental: boolean = false): Promise<void> { | |
| try { | |
| const response = await client.session.messages({ | |
| path: { id: sessionId }, | |
| }); | |
| const messages = response.data || []; | |
| if (messages.length < MIN_MESSAGES_TO_INGEST) { | |
| return; | |
| } | |
| const conversationText = formatConversation(messages); | |
| if (!conversationText.trim()) { | |
| return; | |
| } | |
| // Use consistent customId for the same session across both containers | |
| // This allows updates to the same conversation as it progresses | |
| const customId = `session-${sessionId}`; | |
| await Promise.all([ | |
| supermemory.memories.add({ | |
| content: conversationText, | |
| containerTag: containers.global, | |
| customId, | |
| metadata: { | |
| type: "conversation", | |
| project: projectName, | |
| sessionId, | |
| messageCount: messages.length, | |
| lastUpdated: Date.now(), | |
| }, | |
| }), | |
| supermemory.memories.add({ | |
| content: conversationText, | |
| containerTag: containers.project, | |
| customId, | |
| metadata: { | |
| type: "conversation", | |
| sessionId, | |
| messageCount: messages.length, | |
| lastUpdated: Date.now(), | |
| }, | |
| }), | |
| ]); | |
| const ingestType = isIncremental ? "incremental" : "final"; | |
| log( | |
| `[memory] ${ingestType} ingest: ${messages.length} messages with customId: ${customId}`, | |
| ); | |
| } catch (error) { | |
| log("[memory] failed to ingest conversation:", error); | |
| } | |
| } | |
| async function getFileMemories(filePath: string): Promise<MemoryResult[]> { | |
| const cached = fileMemoryCache.get(filePath); | |
| if (cached && Date.now() - cached.timestamp < FILE_CACHE_TTL_MS) { | |
| return cached.memories; | |
| } | |
| const result = await searchMemories( | |
| `file ${filePath} patterns context knowledge`, | |
| containers.project, | |
| 3, | |
| 0.7, // higher threshold for contextual search | |
| ); | |
| const memories = result.results; | |
| fileMemoryCache.set(filePath, { timestamp: Date.now(), memories }); | |
| return memories; | |
| } | |
| return { | |
| event: async ({ event }) => { | |
| if (event.type === "session.created") { | |
| const sessionId = event.properties.info.id; | |
| currentSessionId = sessionId; | |
| messageCount = 0; | |
| // Start loading and track the promise | |
| const loadPromise = loadMemories(sessionId); | |
| sessionLoadingPromises.set(sessionId, loadPromise); | |
| // Wait for it to complete | |
| await loadPromise; | |
| log(`[memory] ✅ Memories fully loaded for session ${sessionId}`); | |
| } | |
| if (event.type === "session.idle") { | |
| const sessionId = event.properties.sessionID; | |
| if (sessionId) { | |
| await ingestConversation(sessionId, false); | |
| // Clean up the loading promise | |
| sessionLoadingPromises.delete(sessionId); | |
| } | |
| } | |
| }, | |
| "chat.message": async (input, output) => { | |
| messageCount++; | |
| // Incremental ingestion every N messages | |
| if (messageCount % INCREMENTAL_INGEST_INTERVAL === 0) { | |
| await ingestConversation(input.sessionID, true); | |
| } | |
| }, | |
| "experimental.chat.system.transform": async (input, output) => { | |
| log(`[memory] system.transform called, currentSessionId: ${currentSessionId}`); | |
| if (!currentSessionId) { | |
| log("[memory] No currentSessionId, skipping injection"); | |
| return; | |
| } | |
| // CRITICAL: Wait for loading to complete if still in progress | |
| const loadPromise = sessionLoadingPromises.get(currentSessionId); | |
| if (loadPromise) { | |
| log(`[memory] ⏳ Waiting for memories to finish loading...`); | |
| await loadPromise; | |
| log(`[memory] ✅ Memories ready, proceeding with injection`); | |
| } | |
| const state = sessionMemoryState.get(currentSessionId); | |
| log(`[memory] state for ${currentSessionId}:`, state ? `has context: ${!!state.context}, injected: ${state.injected}` : "NO STATE"); | |
| if (state?.context && !state.injected) { | |
| output.system.push("## Long-term Memory\n" + state.context); | |
| state.injected = true; | |
| log("[memory] ✅ Successfully injected memories into system prompt"); | |
| } else if (!state) { | |
| log("[memory] ❌ No state found in sessionMemoryState map"); | |
| } else if (!state.context) { | |
| log("[memory] ❌ State exists but no context"); | |
| } else if (state.injected) { | |
| log("[memory] ⏭️ Already injected, skipping"); | |
| } | |
| }, | |
| "experimental.session.compacting": async (input, output) => { | |
| const state = sessionMemoryState.get(input.sessionID); | |
| if (state?.context) { | |
| output.context.push( | |
| "## Persistent Memory Context\n" + | |
| "The following memories should be retained across compaction:\n" + | |
| state.context | |
| ); | |
| log("[memory] added memories to compaction context"); | |
| } | |
| }, | |
| "tool.execute.before": async (input, output) => { | |
| if (input.tool !== "read" || !currentSessionId) return; | |
| const filePath = output.args?.filePath; | |
| if (!filePath || typeof filePath !== "string") return; | |
| try { | |
| const memories = await getFileMemories(filePath); | |
| if (memories.length > 0) { | |
| const context = formatFileMemories(filePath, memories); | |
| if (context) { | |
| // Store file-specific context for system transform | |
| const state = sessionMemoryState.get(currentSessionId); | |
| if (state) { | |
| state.context = (state.context || "") + "\n\n" + context; | |
| state.injected = false; // Allow re-injection with file context | |
| } | |
| log( | |
| `[memory] prepared ${memories.length} file memories for ${filePath}`, | |
| ); | |
| } | |
| } | |
| } catch (error) { | |
| // silently fail - don't block file reads | |
| } | |
| }, | |
| tool: { | |
| search_memories: tool({ | |
| description: "Search long-term memory for coding preferences, project context, past decisions, or learned patterns. Use this when you need specific information about the user's preferences, project conventions, or previous discussions.", | |
| args: { | |
| query: tool.schema.string().describe("What to search for in memories (e.g., 'testing framework preference', 'database choice', 'API design patterns')"), | |
| scope: tool.schema.enum(["global", "project", "both"]).default("both") | |
| .describe("Search scope: 'global' for cross-project preferences, 'project' for project-specific knowledge, 'both' for comprehensive search") | |
| }, | |
| async execute(args) { | |
| try { | |
| const searches: Promise<SearchResponse>[] = []; | |
| if (args.scope === "global" || args.scope === "both") { | |
| searches.push(searchMemories(args.query, containers.global, 5, 0.6)); | |
| } | |
| if (args.scope === "project" || args.scope === "both") { | |
| searches.push(searchMemories(args.query, containers.project, 5, 0.6)); | |
| } | |
| const results = await Promise.all(searches); | |
| const allMemories = results.flatMap(r => r.results); | |
| if (allMemories.length === 0) { | |
| return "No relevant memories found for this query."; | |
| } | |
| const formatted = allMemories.map((mem, idx) => { | |
| const content = mem.memory || mem.chunks?.[0]?.content || ""; | |
| const source = mem.metadata?.project ? `[Project: ${mem.metadata.project}]` : "[Global]"; | |
| return `${idx + 1}. ${source} ${content}`; | |
| }).join("\n\n"); | |
| return `Found ${allMemories.length} relevant memories:\n\n${formatted}`; | |
| } catch (error) { | |
| log("[memory] search_memories tool error:", error); | |
| return "Failed to search memories. Please try again."; | |
| } | |
| } | |
| }) | |
| }, | |
| }; | |
| }; | |
| export default MemoryPlugin; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment