diff --git a/feature.json b/feature.json index 3b8defd..c8f86b5 100644 --- a/feature.json +++ b/feature.json @@ -1,7 +1,7 @@ { "name": "cognitive-memory", - "version": "3.1.0", - "description": "Markdown-based memory system with decay scoring, episodic logging, semantic search, reflection cycles, auto-curated CORE.md, native MCP server integration, rich edge files, and hybrid Ollama/OpenAI embeddings", + "version": "3.2.0", + "description": "Markdown-based memory system with decay scoring, episodic logging, semantic search, reflection cycles, auto-curated CORE.md, native MCP server integration, rich edge files, hybrid Ollama/OpenAI embeddings, multi-graph support, and auto-edge creation", "created": "2026-02-13", "migrated_from": "memorygraph", "status": "active", @@ -15,7 +15,8 @@ "mcp_server.py": "MCP server for Claude Code tool integration", "scripts/session_memory.py": "SessionEnd hook — auto-stores session learnings", "scripts/edge-proposer.py": "Edge proposer for relationship discovery", - "scripts/memory-git-sync.sh": "Git sync for data directory", + "scripts/memory-git-sync.sh": "Git sync for default + named graph repos", + "scripts/maintain-all-graphs.sh": "Loop maintenance commands over all graphs", "systemd/": "Reference copies of systemd user timers (see systemd/README.md)" }, "skill_layer": "~/.claude/skills/cognitive-memory/ (SKILL.md + SCHEMA.md)", @@ -51,6 +52,10 @@ "config: Manage embedding provider (ollama/openai) with fallback", "MCP server: Native Claude Code tool integration via JSON-RPC stdio", "Hybrid embeddings: Ollama (local) + OpenAI (optional) with automatic fallback", - "Rich edges: First-class edge files in graph/edges/ with descriptions" + "Rich edges: First-class edge files in graph/edges/ with descriptions", + "graphs: List available memory graphs", + "graph-create: Create a new named memory graph", + "Auto-edges: Automatically create edges on memory_store via type-pair heuristics", + "Multi-graph: Named isolated graphs with per-project routing via COGNITIVE_MEMORY_GRAPH env var" ] }