#!/usr/bin/env python3 """ Cognitive Memory - CLI Interface Command-line interface for the cognitive memory system. """ import argparse import json import sys from pathlib import Path from client import CognitiveMemoryClient from common import ( MEMORY_DIR, VALID_RELATION_TYPES, VALID_TYPES, _load_memory_config, create_graph, resolve_graph_path, list_graphs, ) def main(): parser = argparse.ArgumentParser( description="Cognitive Memory - Markdown-based memory system with decay scoring", formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( "--graph", default=None, help="Named memory graph to use (default: 'default')", ) subparsers = parser.add_subparsers(dest="command", help="Commands") # store sp = subparsers.add_parser("store", help="Store a new memory") sp.add_argument( "--type", "-t", required=True, choices=VALID_TYPES, help="Memory type" ) sp.add_argument("--title", required=True, help="Memory title") sp.add_argument("--content", "-c", required=True, help="Memory content") sp.add_argument("--tags", help="Comma-separated tags") sp.add_argument( "--importance", "-i", type=float, default=0.5, help="Importance 0.0-1.0" ) sp.add_argument("--confidence", type=float, default=0.8, help="Confidence 0.0-1.0") sp.add_argument( "--episode", action="store_true", default=False, help="Also log an episode entry", ) # recall sp = subparsers.add_parser("recall", help="Search memories by query") sp.add_argument("query", help="Search query") sp.add_argument("--types", help="Comma-separated memory types") sp.add_argument("--limit", "-n", type=int, default=10, help="Max results") sp.add_argument( "--no-semantic", action="store_true", default=False, help="Disable semantic search (keyword-only, faster)", ) # get sp = subparsers.add_parser("get", help="Get memory by ID") sp.add_argument("memory_id", help="Memory UUID") # relate sp = subparsers.add_parser("relate", help="Create relationship") sp.add_argument("from_id", help="Source memory UUID") sp.add_argument("to_id", help="Target memory UUID") sp.add_argument("rel_type", choices=VALID_RELATION_TYPES, help="Relationship type") sp.add_argument("--strength", type=float, default=0.8, help="Strength 0.0-1.0") sp.add_argument("--context", help="Context description") sp.add_argument("--description", help="Rich edge description body") # edge-get sp = subparsers.add_parser("edge-get", help="Get edge by ID") sp.add_argument("edge_id", help="Edge UUID") # edge-search sp = subparsers.add_parser("edge-search", help="Search edges") sp.add_argument("--query", "-q", help="Text query") sp.add_argument("--types", help="Comma-separated relation types") sp.add_argument("--from-id", help="Filter by source memory ID") sp.add_argument("--to-id", help="Filter by target memory ID") sp.add_argument("--limit", "-n", type=int, default=20, help="Max results") # edge-update sp = subparsers.add_parser("edge-update", help="Update an edge") sp.add_argument("edge_id", help="Edge UUID") sp.add_argument("--description", help="New description body") sp.add_argument("--strength", type=float, help="New strength 0.0-1.0") # edge-delete sp = subparsers.add_parser("edge-delete", help="Delete an edge") sp.add_argument("edge_id", help="Edge UUID") # search sp = subparsers.add_parser("search", help="Filter memories") sp.add_argument("--query", "-q", help="Text query") sp.add_argument("--types", help="Comma-separated memory types") sp.add_argument("--tags", help="Comma-separated tags") sp.add_argument("--min-importance", type=float, help="Minimum importance") sp.add_argument("--limit", "-n", type=int, default=20, help="Max results") # update sp = subparsers.add_parser("update", help="Update a memory") sp.add_argument("memory_id", help="Memory UUID") sp.add_argument("--title", help="New title") sp.add_argument("--content", help="New content") sp.add_argument("--tags", help="New tags (comma-separated)") sp.add_argument("--importance", type=float, help="New importance") # delete sp = subparsers.add_parser("delete", help="Delete a memory") sp.add_argument("memory_id", help="Memory UUID") sp.add_argument("--force", "-f", action="store_true", help="Skip confirmation") # related sp = subparsers.add_parser("related", help="Get related memories") sp.add_argument("memory_id", help="Memory UUID") sp.add_argument("--types", help="Comma-separated relationship types") sp.add_argument("--depth", type=int, default=1, help="Traversal depth 1-5") # stats subparsers.add_parser("stats", help="Show statistics") # recent sp = subparsers.add_parser("recent", help="Recently created memories") sp.add_argument("--limit", "-n", type=int, default=20, help="Max results") # decay subparsers.add_parser("decay", help="Recalculate all decay scores") # core subparsers.add_parser("core", help="Generate CORE.md") # episode sp = subparsers.add_parser("episode", help="Log episode entry") sp.add_argument("--type", "-t", required=True, help="Entry type") sp.add_argument("--title", required=True, help="Entry title") sp.add_argument("--tags", help="Comma-separated tags") sp.add_argument("--summary", "-s", help="Summary text") sp.add_argument("--memory-link", help="Path to related memory file") # reindex subparsers.add_parser("reindex", help="Rebuild index from files") # embed embed_parser = subparsers.add_parser( "embed", help="Generate embeddings for all memories via Ollama" ) embed_parser.add_argument( "--if-changed", action="store_true", help="Skip if no memories were added or deleted since last embed", ) # pin sp = subparsers.add_parser("pin", help="Move memory to vault (never decays)") sp.add_argument("memory_id", help="Memory UUID") # reflect sp = subparsers.add_parser( "reflect", help="Review recent memories and identify clusters" ) sp.add_argument("--since", help="ISO date (YYYY-MM-DD) to review from") sp.add_argument( "--dry-run", action="store_true", help="Preview without updating state" ) # merge sp = subparsers.add_parser( "merge", help="Merge two memories (absorb one into another)" ) sp.add_argument("keep_id", help="Memory UUID to keep") sp.add_argument("absorb_id", help="Memory UUID to absorb and delete") sp.add_argument( "--dry-run", action="store_true", help="Preview merge without writing" ) # reflection subparsers.add_parser("reflection", help="Generate REFLECTION.md summary") # tags sp = subparsers.add_parser("tags", help="Tag analysis commands") tags_sub = sp.add_subparsers(dest="tags_command") sp2 = tags_sub.add_parser("list", help="List all tags with counts") sp2.add_argument("--limit", "-n", type=int, default=0, help="Max results (0=all)") sp3 = tags_sub.add_parser("related", help="Find co-occurring tags") sp3.add_argument("tag", help="Tag to analyze") sp3.add_argument("--limit", "-n", type=int, default=0, help="Max results (0=all)") sp4 = tags_sub.add_parser("suggest", help="Suggest tags for a memory") sp4.add_argument("memory_id", help="Memory UUID") # procedure sp = subparsers.add_parser( "procedure", help="Store a procedure memory (convenience wrapper)" ) sp.add_argument("--title", required=True, help="Procedure title") sp.add_argument("--content", "-c", required=True, help="Procedure description") sp.add_argument("--steps", help="Comma-separated ordered steps") sp.add_argument("--preconditions", help="Comma-separated preconditions") sp.add_argument("--postconditions", help="Comma-separated postconditions") sp.add_argument("--tags", help="Comma-separated tags") sp.add_argument( "--importance", "-i", type=float, default=0.5, help="Importance 0.0-1.0" ) # config sp = subparsers.add_parser("config", help="Manage embedding config") sp.add_argument("--show", action="store_true", help="Display current config") sp.add_argument( "--provider", choices=["ollama", "openai"], help="Set embedding provider" ) sp.add_argument("--openai-key", help="Set OpenAI API key") sp.add_argument( "--ollama-model", help="Set Ollama model name (e.g. qwen3-embedding:8b)" ) # graphs subparsers.add_parser("graphs", help="List available memory graphs") # graph-create sp = subparsers.add_parser("graph-create", help="Create a new named memory graph") sp.add_argument("name", help="Graph name (alphanumeric, hyphens OK)") sp.add_argument( "--path", default=None, help=( "Custom directory path for the graph. " "If omitted, uses the convention path (~/.local/share/cognitive-memory-graphs/). " "Custom paths are registered in the default graph's _config.json." ), ) args = parser.parse_args() if not args.command: parser.print_help() sys.exit(1) graph_path = resolve_graph_path(args.graph) client = CognitiveMemoryClient(memory_dir=graph_path) result = None if args.command == "store": tags = [t.strip() for t in args.tags.split(",")] if args.tags else None memory_id = client.store( type=args.type, title=args.title, content=args.content, tags=tags, importance=args.importance, confidence=args.confidence, ) result = {"success": True, "memory_id": memory_id} if args.episode: # Get the relative path from the index for memory_link index = client._load_index() entry = index.get("entries", {}).get(memory_id, {}) rel_path = entry.get("path", "") # Truncate content at word boundary for summary (max 100 chars) summary = args.content.strip()[:100] if len(args.content.strip()) > 100: last_space = summary.rfind(" ") if last_space > 0: summary = summary[:last_space] client.episode( type=args.type, title=args.title, tags=tags or [], summary=summary, memory_link=rel_path, ) result["episode_logged"] = True elif args.command == "recall": types = [t.strip() for t in args.types.split(",")] if args.types else None result = client.recall( args.query, memory_types=types, limit=args.limit, semantic=not args.no_semantic, ) elif args.command == "get": result = client.get(args.memory_id) if not result: result = {"error": "Memory not found"} elif args.command == "relate": edge_id = client.relate( args.from_id, args.to_id, args.rel_type, strength=args.strength, context=args.context, description=args.description, ) result = {"success": bool(edge_id), "edge_id": edge_id} elif args.command == "edge-get": result = client.edge_get(args.edge_id) if not result: result = {"error": "Edge not found"} elif args.command == "edge-search": types = [t.strip() for t in args.types.split(",")] if args.types else None result = client.edge_search( query=args.query, types=types, from_id=getattr(args, "from_id", None), to_id=getattr(args, "to_id", None), limit=args.limit, ) elif args.command == "edge-update": success = client.edge_update( args.edge_id, description=args.description, strength=args.strength, ) result = {"success": success} elif args.command == "edge-delete": success = client.edge_delete(args.edge_id) result = {"success": success} elif args.command == "search": types = [t.strip() for t in args.types.split(",")] if args.types else None tags = [t.strip() for t in args.tags.split(",")] if args.tags else None result = client.search( query=args.query, memory_types=types, tags=tags, min_importance=args.min_importance, limit=args.limit, ) elif args.command == "update": tags = [t.strip() for t in args.tags.split(",")] if args.tags else None success = client.update( args.memory_id, title=args.title, content=args.content, tags=tags, importance=args.importance, ) result = {"success": success} elif args.command == "delete": if not args.force: mem = client.get(args.memory_id) if mem: print(f"Deleting: {mem.get('title')}", file=sys.stderr) success = client.delete(args.memory_id) result = {"success": success} elif args.command == "related": types = [t.strip() for t in args.types.split(",")] if args.types else None result = client.related(args.memory_id, rel_types=types, max_depth=args.depth) elif args.command == "stats": result = client.stats() elif args.command == "recent": result = client.recent(limit=args.limit) elif args.command == "decay": result = client.decay() elif args.command == "core": content = client.core() # Print path, not content (content is written to file) result = { "success": True, "path": str(client.memory_dir / "CORE.md"), "chars": len(content), } elif args.command == "episode": tags = [t.strip() for t in args.tags.split(",")] if args.tags else None client.episode( type=args.type, title=args.title, tags=tags, summary=args.summary, memory_link=args.memory_link, ) result = {"success": True} elif args.command == "reindex": count = client.reindex() result = {"success": True, "indexed": count} elif args.command == "embed": if_changed = getattr(args, "if_changed", False) if not if_changed: print( "Generating embeddings (this may take a while if model needs to be pulled)...", file=sys.stderr, ) result = client.embed(if_changed=if_changed) elif args.command == "pin": success = client.pin(args.memory_id) result = {"success": success} elif args.command == "reflect": result = client.reflect( since=args.since, dry_run=args.dry_run, ) elif args.command == "merge": result = client.merge( keep_id=args.keep_id, absorb_id=args.absorb_id, dry_run=args.dry_run, ) elif args.command == "reflection": content = client.reflection_summary() result = { "success": True, "path": str(client.memory_dir / "REFLECTION.md"), "chars": len(content), } elif args.command == "tags": if args.tags_command == "list": result = client.tags_list(limit=args.limit) elif args.tags_command == "related": result = client.tags_related(args.tag, limit=args.limit) elif args.tags_command == "suggest": result = client.tags_suggest(args.memory_id) else: # No subcommand given, print tags help # Re-parse to get the tags subparser for help output for action in parser._subparsers._actions: if isinstance(action, argparse._SubParsersAction): tags_parser = action.choices.get("tags") if tags_parser: tags_parser.print_help() break sys.exit(1) elif args.command == "procedure": tags = [t.strip() for t in args.tags.split(",")] if args.tags else None steps = [s.strip() for s in args.steps.split(",")] if args.steps else None preconditions = ( [p.strip() for p in args.preconditions.split(",")] if args.preconditions else None ) postconditions = ( [p.strip() for p in args.postconditions.split(",")] if args.postconditions else None ) memory_id = client.store( type="procedure", title=args.title, content=args.content, tags=tags, importance=args.importance, steps=steps, preconditions=preconditions, postconditions=postconditions, ) result = {"success": True, "memory_id": memory_id} elif args.command == "graphs": result = list_graphs() elif args.command == "graph-create": custom_path = Path(args.path) if args.path else None result = create_graph(args.name, path=custom_path) elif args.command == "config": config_path = client.memory_dir / "_config.json" config = _load_memory_config(config_path) changed = False if args.provider: config["embedding_provider"] = args.provider changed = True if args.openai_key: config["openai_api_key"] = args.openai_key changed = True if args.ollama_model: config["ollama_model"] = args.ollama_model changed = True if changed: config_path.write_text(json.dumps(config, indent=2)) result = {"success": True, "updated": True} elif args.show or not changed: # Mask API key for display display = dict(config) key = display.get("openai_api_key") if key and isinstance(key, str) and len(key) > 8: display["openai_api_key"] = key[:4] + "..." + key[-4:] result = display print(json.dumps(result, indent=2, default=str)) if __name__ == "__main__": main()