From 022e8e384427541e7eb90a2c03544498a563bc0f Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 13:04:13 +1100 Subject: [PATCH 01/10] fix(alive): allow Write to non-existent log.md for new walnut creation The log guardian hook blocked ALL Write operations to log.md, which prevented creating log.md for brand-new walnuts. Now checks if the file exists first -- only blocks Write to existing log.md files. Edit (prepend) to existing log.md is still allowed. Co-Authored-By: Claude Opus 4.6 (1M context) --- plugins/alive/hooks/scripts/alive-log-guardian.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/alive/hooks/scripts/alive-log-guardian.sh b/plugins/alive/hooks/scripts/alive-log-guardian.sh index 1d384ca..f142179 100755 --- a/plugins/alive/hooks/scripts/alive-log-guardian.sh +++ b/plugins/alive/hooks/scripts/alive-log-guardian.sh @@ -22,10 +22,13 @@ if [ -n "${WORLD_ROOT:-}" ] && ! echo "$FILE_PATH" | grep -q "^$WORLD_ROOT"; the exit 0 fi -# Block ALL Write operations to log.md (must use Edit to prepend) +# Block Write operations to existing log.md (must use Edit to prepend) +# Allow Write to non-existent log.md (new walnut creation) if [ "$TOOL_NAME" = "Write" ]; then - echo '{"hookSpecificOutput":{"hookEventName":"PreToolUse","permissionDecision":"deny","permissionDecisionReason":"log.md cannot be overwritten. Use Edit to prepend new entries after the YAML frontmatter."}}' - exit 0 + if [ -f "$FILE_PATH" ]; then + echo '{"hookSpecificOutput":{"hookEventName":"PreToolUse","permissionDecision":"deny","permissionDecisionReason":"log.md cannot be overwritten. Use Edit to prepend new entries after the YAML frontmatter."}}' + exit 0 + fi fi # For Edit: check if the old_string contains a signed entry From 2f91e96d18a66729d4ed9f354918d1fc36851deb Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 13:21:43 +1100 Subject: [PATCH 02/10] feat(alive): add cross-platform P2P utilities module - sha256_file: hashlib-based hashing (no subprocess) - safe_tar_create/extract: macOS-safe archiving with Zip Slip protection - atomic_json_write/read: crash-safe state files via temp+fsync+rename - detect_openssl: LibreSSL vs OpenSSL version and capability detection - b64_encode_file: single-line base64 via openssl CLI - parse_yaml_frontmatter: hand-rolled YAML parser matching generate-index.py Stdlib only, no pip dependencies. CLI smoke-test interface included. Task: fn-5-dof.2 Co-Authored-By: Claude Opus 4.6 (1M context) --- plugins/alive/scripts/alive-p2p.py | 528 +++++++++++++++++++++++++++++ 1 file changed, 528 insertions(+) create mode 100644 plugins/alive/scripts/alive-p2p.py diff --git a/plugins/alive/scripts/alive-p2p.py b/plugins/alive/scripts/alive-p2p.py new file mode 100644 index 0000000..4edf2b5 --- /dev/null +++ b/plugins/alive/scripts/alive-p2p.py @@ -0,0 +1,528 @@ +#!/usr/bin/env python3 +"""Cross-platform P2P utilities for the Alive sharing layer. + +Standalone stdlib-only module providing hashing, tar operations, atomic JSON +state files, OpenSSL detection, base64, and YAML frontmatter parsing. + +Designed for macOS (BSD tar, LibreSSL) and Linux (GNU tar, OpenSSL). +No pip dependencies -- python3 stdlib + openssl CLI only. + +Usage as library: + from alive_p2p import sha256_file, safe_tar_create, detect_openssl, ... + +Usage as CLI (smoke tests): + python3 alive-p2p.py hash + python3 alive-p2p.py openssl + python3 alive-p2p.py tar-create + python3 alive-p2p.py tar-extract + python3 alive-p2p.py tar-list + python3 alive-p2p.py b64 + python3 alive-p2p.py yaml + +Task: fn-5-dof.2 +""" + +import hashlib +import json +import os +import re +import subprocess +import sys +import tarfile +import tempfile + + +# --------------------------------------------------------------------------- +# Hashing +# --------------------------------------------------------------------------- + +def sha256_file(path): + """Return hex SHA-256 digest of a file. Cross-platform, no subprocess.""" + h = hashlib.sha256() + with open(path, 'rb') as f: + while True: + chunk = f.read(65536) + if not chunk: + break + h.update(chunk) + return h.hexdigest() + + +# --------------------------------------------------------------------------- +# Tar operations +# --------------------------------------------------------------------------- + +# Files and patterns to exclude from archives +_TAR_EXCLUDES = {'.DS_Store', 'Thumbs.db', 'Icon\r', '__MACOSX'} + + +def _is_excluded(name): + """Check whether a tar entry name should be excluded.""" + base = os.path.basename(name) + if base in _TAR_EXCLUDES: + return True + # macOS resource fork files + if base.startswith('._'): + return True + return False + + +def _resolve_path(base, name): + """Resolve *name* relative to *base* and check it stays inside *base*. + + Returns the resolved absolute path, or None if the entry escapes. + """ + # Reject absolute paths outright + if os.path.isabs(name): + return None + target = os.path.normpath(os.path.join(base, name)) + # Must start with base (use trailing sep to avoid prefix tricks) + if not (target == base or target.startswith(base + os.sep)): + return None + return target + + +def safe_tar_create(source_dir, output_path, strip_prefix=None): + """Create a tar.gz archive from *source_dir*. + + - Sets COPYFILE_DISABLE=1 to suppress macOS resource forks. + - Excludes .DS_Store, Thumbs.db, ._* files. + - Rejects symlinks that resolve outside *source_dir*. + - Optional *strip_prefix* removes a leading path component from entries. + """ + source_dir = os.path.abspath(source_dir) + if not os.path.isdir(source_dir): + raise FileNotFoundError(f"Source directory not found: {source_dir}") + + # Suppress macOS resource forks (affects C-level tar inside python too) + os.environ['COPYFILE_DISABLE'] = '1' + + output_path = os.path.abspath(output_path) + os.makedirs(os.path.dirname(output_path), exist_ok=True) + + with tarfile.open(output_path, 'w:gz') as tar: + for root, dirs, files in os.walk(source_dir): + # Skip excluded directories in-place + dirs[:] = [d for d in dirs + if d not in _TAR_EXCLUDES and not d.startswith('._')] + + for name in sorted(files): + if _is_excluded(name): + continue + + full_path = os.path.join(root, name) + + # Reject symlinks that escape source_dir + if os.path.islink(full_path): + real = os.path.realpath(full_path) + if not (real == source_dir + or real.startswith(source_dir + os.sep)): + raise ValueError( + f"Symlink escapes source: {full_path} -> {real}") + + arcname = os.path.relpath(full_path, source_dir) + if strip_prefix: + if arcname.startswith(strip_prefix): + arcname = arcname[len(strip_prefix):] + arcname = arcname.lstrip(os.sep) + + tar.add(full_path, arcname=arcname) + + # Also add directories that are symlinks (check safety) + for d in dirs: + dir_path = os.path.join(root, d) + if os.path.islink(dir_path): + real = os.path.realpath(dir_path) + if not (real == source_dir + or real.startswith(source_dir + os.sep)): + raise ValueError( + f"Symlink escapes source: {dir_path} -> {real}") + + +def safe_tar_extract(archive_path, output_dir): + """Extract a tar.gz archive with path-traversal and symlink protection. + + - Rejects entries with ``../`` or absolute paths (Zip Slip). + - Rejects symlinks pointing outside *output_dir*. + - Extracts to a staging directory first, then moves into *output_dir*. + """ + archive_path = os.path.abspath(archive_path) + output_dir = os.path.abspath(output_dir) + + if not os.path.isfile(archive_path): + raise FileNotFoundError(f"Archive not found: {archive_path}") + + os.makedirs(output_dir, exist_ok=True) + + # Use a staging directory in the same parent (same filesystem for rename) + parent = os.path.dirname(output_dir) + staging = tempfile.mkdtemp(dir=parent, prefix='.p2p-extract-') + + try: + with tarfile.open(archive_path, 'r:*') as tar: + # First pass: validate every entry + for member in tar.getmembers(): + # Reject absolute paths + if os.path.isabs(member.name): + raise ValueError( + f"Absolute path in archive: {member.name}") + + # Reject path traversal + resolved = _resolve_path(staging, member.name) + if resolved is None: + raise ValueError( + f"Path traversal in archive: {member.name}") + + # Reject symlinks that escape output + if member.issym() or member.islnk(): + link_target = member.linkname + # For symlinks, resolve relative to the member's parent + member_parent = os.path.join( + staging, os.path.dirname(member.name)) + if os.path.isabs(link_target): + link_resolved = link_target + else: + link_resolved = os.path.normpath( + os.path.join(member_parent, link_target)) + if not (link_resolved == staging + or link_resolved.startswith(staging + os.sep)): + raise ValueError( + f"Symlink escapes output: {member.name} " + f"-> {member.linkname}") + + # Second pass: extract (rewind) + tar.extractall(path=staging) + + # Move contents from staging into output_dir + for item in os.listdir(staging): + src = os.path.join(staging, item) + dst = os.path.join(output_dir, item) + if os.path.exists(dst): + # Remove existing to allow overwrite + if os.path.isdir(dst): + import shutil + shutil.rmtree(dst) + else: + os.remove(dst) + os.rename(src, dst) + + finally: + # Clean up staging directory + if os.path.isdir(staging): + import shutil + shutil.rmtree(staging, ignore_errors=True) + + +def tar_list_entries(archive_path): + """Return a list of entry names in a tar archive.""" + archive_path = os.path.abspath(archive_path) + if not os.path.isfile(archive_path): + raise FileNotFoundError(f"Archive not found: {archive_path}") + + with tarfile.open(archive_path, 'r:*') as tar: + return [m.name for m in tar.getmembers()] + + +# --------------------------------------------------------------------------- +# JSON state files (atomic read/write) +# --------------------------------------------------------------------------- + +def atomic_json_write(path, data): + """Write *data* as JSON to *path* atomically (temp + fsync + rename). + + The temp file is created in the same directory as *path* so that + os.replace() is a same-filesystem atomic rename on POSIX. + """ + path = os.path.abspath(path) + target_dir = os.path.dirname(path) + os.makedirs(target_dir, exist_ok=True) + + fd, tmp_path = tempfile.mkstemp(dir=target_dir, suffix='.tmp') + try: + with os.fdopen(fd, 'w', encoding='utf-8') as f: + json.dump(data, f, indent=2, default=str) + f.write('\n') + f.flush() + os.fsync(f.fileno()) + os.replace(tmp_path, path) + except BaseException: + # Clean up temp file on any failure + try: + os.unlink(tmp_path) + except OSError: + pass + raise + + +def atomic_json_read(path): + """Read JSON from *path*. Returns empty dict on missing or corrupt file.""" + path = os.path.abspath(path) + try: + with open(path, 'r', encoding='utf-8') as f: + return json.load(f) + except (FileNotFoundError, json.JSONDecodeError, IOError): + return {} + + +# --------------------------------------------------------------------------- +# OpenSSL detection +# --------------------------------------------------------------------------- + +def detect_openssl(): + """Detect the system openssl binary and its capabilities. + + Returns a dict:: + + { + "binary": "openssl", # path or name + "version": "LibreSSL 3.3.6", + "is_libressl": True, + "supports_pbkdf2": True, + "supports_pkeyutl": True, + } + + Returns None values on detection failure (openssl not found). + """ + result = { + 'binary': None, + 'version': None, + 'is_libressl': None, + 'supports_pbkdf2': None, + 'supports_pkeyutl': None, + } + + # Find openssl binary + for candidate in ['openssl', '/usr/bin/openssl', '/usr/local/bin/openssl']: + try: + proc = subprocess.run( + [candidate, 'version'], + capture_output=True, text=True, timeout=5) + if proc.returncode == 0: + result['binary'] = candidate + result['version'] = proc.stdout.strip() + break + except (FileNotFoundError, subprocess.TimeoutExpired): + continue + + if result['binary'] is None: + return result + + version_str = result['version'] or '' + result['is_libressl'] = 'LibreSSL' in version_str + + # Detect -pbkdf2 support + # LibreSSL < 3.1 and OpenSSL < 1.1.1 lack -pbkdf2 + if result['is_libressl']: + # Parse LibreSSL version: "LibreSSL X.Y.Z" + m = re.search(r'LibreSSL\s+(\d+)\.(\d+)\.(\d+)', version_str) + if m: + major, minor, patch = int(m.group(1)), int(m.group(2)), int(m.group(3)) + result['supports_pbkdf2'] = (major, minor, patch) >= (3, 1, 0) + else: + result['supports_pbkdf2'] = False + else: + # OpenSSL: "OpenSSL X.Y.Zp" or "OpenSSL X.Y.Z" + m = re.search(r'OpenSSL\s+(\d+)\.(\d+)\.(\d+)', version_str) + if m: + major, minor, patch = int(m.group(1)), int(m.group(2)), int(m.group(3)) + result['supports_pbkdf2'] = (major, minor, patch) >= (1, 1, 1) + else: + result['supports_pbkdf2'] = False + + # Detect pkeyutl support (needed for RSA-OAEP) + try: + proc = subprocess.run( + [result['binary'], 'pkeyutl', '-help'], + capture_output=True, text=True, timeout=5) + # pkeyutl -help returns 0 on OpenSSL, 1 on some versions -- both mean it exists + # If the command is truly missing, FileNotFoundError or returncode != 0 with + # "unknown command" in stderr + stderr = proc.stderr.lower() + result['supports_pkeyutl'] = 'unknown command' not in stderr + except (FileNotFoundError, subprocess.TimeoutExpired): + result['supports_pkeyutl'] = False + + return result + + +# --------------------------------------------------------------------------- +# Base64 +# --------------------------------------------------------------------------- + +def b64_encode_file(path): + """Return strict base64 encoding of a file (no line breaks). + + Uses ``openssl base64 -A`` for cross-platform portability + (works on both LibreSSL and OpenSSL). + """ + path = os.path.abspath(path) + if not os.path.isfile(path): + raise FileNotFoundError(f"File not found: {path}") + + ssl = detect_openssl() + if ssl['binary'] is None: + raise RuntimeError("openssl not found on this system") + + proc = subprocess.run( + [ssl['binary'], 'base64', '-A', '-in', path], + capture_output=True, text=True, timeout=30) + + if proc.returncode != 0: + raise RuntimeError( + f"openssl base64 failed (rc={proc.returncode}): {proc.stderr}") + + return proc.stdout.strip() + + +# --------------------------------------------------------------------------- +# YAML frontmatter parsing +# --------------------------------------------------------------------------- + +def parse_yaml_frontmatter(content): + """Parse YAML frontmatter from markdown content. + + Hand-rolled parser matching the pattern in generate-index.py. + No PyYAML dependency. Handles: + - Scalar values (strings, numbers, booleans) + - Inline lists: [a, b, c] + - Multi-line lists (items starting with `` - ``) + - Quoted strings (single and double) + + Returns an empty dict if no frontmatter is found. + """ + match = re.match(r'^---\s*\n(.*?)\n---', content, re.DOTALL) + if not match: + return {} + + fm = {} + lines = match.group(1).split('\n') + i = 0 + while i < len(lines): + line = lines[i] + kv = re.match(r'^(\w[\w-]*)\s*:\s*(.*)', line) + if kv: + key = kv.group(1) + val = kv.group(2).strip() + + # Check for multi-line list (next lines start with " - ") + if val == '' or val == '[]': + items = [] + j = i + 1 + while j < len(lines) and re.match(r'^\s+-\s', lines[j]): + item_match = re.match(r'^\s+-\s+(.*)', lines[j]) + if item_match: + items.append(item_match.group(1).strip()) + j += 1 + if items: + fm[key] = items + i = j + continue + else: + fm[key] = val + elif val.startswith('[') and val.endswith(']'): + # Inline list: [a, b, c] + inner = val[1:-1] + fm[key] = [x.strip().strip('"').strip("'") + for x in inner.split(',') if x.strip()] + else: + # Remove surrounding quotes + if ((val.startswith('"') and val.endswith('"')) + or (val.startswith("'") and val.endswith("'"))): + val = val[1:-1] + + # Coerce booleans and numbers + lower = val.lower() + if lower == 'true': + fm[key] = True + elif lower == 'false': + fm[key] = False + elif lower == 'null' or lower == '~': + fm[key] = None + else: + # Try integer + try: + fm[key] = int(val) + except ValueError: + # Try float + try: + fm[key] = float(val) + except ValueError: + fm[key] = val + i += 1 + return fm + + +# --------------------------------------------------------------------------- +# CLI (smoke tests) +# --------------------------------------------------------------------------- + +def _cli(): + """Minimal CLI for smoke-testing functions.""" + if len(sys.argv) < 2: + print(__doc__) + sys.exit(1) + + cmd = sys.argv[1] + + if cmd == 'hash': + if len(sys.argv) < 3: + print("Usage: alive-p2p.py hash ", file=sys.stderr) + sys.exit(1) + print(sha256_file(sys.argv[2])) + + elif cmd == 'openssl': + info = detect_openssl() + for k, v in info.items(): + print(f" {k}: {v}") + + elif cmd == 'tar-create': + if len(sys.argv) < 4: + print("Usage: alive-p2p.py tar-create ", + file=sys.stderr) + sys.exit(1) + safe_tar_create(sys.argv[2], sys.argv[3]) + entries = tar_list_entries(sys.argv[3]) + print(f"Created {sys.argv[3]} ({len(entries)} entries)") + for e in entries: + print(f" {e}") + + elif cmd == 'tar-extract': + if len(sys.argv) < 4: + print("Usage: alive-p2p.py tar-extract ", + file=sys.stderr) + sys.exit(1) + safe_tar_extract(sys.argv[2], sys.argv[3]) + print(f"Extracted to {sys.argv[3]}") + + elif cmd == 'tar-list': + if len(sys.argv) < 3: + print("Usage: alive-p2p.py tar-list ", + file=sys.stderr) + sys.exit(1) + entries = tar_list_entries(sys.argv[2]) + for e in entries: + print(e) + + elif cmd == 'b64': + if len(sys.argv) < 3: + print("Usage: alive-p2p.py b64 ", file=sys.stderr) + sys.exit(1) + print(b64_encode_file(sys.argv[2])) + + elif cmd == 'yaml': + if len(sys.argv) < 3: + print("Usage: alive-p2p.py yaml ", file=sys.stderr) + sys.exit(1) + with open(sys.argv[2], 'r', encoding='utf-8') as f: + content = f.read() + fm = parse_yaml_frontmatter(content) + print(json.dumps(fm, indent=2, default=str)) + + else: + print(f"Unknown command: {cmd}", file=sys.stderr) + print(__doc__) + sys.exit(1) + + +if __name__ == '__main__': + _cli() From 7049c000dbbe928fd395383471aa7eee8240be82 Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 14:06:23 +1100 Subject: [PATCH 03/10] feat(alive): add package creation, extraction, manifest, and encryption - create_package() stages files by scope (full/bundle/snapshot), generates manifest.yaml with SHA-256 checksums, produces .walnut tar.gz archive - extract_package() validates manifest (format_version 2.x), verifies all checksums, rejects tampered/unlisted files - encrypt_package() supports passphrase (AES-256-CBC, PBKDF2 600k iter) and RSA (random AES key wrapped with pkeyutl OAEP-SHA256) modes - decrypt_package() auto-detects mode (payload.key present = RSA) - sign_manifest() / verify_manifest() for RSA-SHA256 manifest signing - Strips _kernel/_generated/, .alive/_squirrels/, active_sessions: from packages - Pre-flight size warning at 35 MB (GitHub Contents API limit) - Passphrase via WALNUT_PASSPHRASE env var, never CLI arg Task: fn-5-dof.3 Co-Authored-By: Claude Opus 4.6 (1M context) --- plugins/alive/scripts/alive-p2p.py | 1333 +++++++++++++++++++++++++++- 1 file changed, 1331 insertions(+), 2 deletions(-) diff --git a/plugins/alive/scripts/alive-p2p.py b/plugins/alive/scripts/alive-p2p.py index 4edf2b5..2e6fac7 100644 --- a/plugins/alive/scripts/alive-p2p.py +++ b/plugins/alive/scripts/alive-p2p.py @@ -2,13 +2,15 @@ """Cross-platform P2P utilities for the Alive sharing layer. Standalone stdlib-only module providing hashing, tar operations, atomic JSON -state files, OpenSSL detection, base64, and YAML frontmatter parsing. +state files, OpenSSL detection, base64, YAML frontmatter parsing, and +package creation/extraction/encryption for .walnut archives. Designed for macOS (BSD tar, LibreSSL) and Linux (GNU tar, OpenSSL). No pip dependencies -- python3 stdlib + openssl CLI only. Usage as library: from alive_p2p import sha256_file, safe_tar_create, detect_openssl, ... + from alive_p2p import create_package, extract_package, encrypt_package, ... Usage as CLI (smoke tests): python3 alive-p2p.py hash @@ -18,14 +20,20 @@ python3 alive-p2p.py tar-list python3 alive-p2p.py b64 python3 alive-p2p.py yaml + python3 alive-p2p.py create --scope --walnut [--bundle ...] [--output ] + python3 alive-p2p.py extract --input --output -Task: fn-5-dof.2 +Tasks: fn-5-dof.2, fn-5-dof.3 """ +import base64 +import copy +import datetime import hashlib import json import os import re +import shutil import subprocess import sys import tarfile @@ -452,6 +460,1245 @@ def parse_yaml_frontmatter(content): return fm +# --------------------------------------------------------------------------- +# Package format constants +# --------------------------------------------------------------------------- + +FORMAT_VERSION = '2.0.0' + +# Size threshold for pre-flight warning (35 MB -- GitHub Contents API limit +# with base64 overhead is ~50 MB, but 35 MB leaves margin) +SIZE_WARN_BYTES = 35 * 1024 * 1024 + +# Paths that are always excluded from packages +_PACKAGE_EXCLUDES = { + '_kernel/_generated', + '.alive/_squirrels', + '_kernel/history', + '_kernel/links.yaml', + '_kernel/people.yaml', + 'desktop.ini', +} + +# Filename patterns excluded from packages +_PACKAGE_EXCLUDE_NAMES = {'.DS_Store', 'Thumbs.db', 'desktop.ini'} + + +def _should_exclude_package(rel_path): + """Check whether a relative path should be excluded from a package.""" + base = os.path.basename(rel_path) + if base in _PACKAGE_EXCLUDE_NAMES: + return True + if base.startswith('._'): + return True + # Check path prefix exclusions + norm = rel_path.replace(os.sep, '/') + for excl in _PACKAGE_EXCLUDES: + if norm == excl or norm.startswith(excl + '/'): + return True + return False + + +def _strip_active_sessions(content): + """Remove active_sessions: blocks from manifest YAML content.""" + # Remove active_sessions: line and any subsequent indented lines + lines = content.split('\n') + result = [] + in_active_sessions = False + for line in lines: + if re.match(r'^active_sessions\s*:', line): + in_active_sessions = True + continue + if in_active_sessions: + # Keep going while indented (continuation of active_sessions block) + if line and (line[0] == ' ' or line[0] == '\t'): + continue + in_active_sessions = False + result.append(line) + return '\n'.join(result) + + +# --------------------------------------------------------------------------- +# Manifest generation and validation +# --------------------------------------------------------------------------- + +def generate_manifest(staging_dir, scope, walnut_name, bundles=None, + description='', encrypted=False, note='', + session_id='', engine='', plugin_version='2.0.0', + relay_info=None): + """Generate a manifest.yaml for a .walnut package. + + Hand-rolled YAML generation (no PyYAML dependency). Returns the YAML + string and also writes it to staging_dir/manifest.yaml. + + Parameters: + staging_dir: directory containing the staged files + scope: 'full', 'bundle', or 'snapshot' + walnut_name: name of the source walnut + bundles: list of bundle names (for bundle scope) + description: human-readable description + encrypted: whether the payload will be encrypted + note: optional personal note + session_id: current session identifier + engine: AI engine identifier + plugin_version: alive plugin version + relay_info: dict with 'repo' and 'sender' keys (optional) + """ + now = datetime.datetime.now(datetime.timezone.utc).strftime( + '%Y-%m-%dT%H:%M:%SZ') + + # Build file inventory with checksums + files = [] + for root, dirs, filenames in os.walk(staging_dir): + dirs.sort() + for fname in sorted(filenames): + if fname == 'manifest.yaml': + continue # Don't include manifest in its own inventory + full = os.path.join(root, fname) + rel = os.path.relpath(full, staging_dir).replace(os.sep, '/') + size = os.path.getsize(full) + checksum = sha256_file(full) + files.append({ + 'path': rel, + 'sha256': checksum, + 'size': size, + }) + + # Hand-roll YAML + lines = [] + lines.append(f'format_version: "{FORMAT_VERSION}"') + lines.append('') + lines.append('source:') + lines.append(f' walnut: {walnut_name}') + if session_id: + lines.append(f' session_id: {session_id}') + if engine: + lines.append(f' engine: {engine}') + lines.append(f' plugin_version: "{plugin_version}"') + lines.append('') + lines.append(f'scope: {scope}') + lines.append(f'created: "{now}"') + lines.append(f'encrypted: {"true" if encrypted else "false"}') + if description: + lines.append(f'description: "{_yaml_escape(description)}"') + lines.append('') + lines.append('files:') + for f in files: + lines.append(f' - path: {f["path"]}') + lines.append(f' sha256: {f["sha256"]}') + lines.append(f' size: {f["size"]}') + + if scope == 'bundle' and bundles: + lines.append('') + lines.append('bundles:') + for b in bundles: + lines.append(f' - {b}') + + if note: + lines.append('') + lines.append(f'note: "{_yaml_escape(note)}"') + + if relay_info: + lines.append('') + lines.append('relay:') + if relay_info.get('repo'): + lines.append(f' repo: {relay_info["repo"]}') + if relay_info.get('sender'): + lines.append(f' sender: {relay_info["sender"]}') + + yaml_content = '\n'.join(lines) + '\n' + + manifest_path = os.path.join(staging_dir, 'manifest.yaml') + with open(manifest_path, 'w', encoding='utf-8') as mf: + mf.write(yaml_content) + + return yaml_content + + +def _yaml_escape(s): + """Escape a string for embedding in double-quoted YAML values.""" + return s.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n') + + +def parse_manifest(manifest_content): + """Parse a manifest.yaml string into a dict. + + Uses a combination of the existing parse_yaml_frontmatter logic + and direct parsing for the files array. Since the manifest is + structured YAML (not frontmatter), we parse it line by line. + + Returns a dict with keys: format_version, source, scope, created, + encrypted, description, files, bundles, note, relay, signature. + """ + manifest = {} + lines = manifest_content.strip().split('\n') + i = 0 + current_section = None # Track nested sections: 'source', 'relay', 'signature' + current_file = None + files_list = [] + bundles_list = [] + + while i < len(lines): + line = lines[i] + stripped = line.strip() + + # Skip blank lines and comments + if not stripped or stripped.startswith('#'): + i += 1 + continue + + # Detect indentation level + indent = len(line) - len(line.lstrip()) + + # Top-level key: value pairs (indent 0) + if indent == 0: + kv = re.match(r'^(\w[\w_-]*)\s*:\s*(.*)', line) + if kv: + key = kv.group(1) + val = kv.group(2).strip() + + if key == 'files' and (val == '' or val == '[]'): + current_section = 'files' + current_file = None + elif key == 'bundles' and (val == '' or val == '[]'): + current_section = 'bundles' + elif key == 'source' and val == '': + current_section = 'source' + manifest['source'] = {} + elif key == 'relay' and val == '': + current_section = 'relay' + manifest['relay'] = {} + elif key == 'signature' and val == '': + current_section = 'signature' + manifest['signature'] = {} + else: + current_section = None + manifest[key] = _yaml_unquote(val) + i += 1 + continue + + # Indented content belongs to current_section + if current_section == 'source' and indent >= 2: + kv = re.match(r'^\s+(\w[\w_-]*)\s*:\s*(.*)', line) + if kv: + manifest.setdefault('source', {})[kv.group(1)] = _yaml_unquote( + kv.group(2).strip()) + elif current_section == 'relay' and indent >= 2: + kv = re.match(r'^\s+(\w[\w_-]*)\s*:\s*(.*)', line) + if kv: + manifest.setdefault('relay', {})[kv.group(1)] = _yaml_unquote( + kv.group(2).strip()) + elif current_section == 'signature' and indent >= 2: + kv = re.match(r'^\s+(\w[\w_-]*)\s*:\s*(.*)', line) + if kv: + manifest.setdefault('signature', {})[kv.group(1)] = _yaml_unquote( + kv.group(2).strip()) + elif current_section == 'files': + if stripped.startswith('- path:'): + # Start of a new file entry + if current_file: + files_list.append(current_file) + path_val = stripped[len('- path:'):].strip() + current_file = {'path': _yaml_unquote(path_val)} + elif current_file and indent >= 4: + kv = re.match(r'^\s+(\w[\w_-]*)\s*:\s*(.*)', line) + if kv: + val = _yaml_unquote(kv.group(2).strip()) + # Coerce size to int + if kv.group(1) == 'size': + try: + val = int(val) + except (ValueError, TypeError): + pass + current_file[kv.group(1)] = val + elif current_section == 'bundles': + if stripped.startswith('- '): + bundles_list.append(stripped[2:].strip()) + + i += 1 + + # Flush last file entry + if current_file: + files_list.append(current_file) + + if files_list: + manifest['files'] = files_list + if bundles_list: + manifest['bundles'] = bundles_list + + # Coerce booleans + if 'encrypted' in manifest: + if isinstance(manifest['encrypted'], str): + manifest['encrypted'] = manifest['encrypted'].lower() == 'true' + + return manifest + + +def _yaml_unquote(val): + """Remove surrounding quotes from a YAML value string.""" + if not val: + return val + if (val.startswith('"') and val.endswith('"')) or \ + (val.startswith("'") and val.endswith("'")): + return val[1:-1] + # Coerce booleans/numbers + lower = val.lower() + if lower == 'true': + return True + if lower == 'false': + return False + if lower in ('null', '~'): + return None + try: + return int(val) + except ValueError: + pass + try: + return float(val) + except ValueError: + pass + return val + + +def validate_manifest(manifest): + """Validate a parsed manifest dict. Returns (ok, errors) tuple. + + Checks required fields, format_version compatibility, and file entries. + """ + errors = [] + + # Required top-level fields + for field in ('format_version', 'scope', 'created', 'files'): + if field not in manifest: + errors.append(f"Missing required field: {field}") + + # format_version must be 2.x + fv = str(manifest.get('format_version', '')) + if not fv.startswith('2.'): + errors.append( + f"Unsupported format_version: {fv} (expected 2.x)") + + # scope must be valid + scope = manifest.get('scope', '') + if scope not in ('full', 'bundle', 'snapshot'): + errors.append(f"Invalid scope: {scope}") + + # source must exist and have walnut + source = manifest.get('source') + if not isinstance(source, dict): + errors.append("Missing or invalid 'source' section") + elif 'walnut' not in source: + errors.append("Missing 'source.walnut' field") + + # files must be a list with path and sha256 + files = manifest.get('files', []) + if not isinstance(files, list): + errors.append("'files' must be a list") + else: + for idx, f in enumerate(files): + if not isinstance(f, dict): + errors.append(f"File entry {idx} is not a dict") + continue + if 'path' not in f: + errors.append(f"File entry {idx} missing 'path'") + if 'sha256' not in f: + errors.append(f"File entry {idx} missing 'sha256'") + + # bundle scope should have bundles list + if scope == 'bundle': + if not manifest.get('bundles'): + errors.append("Bundle scope requires 'bundles' list") + + return (len(errors) == 0, errors) + + +def verify_checksums(manifest, base_dir): + """Verify SHA-256 checksums for all files listed in the manifest. + + Returns (ok, failures) where failures is a list of dicts describing + each mismatch or missing file. + """ + failures = [] + for entry in manifest.get('files', []): + rel_path = entry['path'] + expected = entry['sha256'] + full_path = os.path.join(base_dir, rel_path.replace('/', os.sep)) + + if not os.path.isfile(full_path): + failures.append({ + 'path': rel_path, + 'error': 'file_missing', + 'expected': expected, + }) + continue + + actual = sha256_file(full_path) + if actual != expected: + failures.append({ + 'path': rel_path, + 'error': 'checksum_mismatch', + 'expected': expected, + 'actual': actual, + }) + + return (len(failures) == 0, failures) + + +def check_unlisted_files(manifest, base_dir): + """Check for files in base_dir that are not listed in the manifest. + + Returns a list of unlisted relative paths. The manifest.yaml itself + is excluded from this check. + """ + listed = {entry['path'] for entry in manifest.get('files', [])} + listed.add('manifest.yaml') + + unlisted = [] + for root, dirs, filenames in os.walk(base_dir): + for fname in filenames: + full = os.path.join(root, fname) + rel = os.path.relpath(full, base_dir).replace(os.sep, '/') + if rel not in listed: + unlisted.append(rel) + + return unlisted + + +# --------------------------------------------------------------------------- +# Package creation +# --------------------------------------------------------------------------- + +def _stage_files(walnut_path, scope, bundle_names=None): + """Stage files from a walnut into a temporary directory by scope rules. + + Returns the staging directory path. Caller is responsible for cleanup. + + Scope rules (from arch doc section 2): + full: _kernel/ (key, log, insights) + all bundles/ + live context + bundle: _kernel/key.md + selected bundles/ + snapshot: _kernel/key.md + _kernel/insights.md + """ + walnut_path = os.path.abspath(walnut_path) + if not os.path.isdir(walnut_path): + raise FileNotFoundError(f"Walnut not found: {walnut_path}") + + staging = tempfile.mkdtemp(prefix='.walnut-stage-') + + try: + if scope == 'full': + _stage_full(walnut_path, staging) + elif scope == 'bundle': + if not bundle_names: + raise ValueError("Bundle scope requires at least one bundle name") + _stage_bundle(walnut_path, staging, bundle_names) + elif scope == 'snapshot': + _stage_snapshot(walnut_path, staging) + else: + raise ValueError(f"Unknown scope: {scope}") + except Exception: + shutil.rmtree(staging, ignore_errors=True) + raise + + return staging + + +def _copy_file(src, dst): + """Copy a file, creating parent dirs as needed. Strip active_sessions + from YAML/manifest files.""" + os.makedirs(os.path.dirname(dst), exist_ok=True) + base = os.path.basename(src) + # Strip active_sessions from manifest files + if base.endswith('.yaml') or base.endswith('.yml'): + with open(src, 'r', encoding='utf-8') as f: + content = f.read() + content = _strip_active_sessions(content) + with open(dst, 'w', encoding='utf-8') as f: + f.write(content) + else: + shutil.copy2(src, dst) + + +def _stage_full(walnut_path, staging): + """Stage files for full scope: _kernel/ (3 source files) + bundles/ + live context.""" + # _kernel/ source files: key.md, log.md, insights.md + kernel_src = os.path.join(walnut_path, '_kernel') + kernel_dst = os.path.join(staging, '_kernel') + kernel_files = ['key.md', 'log.md', 'insights.md'] + for kf in kernel_files: + src = os.path.join(kernel_src, kf) + if os.path.isfile(src): + _copy_file(src, os.path.join(kernel_dst, kf)) + + # All bundles/ + bundles_src = os.path.join(walnut_path, 'bundles') + if os.path.isdir(bundles_src): + _stage_tree(bundles_src, os.path.join(staging, 'bundles')) + + # Live context: everything at walnut root except _kernel/, bundles/, + # .alive/, and other system dirs + _stage_live_context(walnut_path, staging) + + +def _stage_bundle(walnut_path, staging, bundle_names): + """Stage files for bundle scope: _kernel/key.md + selected bundles.""" + # Always include key.md + key_src = os.path.join(walnut_path, '_kernel', 'key.md') + if os.path.isfile(key_src): + _copy_file(key_src, os.path.join(staging, '_kernel', 'key.md')) + + # Selected bundles + for bname in bundle_names: + bundle_src = os.path.join(walnut_path, 'bundles', bname) + if not os.path.isdir(bundle_src): + raise FileNotFoundError( + f"Bundle not found: {bname} (expected at {bundle_src})") + _stage_tree(bundle_src, os.path.join(staging, 'bundles', bname)) + + +def _stage_snapshot(walnut_path, staging): + """Stage files for snapshot scope: _kernel/key.md + _kernel/insights.md.""" + kernel_src = os.path.join(walnut_path, '_kernel') + kernel_dst = os.path.join(staging, '_kernel') + for fname in ('key.md', 'insights.md'): + src = os.path.join(kernel_src, fname) + if os.path.isfile(src): + _copy_file(src, os.path.join(kernel_dst, fname)) + + +def _stage_tree(src_dir, dst_dir): + """Recursively copy a directory tree, applying package exclusions.""" + src_dir = os.path.abspath(src_dir) + for root, dirs, files in os.walk(src_dir): + # Filter excluded directories in-place + dirs[:] = [d for d in dirs + if not d.startswith('._') and d not in _PACKAGE_EXCLUDE_NAMES] + + for fname in files: + full = os.path.join(root, fname) + rel = os.path.relpath(full, os.path.dirname(src_dir)) + if _should_exclude_package(rel): + continue + dst = os.path.join(dst_dir, os.path.relpath(full, src_dir)) + _copy_file(full, dst) + + +def _stage_live_context(walnut_path, staging): + """Stage live context files (everything outside _kernel/, bundles/, .alive/).""" + skip_dirs = {'_kernel', 'bundles', '.alive', '.git', '__pycache__', + 'node_modules', '.DS_Store'} + + for item in os.listdir(walnut_path): + if item in skip_dirs or item.startswith('.'): + continue + src = os.path.join(walnut_path, item) + if os.path.isdir(src): + _stage_tree(src, os.path.join(staging, item)) + elif os.path.isfile(src): + rel = item + if not _should_exclude_package(rel): + _copy_file(src, os.path.join(staging, item)) + + +def create_package(walnut_path, scope, output_path=None, bundle_names=None, + description='', note='', session_id='', engine='', + plugin_version='2.0.0', relay_info=None): + """Create a .walnut package from a walnut directory. + + Parameters: + walnut_path: path to the walnut directory + scope: 'full', 'bundle', or 'snapshot' + output_path: path for the output .walnut file (auto-generated if None) + bundle_names: list of bundle names (required for bundle scope) + description: human-readable description + note: optional personal note + session_id: session identifier for manifest + engine: AI engine identifier for manifest + plugin_version: alive plugin version + relay_info: dict with 'repo' and 'sender' (optional) + + Returns a dict with: + path: output file path + size: file size in bytes + manifest: parsed manifest dict + warnings: list of warning strings + """ + walnut_path = os.path.abspath(walnut_path) + walnut_name = os.path.basename(walnut_path) + warnings = [] + + # Stage files by scope + staging = _stage_files(walnut_path, scope, bundle_names) + + try: + # Generate manifest + generate_manifest( + staging_dir=staging, + scope=scope, + walnut_name=walnut_name, + bundles=bundle_names, + description=description, + encrypted=False, # Encryption happens after packaging + note=note, + session_id=session_id, + engine=engine, + plugin_version=plugin_version, + relay_info=relay_info, + ) + + # Generate output filename if not specified + if output_path is None: + today = datetime.datetime.now().strftime('%Y-%m-%d') + if scope == 'bundle' and bundle_names: + bundle_slug = '-'.join(bundle_names[:3]) + filename = f"{walnut_name}-bundle-{bundle_slug}-{today}.walnut" + else: + filename = f"{walnut_name}-{scope}-{today}.walnut" + output_path = os.path.join(os.path.expanduser('~/Desktop'), filename) + + output_path = os.path.abspath(output_path) + + # Create tar.gz from staging directory + safe_tar_create(staging, output_path) + + # Check size + size = os.path.getsize(output_path) + if size > SIZE_WARN_BYTES: + warnings.append( + f"Package is {size / (1024*1024):.1f} MB -- exceeds 35 MB " + f"recommended limit for GitHub relay (Contents API limit ~50 MB " + f"with base64 overhead)") + + # Read back manifest for return value + manifest_path = os.path.join(staging, 'manifest.yaml') + with open(manifest_path, 'r', encoding='utf-8') as f: + manifest = parse_manifest(f.read()) + + return { + 'path': output_path, + 'size': size, + 'manifest': manifest, + 'warnings': warnings, + } + + finally: + shutil.rmtree(staging, ignore_errors=True) + + +# --------------------------------------------------------------------------- +# Package extraction +# --------------------------------------------------------------------------- + +def extract_package(input_path, output_dir=None): + """Extract and validate a .walnut package. + + Extracts to a staging directory, validates the manifest, verifies + checksums, and checks for unlisted files. + + Parameters: + input_path: path to the .walnut file + output_dir: extraction target (temp dir if None) + + Returns a dict with: + manifest: parsed and validated manifest dict + staging_path: path to the extracted files + warnings: list of warning strings + """ + input_path = os.path.abspath(input_path) + warnings = [] + + if not os.path.isfile(input_path): + raise FileNotFoundError(f"Package not found: {input_path}") + + # Create output directory + if output_dir is None: + output_dir = tempfile.mkdtemp(prefix='.walnut-extract-') + else: + output_dir = os.path.abspath(output_dir) + os.makedirs(output_dir, exist_ok=True) + + # Extract archive + safe_tar_extract(input_path, output_dir) + + # Find and parse manifest + manifest_path = os.path.join(output_dir, 'manifest.yaml') + if not os.path.isfile(manifest_path): + raise ValueError("Package missing manifest.yaml") + + with open(manifest_path, 'r', encoding='utf-8') as f: + manifest = parse_manifest(f.read()) + + # Validate manifest structure + ok, errors = validate_manifest(manifest) + if not ok: + raise ValueError( + f"Invalid manifest: {'; '.join(errors)}") + + # Verify checksums + ok, failures = verify_checksums(manifest, output_dir) + if not ok: + details = [] + for fail in failures: + if fail['error'] == 'file_missing': + details.append(f" missing: {fail['path']}") + else: + details.append( + f" mismatch: {fail['path']} " + f"(expected {fail['expected'][:12]}..., " + f"got {fail['actual'][:12]}...)") + raise ValueError( + f"Checksum verification failed:\n" + '\n'.join(details)) + + # Check for unlisted files + unlisted = check_unlisted_files(manifest, output_dir) + if unlisted: + warnings.append( + f"Package contains {len(unlisted)} unlisted file(s): " + + ', '.join(unlisted[:5])) + + return { + 'manifest': manifest, + 'staging_path': output_dir, + 'warnings': warnings, + } + + +# --------------------------------------------------------------------------- +# Encryption / Decryption +# --------------------------------------------------------------------------- + +def _get_openssl(): + """Get the openssl binary path, raising RuntimeError if not found.""" + ssl = detect_openssl() + if ssl['binary'] is None: + raise RuntimeError("openssl not found on this system") + return ssl + + +def encrypt_package(package_path, output_path=None, mode='passphrase', + recipient_pubkey=None): + """Encrypt a .walnut package. + + Two modes: + passphrase: AES-256-CBC with PBKDF2 (600k iterations). + Passphrase read from WALNUT_PASSPHRASE env var. + rsa: Random 256-bit AES key, encrypt payload with AES, + wrap key with RSA-OAEP-SHA256 (pkeyutl). + PBKDF2 iter=10000 for the AES (key is random, not + password-derived). + + The output is a new .walnut file containing: + manifest.yaml (cleartext, updated with encrypted: true) + payload.enc (encrypted inner tar.gz) + payload.key (RSA mode only -- wrapped AES key) + + Parameters: + package_path: path to the unencrypted .walnut file + output_path: path for the encrypted .walnut file + mode: 'passphrase' or 'rsa' + recipient_pubkey: path to recipient's RSA public key (rsa mode) + + Returns the path to the encrypted .walnut file. + """ + package_path = os.path.abspath(package_path) + ssl = _get_openssl() + + if mode == 'passphrase': + passphrase = os.environ.get('WALNUT_PASSPHRASE', '') + if not passphrase: + raise ValueError( + "WALNUT_PASSPHRASE environment variable not set. " + "Set it before encrypting: export WALNUT_PASSPHRASE='your passphrase'") + if not ssl['supports_pbkdf2']: + raise RuntimeError( + f"OpenSSL {ssl['version']} does not support -pbkdf2. " + f"Upgrade to LibreSSL >= 3.1 or OpenSSL >= 1.1.1") + elif mode == 'rsa': + if not recipient_pubkey: + raise ValueError("RSA mode requires recipient_pubkey path") + recipient_pubkey = os.path.abspath(recipient_pubkey) + if not os.path.isfile(recipient_pubkey): + raise FileNotFoundError( + f"Recipient public key not found: {recipient_pubkey}") + if not ssl['supports_pkeyutl']: + raise RuntimeError( + f"OpenSSL {ssl['version']} does not support pkeyutl") + else: + raise ValueError(f"Unknown encryption mode: {mode}") + + # Extract the package to get manifest and payload + work_dir = tempfile.mkdtemp(prefix='.walnut-encrypt-') + + try: + # Extract original package + safe_tar_extract(package_path, work_dir) + + # Read manifest + manifest_path = os.path.join(work_dir, 'manifest.yaml') + if not os.path.isfile(manifest_path): + raise ValueError("Package missing manifest.yaml") + + with open(manifest_path, 'r', encoding='utf-8') as f: + manifest_content = f.read() + + manifest = parse_manifest(manifest_content) + + # Create inner tar.gz of all files except manifest + inner_dir = tempfile.mkdtemp(prefix='.walnut-inner-', dir=work_dir) + for entry in manifest.get('files', []): + src = os.path.join(work_dir, entry['path'].replace('/', os.sep)) + dst = os.path.join(inner_dir, entry['path'].replace('/', os.sep)) + if os.path.isfile(src): + os.makedirs(os.path.dirname(dst), exist_ok=True) + shutil.copy2(src, dst) + + inner_tar = os.path.join(work_dir, 'inner.tar.gz') + safe_tar_create(inner_dir, inner_tar) + + # Build encrypted output staging directory + enc_staging = tempfile.mkdtemp(prefix='.walnut-enc-stage-', dir=work_dir) + payload_enc = os.path.join(enc_staging, 'payload.enc') + + if mode == 'passphrase': + # AES-256-CBC with PBKDF2, 600k iterations + proc = subprocess.run( + [ssl['binary'], 'enc', '-aes-256-cbc', '-salt', + '-pbkdf2', '-iter', '600000', + '-in', inner_tar, '-out', payload_enc, + '-pass', f'env:WALNUT_PASSPHRASE'], + capture_output=True, text=True, timeout=120, + env={**os.environ, 'WALNUT_PASSPHRASE': passphrase}) + + if proc.returncode != 0: + raise RuntimeError( + f"Passphrase encryption failed: {proc.stderr}") + + elif mode == 'rsa': + # Generate random 256-bit AES key + aes_key_path = os.path.join(work_dir, 'aes.key') + proc = subprocess.run( + [ssl['binary'], 'rand', '-out', aes_key_path, '32'], + capture_output=True, text=True, timeout=10) + if proc.returncode != 0: + raise RuntimeError( + f"Failed to generate random key: {proc.stderr}") + + # Read the raw key bytes for use as hex passphrase + with open(aes_key_path, 'rb') as f: + aes_key_bytes = f.read() + aes_key_hex = aes_key_bytes.hex() + + # Encrypt inner tar with AES using the random key + # Use -K (hex key) and -iv instead of -pass to avoid PBKDF2 + # overhead on a random key. Generate random IV. + iv_proc = subprocess.run( + [ssl['binary'], 'rand', '-hex', '16'], + capture_output=True, text=True, timeout=10) + if iv_proc.returncode != 0: + raise RuntimeError( + f"Failed to generate IV: {iv_proc.stderr}") + iv_hex = iv_proc.stdout.strip() + + proc = subprocess.run( + [ssl['binary'], 'enc', '-aes-256-cbc', + '-K', aes_key_hex, '-iv', iv_hex, + '-in', inner_tar, '-out', payload_enc], + capture_output=True, text=True, timeout=120) + if proc.returncode != 0: + raise RuntimeError( + f"AES encryption failed: {proc.stderr}") + + # Wrap AES key + IV with RSA-OAEP-SHA256 + # Pack key (32 bytes) + iv (16 bytes hex -> 16 bytes raw) + iv_bytes = bytes.fromhex(iv_hex) + key_material = aes_key_bytes + iv_bytes + key_material_path = os.path.join(work_dir, 'key_material.bin') + with open(key_material_path, 'wb') as f: + f.write(key_material) + + payload_key_path = os.path.join(enc_staging, 'payload.key') + proc = subprocess.run( + [ssl['binary'], 'pkeyutl', '-encrypt', + '-pubin', '-inkey', recipient_pubkey, + '-pkeyopt', 'rsa_padding_mode:oaep', + '-pkeyopt', 'rsa_oaep_md:sha256', + '-in', key_material_path, '-out', payload_key_path], + capture_output=True, text=True, timeout=30) + if proc.returncode != 0: + raise RuntimeError( + f"RSA key wrapping failed: {proc.stderr}") + + # Securely clean up key material + _secure_delete(aes_key_path) + _secure_delete(key_material_path) + + # Update manifest to indicate encryption + updated_manifest = _update_manifest_encrypted(manifest_content, True) + with open(os.path.join(enc_staging, 'manifest.yaml'), 'w', + encoding='utf-8') as f: + f.write(updated_manifest) + + # Create output .walnut + if output_path is None: + base, ext = os.path.splitext(package_path) + output_path = base + '-encrypted' + ext + output_path = os.path.abspath(output_path) + + safe_tar_create(enc_staging, output_path) + return output_path + + finally: + shutil.rmtree(work_dir, ignore_errors=True) + + +def decrypt_package(encrypted_path, output_path=None, private_key=None): + """Decrypt a .walnut package. + + Auto-detects mode: + - payload.key present -> RSA mode (requires private_key path) + - payload.enc only -> passphrase mode (reads WALNUT_PASSPHRASE env var) + + Parameters: + encrypted_path: path to the encrypted .walnut file + output_path: path for the decrypted .walnut file + private_key: path to RSA private key (for RSA mode) + + Returns the path to the decrypted .walnut file. + """ + encrypted_path = os.path.abspath(encrypted_path) + ssl = _get_openssl() + + work_dir = tempfile.mkdtemp(prefix='.walnut-decrypt-') + + try: + # Extract encrypted package + safe_tar_extract(encrypted_path, work_dir) + + payload_enc = os.path.join(work_dir, 'payload.enc') + payload_key = os.path.join(work_dir, 'payload.key') + manifest_path = os.path.join(work_dir, 'manifest.yaml') + + if not os.path.isfile(payload_enc): + raise ValueError("Package is not encrypted (no payload.enc)") + if not os.path.isfile(manifest_path): + raise ValueError("Package missing manifest.yaml") + + inner_tar = os.path.join(work_dir, 'inner.tar.gz') + + if os.path.isfile(payload_key): + # RSA mode + if not private_key: + raise ValueError( + "RSA-encrypted package requires --private-key path") + private_key = os.path.abspath(private_key) + if not os.path.isfile(private_key): + raise FileNotFoundError( + f"Private key not found: {private_key}") + + # Unwrap AES key + IV with RSA + key_material_path = os.path.join(work_dir, 'key_material.bin') + proc = subprocess.run( + [ssl['binary'], 'pkeyutl', '-decrypt', + '-inkey', private_key, + '-pkeyopt', 'rsa_padding_mode:oaep', + '-pkeyopt', 'rsa_oaep_md:sha256', + '-in', payload_key, '-out', key_material_path], + capture_output=True, text=True, timeout=30) + if proc.returncode != 0: + raise RuntimeError( + f"RSA key unwrapping failed: {proc.stderr}") + + # Extract key (32 bytes) + IV (16 bytes) + with open(key_material_path, 'rb') as f: + key_material = f.read() + if len(key_material) < 48: + raise ValueError( + f"Invalid key material length: {len(key_material)} " + f"(expected 48 bytes)") + + aes_key_hex = key_material[:32].hex() + iv_hex = key_material[32:48].hex() + + # Decrypt with AES + proc = subprocess.run( + [ssl['binary'], 'enc', '-d', '-aes-256-cbc', + '-K', aes_key_hex, '-iv', iv_hex, + '-in', payload_enc, '-out', inner_tar], + capture_output=True, text=True, timeout=120) + if proc.returncode != 0: + raise RuntimeError( + f"AES decryption failed: {proc.stderr}") + + _secure_delete(key_material_path) + + else: + # Passphrase mode + passphrase = os.environ.get('WALNUT_PASSPHRASE', '') + if not passphrase: + raise ValueError( + "WALNUT_PASSPHRASE environment variable not set") + + proc = subprocess.run( + [ssl['binary'], 'enc', '-d', '-aes-256-cbc', + '-pbkdf2', '-iter', '600000', + '-in', payload_enc, '-out', inner_tar, + '-pass', f'env:WALNUT_PASSPHRASE'], + capture_output=True, text=True, timeout=120, + env={**os.environ, 'WALNUT_PASSPHRASE': passphrase}) + + if proc.returncode != 0: + raise RuntimeError( + f"Passphrase decryption failed (wrong passphrase?): " + f"{proc.stderr}") + + # Build decrypted output staging directory + dec_staging = tempfile.mkdtemp(prefix='.walnut-dec-stage-', dir=work_dir) + + # Extract inner tar to staging + safe_tar_extract(inner_tar, dec_staging) + + # Copy manifest (update encrypted flag) + with open(manifest_path, 'r', encoding='utf-8') as f: + manifest_content = f.read() + updated = _update_manifest_encrypted(manifest_content, False) + with open(os.path.join(dec_staging, 'manifest.yaml'), 'w', + encoding='utf-8') as f: + f.write(updated) + + # Create output .walnut + if output_path is None: + base, ext = os.path.splitext(encrypted_path) + # Strip -encrypted suffix if present + if base.endswith('-encrypted'): + base = base[:-len('-encrypted')] + output_path = base + '-decrypted' + ext + output_path = os.path.abspath(output_path) + + safe_tar_create(dec_staging, output_path) + return output_path + + finally: + shutil.rmtree(work_dir, ignore_errors=True) + + +def _update_manifest_encrypted(manifest_content, encrypted): + """Update the encrypted: field in manifest YAML content.""" + val = 'true' if encrypted else 'false' + # Try to replace existing field + updated = re.sub( + r'^(encrypted:\s*).*$', + f'encrypted: {val}', + manifest_content, + count=1, + flags=re.MULTILINE) + return updated + + +def _secure_delete(path): + """Overwrite file with zeros before deleting (best-effort).""" + try: + size = os.path.getsize(path) + with open(path, 'wb') as f: + f.write(b'\x00' * size) + f.flush() + os.fsync(f.fileno()) + os.unlink(path) + except OSError: + try: + os.unlink(path) + except OSError: + pass + + +# --------------------------------------------------------------------------- +# Manifest signing and verification +# --------------------------------------------------------------------------- + +def sign_manifest(manifest_path, private_key_path): + """Sign a manifest.yaml with RSA-SHA256 using pkeyutl. + + Reads the manifest, removes any existing signature block, signs the + remaining content, and appends the signature block. + + Parameters: + manifest_path: path to manifest.yaml to sign + private_key_path: path to sender's RSA private key + + Returns the updated manifest content with signature block. + """ + manifest_path = os.path.abspath(manifest_path) + private_key_path = os.path.abspath(private_key_path) + ssl = _get_openssl() + + if not ssl['supports_pkeyutl']: + raise RuntimeError( + f"OpenSSL {ssl['version']} does not support pkeyutl") + + with open(manifest_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Remove any existing signature block + content_to_sign = _strip_signature_block(content) + + # Write content to temp file for signing + work_dir = tempfile.mkdtemp(prefix='.walnut-sign-') + try: + # First create a SHA-256 digest of the content + data_path = os.path.join(work_dir, 'manifest.data') + with open(data_path, 'w', encoding='utf-8') as f: + f.write(content_to_sign) + + digest_path = os.path.join(work_dir, 'manifest.dgst') + sig_path = os.path.join(work_dir, 'manifest.sig') + + # Hash the data + proc = subprocess.run( + [ssl['binary'], 'dgst', '-sha256', '-binary', + '-out', digest_path, data_path], + capture_output=True, text=True, timeout=10) + if proc.returncode != 0: + raise RuntimeError(f"Digest failed: {proc.stderr}") + + # Sign the digest with RSA + proc = subprocess.run( + [ssl['binary'], 'pkeyutl', '-sign', + '-inkey', private_key_path, + '-pkeyopt', 'digest:sha256', + '-in', digest_path, '-out', sig_path], + capture_output=True, text=True, timeout=10) + if proc.returncode != 0: + raise RuntimeError(f"Signing failed: {proc.stderr}") + + # Read signature and base64 encode + with open(sig_path, 'rb') as f: + sig_bytes = f.read() + sig_b64 = base64.b64encode(sig_bytes).decode('ascii') + + # Derive signer name from the key path (best effort) + # The caller should set this properly via the manifest + signer = os.path.basename(os.path.dirname( + os.path.dirname(private_key_path))) + if not signer or signer == '.': + signer = 'unknown' + + finally: + shutil.rmtree(work_dir, ignore_errors=True) + + # Append signature block + signed_content = content_to_sign.rstrip('\n') + '\n' + signed_content += '\nsignature:\n' + signed_content += ' algorithm: "RSA-SHA256"\n' + signed_content += f' signer: "{signer}"\n' + signed_content += f' value: "{sig_b64}"\n' + + # Write signed manifest + with open(manifest_path, 'w', encoding='utf-8') as f: + f.write(signed_content) + + return signed_content + + +def verify_manifest(manifest_path, public_key_path): + """Verify the RSA-SHA256 signature on a manifest.yaml. + + Parameters: + manifest_path: path to the signed manifest.yaml + public_key_path: path to the signer's RSA public key + + Returns (verified, signer) tuple. verified is True if signature is valid. + """ + manifest_path = os.path.abspath(manifest_path) + public_key_path = os.path.abspath(public_key_path) + ssl = _get_openssl() + + if not ssl['supports_pkeyutl']: + raise RuntimeError( + f"OpenSSL {ssl['version']} does not support pkeyutl") + + with open(manifest_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Parse manifest to get signature + manifest = parse_manifest(content) + sig_info = manifest.get('signature') + if not sig_info: + return (False, None) + + sig_b64 = sig_info.get('value', '') + signer = sig_info.get('signer', '') + + if not sig_b64: + return (False, signer) + + # Strip signature block to get the signed content + content_to_verify = _strip_signature_block(content) + + # Decode signature + try: + sig_bytes = base64.b64decode(sig_b64) + except Exception: + return (False, signer) + + work_dir = tempfile.mkdtemp(prefix='.walnut-verify-') + try: + data_path = os.path.join(work_dir, 'manifest.data') + with open(data_path, 'w', encoding='utf-8') as f: + f.write(content_to_verify) + + digest_path = os.path.join(work_dir, 'manifest.dgst') + sig_path = os.path.join(work_dir, 'manifest.sig') + + # Hash the data + proc = subprocess.run( + [ssl['binary'], 'dgst', '-sha256', '-binary', + '-out', digest_path, data_path], + capture_output=True, text=True, timeout=10) + if proc.returncode != 0: + return (False, signer) + + # Write signature to file + with open(sig_path, 'wb') as f: + f.write(sig_bytes) + + # Verify with public key + proc = subprocess.run( + [ssl['binary'], 'pkeyutl', '-verify', + '-pubin', '-inkey', public_key_path, + '-pkeyopt', 'digest:sha256', + '-in', digest_path, '-sigfile', sig_path], + capture_output=True, text=True, timeout=10) + + verified = proc.returncode == 0 + return (verified, signer) + + finally: + shutil.rmtree(work_dir, ignore_errors=True) + + +def _strip_signature_block(content): + """Remove the signature: block from manifest content. + + Returns the content without the signature section (for signing/verification). + """ + lines = content.split('\n') + result = [] + in_sig = False + for line in lines: + if re.match(r'^signature\s*:', line): + in_sig = True + continue + if in_sig: + if line and (line[0] == ' ' or line[0] == '\t'): + continue + in_sig = False + result.append(line) + + # Remove trailing blank lines that were before the signature block + while result and result[-1].strip() == '': + result.pop() + + return '\n'.join(result) + '\n' + + # --------------------------------------------------------------------------- # CLI (smoke tests) # --------------------------------------------------------------------------- @@ -518,6 +1765,88 @@ def _cli(): fm = parse_yaml_frontmatter(content) print(json.dumps(fm, indent=2, default=str)) + elif cmd == 'create': + # Parse arguments + scope = None + walnut = None + bundles = [] + output = None + description = '' + i = 2 + while i < len(sys.argv): + arg = sys.argv[i] + if arg == '--scope' and i + 1 < len(sys.argv): + scope = sys.argv[i + 1] + i += 2 + elif arg == '--walnut' and i + 1 < len(sys.argv): + walnut = sys.argv[i + 1] + i += 2 + elif arg == '--bundle' and i + 1 < len(sys.argv): + bundles.append(sys.argv[i + 1]) + i += 2 + elif arg == '--output' and i + 1 < len(sys.argv): + output = sys.argv[i + 1] + i += 2 + elif arg == '--description' and i + 1 < len(sys.argv): + description = sys.argv[i + 1] + i += 2 + else: + print(f"Unknown argument: {arg}", file=sys.stderr) + sys.exit(1) + + if not scope or not walnut: + print("Usage: alive-p2p.py create --scope " + "--walnut [--bundle ...] [--output ] " + "[--description ]", file=sys.stderr) + sys.exit(1) + + result = create_package( + walnut_path=walnut, + scope=scope, + output_path=output, + bundle_names=bundles if bundles else None, + description=description) + + print(f"Created: {result['path']}") + print(f"Size: {result['size']} bytes ({result['size'] / 1024:.1f} KB)") + print(f"Scope: {result['manifest'].get('scope')}") + file_count = len(result['manifest'].get('files', [])) + print(f"Files: {file_count}") + for w in result.get('warnings', []): + print(f"WARNING: {w}", file=sys.stderr) + + elif cmd == 'extract': + input_path = None + output_dir = None + i = 2 + while i < len(sys.argv): + arg = sys.argv[i] + if arg == '--input' and i + 1 < len(sys.argv): + input_path = sys.argv[i + 1] + i += 2 + elif arg == '--output' and i + 1 < len(sys.argv): + output_dir = sys.argv[i + 1] + i += 2 + else: + print(f"Unknown argument: {arg}", file=sys.stderr) + sys.exit(1) + + if not input_path: + print("Usage: alive-p2p.py extract --input " + "[--output ]", file=sys.stderr) + sys.exit(1) + + result = extract_package(input_path, output_dir) + print(f"Extracted to: {result['staging_path']}") + m = result['manifest'] + print(f"Format: {m.get('format_version')}") + print(f"Scope: {m.get('scope')}") + print(f"Source: {m.get('source', {}).get('walnut', 'unknown')}") + print(f"Files: {len(m.get('files', []))}") + print(f"Encrypted: {m.get('encrypted', False)}") + for w in result.get('warnings', []): + print(f"WARNING: {w}", file=sys.stderr) + else: print(f"Unknown command: {cmd}", file=sys.stderr) print(__doc__) From b6aa71a7faf3206ff86ef0af0ff7b8c9fc703ee4 Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 14:14:46 +1100 Subject: [PATCH 04/10] feat(alive): add relay probe and v1 config migration scripts - relay-probe.py: checks GitHub relay via `gh api` for new commits, fetches into sparse clone, counts pending .walnut inbox packages, checks peer reachability, writes state.json atomically. Exits 0 always (safe for SessionStart hook with 10s timeout). - migrate-relay.py: converts v1 relay.yaml (flat or nested format) to v2 relay.json. Handles missing v1, already-migrated, and partial config. Records migration metadata. - Both import atomic_json_read/write from alive-p2p.py via importlib (hyphenated filename). Task: fn-5-dof.4 Co-Authored-By: Claude Opus 4.6 (1M context) --- plugins/alive/scripts/migrate-relay.py | 305 +++++++++++++++++++++++++ plugins/alive/scripts/relay-probe.py | 255 +++++++++++++++++++++ 2 files changed, 560 insertions(+) create mode 100644 plugins/alive/scripts/migrate-relay.py create mode 100644 plugins/alive/scripts/relay-probe.py diff --git a/plugins/alive/scripts/migrate-relay.py b/plugins/alive/scripts/migrate-relay.py new file mode 100644 index 0000000..f53358a --- /dev/null +++ b/plugins/alive/scripts/migrate-relay.py @@ -0,0 +1,305 @@ +#!/usr/bin/env python3 +"""Migrate v1 relay.yaml to v2 relay.json. + +v1 stored relay config at $HOME/.alive/relay.yaml (flat YAML file). +v2 uses $HOME/.alive/relay/relay.json (JSON in a directory). + +This script: +- Reads v1 relay.yaml (using alive-p2p.py's YAML frontmatter parser as + a basic YAML reader, since we're stdlib-only and can't import PyYAML) +- Converts to v2 relay.json schema +- Preserves: repo, github_username, peers (with status), key paths +- Handles: relay.yaml not found (skip), already migrated (skip), + partial config (warn on stderr) + +Usage: + python3 migrate-relay.py + python3 migrate-relay.py --v1-path ~/.alive/relay.yaml \ + --v2-path ~/.alive/relay/relay.json + +Task: fn-5-dof.4 +""" + +import argparse +import datetime +import os +import re +import sys + +# Import utilities from alive-p2p.py (same directory). +# The filename uses a hyphen, so we need importlib to load it. +import importlib.util as _ilu + +_script_dir = os.path.dirname(os.path.abspath(__file__)) +_p2p_path = os.path.join(_script_dir, 'alive-p2p.py') +_spec = _ilu.spec_from_file_location('alive_p2p', _p2p_path) +_mod = _ilu.module_from_spec(_spec) +_spec.loader.exec_module(_mod) + +atomic_json_read = _mod.atomic_json_read +atomic_json_write = _mod.atomic_json_write + + +# --------------------------------------------------------------------------- +# Minimal YAML parser (stdlib only, handles relay.yaml structure) +# --------------------------------------------------------------------------- + +def _parse_simple_yaml(text): + """Parse simple YAML with nested dicts and one level of list-of-dicts. + + Handles the specific structure of v1 relay.yaml: + + relay: + repo: owner/repo + github_username: user + peers: + - github: user2 + name: Name + relay: user2/repo + person_walnut: People/name + added: 2026-01-01 + status: accepted + + Also handles the flat variant (no nesting wrapper): + + repo: owner/repo + github_username: user + peers: + - github: user2 + ... + + Returns a dict. Not a general YAML parser -- just enough for + relay.yaml migration. + """ + result = {} + # current_block_key tracks a top-level key that has indented children + # (either a dict or a list). current_block_type is 'list' or 'dict'. + current_block_key = None + current_block_type = None + current_item = None # current list item being built + + for line in text.splitlines(): + stripped = line.strip() + + # Skip empty lines and comments + if not stripped or stripped.startswith('#'): + continue + + # List item start ( - key: value) + list_match = re.match(r'^ - ([\w_]+):\s*(.*)', line) + if list_match and current_block_key: + # We're in a list block now + if current_block_type == 'dict': + # Convert from dict assumption to list + current_block_type = 'list' + # Keep any dict items already parsed -- they were wrong. + # In practice this won't happen with relay.yaml's structure. + result[current_block_key] = [] + # Save previous list item + if current_item is not None: + result[current_block_key].append(current_item) + current_item = {} + key, value = list_match.group(1), list_match.group(2).strip() + current_item[key] = _coerce_yaml_value(value) + continue + + # List item continuation ( key: value, 4+ spaces) + cont_match = re.match(r'^ ([\w_]+):\s*(.*)', line) + if cont_match and current_item is not None and current_block_type == 'list': + key, value = cont_match.group(1), cont_match.group(2).strip() + current_item[key] = _coerce_yaml_value(value) + continue + + # Nested dict value ( key: value, 2 spaces, no dash) + nested_match = re.match(r'^ ([\w_]+):\s+(.*)', line) + if nested_match and current_block_key and current_block_type == 'dict': + key, value = nested_match.group(1), nested_match.group(2).strip() + result[current_block_key][key] = _coerce_yaml_value(value) + continue + + # Top-level key with block value (key:\n -- no inline value) + top_block = re.match(r'^([\w_]+):\s*$', line) + if top_block: + # Finalize previous block + if current_block_key and current_block_type == 'list' and current_item is not None: + result[current_block_key].append(current_item) + current_item = None + current_block_key = top_block.group(1) + # Assume dict until we see a list item + current_block_type = 'dict' + result[current_block_key] = {} + current_item = None + continue + + # Top-level key: value (inline) + top_match = re.match(r'^([\w_]+):\s+(.+)', line) + if top_match: + # Finalize previous block + if current_block_key and current_block_type == 'list' and current_item is not None: + result[current_block_key].append(current_item) + current_item = None + current_block_key = None + current_block_type = None + key, value = top_match.group(1), top_match.group(2).strip() + result[key] = _coerce_yaml_value(value) + continue + + # Finalize last block + if current_block_key and current_block_type == 'list' and current_item is not None: + result[current_block_key].append(current_item) + + return result + + +def _coerce_yaml_value(value): + """Coerce a YAML scalar string to its Python type.""" + if not value: + return '' + # Remove quotes + if (value.startswith('"') and value.endswith('"')) or \ + (value.startswith("'") and value.endswith("'")): + return value[1:-1] + # Booleans + if value.lower() in ('true', 'yes'): + return True + if value.lower() in ('false', 'no'): + return False + # Integers + try: + return int(value) + except ValueError: + pass + return value + + +# --------------------------------------------------------------------------- +# Migration +# --------------------------------------------------------------------------- + +def migrate(v1_path, v2_path): + """Migrate v1 relay.yaml to v2 relay.json. + + Returns a status string: 'migrated', 'skipped:no-v1', 'skipped:already-migrated', + or 'error:'. + """ + v1_path = os.path.abspath(v1_path) + v2_path = os.path.abspath(v2_path) + + # Check if v1 config exists + if not os.path.isfile(v1_path): + return 'skipped:no-v1' + + # Check if v2 config already exists + if os.path.isfile(v2_path): + existing = atomic_json_read(v2_path) + if existing.get('repo') or existing.get('github_username'): + return 'skipped:already-migrated' + + # Read v1 YAML + try: + with open(v1_path, 'r', encoding='utf-8') as f: + v1_text = f.read() + except (IOError, OSError) as e: + return f'error:cannot-read-v1: {e}' + + v1_config = _parse_simple_yaml(v1_text) + + if not v1_config: + return 'error:empty-v1-config' + + # v1 relay.yaml has two variants: + # Flat: repo: ..., github_username: ..., peers: [...] + # Nested: relay: {repo: ..., github_username: ...}, peers: [...] + # We check both locations. + relay_block = v1_config.get('relay', {}) + if not isinstance(relay_block, dict): + relay_block = {} + + def _get(key): + """Get a value from top-level or relay: block.""" + return v1_config.get(key, '') or relay_block.get(key, '') + + # Build v2 JSON structure + v2_config = { + 'repo': _get('repo'), + 'github_username': _get('github_username'), + 'peers': [], + } + + # Warn on missing fields + if not v2_config['repo']: + print('warning: v1 relay.yaml missing "repo" field', file=sys.stderr) + if not v2_config['github_username']: + print('warning: v1 relay.yaml missing "github_username" field', + file=sys.stderr) + + # Migrate peers + v1_peers = v1_config.get('peers', []) + for peer in v1_peers: + if not isinstance(peer, dict): + continue + v2_peer = { + 'github': peer.get('github', ''), + 'name': peer.get('name', ''), + 'relay': peer.get('relay', ''), + 'person_walnut': peer.get('person_walnut', ''), + 'added': str(peer.get('added', '')), + 'status': peer.get('status', 'pending'), + } + v2_config['peers'].append(v2_peer) + + # Write v2 JSON atomically + atomic_json_write(v2_path, v2_config) + + # Record migration metadata + meta_path = os.path.join(os.path.dirname(v2_path), '.migration-meta.json') + meta = { + 'migrated_from': v1_path, + 'migrated_at': datetime.datetime.now( + datetime.timezone.utc).isoformat(timespec='seconds'), + 'v1_peer_count': len(v1_peers), + 'v2_peer_count': len(v2_config['peers']), + } + atomic_json_write(meta_path, meta) + + return 'migrated' + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(): + default_v1 = os.path.join(os.path.expanduser('~'), '.alive', 'relay.yaml') + default_v2 = os.path.join( + os.path.expanduser('~'), '.alive', 'relay', 'relay.json') + + parser = argparse.ArgumentParser( + description='Migrate v1 relay.yaml to v2 relay.json') + parser.add_argument( + '--v1-path', default=default_v1, + help=f'Path to v1 relay.yaml (default: {default_v1})') + parser.add_argument( + '--v2-path', default=default_v2, + help=f'Path to v2 relay.json (default: {default_v2})') + + args = parser.parse_args() + result = migrate(args.v1_path, args.v2_path) + + if result == 'migrated': + print(f'Migrated {args.v1_path} -> {args.v2_path}') + elif result == 'skipped:no-v1': + print(f'No v1 config at {args.v1_path} -- nothing to migrate') + elif result == 'skipped:already-migrated': + print(f'v2 config already exists at {args.v2_path} -- skipping') + elif result.startswith('error:'): + print(f'Migration error: {result}', file=sys.stderr) + # Still exit 0 -- migration failure is non-fatal + else: + print(f'Unknown result: {result}', file=sys.stderr) + + sys.exit(0) + + +if __name__ == '__main__': + main() diff --git a/plugins/alive/scripts/relay-probe.py b/plugins/alive/scripts/relay-probe.py new file mode 100644 index 0000000..7d1c5d7 --- /dev/null +++ b/plugins/alive/scripts/relay-probe.py @@ -0,0 +1,255 @@ +#!/usr/bin/env python3 +"""Relay probe: check GitHub relay for new commits and pending packages. + +Runs from the alive-relay-check SessionStart hook with a 10s timeout. +Reads relay.json, probes the relay repo via `gh api`, counts pending +.walnut files in the user's inbox, checks peer reachability, and writes +results to state.json atomically. + +MUST exit 0 always -- network failures are expected and must not block +session start. + +Usage: + python3 relay-probe.py --config ~/.alive/relay/relay.json \ + --state ~/.alive/relay/state.json + +Task: fn-5-dof.4 +""" + +import argparse +import datetime +import os +import subprocess +import sys + +# Import atomic JSON utilities from alive-p2p.py (same directory). +# The filename uses a hyphen, so we need importlib to load it. +import importlib.util as _ilu + +_script_dir = os.path.dirname(os.path.abspath(__file__)) +_p2p_path = os.path.join(_script_dir, 'alive-p2p.py') +_spec = _ilu.spec_from_file_location('alive_p2p', _p2p_path) +_mod = _ilu.module_from_spec(_spec) +_spec.loader.exec_module(_mod) + +atomic_json_read = _mod.atomic_json_read +atomic_json_write = _mod.atomic_json_write + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _now_iso(): + """Return current UTC time as ISO 8601 string.""" + return datetime.datetime.now(datetime.timezone.utc).isoformat( + timespec='seconds') + + +def _run_gh(args, timeout=5): + """Run a `gh` CLI command. Returns (stdout, success).""" + try: + proc = subprocess.run( + ['gh'] + args, + capture_output=True, text=True, timeout=timeout) + return proc.stdout.strip(), proc.returncode == 0 + except (subprocess.TimeoutExpired, FileNotFoundError, OSError): + return '', False + + +def _run_git(args, timeout=5): + """Run a `git` command. Returns (stdout, success).""" + try: + proc = subprocess.run( + ['git'] + args, + capture_output=True, text=True, timeout=timeout) + return proc.stdout.strip(), proc.returncode == 0 + except (subprocess.TimeoutExpired, FileNotFoundError, OSError): + return '', False + + +# --------------------------------------------------------------------------- +# Probe: check relay repo for new commits +# --------------------------------------------------------------------------- + +def probe_relay_commit(repo): + """Check the latest commit SHA on the relay repo's main branch. + + Uses `gh api` which is the fastest way to check -- single HTTPS + request, no git operations. + + Returns the commit SHA string, or None on failure. + """ + stdout, ok = _run_gh([ + 'api', f'repos/{repo}/git/refs/heads/main', + '--jq', '.object.sha']) + if ok and stdout: + return stdout + return None + + +# --------------------------------------------------------------------------- +# Probe: count pending .walnut files in inbox +# --------------------------------------------------------------------------- + +def count_pending_packages(clone_dir, username): + """Count .walnut files in inbox/{username}/ of the local clone. + + The clone is a sparse checkout that only includes inbox/{username}/ + and keys/. Returns 0 if the directory doesn't exist. + """ + inbox_dir = os.path.join(clone_dir, 'inbox', username) + if not os.path.isdir(inbox_dir): + return 0 + + count = 0 + try: + for entry in os.listdir(inbox_dir): + if entry.endswith('.walnut'): + count += 1 + except OSError: + pass + return count + + +# --------------------------------------------------------------------------- +# Probe: fetch latest from relay +# --------------------------------------------------------------------------- + +def fetch_relay(clone_dir): + """Fetch latest from origin and reset to origin/main. + + Uses --depth=1 to keep it fast. Returns True on success. + """ + _, fetch_ok = _run_git( + ['-C', clone_dir, 'fetch', '--depth=1', 'origin', 'main'], + timeout=8) + if not fetch_ok: + return False + + _, reset_ok = _run_git( + ['-C', clone_dir, 'reset', '--hard', 'origin/main'], + timeout=5) + return reset_ok + + +# --------------------------------------------------------------------------- +# Probe: peer reachability +# --------------------------------------------------------------------------- + +def check_peer_reachability(peers): + """Check if each peer's relay repo is reachable via `gh api`. + + Returns a dict of {github_username: {reachable, checked, relay_repo}}. + Skips peers without a relay field. + """ + reachability = {} + for peer in peers: + github = peer.get('github', '') + relay = peer.get('relay', '') + if not github or not relay: + continue + + # Quick check: does the repo exist and is it accessible? + stdout, ok = _run_gh([ + 'api', f'repos/{relay}', + '--jq', '.full_name'], + timeout=3) + + reachability[github] = { + 'reachable': ok and bool(stdout), + 'checked': _now_iso(), + 'relay_repo': relay, + } + + return reachability + + +# --------------------------------------------------------------------------- +# Main probe +# --------------------------------------------------------------------------- + +def run_probe(config_path, state_path): + """Run the full relay probe and write results to state.json. + + Steps: + 1. Read relay.json config + 2. Check for new commits on relay repo (gh api) + 3. If changed: fetch latest into local clone + 4. Count pending .walnut files in inbox + 5. Check peer reachability + 6. Write state.json atomically + """ + config = atomic_json_read(config_path) + if not config: + # No relay configured -- nothing to probe + return + + repo = config.get('repo', '') + username = config.get('github_username', '') + peers = config.get('peers', []) + + if not repo or not username: + return + + # Read existing state for comparison + state = atomic_json_read(state_path) + old_commit = state.get('last_commit', '') + + # Step 1: Check latest commit on relay repo + new_commit = probe_relay_commit(repo) + + # Determine the clone directory + relay_dir = os.path.dirname(os.path.abspath(config_path)) + clone_dir = os.path.join(relay_dir, 'clone') + + # Step 2: If commit changed (or first run), fetch + fetched = False + if new_commit and new_commit != old_commit and os.path.isdir(clone_dir): + fetched = fetch_relay(clone_dir) + + # Step 3: Count pending packages (from local clone) + pending = count_pending_packages(clone_dir, username) + + # Step 4: Check peer reachability + reachability = check_peer_reachability(peers) + + # Step 5: Build and write state + new_state = { + 'last_sync': _now_iso(), + 'last_commit': new_commit if new_commit else old_commit, + 'pending_packages': pending, + 'peer_reachability': reachability, + } + + atomic_json_write(state_path, new_state) + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(): + parser = argparse.ArgumentParser( + description='Probe GitHub relay for new packages and peer reachability') + parser.add_argument( + '--config', required=True, + help='Path to relay.json') + parser.add_argument( + '--state', required=True, + help='Path to state.json (written atomically)') + + args = parser.parse_args() + + try: + run_probe(args.config, args.state) + except Exception: + # Must exit 0 always -- this runs in a SessionStart hook + # with a 10s timeout. Network failures are expected. + pass + + sys.exit(0) + + +if __name__ == '__main__': + main() From d200048d1540c8d0ce73469affc59caa7d4ab394 Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 14:17:33 +1100 Subject: [PATCH 05/10] feat(alive): add alive-relay-check SessionStart hook - Probes GitHub relay for pending .walnut packages at session start - Fast path exits immediately when no relay.json configured - Uses inline python3 to read state.json (not json_field which reads HOOK_INPUT) - Injects notification with package count when pending > 0, silent otherwise - Exits 0 on any error (network, missing python3, corrupt config) - Registered in hooks.json startup matcher with 10s timeout Task: fn-5-dof.5 --- plugins/alive/hooks/hooks.json | 7 ++- .../alive/hooks/scripts/alive-relay-check.sh | 62 +++++++++++++++++++ 2 files changed, 68 insertions(+), 1 deletion(-) create mode 100755 plugins/alive/hooks/scripts/alive-relay-check.sh diff --git a/plugins/alive/hooks/hooks.json b/plugins/alive/hooks/hooks.json index 2c23af4..ecbe1a4 100644 --- a/plugins/alive/hooks/hooks.json +++ b/plugins/alive/hooks/hooks.json @@ -1,5 +1,5 @@ { - "description": "ALIVE Context System v2 — 14 hooks. Session hooks read/write .alive/_squirrels/. All read stdin JSON for session_id.", + "description": "ALIVE Context System v2 — 15 hooks. Session hooks read/write .alive/_squirrels/. All read stdin JSON for session_id.", "hooks": { "SessionStart": [ { @@ -14,6 +14,11 @@ "type": "command", "command": "bash ${CLAUDE_PLUGIN_ROOT}/hooks/scripts/alive-repo-detect.sh", "timeout": 10 + }, + { + "type": "command", + "command": "bash ${CLAUDE_PLUGIN_ROOT}/hooks/scripts/alive-relay-check.sh", + "timeout": 10 } ] }, diff --git a/plugins/alive/hooks/scripts/alive-relay-check.sh b/plugins/alive/hooks/scripts/alive-relay-check.sh new file mode 100755 index 0000000..1d07813 --- /dev/null +++ b/plugins/alive/hooks/scripts/alive-relay-check.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Hook: Relay Check -- SessionStart (startup) +# Probes the GitHub relay for pending .walnut packages and injects a +# notification into the session. Silent when no relay configured or no +# pending packages. Exits 0 on ANY error. + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +source "$SCRIPT_DIR/alive-common.sh" + +# Read stdin JSON -- extracts session_id, cwd, event name +read_hook_input + +# Find world root (needed for alive-common.sh contract, though relay +# config lives at $HOME/.alive/relay/ not under the world) +find_world || exit 0 + +# Fast path: no relay configured -- exit immediately +RELAY_DIR="$HOME/.alive/relay" +RELAY_CONFIG="$RELAY_DIR/relay.json" +[ -f "$RELAY_CONFIG" ] || exit 0 + +# Resolve relay-probe.py location +PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(cd "$SCRIPT_DIR/../.." && pwd)}" +PROBE_SCRIPT="$PLUGIN_ROOT/scripts/relay-probe.py" +[ -f "$PROBE_SCRIPT" ] || exit 0 + +# Run probe -- writes state.json atomically. Errors are swallowed by +# relay-probe.py internally (it always exits 0). +STATE_JSON="$RELAY_DIR/state.json" +python3 "$PROBE_SCRIPT" --config "$RELAY_CONFIG" --state "$STATE_JSON" 2>/dev/null || exit 0 + +# Read pending_packages from state.json via inline python3 +# (json_field reads HOOK_INPUT, not arbitrary files) +PENDING=$(python3 -c " +import json, sys +try: + with open('$STATE_JSON') as f: + print(json.load(f).get('pending_packages', 0)) +except Exception: + print(0) +" 2>/dev/null || echo 0) + +# Silent exit if nothing pending +[ "$PENDING" -gt 0 ] 2>/dev/null || exit 0 + +# Build notification +MSG="You have ${PENDING} walnut package(s) waiting. Run /alive:receive to import." +ESCAPED=$(escape_for_json "$MSG") + +cat < Date: Wed, 1 Apr 2026 14:34:03 +1100 Subject: [PATCH 06/10] feat(alive): add alive:relay skill for P2P relay management - Setup: RSA-4096 keypair generation, GitHub private repo creation, sparse checkout clone, relay.json + state.json config - Add peer: collaborator invite, inbox directory, relay.json update, person walnut resolution - Accept: invitation listing, public key fetch, bidirectional auto-invite with confirmation - Status: relay health, key permissions, peer reachability, clone state - All external actions gated by confirmation prompts (external guard hook only catches mcp__ tools, not Bash) - V1 migration support via migrate-relay.py - Bordered blocks with squirrel visual conventions throughout Task: fn-5-dof.6 --- plugins/alive/skills/relay/SKILL.md | 561 ++++++++++++++++++++++++++++ 1 file changed, 561 insertions(+) create mode 100644 plugins/alive/skills/relay/SKILL.md diff --git a/plugins/alive/skills/relay/SKILL.md b/plugins/alive/skills/relay/SKILL.md new file mode 100644 index 0000000..624a6c9 --- /dev/null +++ b/plugins/alive/skills/relay/SKILL.md @@ -0,0 +1,561 @@ +--- +name: alive:relay +description: "Set up and manage a private GitHub relay for automatic .walnut package delivery between peers. Handles relay creation (private repo + RSA keypair), peer invitations, invitation acceptance, and status. The transport layer for P2P sharing -- extends alive:share with relay push, alive:receive with relay pull." +user-invocable: true +--- + +# Relay + +Private GitHub-based mailbox for .walnut package delivery. Each peer has their own relay repo. Packages are RSA-encrypted before push -- the relay never sees plaintext. + +Setup creates identity (RSA-4096 keypair + GitHub private repo). Adding a peer invites them as collaborator. Accepting creates the bidirectional link. After setup, `alive:share` can push directly and `alive:receive` can pull automatically. + +--- + +## Subcommand Routing + +| Invocation | Route | +|---|---| +| `/alive:relay` (no args) | Quick Status | +| `/alive:relay setup` | Setup | +| `/alive:relay add ` | Add Peer | +| `/alive:relay accept` | Accept Invitation | +| `/alive:relay status` | Detailed Status | + +Before any subcommand, check for v1 migration: if `$HOME/.alive/relay.yaml` exists but `$HOME/.alive/relay/relay.json` does not, offer migration first (see V1 Migration at bottom). + +--- + +## Quick Status (no args) + +Check if `$HOME/.alive/relay/relay.json` exists. If not: + +``` +╭─ 🐿️ relay +│ +│ No relay configured. Run /alive:relay setup to create one. +│ +│ A relay is a private GitHub repo that acts as a mailbox for .walnut +│ packages. Peers you invite can push encrypted packages to your inbox. +│ You push to theirs. The relay never sees plaintext. +╰─ +``` + +If configured, read relay.json and state.json, present summary: + +``` +╭─ 🐿️ relay +│ +│ Repo: patrickbrosnan11-spec/walnut-relay +│ Peers: 1 (ben-flint -- accepted) +│ Pending packages: 0 +│ Last sync: 2 minutes ago +│ +│ /alive:relay status for detail, or /alive:relay add to invite. +╰─ +``` + +--- + +## Setup + +One-time flow. Creates keypair, GitHub repo, sparse clone, and local config. + +### Prerequisites + +1. `gh` CLI installed and authenticated: `gh auth status` +2. No existing relay.json (if exists, show config and confirm reconfigure -- destructive) + +If gh not authenticated, stop: explain `gh auth login`. + +### Step 1: Detect Username + Confirm + +```bash +gh api user --jq '.login' +``` + +``` +╭─ 🐿️ relay setup +│ +│ GitHub account: patrickbrosnan11-spec +│ +│ This will: +│ 1. Generate an RSA-4096 keypair for package encryption +│ 2. Create a private repo patrickbrosnan11-spec/walnut-relay +│ 3. Commit your public key and set up the inbox structure +│ 4. Configure local relay at $HOME/.alive/relay/ +│ +│ ▸ proceed? +│ 1. Yes, set up the relay +│ 2. Use a different GitHub account +│ 3. Cancel +╰─ +``` + +**Wait for confirmation.** This creates a GitHub repo. + +Option 2: ask for username, apply account routing per platforms.md. + +### Step 2: Generate RSA-4096 Keypair + +```bash +mkdir -p "$HOME/.alive/relay/keys/peers" +openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:4096 \ + -out "$HOME/.alive/relay/keys/private.pem" +chmod 600 "$HOME/.alive/relay/keys/private.pem" +openssl pkey -in "$HOME/.alive/relay/keys/private.pem" \ + -pubout -out "$HOME/.alive/relay/keys/public.pem" +``` + +Verify permissions: `stat -f "%Lp" "$HOME/.alive/relay/keys/private.pem"` (macOS) or `stat -c "%a"` (Linux). Must be 600. + +### Step 3: Create GitHub Repo + +```bash +gh repo create walnut-relay --private \ + --description "Walnut P2P relay -- encrypted package mailbox" --clone=false +``` + +If repo already exists, ask whether to use existing or fail. + +### Step 4: Initialize + Push + +```bash +WORK_DIR=$(mktemp -d) +GITHUB_USER="" + +cd "$WORK_DIR" +gh repo clone "${GITHUB_USER}/walnut-relay" . + +mkdir -p "keys" "inbox/${GITHUB_USER}" +cp "$HOME/.alive/relay/keys/public.pem" "keys/${GITHUB_USER}.pem" +touch "inbox/${GITHUB_USER}/.gitkeep" + +cat > README.md << 'EOF' +# Walnut Relay + +Private relay for encrypted .walnut package delivery. +Do not modify manually. Managed by the alive plugin. + +- `keys/` -- public keys (one per peer) +- `inbox//` -- encrypted packages waiting for pickup +EOF + +git add -A && git commit -m "Initialize walnut relay" && git push origin main + +cd / && rm -rf "$WORK_DIR" +``` + +### Step 5: Sparse Checkout Clone + +```bash +CLONE_DIR="$HOME/.alive/relay/clone" +git clone --filter=blob:none --no-checkout \ + "https://github.com/${GITHUB_USER}/walnut-relay.git" "$CLONE_DIR" +cd "$CLONE_DIR" +git sparse-checkout init --cone +git sparse-checkout set "inbox/${GITHUB_USER}" "keys" +git checkout main +``` + +### Step 6: Write Config Files + +```bash +# relay.json +cat > "$HOME/.alive/relay/relay.json" << JSONEOF +{ + "repo": "${GITHUB_USER}/walnut-relay", + "github_username": "${GITHUB_USER}", + "peers": [] +} +JSONEOF + +# state.json +cat > "$HOME/.alive/relay/state.json" << JSONEOF +{ + "last_sync": null, + "last_commit": null, + "pending_packages": 0, + "peer_reachability": {} +} +JSONEOF +``` + +### Done + +``` +╭─ 🐿️ relay ready +│ +│ Repo: /walnut-relay (private) +│ Keys: $HOME/.alive/relay/keys/ +│ Private: private.pem (chmod 600) +│ Public: committed to keys/.pem +│ Clone: $HOME/.alive/relay/clone/ (sparse) +│ Config: $HOME/.alive/relay/relay.json +│ +│ Next: /alive:relay add to invite a peer. +╰─ +``` + +--- + +## Add Peer + +Invite a GitHub user to the relay. Creates their inbox and updates relay.json. + +### Step 1: Validate + +Verify the GitHub user exists: + +```bash +gh api "users/" --jq '.login' 2>/dev/null +``` + +If not found, report and stop. If peer already in relay.json, report current status and stop (unless status `removed` -- offer re-add). + +### Step 2: Confirm + +``` +╭─ 🐿️ add peer +│ +│ This will: +│ 1. Add as collaborator on +│ (they get push access to deliver packages to your inbox) +│ 2. Create inbox// in the relay repo +│ +│ They'll receive a GitHub notification to accept. +│ +│ ▸ proceed? +│ 1. Yes, invite +│ 2. Cancel +╰─ +``` + +**Wait for confirmation.** External action. + +### Step 3: Execute + +```bash +# Add collaborator +gh api "repos//collaborators/" \ + --method PUT --field permission=push + +# Create inbox in relay repo +cd "$HOME/.alive/relay/clone" +git pull origin main +mkdir -p "inbox/" +touch "inbox//.gitkeep" +git add -A && git commit -m "Add inbox for " && git push origin main +``` + +### Step 4: Update relay.json + +Use inline python3 for safe JSON manipulation: + +```bash +python3 -c " +import json, datetime +with open('$HOME/.alive/relay/relay.json') as f: + config = json.load(f) +config['peers'].append({ + 'github': '', + 'name': None, + 'relay': None, + 'person_walnut': None, + 'added': datetime.date.today().isoformat(), + 'status': 'pending' +}) +with open('$HOME/.alive/relay/relay.json', 'w') as f: + json.dump(config, f, indent=2) + f.write('\n') +" +``` + +### Step 5: Resolve Person Walnut + +Check if a person walnut exists for this peer. Scan v2 path (`People/`) then v1 (`02_Life/people/`): + +```bash +ls -d People/*/ 2>/dev/null | while read d; do + grep -l "" "$d/_kernel/key.md" 2>/dev/null && echo "$d" +done +ls -d 02_Life/people/*/ 2>/dev/null | while read d; do + grep -l "" "$d/_core/key.md" 2>/dev/null && echo "$d" +done +``` + +If found, update `person_walnut` in relay.json. If not: + +``` +╭─ 🐿️ person walnut +│ +│ No person walnut found for . +│ +│ ▸ create one? +│ 1. Yes, create People// +│ 2. Skip -- link later +╰─ +``` + +If yes, invoke `alive:create-walnut` with type `person`. + +### Done + +``` +╭─ 🐿️ peer invited +│ +│ invited as collaborator on . +│ Status: pending (waiting for them to accept) +│ +│ Tell them to run /alive:relay accept on their end. +╰─ +``` + +--- + +## Accept Invitation + +List pending relay invitations, accept, fetch peer key, establish bidirectional link. + +### Step 1: Fetch Invitations + +```bash +gh api user/repository_invitations \ + --jq '.[] | select(.repository.name == "walnut-relay") | "\(.id)\t\(.repository.full_name)\t\(.inviter.login)"' +``` + +If none found: + +``` +╭─ 🐿️ accept +│ +│ No pending relay invitations found. +│ Check GitHub notifications -- the invite may have expired. +╰─ +``` + +### Step 2: Present + Confirm + +``` +╭─ 🐿️ relay invitations +│ +│ 1. benflint/walnut-relay (from benflint) +│ 2. janedoe/walnut-relay (from janedoe) +│ +│ ▸ accept which? +│ 1. Accept all +│ 2. Pick individually +│ 3. Cancel +╰─ +``` + +For each selected invitation, confirm before accepting (external action): + +```bash +gh api "user/repository_invitations/" --method PATCH +``` + +### Step 3: Fetch Peer's Public Key + +```bash +PEER="" +gh api "repos/${PEER}/walnut-relay/contents/keys/${PEER}.pem" \ + --jq '.content' | base64 -d > "$HOME/.alive/relay/keys/peers/${PEER}.pem" + +# Verify +openssl pkey -pubin -in "$HOME/.alive/relay/keys/peers/${PEER}.pem" -noout +``` + +If verification fails, warn but continue. + +### Step 4: Update relay.json + +```bash +python3 -c " +import json, datetime +with open('$HOME/.alive/relay/relay.json') as f: + config = json.load(f) +peer = '' +existing = [p for p in config['peers'] if p['github'] == peer] +if existing: + existing[0]['status'] = 'accepted' + existing[0]['relay'] = f'{peer}/walnut-relay' +else: + config['peers'].append({ + 'github': peer, + 'name': None, + 'relay': f'{peer}/walnut-relay', + 'person_walnut': None, + 'added': datetime.date.today().isoformat(), + 'status': 'accepted' + }) +with open('$HOME/.alive/relay/relay.json', 'w') as f: + json.dump(config, f, indent=2) + f.write('\n') +" +``` + +### Step 5: Bidirectional Auto-Invite + +The peer invited you to their relay. For two-way sharing, they need access to yours too. + +``` +╭─ 🐿️ bidirectional setup +│ +│ invited you to their relay. For two-way sharing, +│ they also need access to yours. +│ +│ ▸ invite to your relay? +│ 1. Yes, send invite +│ 2. Skip -- I'll do it later with /alive:relay add +╰─ +``` + +**Wait for confirmation.** External action. + +If yes, execute Add Peer steps 3-4 for the inviter username (collaborator invite, inbox creation, relay.json update). Skip the Add Peer confirmation prompt since they just confirmed. + +### Step 6: Resolve Person Walnut + +Same as Add Peer step 5 -- scan People/ and 02_Life/people/ for the peer. + +### Done + +``` +╭─ 🐿️ relay linked +│ +│ Accepted: /walnut-relay +│ Public key cached: $HOME/.alive/relay/keys/peers/.pem +│ Bidirectional: +│ +│ You can now: +│ - /alive:share to push packages to via relay +│ - /alive:receive --relay to pull packages from your inbox +╰─ +``` + +--- + +## Detailed Status + +Full relay health. Read relay.json, state.json, check keys and clone. + +### Gather + +```bash +cat "$HOME/.alive/relay/relay.json" +cat "$HOME/.alive/relay/state.json" 2>/dev/null + +# Key health +[ -f "$HOME/.alive/relay/keys/private.pem" ] && echo "private:exists" || echo "private:missing" +stat -f "%Lp" "$HOME/.alive/relay/keys/private.pem" 2>/dev/null # macOS permissions + +# Public key in repo +GITHUB_USER=$(python3 -c "import json; print(json.load(open('$HOME/.alive/relay/relay.json'))['github_username'])") +gh api "repos/${GITHUB_USER}/walnut-relay/contents/keys/${GITHUB_USER}.pem" --jq '.sha' 2>/dev/null + +# Clone health +[ -d "$HOME/.alive/relay/clone/.git" ] && echo "clone:ok" || echo "clone:missing" +ls "$HOME/.alive/relay/clone/inbox/${GITHUB_USER}/"*.walnut 2>/dev/null | wc -l +``` + +### Present + +``` +╭─ 🐿️ relay status +│ +│ RELAY +│ Repo: patrickbrosnan11-spec/walnut-relay +│ Username: patrickbrosnan11-spec +│ Clone: $HOME/.alive/relay/clone/ (ok) +│ +│ KEYS +│ Private: private.pem (600, ok) +│ Public: committed to keys/patrickbrosnan11-spec.pem (ok) +│ +│ PEERS +│ github name relay status key +│ benflint Ben Flint benflint/walnut-relay accepted cached +│ janedoe -- janedoe/walnut-relay pending -- +│ +│ STATE +│ Last sync: 3 minutes ago +│ Pending packages: 0 +│ Peer reachability: +│ benflint: reachable (checked 3 min ago) +╰─ +``` + +Peer key column: `cached` (key at `keys/peers/.pem`), `missing` (no key), `--` (pending peer, not expected yet). + +### Troubleshooting + +Surface any failed health check with a fix suggestion: + +``` +╭─ 🐿️ relay issue +│ +│ Private key permissions are 644 (should be 600). +│ Fix: chmod 600 "$HOME/.alive/relay/keys/private.pem" +│ +│ ▸ fix now? +│ 1. Yes +│ 2. Skip +╰─ +``` + +Issues and fixes: +- Key permissions wrong: `chmod 600` +- Clone missing: re-clone with sparse checkout (Setup step 5) +- Public key not in repo: re-commit from local key +- state.json missing: `python3 "${CLAUDE_PLUGIN_ROOT}/scripts/relay-probe.py" --config "$HOME/.alive/relay/relay.json" --state "$HOME/.alive/relay/state.json"` + +--- + +## V1 Migration + +If `$HOME/.alive/relay.yaml` exists but relay.json does not, offer migration before any subcommand: + +``` +╭─ 🐿️ v1 relay detected +│ +│ Found v1 config at $HOME/.alive/relay.yaml +│ This preserves your keypairs, repo, and peer relationships. +│ +│ ▸ migrate to v2 format? +│ 1. Yes +│ 2. Cancel +╰─ +``` + +```bash +python3 "${CLAUDE_PLUGIN_ROOT}/scripts/migrate-relay.py" +``` + +After migration, continue with the original subcommand. + +--- + +## Error Handling + +| Error | Message | +|---|---| +| gh not installed | Install: `brew install gh` (macOS) or https://cli.github.com, then `gh auth login` | +| gh not authenticated | Run `gh auth login`, then try again | +| Network failure | Check connection, then `gh auth status` | + +### Account Routing + +Apply platform routing from platforms.md. The `github_username` in relay.json determines which account to use. For setup (before relay.json exists), detect current gh auth and confirm. + +--- + +## Confirmation Gate Rules + +Every external action MUST have a confirmation prompt. The external guard hook only catches `mcp__` tools, not Bash. This skill is the gate. + +**Requires confirmation:** repo creation, collaborator invite, invitation acceptance, bidirectional auto-invite, git push to relay repo. + +**No confirmation needed:** reading config/state files, `gh auth status`, read-only GitHub API, local keypair generation, writing local config. + +Pattern: present what will happen, numbered options, wait for choice. Never fire-and-forget on external actions. From 4cca8d9f28ac4659fc5d791f71610b3e7c0aa8ea Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 14:49:55 +1100 Subject: [PATCH 07/10] feat(alive): add alive:receive skill for .walnut package import 14-step receive flow with three entry points (direct file, inbox scan, relay pull). Handles encryption detection (passphrase/RSA), SHA-256 checksum verification, manifest signature validation, path safety checks, scope-based routing (full/bundle/snapshot), conflict handling for bundle merges, v1 format 1.1.0 backward compatibility with path mapping, log entry via Edit (prepend), and now.json regeneration. Task: fn-5-dof.8 --- plugins/alive/skills/receive/SKILL.md | 944 ++++++++++++++++++++++++++ 1 file changed, 944 insertions(+) create mode 100644 plugins/alive/skills/receive/SKILL.md diff --git a/plugins/alive/skills/receive/SKILL.md b/plugins/alive/skills/receive/SKILL.md new file mode 100644 index 0000000..68d3996 --- /dev/null +++ b/plugins/alive/skills/receive/SKILL.md @@ -0,0 +1,944 @@ +--- +name: alive:receive +description: "Import a .walnut package into the world. Supports direct file import, inbox scan delegation, and relay pull (automatic fetch from git-based relay inbox). Detects encryption (passphrase or RSA), validates integrity (checksums + path safety), previews contents, routes by scope (full/bundle/snapshot), logs the import, and regenerates now.json." +user-invocable: true +--- + +# Receive + +Import a .walnut package. Validate, decrypt if needed, preview, route, log, regenerate state. + +Three entry points. One flow. Every package gets the same 14-step validation regardless of how it arrived. + +--- + +## Entry Points + +### 1. Direct File + +``` +/alive:receive path/to/file.walnut +``` + +The file path is the argument. Jump straight to Step 1 (Extract). + +### 2. Inbox Scan (delegated from capture-context) + +When `alive:capture-context` detects `.walnut` files in `03_Inputs/`, it delegates here instead of normal capture. The skill receives a list of `.walnut` paths from the inbox. + +Present the list and let the user pick: + +``` +╭─ 🐿️ walnut packages in inbox +│ +│ 1. nova-station-bundle-shielding-review-2026-04-01.walnut (2.3 MB) +│ 2. glass-cathedral-full-2026-03-28.walnut (14.1 MB) +│ +│ ▸ import which? +│ 1. Process all +│ 2. Pick individually (e.g. "1" or "1,2") +│ 3. Skip for now +╰─ +``` + +For each selected file, run the full 14-step flow. Between packages: "N remaining. Next, or done for now?" + +**Do NOT delete .walnut files from `03_Inputs/`.** Archive enforcer blocks deletion. After successful import, offer to move the file to the target walnut's `bundles/_received/` or leave it in place. Use `mv`, never `rm`. + +### 3. Relay Pull + +``` +/alive:receive --relay +``` + +Or triggered from session-start notification ("You have N walnut package(s) waiting on the relay"). + +**Flow:** + +1. Read `$HOME/.alive/relay/relay.json` -- get username and repo +2. Read `$HOME/.alive/relay/state.json` -- get pending count +3. If no relay configured, stop: "No relay configured. Run `/alive:relay setup` first." +4. If pending_packages == 0, check anyway (state may be stale): + +```bash +GITHUB_USER=$(python3 -c "import json; print(json.load(open('$HOME/.alive/relay/relay.json'))['github_username'])") +CLONE_DIR="$HOME/.alive/relay/clone" +cd "$CLONE_DIR" && git pull origin main --quiet +ls "$CLONE_DIR/inbox/${GITHUB_USER}/"*.walnut 2>/dev/null +``` + +5. If packages found, list them: + +``` +╭─ 🐿️ relay inbox +│ +│ 1. a1b2c3d4-nova-station-bundle-shielding-review.walnut +│ 2. e5f6g7h8-glass-cathedral-snapshot.walnut +│ +│ ▸ import which? +│ 1. Process all +│ 2. Pick individually +│ 3. Skip for now +╰─ +``` + +6. For each selected: copy from clone to a temp staging path, then run the full 14-step flow +7. After successful import of each package, remove from relay clone and push: + +```bash +cd "$CLONE_DIR" +rm "inbox/${GITHUB_USER}/.walnut" +git add -A && git commit -m "Received: " && git push origin main +``` + +This is an external action (git push) -- confirm before pushing: + +``` +╭─ 🐿️ cleanup relay inbox +│ +│ Remove from relay inbox? (git push) +│ +│ ▸ confirm? +│ 1. Yes, clean up +│ 2. Leave it (can re-import later) +╰─ +``` + +8. Update `$HOME/.alive/relay/state.json` -- decrement pending_packages, update last_sync + +--- + +## The 14-Step Import Flow + +Every package goes through all 14 steps. Steps 4-5 are conditional on encryption. Step 6 is conditional on signature presence. Step 13 is conditional on relay metadata. + +### Step 1: Extract to Staging + +```bash +STAGING=$(mktemp -d -t walnut-receive-XXXXXX) +python3 -c " +import sys +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import safe_tar_extract +safe_tar_extract('$PACKAGE_PATH', '$STAGING') +" +``` + +If extraction fails (corrupt archive, unsupported format), report and stop: + +``` +╭─ 🐿️ extraction failed +│ +│ Could not extract: +│ Error: +│ +│ The file may be corrupt or not a valid .walnut package. +╰─ +``` + +### Step 2: Validate Manifest + +Read `$STAGING/manifest.yaml`. If missing, stop -- not a valid package. + +```bash +python3 -c " +import sys, json +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import parse_manifest, validate_manifest + +with open('$STAGING/manifest.yaml') as f: + manifest = parse_manifest(f.read()) + +ok, errors = validate_manifest(manifest) +print(json.dumps({'manifest': manifest, 'valid': ok, 'errors': errors})) +" +``` + +**Format version handling:** + +- `2.x` -- proceed normally +- `1.x` (specifically `1.1.0`) -- accept with v1 backward compatibility mapping (see V1 Compatibility section below). Log a note that this is a v1 package. +- Anything else -- reject: + +``` +╭─ 🐿️ unsupported format +│ +│ Package format_version: +│ Expected: 2.x (or 1.x for backward compat) +│ +│ This package was created by a version of alive that isn't compatible. +╰─ +``` + +If validation fails (missing required fields, invalid scope): + +``` +╭─ 🐿️ invalid manifest +│ +│ +│ +│ +│ The manifest is malformed. Package cannot be imported. +╰─ +``` + +### Step 3: Verify Checksums + +```bash +python3 -c " +import sys, json +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import parse_manifest, verify_checksums + +with open('$STAGING/manifest.yaml') as f: + manifest = parse_manifest(f.read()) + +ok, failures = verify_checksums(manifest, '$STAGING') +print(json.dumps({'ok': ok, 'failures': failures})) +" +``` + +If checksums pass, continue. If any fail: + +``` +╭─ 🐿️ checksum verification failed +│ +│ 2 file(s) failed integrity check: +│ - bundles/shielding-review/draft-02.md (mismatch) +│ - bundles/shielding-review/raw/proposal.pdf (missing) +│ +│ This package may have been tampered with or corrupted in transit. +│ +│ ▸ what to do? +│ 1. Abort import (recommended) +│ 2. Continue anyway (files may be corrupt) +╰─ +``` + +**Skip this step for encrypted packages** -- checksums will be verified after decryption in step 5b. + +### Step 4: Detect Encryption + +Check the staging directory for encryption artifacts: + +```bash +[ -f "$STAGING/payload.key" ] && echo "rsa" || ([ -f "$STAGING/payload.enc" ] && echo "passphrase" || echo "none") +``` + +- `payload.key` exists -> RSA mode (relay transport) +- `payload.enc` exists (no `payload.key`) -> passphrase mode (manual share) +- Neither -> unencrypted, skip to step 6 + +### Step 5: Decrypt + +**Passphrase mode:** + +``` +╭─ 🐿️ encrypted package (passphrase) +│ +│ This package is passphrase-encrypted. +│ The sender should have given you the passphrase separately. +│ +│ ▸ enter passphrase: +╰─ +``` + +Get passphrase via AskUserQuestion. Then: + +```bash +WALNUT_PASSPHRASE="" python3 -c " +import sys, os, subprocess, tempfile, shutil +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import detect_openssl, safe_tar_extract + +ssl = detect_openssl() +staging = '$STAGING' +payload_enc = os.path.join(staging, 'payload.enc') +decrypted_tar = os.path.join(staging, 'payload.tar.gz') + +proc = subprocess.run( + [ssl['binary'], 'enc', '-d', '-aes-256-cbc', '-pbkdf2', '-iter', '600000', + '-in', payload_enc, '-out', decrypted_tar, + '-pass', 'env:WALNUT_PASSPHRASE'], + capture_output=True, text=True, timeout=120) + +if proc.returncode != 0: + print('DECRYPT_FAILED') + sys.exit(1) + +# Extract decrypted payload over the staging dir +safe_tar_extract(decrypted_tar, staging) + +# Clean up encryption artifacts +os.remove(payload_enc) +os.remove(decrypted_tar) +if os.path.exists(os.path.join(staging, 'payload.key')): + os.remove(os.path.join(staging, 'payload.key')) + +print('DECRYPT_OK') +" +``` + +If decryption fails (wrong passphrase): + +``` +╭─ 🐿️ decryption failed +│ +│ Wrong passphrase or corrupt payload. +│ +│ ▸ try again? +│ 1. Yes, re-enter passphrase +│ 2. Abort import +╰─ +``` + +Allow up to 3 retries before aborting. + +**RSA mode:** + +```bash +python3 -c " +import sys, os, subprocess, tempfile +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import detect_openssl, safe_tar_extract + +ssl = detect_openssl() +staging = '$STAGING' +payload_enc = os.path.join(staging, 'payload.enc') +payload_key = os.path.join(staging, 'payload.key') +private_key = os.path.expanduser('~/.alive/relay/keys/private.pem') +aes_key = os.path.join(staging, 'aes.key') +decrypted_tar = os.path.join(staging, 'payload.tar.gz') + +if not os.path.isfile(private_key): + print('NO_PRIVATE_KEY') + sys.exit(1) + +# Unwrap AES key with RSA private key +proc = subprocess.run( + [ssl['binary'], 'pkeyutl', '-decrypt', + '-inkey', private_key, + '-in', payload_key, '-out', aes_key, + '-pkeyopt', 'rsa_padding_mode:oaep', + '-pkeyopt', 'rsa_oaep_md:sha256'], + capture_output=True, text=True, timeout=30) + +if proc.returncode != 0: + print('RSA_DECRYPT_FAILED') + sys.exit(1) + +# Read AES key as hex +with open(aes_key, 'rb') as f: + aes_key_hex = f.read().hex() + +# Read IV from payload.iv if present, else use zero IV +iv_path = os.path.join(staging, 'payload.iv') +if os.path.isfile(iv_path): + with open(iv_path, 'r') as f: + iv_hex = f.read().strip() +else: + iv_hex = '0' * 32 + +# Decrypt payload with AES key +proc = subprocess.run( + [ssl['binary'], 'enc', '-d', '-aes-256-cbc', + '-K', aes_key_hex, '-iv', iv_hex, + '-in', payload_enc, '-out', decrypted_tar], + capture_output=True, text=True, timeout=120) + +if proc.returncode != 0: + print('AES_DECRYPT_FAILED') + sys.exit(1) + +# Extract decrypted payload over staging +safe_tar_extract(decrypted_tar, staging) + +# Clean up +for f in [payload_enc, payload_key, aes_key, decrypted_tar, iv_path]: + if os.path.isfile(f): + os.remove(f) + +print('DECRYPT_OK') +" +``` + +If no private key found: + +``` +╭─ 🐿️ RSA decryption failed +│ +│ This package was RSA-encrypted for relay delivery, but no private +│ key found at $HOME/.alive/relay/keys/private.pem +│ +│ Run /alive:relay setup first, or ask the sender for a +│ passphrase-encrypted version. +╰─ +``` + +**Step 5b: Verify checksums after decryption.** Now that files are decrypted, run the same checksum verification as step 3. + +### Step 6: Verify Manifest Signature (if present) + +Check `manifest.signature` in the parsed manifest. If no signature block, skip. + +If signature present: + +```bash +python3 -c " +import sys, os, subprocess, json +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import detect_openssl + +ssl = detect_openssl() +staging = '$STAGING' +manifest_path = os.path.join(staging, 'manifest.yaml') + +# Read manifest, strip signature block for verification +with open(manifest_path) as f: + content = f.read() + +# Remove signature: block (last section) +import re +stripped = re.sub(r'\nsignature:\n(?:\s+\w.*\n)*', '\n', content) + +# Write stripped content to temp file +stripped_path = manifest_path + '.stripped' +with open(stripped_path, 'w') as f: + f.write(stripped) + +# Get signer from manifest +manifest = json.loads('$MANIFEST_JSON') +signer = manifest.get('signature', {}).get('signer', '') +sig_value = manifest.get('signature', {}).get('value', '') + +# Look for signer's public key +key_paths = [ + os.path.expanduser(f'~/.alive/relay/keys/peers/{signer}.pem'), + os.path.join(staging, 'keys', f'{signer}.pem'), +] +pub_key = None +for kp in key_paths: + if os.path.isfile(kp): + pub_key = kp + break + +if not pub_key: + print('NO_SIGNER_KEY') + sys.exit(0) # Not fatal, just warn + +# Decode signature +import base64 +sig_bytes = base64.b64decode(sig_value) +sig_path = manifest_path + '.sig' +with open(sig_path, 'wb') as f: + f.write(sig_bytes) + +# Verify +proc = subprocess.run( + [ssl['binary'], 'dgst', '-sha256', '-verify', pub_key, + '-signature', sig_path, stripped_path], + capture_output=True, text=True, timeout=30) + +os.remove(stripped_path) +os.remove(sig_path) + +if proc.returncode == 0: + print('SIG_VALID') +else: + print('SIG_INVALID') +" +``` + +If signature valid: note in preview. If invalid: + +``` +╭─ 🐿️ signature verification failed +│ +│ The manifest claims to be from , but the signature +│ doesn't match their public key. +│ +│ This could mean the package was modified after signing. +│ +│ ▸ what to do? +│ 1. Abort import (recommended) +│ 2. Continue anyway (at your own risk) +╰─ +``` + +If signer's key not found: warn but continue (not fatal): + +``` +╭─ 🐿️ signature not verified +│ +│ Package is signed by , but their public key isn't cached. +│ Can't verify authenticity. Continuing with import. +╰─ +``` + +### Step 7: Path Safety Checks + +Verify every file path in the manifest is safe. This is defense-in-depth on top of `safe_tar_extract`'s built-in checks. + +```bash +python3 -c " +import sys, os, json +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import parse_manifest + +with open('$STAGING/manifest.yaml') as f: + manifest = parse_manifest(f.read()) + +issues = [] +staging = os.path.abspath('$STAGING') + +for entry in manifest.get('files', []): + p = entry['path'] + # No parent directory traversal + if '..' in p: + issues.append(f'Path traversal: {p}') + # No absolute paths + if os.path.isabs(p): + issues.append(f'Absolute path: {p}') + # Resolved path must stay within staging + resolved = os.path.normpath(os.path.join(staging, p)) + if not resolved.startswith(staging + os.sep) and resolved != staging: + issues.append(f'Escapes staging: {p}') + # Check for symlinks in extracted files + full = os.path.join(staging, p.replace('/', os.sep)) + if os.path.islink(full): + target = os.path.realpath(full) + if not target.startswith(staging + os.sep): + issues.append(f'Symlink escapes staging: {p} -> {target}') + +print(json.dumps({'ok': len(issues) == 0, 'issues': issues})) +" +``` + +If any issues found, abort: + +``` +╭─ 🐿️ path safety check failed +│ +│ Dangerous paths detected in package: +│ - +│ - +│ +│ This package contains paths that could write outside the target +│ directory. Import aborted. +╰─ +``` + +No override option for path safety failures. This is non-negotiable. + +### Step 8: Present Preview + +Build the preview from the validated manifest: + +``` +╭─ 🐿️ package preview +│ +│ Source: nova-station (by patrickSupernormal) +│ Scope: bundle +│ Format: 2.0.0 +│ Created: 2026-04-01T10:00:00Z +│ Encrypted: passphrase (decrypted) +│ Signed: patrickSupernormal (verified) +│ Description: Radiation shielding vendor evaluation +│ Note: "Here's the shielding review -- let me know" +│ +│ Bundles (2): +│ shielding-review/ (5 files, 1.2 MB) +│ launch-checklist/ (3 files, 340 KB) +│ +│ Files: 12 total, 1.5 MB +│ +│ ▸ import? +│ 1. Yes, import +│ 2. Preview file list first +│ 3. Cancel +╰─ +``` + +If user picks "Preview file list", show the full manifest file inventory with paths and sizes. Then re-ask import question. + +For **snapshot scope**, adjust the prompt -- no import, just display: + +``` +╭─ 🐿️ snapshot preview +│ +│ Source: nova-station (by patrickSupernormal) +│ Scope: snapshot (read-only) +│ +│ This is a snapshot -- identity and insights only. +│ No files will be written. +│ +│ ▸ what to do? +│ 1. Display key.md contents +│ 2. Display insights.md contents +│ 3. Done +╰─ +``` + +### Step 9: Route by Scope + +**Full scope -> create new walnut:** + +The user must choose an ALIVE domain folder. Root guardian blocks world root writes. + +``` +╭─ 🐿️ full import -- choose destination +│ +│ This is a full walnut export. It needs its own folder. +│ +│ ▸ which ALIVE domain? +│ 1. 02_Life/ +│ 2. 04_Ventures/ +│ 3. 05_Experiments/ +│ 4. Custom path +╰─ +``` + +Resolve the target path: `//` where walnut-name comes from `manifest.source.walnut`. + +If the directory already exists: + +``` +╭─ 🐿️ walnut already exists +│ +│ // already exists. +│ +│ ▸ what to do? +│ 1. Import as -imported/ +│ 2. Merge into existing (careful -- may overwrite files) +│ 3. Cancel +╰─ +``` + +**Bundle scope -> merge into existing walnut:** + +``` +╭─ 🐿️ bundle import -- choose target walnut +│ +│ Bundles: +│ +│ ▸ which walnut receives these bundles? +│ 1. +│ 2. Browse walnuts +│ 3. Cancel +╰─ +``` + +If "Browse walnuts", scan `04_Ventures/*/_kernel/key.md`, `05_Experiments/*/_kernel/key.md`, `02_Life/*/_kernel/key.md` frontmatter (type, goal) and present as numbered list. Fall back to `_core/key.md` for v1 walnuts. + +After target selection, check for bundle name conflicts (see Conflict Handling section below). + +**Snapshot scope -> read-only display:** + +No file writes. Display key.md and/or insights.md from staging. Handled in step 8 already. Skip to step 14 (cleanup). + +### Step 10: Write Imported Files + +**Full scope:** + +```bash +TARGET="" +mkdir -p "$TARGET" + +# Copy _kernel/ files +mkdir -p "$TARGET/_kernel" +for f in key.md log.md insights.md; do + [ -f "$STAGING/_kernel/$f" ] && cp "$STAGING/_kernel/$f" "$TARGET/_kernel/$f" +done + +# Copy bundles/ +[ -d "$STAGING/bundles" ] && cp -R "$STAGING/bundles" "$TARGET/bundles" + +# Copy live context (everything else at staging root except _kernel/, bundles/, manifest.yaml) +for item in "$STAGING"/*; do + base=$(basename "$item") + case "$base" in + _kernel|bundles|manifest.yaml) continue ;; + *) cp -R "$item" "$TARGET/$base" ;; + esac +done + +# Create _kernel/_generated/ directory for now.json +mkdir -p "$TARGET/_kernel/_generated" +``` + +**Bundle scope:** + +```bash +TARGET_WALNUT="" + +# Copy each bundle +for bundle_name in ; do + [ -d "$STAGING/bundles/$bundle_name" ] && \ + cp -R "$STAGING/bundles/$bundle_name" "$TARGET_WALNUT/bundles/$bundle_name" +done + +# If key.md included and target has no key.md, copy it +[ -f "$STAGING/_kernel/key.md" ] && [ ! -f "$TARGET_WALNUT/_kernel/key.md" ] && \ + cp "$STAGING/_kernel/key.md" "$TARGET_WALNUT/_kernel/key.md" +``` + +### Step 11: Log Import Event + +**Use Edit to prepend to `_kernel/log.md`.** The log guardian blocks Write to existing log files. Edit (prepend) is the only safe way. + +If this is a full-scope import to a new walnut, the log.md was just copied from the package. Prepend the import event to the top of that log. + +If this is a bundle-scope import, prepend to the existing walnut's log. + +``` +## + +Received .walnut package: +- Scope: +- Source: (by ) +- Format: +- Bundles imported: +- Encrypted: +- Signature: +- Files: () + +signed: squirrel: +``` + +**For new walnuts (full scope):** If `_kernel/log.md` does not exist yet (e.g., the package didn't include one), use Write to create it. Write to non-existent log.md is allowed (task fn-5-dof.1 fix). + +**For existing walnuts (bundle scope):** Always use Edit to prepend. Read the first line of the existing log.md to get the old_string for the Edit, then prepend the new entry above it. + +### Step 12: Regenerate now.json + +After import, the walnut needs a valid `_kernel/_generated/now.json`. This is a minimal regeneration -- not the full save protocol. + +1. Read `_kernel/key.md` frontmatter for phase, rhythm, type, goal +2. Read `_kernel/log.md` first 3-5 entries for context synthesis +3. Generate now.json: + +```json +{ + "phase": "", + "health": "active", + "updated": "", + "bundle": null, + "next": "", + "squirrel": "", + "context": "<1-2 sentence synthesis from log entries>", + "projections": [] +} +``` + +Write to `_kernel/_generated/now.json`: + +```bash +mkdir -p "$TARGET/_kernel/_generated" +``` + +Then use Write to create the file (it's a generated projection, not a source file -- Write is appropriate here). + +For **bundle scope into existing walnut:** Read the existing now.json first. Update the context paragraph to mention the imported bundles. Don't flatten existing context -- merge the new information in. + +### Step 13: Relay Bootstrap (conditional) + +If the manifest has a `relay:` section with `repo` and `sender`: + +``` +╭─ 🐿️ relay bootstrap +│ +│ This package came from 's relay: +│ +│ ▸ join their relay for automatic package delivery? +│ 1. Yes -- run /alive:relay accept +│ 2. Not now +╰─ +``` + +If yes, invoke `alive:relay accept` flow. If the user doesn't have a relay set up yet, suggest `alive:relay setup` first. + +This is how relay connections spread organically -- receiving a package from someone's relay is the natural moment to establish the return channel. + +### Step 14: Cleanup + +```bash +rm -rf "$STAGING" +``` + +Always runs, even on errors. Wrap the entire flow in a try/finally equivalent -- if any step fails, clean up staging before reporting the error. + +Present completion: + +``` +╭─ 🐿️ import complete +│ +│ Package: +│ Imported to: +│ Scope: +│ Bundles: +│ Log entry: written +│ now.json: regenerated +│ +│ Open with /alive:load to start working. +╰─ +``` + +--- + +## Conflict Handling (Bundle Scope) + +When importing a bundle into a walnut that already has a bundle with the same name: + +1. Read both context.manifest.yaml files (local and incoming) +2. Compare versions if available, count files in each +3. Present: + +``` +╭─ 🐿️ bundle conflict +│ +│ Bundle "shielding-review" already exists in . +│ +│ Local: 5 files, last modified 2026-03-25 +│ Incoming: 7 files, from 2026-04-01 +│ +│ ▸ how to handle? +│ 1. Replace -- incoming overwrites local +│ 2. Merge -- import new files only, keep existing +│ 3. Import as "shielding-review-imported" +│ 4. Skip this bundle +╰─ +``` + +**Replace:** Remove local bundle directory, copy incoming. + +**Merge:** Walk incoming files. For each file: +- If not in local: copy +- If in local and identical (same SHA-256): skip +- If in local and different: present individual conflict: + +``` +╭─ 🐿️ file conflict +│ +│ bundles/shielding-review/draft-02.md +│ +│ Local: 2,847 bytes (modified 2026-03-25) +│ Incoming: 3,201 bytes (from 2026-04-01) +│ +│ ▸ keep which? +│ 1. Incoming (overwrite local) +│ 2. Local (skip incoming) +│ 3. Keep both (rename incoming to draft-02-imported.md) +╰─ +``` + +**Import as renamed:** Copy entire bundle to `bundles/-imported/`. + +**Skip:** Don't import this bundle. Continue with next. + +--- + +## V1 Backward Compatibility (format 1.1.0) + +When `format_version` starts with `1.`, apply these path mappings during import: + +| v1 path | v2 path | Action | +|---------|---------|--------| +| `_core/key.md` | `_kernel/key.md` | Copy with path change | +| `_core/log.md` | `_kernel/log.md` | Copy with path change | +| `_core/insights.md` | `_kernel/insights.md` | Copy with path change | +| `_core/now.md` | (discard) | now.json is generated, not imported | +| `_core/tasks.md` | (discard) | v2 tasks are per-bundle | +| `_core/_capsules//` | `bundles//` | Copy with path change | +| `_core/_capsules//companion.md` | `bundles//context.manifest.yaml` | Convert (see below) | + +**companion.md -> context.manifest.yaml conversion:** + +1. Read companion.md +2. Extract YAML frontmatter using `parse_yaml_frontmatter()` +3. Map frontmatter fields to context.manifest.yaml structure: + +```yaml +# From companion.md frontmatter: +# type: capsule +# description: "Shielding vendor evaluation" +# status: active +# tags: [shielding, vendors] +# +# Becomes context.manifest.yaml: +name: shielding-review +description: "Shielding vendor evaluation" +status: active +tags: [shielding, vendors] +sources: [] +decisions: [] +``` + +4. Write as pure YAML (not markdown with frontmatter) +5. If companion.md has a body (below frontmatter), preserve it as `bundles//README.md` + +**v1 scope mapping:** +- `scope: capsule` -> treat as `scope: bundle` + +**During step 2 (validate manifest):** Relax validation for v1 packages. Accept `_core/` paths in the file list. The mapping happens during step 10 (write). + +``` +╭─ 🐿️ v1 package detected +│ +│ Format: 1.1.0 (legacy) +│ Paths will be mapped to v2 structure: +│ _core/ -> _kernel/ +│ _core/_capsules/ -> bundles/ +│ now.md, tasks.md -> discarded (regenerated) +│ +│ Continuing with import. +╰─ +``` + +--- + +## Error Recovery + +Every step that can fail has a cleanup path. The staging directory is the containment boundary -- nothing touches the target walnut until step 10. + +| Failure point | Recovery | +|---|---| +| Extraction (step 1) | Clean staging, report | +| Manifest (step 2) | Clean staging, report | +| Checksums (step 3) | Clean staging, report (or user overrides) | +| Decryption (step 5) | Retry passphrase up to 3x, then clean staging | +| Signature (step 6) | Warn but continue (user choice) | +| Path safety (step 7) | Clean staging, abort (non-negotiable) | +| Write (step 10) | Partial writes may exist -- report which files were written | +| Log (step 11) | If Edit fails, report -- manual intervention needed | +| now.json (step 12) | Non-fatal -- walnut works without it, just run /alive:save later | + +If any fatal error occurs, always clean up staging: + +```bash +[ -d "$STAGING" ] && rm -rf "$STAGING" +``` + +--- + +## Files Read + +| File | Why | +|---|---| +| `$HOME/.alive/relay/relay.json` | Relay config (relay pull entry point) | +| `$HOME/.alive/relay/state.json` | Pending package count | +| `$STAGING/manifest.yaml` | Package metadata, file inventory, checksums | +| Target walnut `_kernel/key.md` | Phase, rhythm for now.json regen | +| Target walnut `_kernel/log.md` | Recent entries for now.json context synthesis | +| Target walnut `_kernel/_generated/now.json` | Existing state (bundle scope merge) | +| Target walnut `bundles/*/context.manifest.yaml` | Conflict detection | + +## Files Written + +| File | Method | Why | +|---|---|---| +| Target `_kernel/key.md` | Write (new walnut) or skip (exists) | Walnut identity | +| Target `_kernel/log.md` | Write (new) or Edit prepend (existing) | Import event | +| Target `_kernel/insights.md` | Write (new walnut) or skip | Domain knowledge | +| Target `_kernel/_generated/now.json` | Write | Generated projection | +| Target `bundles/*/` | Write (cp) | Imported bundles | +| `$HOME/.alive/relay/state.json` | Write (JSON update) | Relay state after pull | From b41efaad77b038b4b132ae3e3c02b1598e31e6a8 Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 15:00:38 +1100 Subject: [PATCH 08/10] feat(alive): add alive:share skill for 9-step packaging + relay push - 9-step flow: scope, bundle picker, sensitivity gate, confirmation, encryption, package creation, output, metadata update, relay push - Sensitivity gate blocks restricted/PII content without confirmation - Passphrase via env var (never CLI arg, never disk) - Relay push reads peers from state.json, confirms before GitHub API - Account routing per platforms.md for GitHub API calls - Pre-flight size check warns > 35MB for relay Task: fn-5-dof.7 --- plugins/alive/skills/share/SKILL.md | 636 ++++++++++++++++++++++++++++ 1 file changed, 636 insertions(+) create mode 100644 plugins/alive/skills/share/SKILL.md diff --git a/plugins/alive/skills/share/SKILL.md b/plugins/alive/skills/share/SKILL.md new file mode 100644 index 0000000..f04acbb --- /dev/null +++ b/plugins/alive/skills/share/SKILL.md @@ -0,0 +1,636 @@ +--- +name: alive:share +description: "Package walnut context into a portable .walnut file for sharing via any channel -- email, AirDrop, Slack, USB. Supports three scopes (full, bundle, snapshot), sensitivity gating, optional passphrase encryption, and relay push for automatic delivery to peers." +user-invocable: true +--- + +# Share + +Package walnut context and send it. Nine steps: scope, pick, gate, confirm, encrypt, create, verify, record, push. + +Output is a `.walnut` file on the Desktop. Optionally encrypted (passphrase for manual transfer, RSA for relay). Relay push delivers directly to a peer's inbox via GitHub. + +--- + +## Prerequisites + +Before starting, determine the active walnut: + +```bash +WALNUT_ROOT="$(pwd)" +WALNUT_NAME="$(basename "$WALNUT_ROOT")" +``` + +Verify `_kernel/key.md` exists. If not, stop: "No walnut loaded. Open one first with /alive:load." + +--- + +## Step 1: Scope Selection + +``` +╭─ 🐿️ share +│ +│ Walnut: +│ +│ ▸ what scope? +│ 1. Full -- everything (_kernel, bundles, live context) +│ 2. Bundle -- one or more specific bundles + key.md +│ 3. Snapshot -- key.md + insights.md only (lightweight intro) +╰─ +``` + +Wait for selection. Map: 1 = `full`, 2 = `bundle`, 3 = `snapshot`. + +If `full` or `snapshot`, skip to Step 3. + +--- + +## Step 2: Bundle Picker (bundle scope only) + +List bundles from `bundles/*/context.manifest.yaml`. For each, read frontmatter to extract `name`, `phase`, `sensitivity`, `pii`, and `description`. + +```bash +for manifest in "$WALNUT_ROOT"/bundles/*/context.manifest.yaml; do + echo "---" + echo "path: $manifest" + python3 -c " +import sys +with open('$manifest') as f: + content = f.read() +# Read YAML frontmatter (before first ---) +lines = content.strip().split('\n') +for line in lines: + if line.strip() == '---': + break + print(line) +" +done +``` + +Present as numbered list: + +``` +╭─ 🐿️ bundles +│ +│ 1. p2p-design phase: published sensitivity: open P2P sharing architecture +│ 2. funding-pitch phase: draft sensitivity: private Series A investor deck +│ 3. user-research phase: prototype sensitivity: restricted (pii) Interview transcripts +│ +│ ▸ which bundles? (comma-separated, e.g. 1,3) +╰─ +``` + +Collect selected bundle names. These are used in Step 6. + +--- + +## Step 3: Sensitivity Gate + +For each bundle in scope (all bundles for full scope, selected for bundle scope), read `sensitivity:` and `pii:` from its `context.manifest.yaml` frontmatter. + +For snapshot scope, skip this step (key.md and insights.md don't carry bundle sensitivity). + +**Gate logic:** + +| sensitivity | pii | Action | +|---|---|---| +| `open` | `false` / absent | Pass. No warning. | +| `open` | `true` | Warn: "This bundle is marked open but contains PII. Confirm before sharing." Block until confirmed. | +| `private` | any | Note: "This bundle is marked private. A note will be added to the package metadata." Continue. | +| `restricted` | `false` / absent | Warn + confirm: | +| `restricted` | `true` | Block until confirmed: | + +For `restricted` without PII: + +``` +╭─ 🐿️ sensitivity check +│ +│ Bundle "" is marked restricted. +│ +│ ▸ continue with share? +│ 1. Yes, I understand the sensitivity +│ 2. Cancel +╰─ +``` + +For `restricted` with PII, or `open` with PII: + +``` +╭─ 🐿️ sensitivity check +│ +│ Bundle "" contains personally identifiable information (pii: true). +│ Sharing PII outside your system requires explicit confirmation. +│ +│ ▸ confirm PII share? +│ 1. Yes, the recipient is authorized to receive this data +│ 2. Cancel -- do not share +╰─ +``` + +If any bundle fails the gate and the user cancels, abort the entire share flow. + +--- + +## Step 4: Scope Confirmation + +Count files and estimate package size before creating: + +```bash +# For bundle scope -- count files in selected bundles +TOTAL_FILES=0 +TOTAL_SIZE=0 +for BUNDLE in ; do + BUNDLE_DIR="$WALNUT_ROOT/bundles/$BUNDLE" + COUNT=$(find "$BUNDLE_DIR" -type f | wc -l) + SIZE=$(find "$BUNDLE_DIR" -type f -exec stat -f%z {} + 2>/dev/null | paste -sd+ | bc) + TOTAL_FILES=$((TOTAL_FILES + COUNT)) + TOTAL_SIZE=$((TOTAL_SIZE + SIZE)) +done + +# For full scope -- count everything staged +find "$WALNUT_ROOT" -type f \ + -not -path '*/.alive/*' \ + -not -path '*/.git/*' \ + -not -path '*/__pycache__/*' \ + -not -path '*/node_modules/*' \ + -not -name '.DS_Store' \ + | wc -l +``` + +Present summary: + +``` +╭─ 🐿️ pre-flight +│ +│ Scope: bundle (p2p-design, funding-pitch) +│ Files: 47 +│ Estimated size: 2.3 MB +│ +│ ▸ proceed? +│ 1. Yes, create package +│ 2. Change scope +│ 3. Cancel +╰─ +``` + +**Size warning** (> 35 MB): + +``` +╭─ 🐿️ size warning +│ +│ Estimated package size is ~52 MB. +│ GitHub relay limit is ~50 MB (Contents API with base64 overhead). +│ Manual transfer (AirDrop, email) will still work. +│ +│ ▸ proceed anyway? +│ 1. Yes, create package (relay push may fail) +│ 2. Reduce scope +│ 3. Cancel +╰─ +``` + +If "Change scope" or "Reduce scope", return to Step 1. + +--- + +## Step 5: Encryption Prompt + +Check whether a relay is configured: + +```bash +[ -f "$HOME/.alive/relay/relay.json" ] && echo "relay:configured" || echo "relay:none" +``` + +If relay is configured, also check for accepted peers: + +```bash +python3 -c " +import json +with open('$HOME/.alive/relay/relay.json') as f: + config = json.load(f) +accepted = [p for p in config.get('peers', []) if p.get('status') == 'accepted'] +print(f'accepted_peers:{len(accepted)}') +" +``` + +**Present options based on relay state:** + +No relay or no accepted peers: + +``` +╭─ 🐿️ encryption +│ +│ ▸ encrypt the package? +│ 1. Passphrase -- AES-256, you share the passphrase separately +│ 2. No encryption -- plaintext .walnut file +╰─ +``` + +Relay with accepted peers: + +``` +╭─ 🐿️ encryption +│ +│ ▸ encrypt the package? +│ 1. Passphrase -- AES-256, you share the passphrase separately +│ 2. No encryption -- plaintext .walnut file +│ 3. Relay -- RSA-encrypt per peer, push to relay inbox +╰─ +``` + +**If passphrase (option 1):** + +``` +╭─ 🐿️ passphrase +│ +│ Enter a passphrase for the package. Share it with the recipient +│ through a separate channel (not alongside the .walnut file). +│ +│ ▸ passphrase: +╰─ +``` + +Store the passphrase in an environment variable. **Never** pass it as a CLI argument. **Never** write it to disk. + +```bash +export WALNUT_PASSPHRASE="" +``` + +**If relay (option 3):** Encryption happens per-peer in Step 9. Create the unencrypted package first in Step 6. + +Record the encryption choice: `ENCRYPT_MODE` = `passphrase`, `none`, or `relay`. + +--- + +## Step 6: Package Creation + +Call alive-p2p.py to create the .walnut package: + +```bash +python3 "${CLAUDE_PLUGIN_ROOT}/scripts/alive-p2p.py" create \ + --scope \ + --walnut "$WALNUT_ROOT" \ + --bundle \ + --output "$HOME/Desktop/--.walnut" \ + --description "" +``` + +For bundle scope, pass `--bundle ` once per selected bundle. +For full/snapshot scope, omit `--bundle`. +If output path is omitted, alive-p2p.py defaults to `~/Desktop/`. + +**If passphrase encryption was selected,** encrypt immediately after creation: + +```bash +export WALNUT_PASSPHRASE="" +python3 -c " +import sys +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import encrypt_package +result = encrypt_package( + package_path='', + mode='passphrase' +) +print(result) +" +``` + +The encrypt_package function reads `WALNUT_PASSPHRASE` from the environment. It produces a new `.walnut` file with encrypted payload. The original unencrypted file is replaced. + +**Unset the passphrase immediately after encryption:** + +```bash +unset WALNUT_PASSPHRASE +``` + +Capture the output path and size from the result. + +--- + +## Step 7: Output Confirmation + +``` +╭─ 🐿️ package created +│ +│ Path: ~/Desktop/stackwalnuts-bundle-p2p-design-2026-04-01.walnut +│ Size: 2.3 MB +│ Scope: bundle (p2p-design) +│ Encryption: AES-256-CBC (passphrase) +│ Files: 47 +│ +│ Ready to send via email, AirDrop, Slack, USB -- whatever works. +│ Recipient imports with /alive:receive. +╰─ +``` + +If unencrypted: + +``` +╭─ 🐿️ package created +│ +│ Path: ~/Desktop/stackwalnuts-bundle-p2p-design-2026-04-01.walnut +│ Size: 2.3 MB +│ Scope: bundle (p2p-design) +│ Encryption: none +│ Files: 47 +│ +│ This package is not encrypted. Anyone with the file can read it. +│ Send via a trusted channel, or re-share with /alive:share and pick passphrase. +╰─ +``` + +If relay encryption was selected, note that the local copy is unencrypted but the relay push will RSA-encrypt per peer: + +``` +╭─ 🐿️ package created +│ +│ Path: ~/Desktop/stackwalnuts-bundle-p2p-design-2026-04-01.walnut +│ Size: 2.3 MB (local copy, unencrypted) +│ Scope: bundle (p2p-design) +│ +│ Relay push will RSA-encrypt per peer in the next step. +╰─ +``` + +--- + +## Step 8: Metadata Update + +For each bundle in scope, update its `context.manifest.yaml` `shared:` array. Use inline python3 for safe YAML manipulation: + +```bash +python3 -c " +import datetime, sys + +manifest_path = '$WALNUT_ROOT/bundles//context.manifest.yaml' +with open(manifest_path, 'r') as f: + content = f.read() + +# Find the shared: line or add it +# Simple append-to-YAML approach -- add shared entry +entry = ''' - method: alive:share + scope: + date: $(date -u +%Y-%m-%dT%H:%M:%SZ) + encrypted: + package: ''' + +# Read existing content, find or create shared: section +lines = content.rstrip().split('\n') +shared_idx = None +for i, line in enumerate(lines): + if line.startswith('shared:'): + shared_idx = i + break + +if shared_idx is not None: + if lines[shared_idx].strip() == 'shared: []': + lines[shared_idx] = 'shared:' + lines.insert(shared_idx + 1, entry) +else: + lines.append('shared:') + lines.append(entry) + +with open(manifest_path, 'w') as f: + f.write('\n'.join(lines) + '\n') +" +``` + +This records the share event in the bundle's manifest. When the relay push delivers to specific peers, update the entry with `to: `. + +--- + +## Step 9: Relay Push (conditional) + +Only runs when `ENCRYPT_MODE` is `relay`. If the user chose passphrase or no encryption, the flow ends at Step 8. + +### Step 9a: Read Peer Reachability + +```bash +python3 -c " +import json +with open('$HOME/.alive/relay/state.json') as f: + state = json.load(f) +with open('$HOME/.alive/relay/relay.json') as f: + config = json.load(f) + +accepted = [p for p in config.get('peers', []) if p.get('status') == 'accepted'] +reachability = state.get('peer_reachability', {}) + +for p in accepted: + gh = p['github'] + name = p.get('name') or gh + reach = reachability.get(gh, {}) + status = reach.get('status', 'unknown') + print(f\"{gh}\t{name}\t{status}\") +" +``` + +### Step 9b: Present Peer Selection + +``` +╭─ 🐿️ relay push +│ +│ ▸ send to which peers? +│ 1. benflint (Ben Flint) -- reachable +│ 2. janedoe (janedoe) -- unreachable +│ 3. All reachable peers +│ +│ Unreachable peers are shown but may fail. The package stays +│ in their inbox for pickup when they come back online. +╰─ +``` + +### Step 9c: User Selects + +Wait for numbered selection. + +### Step 9d: Read Selected Peer Details + +For each selected peer, read their relay repo and key path from relay.json: + +```bash +python3 -c " +import json +with open('$HOME/.alive/relay/relay.json') as f: + config = json.load(f) +peer = [p for p in config['peers'] if p['github'] == ''][0] +print(f\"relay:{peer['relay']}\") +print(f\"github:{peer['github']}\") +" +``` + +### Step 9e: Confirmation Before Push + +**This is a confirmation gate for an external action.** The external guard hook does not catch Bash tool calls. This prompt is the gate. + +``` +╭─ 🐿️ relay push confirmation +│ +│ This will push an RSA-encrypted package to: +│ +│ Peer: benflint (Ben Flint) +│ Relay: benflint/walnut-relay +│ Inbox: inbox/benflint/.walnut +│ +│ The package will be encrypted with their RSA public key. +│ Only they can decrypt it with their private key. +│ +│ ▸ push to relay? +│ 1. Yes, push now +│ 2. Cancel +╰─ +``` + +**Wait for confirmation.** Do not proceed without explicit "yes" or "1". + +If multiple peers are selected, confirm once listing all peers, not once per peer. + +### Step 9f: RSA-Encrypt Per Peer + +For each selected peer, encrypt the package with their public key: + +```bash +python3 -c " +import sys +sys.path.insert(0, '${CLAUDE_PLUGIN_ROOT}/scripts') +from alive_p2p import encrypt_package + +result = encrypt_package( + package_path='', + output_path='/tmp/-.walnut', + mode='rsa', + recipient_pubkey='$HOME/.alive/relay/keys/peers/.pem' +) +print(result) +" +``` + +### Step 9g: Push to Peer's Inbox + +Read the RSA-encrypted package, base64-encode, and push via GitHub Contents API: + +```bash +PEER="" +PEER_RELAY="" +PACKAGE_NAME=".walnut" +MY_USER=$(python3 -c "import json; print(json.load(open('$HOME/.alive/relay/relay.json'))['github_username'])") + +# Base64-encode the encrypted package (cross-platform, no line breaks) +CONTENT=$(openssl base64 -A -in "/tmp/${PEER}-${PACKAGE_NAME}") + +# Check if file already exists (need SHA for update) +EXISTING_SHA=$(gh api "repos/${PEER_RELAY}/contents/inbox/${MY_USER}/${PACKAGE_NAME}" \ + --jq '.sha' 2>/dev/null || echo "") + +if [ -n "$EXISTING_SHA" ]; then + # Update existing file + gh api "repos/${PEER_RELAY}/contents/inbox/${MY_USER}/${PACKAGE_NAME}" \ + --method PUT \ + --field message="Package from ${MY_USER}" \ + --field content="$CONTENT" \ + --field sha="$EXISTING_SHA" +else + # Create new file + gh api "repos/${PEER_RELAY}/contents/inbox/${MY_USER}/${PACKAGE_NAME}" \ + --method PUT \ + --field message="Package from ${MY_USER}" \ + --field content="$CONTENT" +fi +``` + +### Step 9h: Clean Up + Confirm Delivery + +```bash +# Clean up temporary encrypted copies +rm -f "/tmp/${PEER}-${PACKAGE_NAME}" +``` + +Update the shared entry in the bundle manifest with peer info (update Step 8's entry): + +```bash +python3 -c " +import datetime +manifest_path = '$WALNUT_ROOT/bundles//context.manifest.yaml' +with open(manifest_path, 'r') as f: + content = f.read() +# Append to: field to the most recent shared entry +content = content.rstrip() +# Find last ' package:' line and add 'to:' after it +lines = content.split('\n') +for i in range(len(lines) - 1, -1, -1): + if ' package:' in lines[i]: + lines.insert(i + 1, ' to: ') + break +with open(manifest_path, 'w') as f: + f.write('\n'.join(lines) + '\n') +" +``` + +Present delivery confirmation: + +``` +╭─ 🐿️ delivered +│ +│ Package pushed to relay: +│ - benflint/walnut-relay/inbox/benflint/.walnut (RSA-encrypted) +│ +│ They'll see a notification at next session start (alive-relay-check hook) +│ or can pull manually with /alive:receive --relay. +╰─ +``` + +If push failed for any peer: + +``` +╭─ 🐿️ relay push failed +│ +│ Could not push to benflint/walnut-relay: +│ +│ +│ The unencrypted package is still at: +│ ~/Desktop/.walnut +│ +│ You can send it manually or retry with /alive:share. +╰─ +``` + +--- + +## Account Routing + +Apply platform routing from platforms.md. For GitHub API calls in relay push: + +```bash +GH_TOKEN=$(gh auth token --user ) gh api ... +``` + +The `github_username` in relay.json determines which account to use. + +--- + +## Error Handling + +| Error | Message | +|---|---| +| No walnut loaded | "No walnut loaded. Open one with /alive:load first." | +| No bundles found | "No bundles in this walnut. Create one with /alive:bundle first." | +| Bundle not found | "Bundle '' not found. Available: ." | +| alive-p2p.py fails | Show stderr, suggest checking the walnut structure | +| Encryption fails (LibreSSL) | "OpenSSL doesn't support PBKDF2. Upgrade LibreSSL to >= 3.1 or install OpenSSL >= 1.1.1." | +| Relay push 403 | "Permission denied on . Check collaborator access with /alive:relay status." | +| Relay push 422 | "File too large for GitHub Contents API (~50 MB limit). Use manual transfer instead." | +| Package > 50 MB for relay | "Package exceeds GitHub API limit. Created locally at ~/Desktop/ -- send manually." | +| Peer key missing | "No public key cached for . They may need to accept the relay invitation first." | +| state.json missing | Run probe first: `python3 "${CLAUDE_PLUGIN_ROOT}/scripts/relay-probe.py" --config "$HOME/.alive/relay/relay.json" --state "$HOME/.alive/relay/state.json"` | + +--- + +## Confirmation Gate Rules + +Every external action MUST have a confirmation prompt. The external guard hook only catches `mcp__` tools, not Bash. This skill is the gate. + +**Requires confirmation:** relay push (Step 9e). This is the only external write action in the share flow. + +**No confirmation needed:** reading config/state files, local package creation, local encryption, writing to bundle manifest, file counting, size estimation. + +Pattern: present what will happen, numbered options, wait for choice. Never fire-and-forget on external actions. From dc267f369ac9b84920e7b7bb061df975322819aa Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 15:14:22 +1100 Subject: [PATCH 09/10] feat(alive): wire P2P integration into bundle and capture skills - Verify hooks.json: alive-relay-check.sh correctly registered in SessionStart:startup - Update bundle/SKILL.md: add /alive:share reference for P2P packaging in Share section - Update capture-context/SKILL.md: detect .walnut files in 03_Inputs/ inbox scan, suggest /alive:receive Task: fn-5-dof.9 --- plugins/alive/skills/bundle/SKILL.md | 11 ++++++++ plugins/alive/skills/capture-context/SKILL.md | 25 ++++++++++++++++--- 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/plugins/alive/skills/bundle/SKILL.md b/plugins/alive/skills/bundle/SKILL.md index b5f5f73..2a2f16e 100644 --- a/plugins/alive/skills/bundle/SKILL.md +++ b/plugins/alive/skills/bundle/SKILL.md @@ -135,6 +135,17 @@ shared: ╰─ ``` +#### P2P Sharing via /alive:share + +For packaging a bundle as a portable `.walnut` file (P2P transfer via AirDrop, USB, email attachment, or relay), use `/alive:share` with `--scope bundle`. This handles: + +- Packaging the bundle contents into a `.walnut` archive +- Sensitivity gating and optional passphrase encryption +- Manifest SHA-256 checksums for integrity verification +- Relay push for automatic delivery (if relay configured) + +`/alive:share` automatically updates the `shared:` field in `context.manifest.yaml` with encryption status and relay provenance, so manual tracking is not needed for P2P shares. + #### Publishing to walnut.world When the human wants to publish a bundle to walnut.world: diff --git a/plugins/alive/skills/capture-context/SKILL.md b/plugins/alive/skills/capture-context/SKILL.md index a137ad3..84a72c7 100644 --- a/plugins/alive/skills/capture-context/SKILL.md +++ b/plugins/alive/skills/capture-context/SKILL.md @@ -135,9 +135,28 @@ If empty: "Nothing to capture, and your inbox is clear." If items exist, enter inbox scan: 1. **List** — scan `03_Inputs/` for non-system files (exclude `.DS_Store`, `.gitkeep`). Present numbered list with detected type and filename. -2. **Process** — for each selected item: read the file, suggest destination walnut + bundle, rename garbage filenames per conventions, present for confirmation or redirect. -3. **Capture** — route raw to the chosen bundle's `raw/`, update bundle `context.manifest.yaml` `sources:`, stash insights/tasks, remove original from `03_Inputs/`. If no bundle fits, create one or use `bundles/_inbox/`. -4. **Continue or stop** — after each item: "N remaining. Next, or done for now?" Partial clearing is fine. +2. **Detect .walnut packages** — if any files have a `.walnut` extension, these are P2P sharing packages (not normal captures). Surface them separately and suggest `/alive:receive` instead of normal capture: + +``` +╭─ 🐿️ inbox (4 items) +│ +│ P2P packages (use /alive:receive): +│ 1. shielding-review.walnut .walnut package +│ +│ Regular files: +│ 2. quarterly-report.pdf document +│ 3. IMG_4892.jpg screenshot +│ 4. meeting-notes-kai.txt transcript +│ +│ .walnut files need /alive:receive for proper import +│ (checksums, decryption, manifest validation). +│ Pick regular files to capture, or run /alive:receive first. +╰─ +``` + +3. **Process** — for each selected regular item: read the file, suggest destination walnut + bundle, rename garbage filenames per conventions, present for confirmation or redirect. +4. **Capture** — route raw to the chosen bundle's `raw/`, update bundle `context.manifest.yaml` `sources:`, stash insights/tasks, remove original from `03_Inputs/`. If no bundle fits, create one or use `bundles/_inbox/`. +5. **Continue or stop** — after each item: "N remaining. Next, or done for now?" Partial clearing is fine. ``` ╭─ 🐿️ inbox (3 items) From fb7718b694e4b2b0e956704bff6d99fdae863157 Mon Sep 17 00:00:00 2001 From: Patrick Brosnan Date: Wed, 1 Apr 2026 18:08:04 +1100 Subject: [PATCH 10/10] fix: relay inbox scanning looks at wrong directory Relay packages are stored in inbox// on the receiver's relay, but three components all scanned inbox// -- which is always empty. Discovered during live P2P testing between two accounts. - relay-probe.py: count_pending_packages() now scans all inbox// directories, skipping inbox// - receive/SKILL.md: relay pull scan loops over peer dirs, cleanup uses correct sender path - relay/SKILL.md: sparse checkout changed from inbox/ to inbox/ so all peer inbox directories are visible in the clone Co-Authored-By: Claude Opus 4.6 (1M context) --- plugins/alive/scripts/relay-probe.py | 23 +++++++++++++++-------- plugins/alive/skills/receive/SKILL.md | 13 +++++++++---- plugins/alive/skills/relay/SKILL.md | 2 +- 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/plugins/alive/scripts/relay-probe.py b/plugins/alive/scripts/relay-probe.py index 7d1c5d7..76fbdd5 100644 --- a/plugins/alive/scripts/relay-probe.py +++ b/plugins/alive/scripts/relay-probe.py @@ -93,20 +93,27 @@ def probe_relay_commit(repo): # --------------------------------------------------------------------------- def count_pending_packages(clone_dir, username): - """Count .walnut files in inbox/{username}/ of the local clone. + """Count .walnut files in peer inbox directories of the local clone. - The clone is a sparse checkout that only includes inbox/{username}/ - and keys/. Returns 0 if the directory doesn't exist. + Packages from peers are stored in inbox// on your relay. + We scan all inbox// directories, skipping inbox//. + Returns 0 if no packages found. """ - inbox_dir = os.path.join(clone_dir, 'inbox', username) - if not os.path.isdir(inbox_dir): + inbox_root = os.path.join(clone_dir, 'inbox') + if not os.path.isdir(inbox_root): return 0 count = 0 try: - for entry in os.listdir(inbox_dir): - if entry.endswith('.walnut'): - count += 1 + for peer_dir in os.listdir(inbox_root): + if peer_dir == username: + continue # Skip own directory + peer_path = os.path.join(inbox_root, peer_dir) + if not os.path.isdir(peer_path): + continue + for entry in os.listdir(peer_path): + if entry.endswith('.walnut'): + count += 1 except OSError: pass return count diff --git a/plugins/alive/skills/receive/SKILL.md b/plugins/alive/skills/receive/SKILL.md index 68d3996..8ede969 100644 --- a/plugins/alive/skills/receive/SKILL.md +++ b/plugins/alive/skills/receive/SKILL.md @@ -64,10 +64,15 @@ Or triggered from session-start notification ("You have N walnut package(s) wait GITHUB_USER=$(python3 -c "import json; print(json.load(open('$HOME/.alive/relay/relay.json'))['github_username'])") CLONE_DIR="$HOME/.alive/relay/clone" cd "$CLONE_DIR" && git pull origin main --quiet -ls "$CLONE_DIR/inbox/${GITHUB_USER}/"*.walnut 2>/dev/null +# Scan peer inbox directories -- packages are stored in inbox// +for PEER_DIR in "$CLONE_DIR"/inbox/*/; do + PEER_NAME=$(basename "$PEER_DIR") + [ "$PEER_NAME" = "$GITHUB_USER" ] && continue + ls "$PEER_DIR"*.walnut 2>/dev/null +done ``` -5. If packages found, list them: +5. If packages found, list them (include which peer sent each): ``` ╭─ 🐿️ relay inbox @@ -87,8 +92,8 @@ ls "$CLONE_DIR/inbox/${GITHUB_USER}/"*.walnut 2>/dev/null ```bash cd "$CLONE_DIR" -rm "inbox/${GITHUB_USER}/.walnut" -git add -A && git commit -m "Received: " && git push origin main +rm "inbox//.walnut" +git add -A && git commit -m "Received: from " && git push origin main ``` This is an external action (git push) -- confirm before pushing: diff --git a/plugins/alive/skills/relay/SKILL.md b/plugins/alive/skills/relay/SKILL.md index 624a6c9..80fcd77 100644 --- a/plugins/alive/skills/relay/SKILL.md +++ b/plugins/alive/skills/relay/SKILL.md @@ -154,7 +154,7 @@ git clone --filter=blob:none --no-checkout \ "https://github.com/${GITHUB_USER}/walnut-relay.git" "$CLONE_DIR" cd "$CLONE_DIR" git sparse-checkout init --cone -git sparse-checkout set "inbox/${GITHUB_USER}" "keys" +git sparse-checkout set "inbox" "keys" git checkout main ```