Skills engine v0.1 + multi-channel infrastructure (#307)

* refactor: multi-channel infrastructure with explicit channel/is_group tracking

- Add channels[] array and findChannel() routing in index.ts, replacing
  hardcoded whatsapp.* calls with channel-agnostic callbacks
- Add channel TEXT and is_group INTEGER columns to chats table with
  COALESCE upsert to protect existing values from null overwrites
- is_group defaults to 0 (safe: unknown chats excluded from groups)
- WhatsApp passes explicit channel='whatsapp' and isGroup to onChatMetadata
- getAvailableGroups filters on is_group instead of JID pattern matching
- findChannel logs warnings instead of silently dropping unroutable JIDs
- Migration backfills channel/is_group from JID patterns for existing DBs

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* feat: skills engine v0.1 — deterministic skill packages with rerere resolution

Three-way merge engine for applying skill packages on top of a core
codebase. Skills declare which files they add/modify, and the engine
uses git merge-file for conflict detection with git rerere for
automatic resolution of previously-seen conflicts.

Key components:
- apply: three-way merge with backup/rollback safety net
- replay: clean-slate replay for uninstall and rebase
- update: core version updates with deletion detection
- rebase: bake applied skills into base (one-way)
- manifest: validation with path traversal protection
- resolution-cache: pre-computed rerere resolutions
- structured: npm deps, env vars, docker-compose merging
- CI: per-skill test matrix with conflict detection

151 unit tests covering merge, rerere, backup, replay, uninstall,
update, rebase, structured ops, and edge cases.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* feat: add Discord and Telegram skill packages

Skill packages for adding Discord and Telegram channels to NanoClaw.
Each package includes:
- Channel implementation (add/src/channels/)
- Three-way merge targets for index.ts, config.ts, routing.test.ts
- Intent docs explaining merge invariants
- Standalone integration tests
- manifest.yaml with dependency/conflict declarations

Applied via: npx tsx scripts/apply-skill.ts .claude/skills/add-discord
These are inert until applied — no runtime impact.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* remove unused docs (skills-system-status, implementation-guide)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
gavrielc
2026-02-19 01:55:00 +02:00
committed by GitHub
parent a689f8b3fa
commit 51788de3b9
83 changed files with 13159 additions and 626 deletions

View File

@@ -0,0 +1,92 @@
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { applySkill } from '../apply.js';
import {
cleanup,
createMinimalState,
createSkillPackage,
createTempDir,
initGitRepo,
setupNanoclawDir,
} from './test-helpers.js';
describe('apply', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
createMinimalState(tmpDir);
initGitRepo(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('rejects when min_skills_system_version is too high', async () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'future-skill',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
min_skills_system_version: '99.0.0',
});
const result = await applySkill(skillDir);
expect(result.success).toBe(false);
expect(result.error).toContain('99.0.0');
});
it('executes post_apply commands on success', async () => {
const markerFile = path.join(tmpDir, 'post-apply-marker.txt');
const skillDir = createSkillPackage(tmpDir, {
skill: 'post-test',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/newfile.ts'],
modifies: [],
addFiles: { 'src/newfile.ts': 'export const x = 1;' },
post_apply: [`echo "applied" > "${markerFile}"`],
});
const result = await applySkill(skillDir);
expect(result.success).toBe(true);
expect(fs.existsSync(markerFile)).toBe(true);
expect(fs.readFileSync(markerFile, 'utf-8').trim()).toBe('applied');
});
it('rolls back on post_apply failure', async () => {
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
const existingFile = path.join(tmpDir, 'src/existing.ts');
fs.writeFileSync(existingFile, 'original content');
// Set up base for the modified file
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'existing.ts'), 'original content');
const skillDir = createSkillPackage(tmpDir, {
skill: 'bad-post',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/added.ts'],
modifies: [],
addFiles: { 'src/added.ts': 'new file' },
post_apply: ['false'], // always fails
});
const result = await applySkill(skillDir);
expect(result.success).toBe(false);
expect(result.error).toContain('post_apply');
// Added file should be cleaned up
expect(fs.existsSync(path.join(tmpDir, 'src/added.ts'))).toBe(false);
});
});

View File

@@ -0,0 +1,77 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { createBackup, restoreBackup, clearBackup } from '../backup.js';
import { createTempDir, setupNanoclawDir, cleanup } from './test-helpers.js';
describe('backup', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('createBackup copies files and restoreBackup puts them back', () => {
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src', 'app.ts'), 'original content');
createBackup(['src/app.ts']);
fs.writeFileSync(path.join(tmpDir, 'src', 'app.ts'), 'modified content');
expect(fs.readFileSync(path.join(tmpDir, 'src', 'app.ts'), 'utf-8')).toBe('modified content');
restoreBackup();
expect(fs.readFileSync(path.join(tmpDir, 'src', 'app.ts'), 'utf-8')).toBe('original content');
});
it('createBackup skips missing files without error', () => {
expect(() => createBackup(['does-not-exist.ts'])).not.toThrow();
});
it('clearBackup removes backup directory', () => {
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src', 'app.ts'), 'content');
createBackup(['src/app.ts']);
const backupDir = path.join(tmpDir, '.nanoclaw', 'backup');
expect(fs.existsSync(backupDir)).toBe(true);
clearBackup();
expect(fs.existsSync(backupDir)).toBe(false);
});
it('createBackup writes tombstone for non-existent files', () => {
createBackup(['src/newfile.ts']);
const tombstone = path.join(tmpDir, '.nanoclaw', 'backup', 'src', 'newfile.ts.tombstone');
expect(fs.existsSync(tombstone)).toBe(true);
});
it('restoreBackup deletes files with tombstone markers', () => {
// Create backup first — file doesn't exist yet, so tombstone is written
createBackup(['src/added.ts']);
// Now the file gets created (simulating skill apply)
const filePath = path.join(tmpDir, 'src', 'added.ts');
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.writeFileSync(filePath, 'new content');
expect(fs.existsSync(filePath)).toBe(true);
// Restore should delete the file (tombstone means it didn't exist before)
restoreBackup();
expect(fs.existsSync(filePath)).toBe(false);
});
it('restoreBackup is no-op when backup dir is empty or missing', () => {
clearBackup();
expect(() => restoreBackup()).not.toThrow();
});
});

View File

@@ -0,0 +1,270 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { stringify } from 'yaml';
import {
computeOverlapMatrix,
extractOverlapInfo,
generateMatrix,
type SkillOverlapInfo,
} from '../../scripts/generate-ci-matrix.js';
import { SkillManifest } from '../types.js';
import { createTempDir, cleanup } from './test-helpers.js';
function makeManifest(overrides: Partial<SkillManifest> & { skill: string }): SkillManifest {
return {
version: '1.0.0',
description: 'Test skill',
core_version: '1.0.0',
adds: [],
modifies: [],
conflicts: [],
depends: [],
...overrides,
};
}
describe('ci-matrix', () => {
describe('computeOverlapMatrix', () => {
it('detects overlap from shared modifies entries', () => {
const skills: SkillOverlapInfo[] = [
{ name: 'telegram', modifies: ['src/config.ts', 'src/index.ts'], npmDependencies: [] },
{ name: 'discord', modifies: ['src/config.ts', 'src/router.ts'], npmDependencies: [] },
];
const matrix = computeOverlapMatrix(skills);
expect(matrix).toHaveLength(1);
expect(matrix[0].skills).toEqual(['telegram', 'discord']);
expect(matrix[0].reason).toContain('shared modifies');
expect(matrix[0].reason).toContain('src/config.ts');
});
it('returns no entry for non-overlapping skills', () => {
const skills: SkillOverlapInfo[] = [
{ name: 'telegram', modifies: ['src/telegram.ts'], npmDependencies: ['grammy'] },
{ name: 'discord', modifies: ['src/discord.ts'], npmDependencies: ['discord.js'] },
];
const matrix = computeOverlapMatrix(skills);
expect(matrix).toHaveLength(0);
});
it('detects overlap from shared npm dependencies', () => {
const skills: SkillOverlapInfo[] = [
{ name: 'skill-a', modifies: ['src/a.ts'], npmDependencies: ['lodash', 'zod'] },
{ name: 'skill-b', modifies: ['src/b.ts'], npmDependencies: ['zod', 'express'] },
];
const matrix = computeOverlapMatrix(skills);
expect(matrix).toHaveLength(1);
expect(matrix[0].skills).toEqual(['skill-a', 'skill-b']);
expect(matrix[0].reason).toContain('shared npm packages');
expect(matrix[0].reason).toContain('zod');
});
it('reports both modifies and npm overlap in one entry', () => {
const skills: SkillOverlapInfo[] = [
{ name: 'skill-a', modifies: ['src/config.ts'], npmDependencies: ['zod'] },
{ name: 'skill-b', modifies: ['src/config.ts'], npmDependencies: ['zod'] },
];
const matrix = computeOverlapMatrix(skills);
expect(matrix).toHaveLength(1);
expect(matrix[0].reason).toContain('shared modifies');
expect(matrix[0].reason).toContain('shared npm packages');
});
it('handles three skills with pairwise overlaps', () => {
const skills: SkillOverlapInfo[] = [
{ name: 'a', modifies: ['src/config.ts'], npmDependencies: [] },
{ name: 'b', modifies: ['src/config.ts', 'src/router.ts'], npmDependencies: [] },
{ name: 'c', modifies: ['src/router.ts'], npmDependencies: [] },
];
const matrix = computeOverlapMatrix(skills);
// a-b overlap on config.ts, b-c overlap on router.ts, a-c no overlap
expect(matrix).toHaveLength(2);
expect(matrix[0].skills).toEqual(['a', 'b']);
expect(matrix[1].skills).toEqual(['b', 'c']);
});
it('returns empty array for single skill', () => {
const skills: SkillOverlapInfo[] = [
{ name: 'only', modifies: ['src/config.ts'], npmDependencies: ['zod'] },
];
const matrix = computeOverlapMatrix(skills);
expect(matrix).toHaveLength(0);
});
it('returns empty array for no skills', () => {
const matrix = computeOverlapMatrix([]);
expect(matrix).toHaveLength(0);
});
});
describe('extractOverlapInfo', () => {
it('extracts modifies and npm dependencies using dirName', () => {
const manifest = makeManifest({
skill: 'telegram',
modifies: ['src/config.ts'],
structured: {
npm_dependencies: { grammy: '^1.0.0', zod: '^3.0.0' },
},
});
const info = extractOverlapInfo(manifest, 'add-telegram');
expect(info.name).toBe('add-telegram');
expect(info.modifies).toEqual(['src/config.ts']);
expect(info.npmDependencies).toEqual(['grammy', 'zod']);
});
it('handles manifest without structured field', () => {
const manifest = makeManifest({
skill: 'simple',
modifies: ['src/index.ts'],
});
const info = extractOverlapInfo(manifest, 'add-simple');
expect(info.npmDependencies).toEqual([]);
});
it('handles structured without npm_dependencies', () => {
const manifest = makeManifest({
skill: 'env-only',
modifies: [],
structured: {
env_additions: ['MY_VAR'],
},
});
const info = extractOverlapInfo(manifest, 'add-env-only');
expect(info.npmDependencies).toEqual([]);
});
});
describe('generateMatrix with real filesystem', () => {
let tmpDir: string;
beforeEach(() => {
tmpDir = createTempDir();
});
afterEach(() => {
cleanup(tmpDir);
});
function createManifestDir(skillsDir: string, name: string, manifest: Record<string, unknown>): void {
const dir = path.join(skillsDir, name);
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify(manifest));
}
it('reads manifests from disk and finds overlaps', () => {
const skillsDir = path.join(tmpDir, '.claude', 'skills');
createManifestDir(skillsDir, 'telegram', {
skill: 'telegram',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/telegram.ts'],
modifies: ['src/config.ts', 'src/index.ts'],
conflicts: [],
depends: [],
});
createManifestDir(skillsDir, 'discord', {
skill: 'discord',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/discord.ts'],
modifies: ['src/config.ts', 'src/index.ts'],
conflicts: [],
depends: [],
});
const matrix = generateMatrix(skillsDir);
expect(matrix).toHaveLength(1);
expect(matrix[0].skills).toContain('telegram');
expect(matrix[0].skills).toContain('discord');
});
it('returns empty matrix when skills dir does not exist', () => {
const matrix = generateMatrix(path.join(tmpDir, 'nonexistent'));
expect(matrix).toHaveLength(0);
});
it('returns empty matrix for non-overlapping skills on disk', () => {
const skillsDir = path.join(tmpDir, '.claude', 'skills');
createManifestDir(skillsDir, 'alpha', {
skill: 'alpha',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/alpha.ts'],
modifies: ['src/alpha-config.ts'],
conflicts: [],
depends: [],
});
createManifestDir(skillsDir, 'beta', {
skill: 'beta',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/beta.ts'],
modifies: ['src/beta-config.ts'],
conflicts: [],
depends: [],
});
const matrix = generateMatrix(skillsDir);
expect(matrix).toHaveLength(0);
});
it('detects structured npm overlap from disk manifests', () => {
const skillsDir = path.join(tmpDir, '.claude', 'skills');
createManifestDir(skillsDir, 'skill-x', {
skill: 'skill-x',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: ['src/x.ts'],
conflicts: [],
depends: [],
structured: {
npm_dependencies: { lodash: '^4.0.0' },
},
});
createManifestDir(skillsDir, 'skill-y', {
skill: 'skill-y',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: ['src/y.ts'],
conflicts: [],
depends: [],
structured: {
npm_dependencies: { lodash: '^4.1.0' },
},
});
const matrix = generateMatrix(skillsDir);
expect(matrix).toHaveLength(1);
expect(matrix[0].reason).toContain('lodash');
});
});
});

View File

@@ -0,0 +1,43 @@
import { describe, it, expect } from 'vitest';
import {
NANOCLAW_DIR,
STATE_FILE,
BASE_DIR,
BACKUP_DIR,
LOCK_FILE,
CUSTOM_DIR,
RESOLUTIONS_DIR,
SKILLS_SCHEMA_VERSION,
} from '../constants.js';
describe('constants', () => {
const allConstants = {
NANOCLAW_DIR,
STATE_FILE,
BASE_DIR,
BACKUP_DIR,
LOCK_FILE,
CUSTOM_DIR,
RESOLUTIONS_DIR,
SKILLS_SCHEMA_VERSION,
};
it('all constants are non-empty strings', () => {
for (const [name, value] of Object.entries(allConstants)) {
expect(value, `${name} should be a non-empty string`).toBeTruthy();
expect(typeof value, `${name} should be a string`).toBe('string');
}
});
it('path constants use forward slashes and .nanoclaw prefix', () => {
const pathConstants = [BASE_DIR, BACKUP_DIR, LOCK_FILE, CUSTOM_DIR, RESOLUTIONS_DIR];
for (const p of pathConstants) {
expect(p).not.toContain('\\');
expect(p).toMatch(/^\.nanoclaw\//);
}
});
it('NANOCLAW_DIR is .nanoclaw', () => {
expect(NANOCLAW_DIR).toBe('.nanoclaw');
});
});

View File

@@ -0,0 +1,136 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import {
isCustomizeActive,
startCustomize,
commitCustomize,
abortCustomize,
} from '../customize.js';
import { CUSTOM_DIR } from '../constants.js';
import {
createTempDir,
setupNanoclawDir,
createMinimalState,
cleanup,
writeState,
} from './test-helpers.js';
import { readState, recordSkillApplication, computeFileHash } from '../state.js';
describe('customize', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
createMinimalState(tmpDir);
fs.mkdirSync(path.join(tmpDir, CUSTOM_DIR), { recursive: true });
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('startCustomize creates pending.yaml and isCustomizeActive returns true', () => {
// Need at least one applied skill with file_hashes for snapshot
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
fs.writeFileSync(trackedFile, 'export const x = 1;');
recordSkillApplication('test-skill', '1.0.0', {
'src/app.ts': computeFileHash(trackedFile),
});
expect(isCustomizeActive()).toBe(false);
startCustomize('test customization');
expect(isCustomizeActive()).toBe(true);
const pendingPath = path.join(tmpDir, CUSTOM_DIR, 'pending.yaml');
expect(fs.existsSync(pendingPath)).toBe(true);
});
it('abortCustomize removes pending.yaml', () => {
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
fs.writeFileSync(trackedFile, 'export const x = 1;');
recordSkillApplication('test-skill', '1.0.0', {
'src/app.ts': computeFileHash(trackedFile),
});
startCustomize('test');
expect(isCustomizeActive()).toBe(true);
abortCustomize();
expect(isCustomizeActive()).toBe(false);
});
it('commitCustomize with no changes clears pending', () => {
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
fs.writeFileSync(trackedFile, 'export const x = 1;');
recordSkillApplication('test-skill', '1.0.0', {
'src/app.ts': computeFileHash(trackedFile),
});
startCustomize('no-op');
commitCustomize();
expect(isCustomizeActive()).toBe(false);
});
it('commitCustomize with changes creates patch and records in state', () => {
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
fs.writeFileSync(trackedFile, 'export const x = 1;');
recordSkillApplication('test-skill', '1.0.0', {
'src/app.ts': computeFileHash(trackedFile),
});
startCustomize('add feature');
// Modify the tracked file
fs.writeFileSync(trackedFile, 'export const x = 2;\nexport const y = 3;');
commitCustomize();
expect(isCustomizeActive()).toBe(false);
const state = readState();
expect(state.custom_modifications).toBeDefined();
expect(state.custom_modifications!.length).toBeGreaterThan(0);
expect(state.custom_modifications![0].description).toBe('add feature');
});
it('commitCustomize throws descriptive error on diff failure', () => {
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
fs.writeFileSync(trackedFile, 'export const x = 1;');
recordSkillApplication('test-skill', '1.0.0', {
'src/app.ts': computeFileHash(trackedFile),
});
startCustomize('diff-error test');
// Modify the tracked file
fs.writeFileSync(trackedFile, 'export const x = 2;');
// Make the base file a directory to cause diff to exit with code 2
const baseFilePath = path.join(tmpDir, '.nanoclaw', 'base', 'src', 'app.ts');
fs.mkdirSync(baseFilePath, { recursive: true });
expect(() => commitCustomize()).toThrow(/diff error/i);
});
it('startCustomize while active throws', () => {
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
fs.writeFileSync(trackedFile, 'export const x = 1;');
recordSkillApplication('test-skill', '1.0.0', {
'src/app.ts': computeFileHash(trackedFile),
});
startCustomize('first');
expect(() => startCustomize('second')).toThrow();
});
});

View File

@@ -0,0 +1,93 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { executeFileOps } from '../file-ops.js';
import { createTempDir, cleanup } from './test-helpers.js';
describe('file-ops', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('rename success', () => {
fs.writeFileSync(path.join(tmpDir, 'old.ts'), 'content');
const result = executeFileOps([
{ type: 'rename', from: 'old.ts', to: 'new.ts' },
], tmpDir);
expect(result.success).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'new.ts'))).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'old.ts'))).toBe(false);
});
it('move success', () => {
fs.writeFileSync(path.join(tmpDir, 'file.ts'), 'content');
const result = executeFileOps([
{ type: 'move', from: 'file.ts', to: 'sub/file.ts' },
], tmpDir);
expect(result.success).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'sub', 'file.ts'))).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'file.ts'))).toBe(false);
});
it('delete success', () => {
fs.writeFileSync(path.join(tmpDir, 'remove-me.ts'), 'content');
const result = executeFileOps([
{ type: 'delete', path: 'remove-me.ts' },
], tmpDir);
expect(result.success).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'remove-me.ts'))).toBe(false);
});
it('rename target exists produces error', () => {
fs.writeFileSync(path.join(tmpDir, 'a.ts'), 'a');
fs.writeFileSync(path.join(tmpDir, 'b.ts'), 'b');
const result = executeFileOps([
{ type: 'rename', from: 'a.ts', to: 'b.ts' },
], tmpDir);
expect(result.success).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('delete missing file produces warning not error', () => {
const result = executeFileOps([
{ type: 'delete', path: 'nonexistent.ts' },
], tmpDir);
expect(result.success).toBe(true);
expect(result.warnings.length).toBeGreaterThan(0);
});
it('move creates destination directory', () => {
fs.writeFileSync(path.join(tmpDir, 'src.ts'), 'content');
const result = executeFileOps([
{ type: 'move', from: 'src.ts', to: 'deep/nested/dir/src.ts' },
], tmpDir);
expect(result.success).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'deep', 'nested', 'dir', 'src.ts'))).toBe(true);
});
it('path escape produces error', () => {
fs.writeFileSync(path.join(tmpDir, 'file.ts'), 'content');
const result = executeFileOps([
{ type: 'rename', from: 'file.ts', to: '../../escaped.ts' },
], tmpDir);
expect(result.success).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('source missing produces error for rename', () => {
const result = executeFileOps([
{ type: 'rename', from: 'missing.ts', to: 'new.ts' },
], tmpDir);
expect(result.success).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
});

View File

@@ -0,0 +1,60 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { acquireLock, releaseLock, isLocked } from '../lock.js';
import { LOCK_FILE } from '../constants.js';
import { createTempDir, cleanup } from './test-helpers.js';
describe('lock', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
fs.mkdirSync(path.join(tmpDir, '.nanoclaw'), { recursive: true });
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('acquireLock returns a release function', () => {
const release = acquireLock();
expect(typeof release).toBe('function');
expect(fs.existsSync(path.join(tmpDir, LOCK_FILE))).toBe(true);
release();
});
it('releaseLock removes the lock file', () => {
acquireLock();
expect(fs.existsSync(path.join(tmpDir, LOCK_FILE))).toBe(true);
releaseLock();
expect(fs.existsSync(path.join(tmpDir, LOCK_FILE))).toBe(false);
});
it('acquire after release succeeds', () => {
const release1 = acquireLock();
release1();
const release2 = acquireLock();
expect(typeof release2).toBe('function');
release2();
});
it('isLocked returns true when locked', () => {
const release = acquireLock();
expect(isLocked()).toBe(true);
release();
});
it('isLocked returns false when released', () => {
const release = acquireLock();
release();
expect(isLocked()).toBe(false);
});
it('isLocked returns false when no lock exists', () => {
expect(isLocked()).toBe(false);
});
});

View File

@@ -0,0 +1,298 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { stringify } from 'yaml';
import {
readManifest,
checkCoreVersion,
checkDependencies,
checkConflicts,
checkSystemVersion,
} from '../manifest.js';
import {
createTempDir,
setupNanoclawDir,
createMinimalState,
createSkillPackage,
cleanup,
writeState,
} from './test-helpers.js';
import { recordSkillApplication } from '../state.js';
describe('manifest', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
createMinimalState(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('parses a valid manifest', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'telegram',
version: '2.0.0',
core_version: '1.0.0',
adds: ['src/telegram.ts'],
modifies: ['src/config.ts'],
});
const manifest = readManifest(skillDir);
expect(manifest.skill).toBe('telegram');
expect(manifest.version).toBe('2.0.0');
expect(manifest.adds).toEqual(['src/telegram.ts']);
expect(manifest.modifies).toEqual(['src/config.ts']);
});
it('throws on missing skill field', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
version: '1.0.0', core_version: '1.0.0', adds: [], modifies: [],
}));
expect(() => readManifest(dir)).toThrow();
});
it('throws on missing version field', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', core_version: '1.0.0', adds: [], modifies: [],
}));
expect(() => readManifest(dir)).toThrow();
});
it('throws on missing core_version field', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', version: '1.0.0', adds: [], modifies: [],
}));
expect(() => readManifest(dir)).toThrow();
});
it('throws on missing adds field', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', version: '1.0.0', core_version: '1.0.0', modifies: [],
}));
expect(() => readManifest(dir)).toThrow();
});
it('throws on missing modifies field', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', version: '1.0.0', core_version: '1.0.0', adds: [],
}));
expect(() => readManifest(dir)).toThrow();
});
it('throws on path traversal in adds', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', version: '1.0.0', core_version: '1.0.0',
adds: ['../etc/passwd'], modifies: [],
}));
expect(() => readManifest(dir)).toThrow('Invalid path');
});
it('throws on path traversal in modifies', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', version: '1.0.0', core_version: '1.0.0',
adds: [], modifies: ['../../secret.ts'],
}));
expect(() => readManifest(dir)).toThrow('Invalid path');
});
it('throws on absolute path in adds', () => {
const dir = path.join(tmpDir, 'bad-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test', version: '1.0.0', core_version: '1.0.0',
adds: ['/etc/passwd'], modifies: [],
}));
expect(() => readManifest(dir)).toThrow('Invalid path');
});
it('defaults conflicts and depends to empty arrays', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
});
const manifest = readManifest(skillDir);
expect(manifest.conflicts).toEqual([]);
expect(manifest.depends).toEqual([]);
});
it('checkCoreVersion returns warning when manifest targets newer core', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '2.0.0',
adds: [],
modifies: [],
});
const manifest = readManifest(skillDir);
const result = checkCoreVersion(manifest);
expect(result.warning).toBeTruthy();
});
it('checkCoreVersion returns no warning when versions match', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
});
const manifest = readManifest(skillDir);
const result = checkCoreVersion(manifest);
expect(result.ok).toBe(true);
expect(result.warning).toBeFalsy();
});
it('checkDependencies satisfied when deps present', () => {
recordSkillApplication('dep-skill', '1.0.0', {});
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
depends: ['dep-skill'],
});
const manifest = readManifest(skillDir);
const result = checkDependencies(manifest);
expect(result.ok).toBe(true);
expect(result.missing).toEqual([]);
});
it('checkDependencies missing when deps not present', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
depends: ['missing-skill'],
});
const manifest = readManifest(skillDir);
const result = checkDependencies(manifest);
expect(result.ok).toBe(false);
expect(result.missing).toContain('missing-skill');
});
it('checkConflicts ok when no conflicts', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
conflicts: [],
});
const manifest = readManifest(skillDir);
const result = checkConflicts(manifest);
expect(result.ok).toBe(true);
expect(result.conflicting).toEqual([]);
});
it('checkConflicts detects conflicting skill', () => {
recordSkillApplication('bad-skill', '1.0.0', {});
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
conflicts: ['bad-skill'],
});
const manifest = readManifest(skillDir);
const result = checkConflicts(manifest);
expect(result.ok).toBe(false);
expect(result.conflicting).toContain('bad-skill');
});
it('parses new optional fields (author, license, etc)', () => {
const dir = path.join(tmpDir, 'full-pkg');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
author: 'tester',
license: 'MIT',
min_skills_system_version: '0.1.0',
tested_with: ['telegram', 'discord'],
post_apply: ['echo done'],
}));
const manifest = readManifest(dir);
expect(manifest.author).toBe('tester');
expect(manifest.license).toBe('MIT');
expect(manifest.min_skills_system_version).toBe('0.1.0');
expect(manifest.tested_with).toEqual(['telegram', 'discord']);
expect(manifest.post_apply).toEqual(['echo done']);
});
it('checkSystemVersion passes when not set', () => {
const skillDir = createSkillPackage(tmpDir, {
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
});
const manifest = readManifest(skillDir);
const result = checkSystemVersion(manifest);
expect(result.ok).toBe(true);
});
it('checkSystemVersion passes when engine is new enough', () => {
const dir = path.join(tmpDir, 'sys-ok');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
min_skills_system_version: '0.1.0',
}));
const manifest = readManifest(dir);
const result = checkSystemVersion(manifest);
expect(result.ok).toBe(true);
});
it('checkSystemVersion fails when engine is too old', () => {
const dir = path.join(tmpDir, 'sys-fail');
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
skill: 'test',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
min_skills_system_version: '99.0.0',
}));
const manifest = readManifest(dir);
const result = checkSystemVersion(manifest);
expect(result.ok).toBe(false);
expect(result.error).toContain('99.0.0');
});
});

View File

@@ -0,0 +1,97 @@
import { execSync } from 'child_process';
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { isGitRepo, mergeFile, setupRerereAdapter } from '../merge.js';
import { createTempDir, initGitRepo, cleanup } from './test-helpers.js';
describe('merge', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('isGitRepo returns true in a git repo', () => {
initGitRepo(tmpDir);
expect(isGitRepo()).toBe(true);
});
it('isGitRepo returns false outside a git repo', () => {
expect(isGitRepo()).toBe(false);
});
describe('mergeFile', () => {
beforeEach(() => {
initGitRepo(tmpDir);
});
it('clean merge with no overlapping changes', () => {
const base = path.join(tmpDir, 'base.txt');
const current = path.join(tmpDir, 'current.txt');
const skill = path.join(tmpDir, 'skill.txt');
fs.writeFileSync(base, 'line1\nline2\nline3\n');
fs.writeFileSync(current, 'line1-modified\nline2\nline3\n');
fs.writeFileSync(skill, 'line1\nline2\nline3-modified\n');
const result = mergeFile(current, base, skill);
expect(result.clean).toBe(true);
expect(result.exitCode).toBe(0);
const merged = fs.readFileSync(current, 'utf-8');
expect(merged).toContain('line1-modified');
expect(merged).toContain('line3-modified');
});
it('setupRerereAdapter cleans stale MERGE_HEAD before proceeding', () => {
// Simulate a stale MERGE_HEAD from a previous crash
const gitDir = execSync('git rev-parse --git-dir', {
cwd: tmpDir,
encoding: 'utf-8',
}).trim();
const headHash = execSync('git rev-parse HEAD', {
cwd: tmpDir,
encoding: 'utf-8',
}).trim();
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
fs.writeFileSync(path.join(gitDir, 'MERGE_MSG'), 'stale merge\n');
// Write a file for the adapter to work with
fs.writeFileSync(path.join(tmpDir, 'test.txt'), 'conflicted content');
// setupRerereAdapter should not throw despite stale MERGE_HEAD
expect(() =>
setupRerereAdapter('test.txt', 'base', 'ours', 'theirs'),
).not.toThrow();
// MERGE_HEAD should still exist (newly written by setupRerereAdapter)
expect(fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))).toBe(true);
});
it('conflict with overlapping changes', () => {
const base = path.join(tmpDir, 'base.txt');
const current = path.join(tmpDir, 'current.txt');
const skill = path.join(tmpDir, 'skill.txt');
fs.writeFileSync(base, 'line1\nline2\nline3\n');
fs.writeFileSync(current, 'line1-ours\nline2\nline3\n');
fs.writeFileSync(skill, 'line1-theirs\nline2\nline3\n');
const result = mergeFile(current, base, skill);
expect(result.clean).toBe(false);
expect(result.exitCode).toBeGreaterThan(0);
const merged = fs.readFileSync(current, 'utf-8');
expect(merged).toContain('<<<<<<<');
expect(merged).toContain('>>>>>>>');
});
});
});

View File

@@ -0,0 +1,77 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { loadPathRemap, recordPathRemap, resolvePathRemap } from '../path-remap.js';
import {
cleanup,
createMinimalState,
createTempDir,
setupNanoclawDir,
} from './test-helpers.js';
describe('path-remap', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
createMinimalState(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
describe('resolvePathRemap', () => {
it('returns remapped path when entry exists', () => {
const remap = { 'src/old.ts': 'src/new.ts' };
expect(resolvePathRemap('src/old.ts', remap)).toBe('src/new.ts');
});
it('returns original path when no remap entry', () => {
const remap = { 'src/old.ts': 'src/new.ts' };
expect(resolvePathRemap('src/other.ts', remap)).toBe('src/other.ts');
});
it('returns original path when remap is empty', () => {
expect(resolvePathRemap('src/file.ts', {})).toBe('src/file.ts');
});
});
describe('loadPathRemap', () => {
it('returns empty object when no remap in state', () => {
const remap = loadPathRemap();
expect(remap).toEqual({});
});
it('returns remap from state', () => {
recordPathRemap({ 'src/a.ts': 'src/b.ts' });
const remap = loadPathRemap();
expect(remap).toEqual({ 'src/a.ts': 'src/b.ts' });
});
});
describe('recordPathRemap', () => {
it('records new remap entries', () => {
recordPathRemap({ 'src/old.ts': 'src/new.ts' });
expect(loadPathRemap()).toEqual({ 'src/old.ts': 'src/new.ts' });
});
it('merges with existing remap', () => {
recordPathRemap({ 'src/a.ts': 'src/b.ts' });
recordPathRemap({ 'src/c.ts': 'src/d.ts' });
expect(loadPathRemap()).toEqual({
'src/a.ts': 'src/b.ts',
'src/c.ts': 'src/d.ts',
});
});
it('overwrites existing key on conflict', () => {
recordPathRemap({ 'src/a.ts': 'src/b.ts' });
recordPathRemap({ 'src/a.ts': 'src/c.ts' });
expect(loadPathRemap()).toEqual({ 'src/a.ts': 'src/c.ts' });
});
});
});

View File

@@ -0,0 +1,434 @@
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { parse } from 'yaml';
import { rebase } from '../rebase.js';
import {
cleanup,
createMinimalState,
createTempDir,
initGitRepo,
setupNanoclawDir,
writeState,
} from './test-helpers.js';
describe('rebase', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
createMinimalState(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('rebase with one skill: patch created, state updated, rebased_at set', async () => {
// Set up base file
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
// Set up working tree with skill modification
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 1;\nconst y = 2; // added by skill\n',
);
// Write state with applied skill
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'test-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/index.ts': 'abc123',
},
},
],
});
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(true);
expect(result.filesInPatch).toBeGreaterThan(0);
expect(result.rebased_at).toBeDefined();
expect(result.patchFile).toBeDefined();
// Verify patch file exists
const patchPath = path.join(tmpDir, '.nanoclaw', 'combined.patch');
expect(fs.existsSync(patchPath)).toBe(true);
const patchContent = fs.readFileSync(patchPath, 'utf-8');
expect(patchContent).toContain('added by skill');
// Verify state was updated
const stateContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
'utf-8',
);
const state = parse(stateContent);
expect(state.rebased_at).toBeDefined();
expect(state.applied_skills).toHaveLength(1);
expect(state.applied_skills[0].name).toBe('test-skill');
// File hashes should be updated to actual current values
const currentHash = state.applied_skills[0].file_hashes['src/index.ts'];
expect(currentHash).toBeDefined();
expect(currentHash).not.toBe('abc123'); // Should be recomputed
// Working tree file should still have the skill's changes
const workingContent = fs.readFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'utf-8',
);
expect(workingContent).toContain('added by skill');
});
it('rebase flattens: base updated to match working tree', async () => {
// Set up base file (clean core)
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
// Working tree has skill modification
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 1;\nconst y = 2; // skill\n',
);
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'my-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/index.ts': 'oldhash',
},
},
],
});
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(true);
// Base should now include the skill's changes (flattened)
const baseContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'index.ts'),
'utf-8',
);
expect(baseContent).toContain('skill');
expect(baseContent).toBe('const x = 1;\nconst y = 2; // skill\n');
});
it('rebase with multiple skills + custom mods: all collapsed into single patch', async () => {
// Set up base files
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src', 'index.ts'), 'const x = 1;\n');
fs.writeFileSync(
path.join(baseDir, 'src', 'config.ts'),
'export const port = 3000;\n',
);
// Set up working tree with modifications from multiple skills
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 1;\nconst y = 2; // skill-a\n',
);
fs.writeFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'export const port = 3000;\nexport const host = "0.0.0.0"; // skill-b\n',
);
// File added by skill
fs.writeFileSync(
path.join(tmpDir, 'src', 'plugin.ts'),
'export const plugin = true;\n',
);
// Write state with multiple skills and custom modifications
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'skill-a',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/index.ts': 'hash-a1',
},
},
{
name: 'skill-b',
version: '2.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/config.ts': 'hash-b1',
'src/plugin.ts': 'hash-b2',
},
},
],
custom_modifications: [
{
description: 'tweaked config',
applied_at: new Date().toISOString(),
files_modified: ['src/config.ts'],
patch_file: '.nanoclaw/custom/001-tweaked-config.patch',
},
],
});
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(true);
expect(result.filesInPatch).toBeGreaterThanOrEqual(2);
// Verify combined patch includes changes from both skills
const patchContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'combined.patch'),
'utf-8',
);
expect(patchContent).toContain('skill-a');
expect(patchContent).toContain('skill-b');
// Verify state: custom_modifications should be cleared
const stateContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
'utf-8',
);
const state = parse(stateContent);
expect(state.custom_modifications).toBeUndefined();
expect(state.rebased_at).toBeDefined();
// applied_skills should still be present (informational)
expect(state.applied_skills).toHaveLength(2);
// Base should be flattened — include all skill changes
const baseIndex = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'index.ts'),
'utf-8',
);
expect(baseIndex).toContain('skill-a');
const baseConfig = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'config.ts'),
'utf-8',
);
expect(baseConfig).toContain('skill-b');
});
it('rebase clears resolution cache', async () => {
// Set up base + working tree
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 1;\n// skill\n',
);
// Create a fake resolution cache entry
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
fs.mkdirSync(resDir, { recursive: true });
fs.writeFileSync(path.join(resDir, 'meta.yaml'), 'skills: [skill-a, skill-b]\n');
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'my-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: { 'src/index.ts': 'hash' },
},
],
});
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(true);
// Resolution cache should be cleared
const resolutions = fs.readdirSync(
path.join(tmpDir, '.nanoclaw', 'resolutions'),
);
expect(resolutions).toHaveLength(0);
});
it('rebase with new base: base updated, changes merged', async () => {
// Set up current base (multi-line so changes don't conflict)
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'src', 'index.ts'),
'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\n',
);
// Working tree: skill adds at bottom
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nskill change\n',
);
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'my-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/index.ts': 'oldhash',
},
},
],
});
initGitRepo(tmpDir);
// New base: core update at top
const newBase = path.join(tmpDir, 'new-core');
fs.mkdirSync(path.join(newBase, 'src'), { recursive: true });
fs.writeFileSync(
path.join(newBase, 'src', 'index.ts'),
'core v2 header\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\n',
);
const result = await rebase(newBase);
expect(result.success).toBe(true);
expect(result.patchFile).toBeDefined();
// Verify base was updated to new core
const baseContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'index.ts'),
'utf-8',
);
expect(baseContent).toContain('core v2 header');
// Working tree should have both core v2 and skill changes merged
const workingContent = fs.readFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'utf-8',
);
expect(workingContent).toContain('core v2 header');
expect(workingContent).toContain('skill change');
// State should reflect rebase
const stateContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
'utf-8',
);
const state = parse(stateContent);
expect(state.rebased_at).toBeDefined();
});
it('rebase with new base: conflict returns backupPending', async () => {
// Set up current base — short file so changes overlap
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'src', 'index.ts'),
'const x = 1;\n',
);
// Working tree: skill replaces the same line
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 42; // skill override\n',
);
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'my-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/index.ts': 'oldhash',
},
},
],
});
initGitRepo(tmpDir);
// New base: also changes the same line — guaranteed conflict
const newBase = path.join(tmpDir, 'new-core');
fs.mkdirSync(path.join(newBase, 'src'), { recursive: true });
fs.writeFileSync(
path.join(newBase, 'src', 'index.ts'),
'const x = 999; // core v2\n',
);
const result = await rebase(newBase);
expect(result.success).toBe(false);
expect(result.mergeConflicts).toContain('src/index.ts');
expect(result.backupPending).toBe(true);
expect(result.error).toContain('Merge conflicts');
// combined.patch should still exist
expect(result.patchFile).toBeDefined();
const patchPath = path.join(tmpDir, '.nanoclaw', 'combined.patch');
expect(fs.existsSync(patchPath)).toBe(true);
// Working tree should have conflict markers (not rolled back)
const workingContent = fs.readFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'utf-8',
);
expect(workingContent).toContain('<<<<<<<');
expect(workingContent).toContain('>>>>>>>');
// State should NOT be updated yet (conflicts pending)
const stateContent = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
'utf-8',
);
const state = parse(stateContent);
expect(state.rebased_at).toBeUndefined();
});
it('error when no skills applied', async () => {
// State has no applied skills (created by createMinimalState)
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(false);
expect(result.error).toContain('No skills applied');
expect(result.filesInPatch).toBe(0);
});
});

View File

@@ -0,0 +1,297 @@
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { findSkillDir, replaySkills } from '../replay.js';
import {
cleanup,
createMinimalState,
createSkillPackage,
createTempDir,
initGitRepo,
setupNanoclawDir,
} from './test-helpers.js';
describe('replay', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
createMinimalState(tmpDir);
initGitRepo(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
describe('findSkillDir', () => {
it('finds skill directory by name', () => {
const skillsRoot = path.join(tmpDir, '.claude', 'skills', 'telegram');
fs.mkdirSync(skillsRoot, { recursive: true });
const { stringify } = require('yaml');
fs.writeFileSync(
path.join(skillsRoot, 'manifest.yaml'),
stringify({
skill: 'telegram',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: [],
}),
);
const result = findSkillDir('telegram', tmpDir);
expect(result).toBe(skillsRoot);
});
it('returns null for missing skill', () => {
const result = findSkillDir('nonexistent', tmpDir);
expect(result).toBeNull();
});
it('returns null when .claude/skills does not exist', () => {
const result = findSkillDir('anything', tmpDir);
expect(result).toBeNull();
});
});
describe('replaySkills', () => {
it('replays a single skill from base', async () => {
// Set up base file
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'base content\n');
// Set up current file (will be overwritten by replay)
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'modified content\n',
);
// Create skill package
const skillDir = createSkillPackage(tmpDir, {
skill: 'telegram',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/telegram.ts'],
modifies: ['src/config.ts'],
addFiles: { 'src/telegram.ts': 'telegram code\n' },
modifyFiles: { 'src/config.ts': 'base content\ntelegram config\n' },
});
const result = await replaySkills({
skills: ['telegram'],
skillDirs: { telegram: skillDir },
projectRoot: tmpDir,
});
expect(result.success).toBe(true);
expect(result.perSkill.telegram.success).toBe(true);
// Added file should exist
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(
true,
);
expect(
fs.readFileSync(path.join(tmpDir, 'src', 'telegram.ts'), 'utf-8'),
).toBe('telegram code\n');
// Modified file should be merged from base
const config = fs.readFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'utf-8',
);
expect(config).toContain('telegram config');
});
it('replays two skills in order', async () => {
// Set up base
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'config.ts'),
'line1\nline2\nline3\nline4\nline5\n',
);
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'line1\nline2\nline3\nline4\nline5\n',
);
// Skill 1 adds at top
const skill1Dir = createSkillPackage(tmpDir, {
skill: 'telegram',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/telegram.ts'],
modifies: ['src/config.ts'],
addFiles: { 'src/telegram.ts': 'tg code' },
modifyFiles: {
'src/config.ts': 'telegram import\nline1\nline2\nline3\nline4\nline5\n',
},
dirName: 'skill-pkg-tg',
});
// Skill 2 adds at bottom
const skill2Dir = createSkillPackage(tmpDir, {
skill: 'discord',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/discord.ts'],
modifies: ['src/config.ts'],
addFiles: { 'src/discord.ts': 'dc code' },
modifyFiles: {
'src/config.ts': 'line1\nline2\nline3\nline4\nline5\ndiscord import\n',
},
dirName: 'skill-pkg-dc',
});
const result = await replaySkills({
skills: ['telegram', 'discord'],
skillDirs: { telegram: skill1Dir, discord: skill2Dir },
projectRoot: tmpDir,
});
expect(result.success).toBe(true);
expect(result.perSkill.telegram.success).toBe(true);
expect(result.perSkill.discord.success).toBe(true);
// Both added files should exist
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(
true,
);
expect(fs.existsSync(path.join(tmpDir, 'src', 'discord.ts'))).toBe(
true,
);
// Config should have both changes
const config = fs.readFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'utf-8',
);
expect(config).toContain('telegram import');
expect(config).toContain('discord import');
});
it('stops on first conflict and does not process later skills', async () => {
// After reset, current=base. Skill 1 merges cleanly (changes line 1).
// Skill 2 also changes line 1 differently → conflict with skill 1's result.
// Skill 3 should NOT be processed due to break-on-conflict.
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'line1\n');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src', 'config.ts'), 'line1\n');
// Skill 1: changes line 1 — merges cleanly since current=base after reset
const skill1Dir = createSkillPackage(tmpDir, {
skill: 'skill-a',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: ['src/config.ts'],
modifyFiles: { 'src/config.ts': 'line1-from-skill-a\n' },
dirName: 'skill-pkg-a',
});
// Skill 2: also changes line 1 differently → conflict with skill-a's result
const skill2Dir = createSkillPackage(tmpDir, {
skill: 'skill-b',
version: '1.0.0',
core_version: '1.0.0',
adds: [],
modifies: ['src/config.ts'],
modifyFiles: { 'src/config.ts': 'line1-from-skill-b\n' },
dirName: 'skill-pkg-b',
});
// Skill 3: adds a new file — should be skipped
const skill3Dir = createSkillPackage(tmpDir, {
skill: 'skill-c',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/newfile.ts'],
modifies: [],
addFiles: { 'src/newfile.ts': 'should not appear' },
dirName: 'skill-pkg-c',
});
const result = await replaySkills({
skills: ['skill-a', 'skill-b', 'skill-c'],
skillDirs: { 'skill-a': skill1Dir, 'skill-b': skill2Dir, 'skill-c': skill3Dir },
projectRoot: tmpDir,
});
expect(result.success).toBe(false);
expect(result.mergeConflicts).toBeDefined();
expect(result.mergeConflicts!.length).toBeGreaterThan(0);
// Skill B caused the conflict
expect(result.perSkill['skill-b']?.success).toBe(false);
// Skill C should NOT have been processed
expect(result.perSkill['skill-c']).toBeUndefined();
});
it('returns error for missing skill dir', async () => {
const result = await replaySkills({
skills: ['missing'],
skillDirs: {},
projectRoot: tmpDir,
});
expect(result.success).toBe(false);
expect(result.error).toContain('missing');
expect(result.perSkill.missing.success).toBe(false);
});
it('resets files to base before replay', async () => {
// Set up base
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'base content\n');
// Current has drift
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'drifted content\n',
);
// Also a stale added file
fs.writeFileSync(
path.join(tmpDir, 'src', 'stale-add.ts'),
'should be removed',
);
const skillDir = createSkillPackage(tmpDir, {
skill: 'skill1',
version: '1.0.0',
core_version: '1.0.0',
adds: ['src/stale-add.ts'],
modifies: ['src/config.ts'],
addFiles: { 'src/stale-add.ts': 'fresh add' },
modifyFiles: { 'src/config.ts': 'base content\nskill addition\n' },
});
const result = await replaySkills({
skills: ['skill1'],
skillDirs: { skill1: skillDir },
projectRoot: tmpDir,
});
expect(result.success).toBe(true);
// The added file should have the fresh content (not stale)
expect(
fs.readFileSync(path.join(tmpDir, 'src', 'stale-add.ts'), 'utf-8'),
).toBe('fresh add');
});
});
});

View File

@@ -0,0 +1,283 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import {
findResolutionDir,
loadResolutions,
saveResolution,
} from '../resolution-cache.js';
import { createTempDir, setupNanoclawDir, initGitRepo, cleanup } from './test-helpers.js';
function sha256(content: string): string {
return crypto.createHash('sha256').update(content).digest('hex');
}
const dummyHashes = { base: 'aaa', current: 'bbb', skill: 'ccc' };
describe('resolution-cache', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('findResolutionDir returns null when not found', () => {
const result = findResolutionDir(['skill-a', 'skill-b'], tmpDir);
expect(result).toBeNull();
});
it('saveResolution creates directory structure with files and meta', () => {
saveResolution(
['skill-b', 'skill-a'],
[{ relPath: 'src/config.ts', preimage: 'conflict content', resolution: 'resolved content', inputHashes: dummyHashes }],
{ core_version: '1.0.0' },
tmpDir,
);
// Skills are sorted, so key is "skill-a+skill-b"
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
expect(fs.existsSync(resDir)).toBe(true);
// Check preimage and resolution files exist
expect(fs.existsSync(path.join(resDir, 'src/config.ts.preimage'))).toBe(true);
expect(fs.existsSync(path.join(resDir, 'src/config.ts.resolution'))).toBe(true);
// Check meta.yaml exists and has expected fields
const metaPath = path.join(resDir, 'meta.yaml');
expect(fs.existsSync(metaPath)).toBe(true);
const meta = parse(fs.readFileSync(metaPath, 'utf-8'));
expect(meta.core_version).toBe('1.0.0');
expect(meta.skills).toEqual(['skill-a', 'skill-b']);
});
it('saveResolution writes file_hashes to meta.yaml', () => {
const hashes = {
base: sha256('base content'),
current: sha256('current content'),
skill: sha256('skill content'),
};
saveResolution(
['alpha', 'beta'],
[{ relPath: 'src/config.ts', preimage: 'pre', resolution: 'post', inputHashes: hashes }],
{},
tmpDir,
);
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'alpha+beta');
const meta = parse(fs.readFileSync(path.join(resDir, 'meta.yaml'), 'utf-8'));
expect(meta.file_hashes).toBeDefined();
expect(meta.file_hashes['src/config.ts']).toEqual(hashes);
});
it('findResolutionDir returns path after save', () => {
saveResolution(
['alpha', 'beta'],
[{ relPath: 'file.ts', preimage: 'pre', resolution: 'post', inputHashes: dummyHashes }],
{},
tmpDir,
);
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
expect(result).not.toBeNull();
expect(result).toContain('alpha+beta');
});
it('findResolutionDir finds shipped resolutions in .claude/resolutions', () => {
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
fs.mkdirSync(shippedDir, { recursive: true });
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [alpha, beta]\n');
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
expect(result).not.toBeNull();
expect(result).toContain('.claude/resolutions/alpha+beta');
});
it('findResolutionDir prefers shipped over project-level', () => {
// Create both shipped and project-level
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'a+b');
fs.mkdirSync(shippedDir, { recursive: true });
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [a, b]\n');
saveResolution(
['a', 'b'],
[{ relPath: 'f.ts', preimage: 'x', resolution: 'project', inputHashes: dummyHashes }],
{},
tmpDir,
);
const result = findResolutionDir(['a', 'b'], tmpDir);
expect(result).toContain('.claude/resolutions/a+b');
});
it('skills are sorted so order does not matter', () => {
saveResolution(
['zeta', 'alpha'],
[{ relPath: 'f.ts', preimage: 'a', resolution: 'b', inputHashes: dummyHashes }],
{},
tmpDir,
);
// Find with reversed order should still work
const result = findResolutionDir(['alpha', 'zeta'], tmpDir);
expect(result).not.toBeNull();
// Also works with original order
const result2 = findResolutionDir(['zeta', 'alpha'], tmpDir);
expect(result2).not.toBeNull();
expect(result).toBe(result2);
});
describe('loadResolutions hash verification', () => {
const baseContent = 'base file content';
const currentContent = 'current file content';
const skillContent = 'skill file content';
const preimageContent = 'preimage with conflict markers';
const resolutionContent = 'resolved content';
const rerereHash = 'abc123def456';
function setupResolutionDir(fileHashes: Record<string, any>) {
// Create a shipped resolution directory
const resDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
fs.mkdirSync(path.join(resDir, 'src'), { recursive: true });
// Write preimage, resolution, and hash sidecar
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage'), preimageContent);
fs.writeFileSync(path.join(resDir, 'src/config.ts.resolution'), resolutionContent);
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage.hash'), rerereHash);
// Write meta.yaml
const meta: any = {
skills: ['alpha', 'beta'],
apply_order: ['alpha', 'beta'],
core_version: '1.0.0',
resolved_at: new Date().toISOString(),
tested: true,
test_passed: true,
resolution_source: 'maintainer',
input_hashes: {},
output_hash: '',
file_hashes: fileHashes,
};
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(meta));
return resDir;
}
function setupInputFiles() {
// Create base file
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'base', 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, '.nanoclaw', 'base', 'src', 'config.ts'), baseContent);
// Create current file
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src', 'config.ts'), currentContent);
}
function createSkillDir() {
const skillDir = path.join(tmpDir, 'skill-pkg');
fs.mkdirSync(path.join(skillDir, 'modify', 'src'), { recursive: true });
fs.writeFileSync(path.join(skillDir, 'modify', 'src', 'config.ts'), skillContent);
return skillDir;
}
beforeEach(() => {
initGitRepo(tmpDir);
});
it('loads with matching file_hashes', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: sha256(currentContent),
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(true);
// Verify rr-cache entry was created
const gitDir = path.join(tmpDir, '.git');
const cacheEntry = path.join(gitDir, 'rr-cache', rerereHash);
expect(fs.existsSync(path.join(cacheEntry, 'preimage'))).toBe(true);
expect(fs.existsSync(path.join(cacheEntry, 'postimage'))).toBe(true);
});
it('skips pair with mismatched base hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: 'wrong_hash',
current: sha256(currentContent),
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
// rr-cache entry should NOT be created
const gitDir = path.join(tmpDir, '.git');
expect(fs.existsSync(path.join(gitDir, 'rr-cache', rerereHash))).toBe(false);
});
it('skips pair with mismatched current hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: 'wrong_hash',
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
it('skips pair with mismatched skill hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: sha256(currentContent),
skill: 'wrong_hash',
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
it('skips pair with no file_hashes entry for that file', () => {
setupInputFiles();
const skillDir = createSkillDir();
// file_hashes exists but doesn't include src/config.ts
setupResolutionDir({});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
});
});

View File

@@ -0,0 +1,120 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import {
readState,
writeState,
recordSkillApplication,
computeFileHash,
compareSemver,
recordCustomModification,
getCustomModifications,
} from '../state.js';
import {
createTempDir,
setupNanoclawDir,
createMinimalState,
writeState as writeStateHelper,
cleanup,
} from './test-helpers.js';
describe('state', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('readState/writeState roundtrip', () => {
const state = {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
};
writeState(state);
const result = readState();
expect(result.skills_system_version).toBe('0.1.0');
expect(result.core_version).toBe('1.0.0');
expect(result.applied_skills).toEqual([]);
});
it('readState throws when no state file exists', () => {
expect(() => readState()).toThrow();
});
it('readState throws when version is newer than current', () => {
writeStateHelper(tmpDir, {
skills_system_version: '99.0.0',
core_version: '1.0.0',
applied_skills: [],
});
expect(() => readState()).toThrow();
});
it('recordSkillApplication adds a skill', () => {
createMinimalState(tmpDir);
recordSkillApplication('my-skill', '1.0.0', { 'src/foo.ts': 'abc123' });
const state = readState();
expect(state.applied_skills).toHaveLength(1);
expect(state.applied_skills[0].name).toBe('my-skill');
expect(state.applied_skills[0].version).toBe('1.0.0');
expect(state.applied_skills[0].file_hashes).toEqual({ 'src/foo.ts': 'abc123' });
});
it('re-applying same skill replaces it', () => {
createMinimalState(tmpDir);
recordSkillApplication('my-skill', '1.0.0', { 'a.ts': 'hash1' });
recordSkillApplication('my-skill', '2.0.0', { 'a.ts': 'hash2' });
const state = readState();
expect(state.applied_skills).toHaveLength(1);
expect(state.applied_skills[0].version).toBe('2.0.0');
expect(state.applied_skills[0].file_hashes).toEqual({ 'a.ts': 'hash2' });
});
it('computeFileHash produces consistent sha256', () => {
const filePath = path.join(tmpDir, 'hashtest.txt');
fs.writeFileSync(filePath, 'hello world');
const hash1 = computeFileHash(filePath);
const hash2 = computeFileHash(filePath);
expect(hash1).toBe(hash2);
expect(hash1).toMatch(/^[a-f0-9]{64}$/);
});
describe('compareSemver', () => {
it('1.0.0 < 1.1.0', () => {
expect(compareSemver('1.0.0', '1.1.0')).toBeLessThan(0);
});
it('0.9.0 < 0.10.0', () => {
expect(compareSemver('0.9.0', '0.10.0')).toBeLessThan(0);
});
it('1.0.0 = 1.0.0', () => {
expect(compareSemver('1.0.0', '1.0.0')).toBe(0);
});
});
it('recordCustomModification adds to array', () => {
createMinimalState(tmpDir);
recordCustomModification('tweak', ['src/a.ts'], 'custom/001-tweak.patch');
const mods = getCustomModifications();
expect(mods).toHaveLength(1);
expect(mods[0].description).toBe('tweak');
expect(mods[0].files_modified).toEqual(['src/a.ts']);
expect(mods[0].patch_file).toBe('custom/001-tweak.patch');
});
it('getCustomModifications returns empty when none recorded', () => {
createMinimalState(tmpDir);
const mods = getCustomModifications();
expect(mods).toEqual([]);
});
});

View File

@@ -0,0 +1,204 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import {
areRangesCompatible,
mergeNpmDependencies,
mergeEnvAdditions,
mergeDockerComposeServices,
} from '../structured.js';
import { createTempDir, cleanup } from './test-helpers.js';
describe('structured', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
describe('areRangesCompatible', () => {
it('identical versions are compatible', () => {
const result = areRangesCompatible('^1.0.0', '^1.0.0');
expect(result.compatible).toBe(true);
});
it('compatible ^ ranges resolve to higher', () => {
const result = areRangesCompatible('^1.0.0', '^1.1.0');
expect(result.compatible).toBe(true);
expect(result.resolved).toBe('^1.1.0');
});
it('incompatible major ^ ranges', () => {
const result = areRangesCompatible('^1.0.0', '^2.0.0');
expect(result.compatible).toBe(false);
});
it('compatible ~ ranges', () => {
const result = areRangesCompatible('~1.0.0', '~1.0.3');
expect(result.compatible).toBe(true);
expect(result.resolved).toBe('~1.0.3');
});
it('mismatched prefixes are incompatible', () => {
const result = areRangesCompatible('^1.0.0', '~1.0.0');
expect(result.compatible).toBe(false);
});
it('handles double-digit version parts numerically', () => {
// ^1.9.0 vs ^1.10.0 — 10 > 9 numerically, but "9" > "10" as strings
const result = areRangesCompatible('^1.9.0', '^1.10.0');
expect(result.compatible).toBe(true);
expect(result.resolved).toBe('^1.10.0');
});
it('handles double-digit patch versions', () => {
const result = areRangesCompatible('~1.0.9', '~1.0.10');
expect(result.compatible).toBe(true);
expect(result.resolved).toBe('~1.0.10');
});
});
describe('mergeNpmDependencies', () => {
it('adds new dependencies', () => {
const pkgPath = path.join(tmpDir, 'package.json');
fs.writeFileSync(pkgPath, JSON.stringify({
name: 'test',
dependencies: { existing: '^1.0.0' },
}, null, 2));
mergeNpmDependencies(pkgPath, { newdep: '^2.0.0' });
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
expect(pkg.dependencies.newdep).toBe('^2.0.0');
expect(pkg.dependencies.existing).toBe('^1.0.0');
});
it('resolves compatible ^ ranges', () => {
const pkgPath = path.join(tmpDir, 'package.json');
fs.writeFileSync(pkgPath, JSON.stringify({
name: 'test',
dependencies: { dep: '^1.0.0' },
}, null, 2));
mergeNpmDependencies(pkgPath, { dep: '^1.1.0' });
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
expect(pkg.dependencies.dep).toBe('^1.1.0');
});
it('sorts devDependencies after merge', () => {
const pkgPath = path.join(tmpDir, 'package.json');
fs.writeFileSync(pkgPath, JSON.stringify({
name: 'test',
dependencies: {},
devDependencies: { zlib: '^1.0.0', acorn: '^2.0.0' },
}, null, 2));
mergeNpmDependencies(pkgPath, { middle: '^1.0.0' });
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
const devKeys = Object.keys(pkg.devDependencies);
expect(devKeys).toEqual(['acorn', 'zlib']);
});
it('throws on incompatible major versions', () => {
const pkgPath = path.join(tmpDir, 'package.json');
fs.writeFileSync(pkgPath, JSON.stringify({
name: 'test',
dependencies: { dep: '^1.0.0' },
}, null, 2));
expect(() => mergeNpmDependencies(pkgPath, { dep: '^2.0.0' })).toThrow();
});
});
describe('mergeEnvAdditions', () => {
it('adds new variables', () => {
const envPath = path.join(tmpDir, '.env.example');
fs.writeFileSync(envPath, 'EXISTING_VAR=value\n');
mergeEnvAdditions(envPath, ['NEW_VAR']);
const content = fs.readFileSync(envPath, 'utf-8');
expect(content).toContain('NEW_VAR=');
expect(content).toContain('EXISTING_VAR=value');
});
it('skips existing variables', () => {
const envPath = path.join(tmpDir, '.env.example');
fs.writeFileSync(envPath, 'MY_VAR=original\n');
mergeEnvAdditions(envPath, ['MY_VAR']);
const content = fs.readFileSync(envPath, 'utf-8');
// Should not add duplicate - only 1 occurrence of MY_VAR=
const matches = content.match(/MY_VAR=/g);
expect(matches).toHaveLength(1);
});
it('recognizes lowercase and mixed-case env vars as existing', () => {
const envPath = path.join(tmpDir, '.env.example');
fs.writeFileSync(envPath, 'my_lower_var=value\nMixed_Case=abc\n');
mergeEnvAdditions(envPath, ['my_lower_var', 'Mixed_Case']);
const content = fs.readFileSync(envPath, 'utf-8');
// Should not add duplicates
const lowerMatches = content.match(/my_lower_var=/g);
expect(lowerMatches).toHaveLength(1);
const mixedMatches = content.match(/Mixed_Case=/g);
expect(mixedMatches).toHaveLength(1);
});
it('creates file if it does not exist', () => {
const envPath = path.join(tmpDir, '.env.example');
mergeEnvAdditions(envPath, ['NEW_VAR']);
expect(fs.existsSync(envPath)).toBe(true);
const content = fs.readFileSync(envPath, 'utf-8');
expect(content).toContain('NEW_VAR=');
});
});
describe('mergeDockerComposeServices', () => {
it('adds new services', () => {
const composePath = path.join(tmpDir, 'docker-compose.yaml');
fs.writeFileSync(composePath, 'version: "3"\nservices:\n web:\n image: nginx\n');
mergeDockerComposeServices(composePath, {
redis: { image: 'redis:7' },
});
const content = fs.readFileSync(composePath, 'utf-8');
expect(content).toContain('redis');
});
it('skips existing services', () => {
const composePath = path.join(tmpDir, 'docker-compose.yaml');
fs.writeFileSync(composePath, 'version: "3"\nservices:\n web:\n image: nginx\n');
mergeDockerComposeServices(composePath, {
web: { image: 'apache' },
});
const content = fs.readFileSync(composePath, 'utf-8');
expect(content).toContain('nginx');
});
it('throws on port collision', () => {
const composePath = path.join(tmpDir, 'docker-compose.yaml');
fs.writeFileSync(composePath, 'version: "3"\nservices:\n web:\n image: nginx\n ports:\n - "8080:80"\n');
expect(() => mergeDockerComposeServices(composePath, {
api: { image: 'node', ports: ['8080:3000'] },
})).toThrow();
});
});
});

View File

@@ -0,0 +1,99 @@
import { execSync } from 'child_process';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { stringify } from 'yaml';
export function createTempDir(): string {
return fs.mkdtempSync(path.join(os.tmpdir(), 'nanoclaw-test-'));
}
export function setupNanoclawDir(tmpDir: string): void {
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'base', 'src'), { recursive: true });
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'backup'), { recursive: true });
}
export function writeState(tmpDir: string, state: any): void {
const statePath = path.join(tmpDir, '.nanoclaw', 'state.yaml');
fs.writeFileSync(statePath, stringify(state), 'utf-8');
}
export function createMinimalState(tmpDir: string): void {
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
}
export function createSkillPackage(tmpDir: string, opts: {
skill?: string;
version?: string;
core_version?: string;
adds?: string[];
modifies?: string[];
addFiles?: Record<string, string>;
modifyFiles?: Record<string, string>;
conflicts?: string[];
depends?: string[];
test?: string;
structured?: any;
file_ops?: any[];
post_apply?: string[];
min_skills_system_version?: string;
dirName?: string;
}): string {
const skillDir = path.join(tmpDir, opts.dirName ?? 'skill-pkg');
fs.mkdirSync(skillDir, { recursive: true });
const manifest: Record<string, unknown> = {
skill: opts.skill ?? 'test-skill',
version: opts.version ?? '1.0.0',
description: 'Test skill',
core_version: opts.core_version ?? '1.0.0',
adds: opts.adds ?? [],
modifies: opts.modifies ?? [],
conflicts: opts.conflicts ?? [],
depends: opts.depends ?? [],
test: opts.test,
structured: opts.structured,
file_ops: opts.file_ops,
};
if (opts.post_apply) manifest.post_apply = opts.post_apply;
if (opts.min_skills_system_version) manifest.min_skills_system_version = opts.min_skills_system_version;
fs.writeFileSync(path.join(skillDir, 'manifest.yaml'), stringify(manifest));
if (opts.addFiles) {
const addDir = path.join(skillDir, 'add');
for (const [relPath, content] of Object.entries(opts.addFiles)) {
const fullPath = path.join(addDir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
}
if (opts.modifyFiles) {
const modDir = path.join(skillDir, 'modify');
for (const [relPath, content] of Object.entries(opts.modifyFiles)) {
const fullPath = path.join(modDir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
}
return skillDir;
}
export function initGitRepo(dir: string): void {
execSync('git init', { cwd: dir, stdio: 'pipe' });
execSync('git config user.email "test@test.com"', { cwd: dir, stdio: 'pipe' });
execSync('git config user.name "Test"', { cwd: dir, stdio: 'pipe' });
execSync('git config rerere.enabled true', { cwd: dir, stdio: 'pipe' });
fs.writeFileSync(path.join(dir, '.gitignore'), 'node_modules\n');
execSync('git add -A && git commit -m "init"', { cwd: dir, stdio: 'pipe' });
}
export function cleanup(dir: string): void {
fs.rmSync(dir, { recursive: true, force: true });
}

View File

@@ -0,0 +1,261 @@
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { stringify } from 'yaml';
import { uninstallSkill } from '../uninstall.js';
import {
cleanup,
createTempDir,
initGitRepo,
setupNanoclawDir,
writeState,
} from './test-helpers.js';
describe('uninstall', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
initGitRepo(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
function setupSkillPackage(
name: string,
opts: {
adds?: Record<string, string>;
modifies?: Record<string, string>;
modifiesBase?: Record<string, string>;
} = {},
): void {
const skillDir = path.join(tmpDir, '.claude', 'skills', name);
fs.mkdirSync(skillDir, { recursive: true });
const addsList = Object.keys(opts.adds ?? {});
const modifiesList = Object.keys(opts.modifies ?? {});
fs.writeFileSync(
path.join(skillDir, 'manifest.yaml'),
stringify({
skill: name,
version: '1.0.0',
core_version: '1.0.0',
adds: addsList,
modifies: modifiesList,
}),
);
if (opts.adds) {
const addDir = path.join(skillDir, 'add');
for (const [relPath, content] of Object.entries(opts.adds)) {
const fullPath = path.join(addDir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
}
if (opts.modifies) {
const modDir = path.join(skillDir, 'modify');
for (const [relPath, content] of Object.entries(opts.modifies)) {
const fullPath = path.join(modDir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
}
}
it('returns error for non-applied skill', async () => {
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const result = await uninstallSkill('nonexistent');
expect(result.success).toBe(false);
expect(result.error).toContain('not applied');
});
it('blocks uninstall after rebase', async () => {
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
rebased_at: new Date().toISOString(),
applied_skills: [
{
name: 'telegram',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: { 'src/config.ts': 'abc' },
},
],
});
const result = await uninstallSkill('telegram');
expect(result.success).toBe(false);
expect(result.error).toContain('Cannot uninstall');
expect(result.error).toContain('after rebase');
});
it('returns custom patch warning', async () => {
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'telegram',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {},
custom_patch: '.nanoclaw/custom/001.patch',
custom_patch_description: 'My tweak',
},
],
});
const result = await uninstallSkill('telegram');
expect(result.success).toBe(false);
expect(result.customPatchWarning).toContain('custom patch');
expect(result.customPatchWarning).toContain('My tweak');
});
it('uninstalls only skill → files reset to base', async () => {
// Set up base
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'base config\n');
// Set up current files (as if skill was applied)
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'base config\ntelegram config\n',
);
fs.writeFileSync(
path.join(tmpDir, 'src', 'telegram.ts'),
'telegram code\n',
);
// Set up skill package in .claude/skills/
setupSkillPackage('telegram', {
adds: { 'src/telegram.ts': 'telegram code\n' },
modifies: {
'src/config.ts': 'base config\ntelegram config\n',
},
});
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'telegram',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/config.ts': 'abc',
'src/telegram.ts': 'def',
},
},
],
});
const result = await uninstallSkill('telegram');
expect(result.success).toBe(true);
expect(result.skill).toBe('telegram');
// config.ts should be reset to base
expect(
fs.readFileSync(path.join(tmpDir, 'src', 'config.ts'), 'utf-8'),
).toBe('base config\n');
// telegram.ts (add-only) should be removed
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(false);
});
it('uninstalls one of two → other preserved', async () => {
// Set up base
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'config.ts'),
'line1\nline2\nline3\nline4\nline5\n',
);
// Current has both skills applied
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'telegram import\nline1\nline2\nline3\nline4\nline5\ndiscord import\n',
);
fs.writeFileSync(path.join(tmpDir, 'src', 'telegram.ts'), 'tg code\n');
fs.writeFileSync(path.join(tmpDir, 'src', 'discord.ts'), 'dc code\n');
// Set up both skill packages
setupSkillPackage('telegram', {
adds: { 'src/telegram.ts': 'tg code\n' },
modifies: {
'src/config.ts':
'telegram import\nline1\nline2\nline3\nline4\nline5\n',
},
});
setupSkillPackage('discord', {
adds: { 'src/discord.ts': 'dc code\n' },
modifies: {
'src/config.ts':
'line1\nline2\nline3\nline4\nline5\ndiscord import\n',
},
});
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'telegram',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/config.ts': 'abc',
'src/telegram.ts': 'def',
},
},
{
name: 'discord',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: {
'src/config.ts': 'ghi',
'src/discord.ts': 'jkl',
},
},
],
});
const result = await uninstallSkill('telegram');
expect(result.success).toBe(true);
// discord.ts should still exist
expect(fs.existsSync(path.join(tmpDir, 'src', 'discord.ts'))).toBe(true);
// telegram.ts should be gone
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(false);
// config should have discord import but not telegram
const config = fs.readFileSync(
path.join(tmpDir, 'src', 'config.ts'),
'utf-8',
);
expect(config).toContain('discord import');
expect(config).not.toContain('telegram import');
});
});

View File

@@ -0,0 +1,413 @@
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { stringify } from 'yaml';
import { cleanup, createTempDir, initGitRepo, setupNanoclawDir } from './test-helpers.js';
// We need to mock process.cwd() since update.ts uses it
let tmpDir: string;
describe('update', () => {
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
initGitRepo(tmpDir);
vi.spyOn(process, 'cwd').mockReturnValue(tmpDir);
});
afterEach(() => {
vi.restoreAllMocks();
cleanup(tmpDir);
});
function writeStateFile(state: Record<string, unknown>): void {
const statePath = path.join(tmpDir, '.nanoclaw', 'state.yaml');
fs.writeFileSync(statePath, stringify(state), 'utf-8');
}
function createNewCoreDir(files: Record<string, string>): string {
const newCoreDir = path.join(tmpDir, 'new-core');
fs.mkdirSync(newCoreDir, { recursive: true });
for (const [relPath, content] of Object.entries(files)) {
const fullPath = path.join(newCoreDir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
return newCoreDir;
}
describe('previewUpdate', () => {
it('detects new files in update', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/new-file.ts': 'export const x = 1;',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesChanged).toContain('src/new-file.ts');
expect(preview.currentVersion).toBe('1.0.0');
});
it('detects changed files vs base', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'modified',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesChanged).toContain('src/index.ts');
});
it('does not list unchanged files', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'same content');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'same content',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesChanged).not.toContain('src/index.ts');
});
it('identifies conflict risk with applied skills', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'telegram',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: { 'src/index.ts': 'abc123' },
},
],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'updated core',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.conflictRisk).toContain('src/index.ts');
});
it('identifies custom patches at risk', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/config.ts'), 'original');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
custom_modifications: [
{
description: 'custom tweak',
applied_at: new Date().toISOString(),
files_modified: ['src/config.ts'],
patch_file: '.nanoclaw/custom/001-tweak.patch',
},
],
});
const newCoreDir = createNewCoreDir({
'src/config.ts': 'updated core config',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.customPatchesAtRisk).toContain('src/config.ts');
});
it('reads version from package.json in new core', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.newVersion).toBe('2.0.0');
});
it('detects files deleted in new core', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'keep this');
fs.writeFileSync(path.join(baseDir, 'src/removed.ts'), 'delete this');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core only has index.ts — removed.ts is gone
const newCoreDir = createNewCoreDir({
'src/index.ts': 'keep this',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesDeleted).toContain('src/removed.ts');
expect(preview.filesChanged).not.toContain('src/removed.ts');
});
});
describe('applyUpdate', () => {
it('rejects when customize session is active', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// Create the pending.yaml that indicates active customize
const customDir = path.join(tmpDir, '.nanoclaw', 'custom');
fs.mkdirSync(customDir, { recursive: true });
fs.writeFileSync(path.join(customDir, 'pending.yaml'), 'active: true');
const newCoreDir = createNewCoreDir({
'src/index.ts': 'new content',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(false);
expect(result.error).toContain('customize session');
});
it('copies new files that do not exist yet', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/brand-new.ts': 'export const fresh = true;',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.error).toBeUndefined();
expect(result.success).toBe(true);
expect(
fs.readFileSync(path.join(tmpDir, 'src/brand-new.ts'), 'utf-8'),
).toBe('export const fresh = true;');
});
it('performs clean three-way merge', async () => {
// Set up base
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'src/index.ts'),
'line 1\nline 2\nline 3\n',
);
// Current has user changes at the bottom
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src/index.ts'),
'line 1\nline 2\nline 3\nuser addition\n',
);
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core changes at the top
const newCoreDir = createNewCoreDir({
'src/index.ts': 'core update\nline 1\nline 2\nline 3\n',
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(true);
expect(result.newVersion).toBe('2.0.0');
const merged = fs.readFileSync(
path.join(tmpDir, 'src/index.ts'),
'utf-8',
);
expect(merged).toContain('core update');
expect(merged).toContain('user addition');
});
it('updates base directory after successful merge', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'old base');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'old base');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'new base content',
});
const { applyUpdate } = await import('../update.js');
await applyUpdate(newCoreDir);
const newBase = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'base', 'src/index.ts'),
'utf-8',
);
expect(newBase).toBe('new base content');
});
it('updates core_version in state after success', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(true);
expect(result.previousVersion).toBe('1.0.0');
expect(result.newVersion).toBe('2.0.0');
// Verify state file was updated
const { readState } = await import('../state.js');
const state = readState();
expect(state.core_version).toBe('2.0.0');
});
it('restores backup on merge conflict', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'src/index.ts'),
'line 1\nline 2\nline 3\n',
);
// Current has conflicting change on same line
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src/index.ts'),
'line 1\nuser changed line 2\nline 3\n',
);
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core also changes line 2 — guaranteed conflict
const newCoreDir = createNewCoreDir({
'src/index.ts': 'line 1\ncore changed line 2\nline 3\n',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(false);
expect(result.mergeConflicts).toContain('src/index.ts');
expect(result.backupPending).toBe(true);
// File should have conflict markers (backup preserved, not restored)
const content = fs.readFileSync(
path.join(tmpDir, 'src/index.ts'),
'utf-8',
);
expect(content).toContain('<<<<<<<');
expect(content).toContain('>>>>>>>');
});
it('removes files deleted in new core', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'keep');
fs.writeFileSync(path.join(baseDir, 'src/removed.ts'), 'old content');
// Working tree has both files
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'keep');
fs.writeFileSync(path.join(tmpDir, 'src/removed.ts'), 'old content');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core only has index.ts
const newCoreDir = createNewCoreDir({
'src/index.ts': 'keep',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'src/index.ts'))).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'src/removed.ts'))).toBe(false);
});
});
});

397
skills-engine/apply.ts Normal file
View File

@@ -0,0 +1,397 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { isCustomizeActive } from './customize.js';
import { executeFileOps } from './file-ops.js';
import { acquireLock } from './lock.js';
import {
checkConflicts,
checkCoreVersion,
checkDependencies,
checkSystemVersion,
readManifest,
} from './manifest.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { loadResolutions } from './resolution-cache.js';
import { computeFileHash, readState, recordSkillApplication, writeState } from './state.js';
import {
mergeDockerComposeServices,
mergeEnvAdditions,
mergeNpmDependencies,
runNpmInstall,
} from './structured.js';
import { ApplyResult } from './types.js';
export async function applySkill(skillDir: string): Promise<ApplyResult> {
const projectRoot = process.cwd();
const manifest = readManifest(skillDir);
// --- Pre-flight checks ---
const currentState = readState(); // Validates state exists and version is compatible
// Check skills system version compatibility
const sysCheck = checkSystemVersion(manifest);
if (!sysCheck.ok) {
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error: sysCheck.error,
};
}
// Check core version compatibility
const coreCheck = checkCoreVersion(manifest);
if (coreCheck.warning) {
console.log(`Warning: ${coreCheck.warning}`);
}
// Block if customize session is active
if (isCustomizeActive()) {
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error:
'A customize session is active. Run commitCustomize() or abortCustomize() first.',
};
}
const deps = checkDependencies(manifest);
if (!deps.ok) {
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error: `Missing dependencies: ${deps.missing.join(', ')}`,
};
}
const conflicts = checkConflicts(manifest);
if (!conflicts.ok) {
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error: `Conflicting skills: ${conflicts.conflicting.join(', ')}`,
};
}
// Load path remap for renamed core files
const pathRemap = loadPathRemap();
// Detect drift for modified files
const driftFiles: string[] = [];
for (const relPath of manifest.modifies) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
const basePath = path.join(projectRoot, NANOCLAW_DIR, 'base', resolvedPath);
if (fs.existsSync(currentPath) && fs.existsSync(basePath)) {
const currentHash = computeFileHash(currentPath);
const baseHash = computeFileHash(basePath);
if (currentHash !== baseHash) {
driftFiles.push(relPath);
}
}
}
if (driftFiles.length > 0) {
console.log(`Drift detected in: ${driftFiles.join(', ')}`);
console.log('Three-way merge will be used to reconcile changes.');
}
// --- Acquire lock ---
const releaseLock = acquireLock();
// Track added files so we can remove them on rollback
const addedFiles: string[] = [];
try {
// --- Backup ---
const filesToBackup = [
...manifest.modifies.map((f) => path.join(projectRoot, resolvePathRemap(f, pathRemap))),
...manifest.adds.map((f) => path.join(projectRoot, resolvePathRemap(f, pathRemap))),
...(manifest.file_ops || [])
.filter((op) => op.from)
.map((op) => path.join(projectRoot, resolvePathRemap(op.from!, pathRemap))),
path.join(projectRoot, 'package.json'),
path.join(projectRoot, 'package-lock.json'),
path.join(projectRoot, '.env.example'),
path.join(projectRoot, 'docker-compose.yml'),
];
createBackup(filesToBackup);
// --- File operations (before copy adds, per architecture doc) ---
if (manifest.file_ops && manifest.file_ops.length > 0) {
const fileOpsResult = executeFileOps(manifest.file_ops, projectRoot);
if (!fileOpsResult.success) {
restoreBackup();
clearBackup();
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error: `File operations failed: ${fileOpsResult.errors.join('; ')}`,
};
}
}
// --- Copy new files from add/ ---
const addDir = path.join(skillDir, 'add');
if (fs.existsSync(addDir)) {
for (const relPath of manifest.adds) {
const resolvedDest = resolvePathRemap(relPath, pathRemap);
const destPath = path.join(projectRoot, resolvedDest);
if (!fs.existsSync(destPath)) {
addedFiles.push(destPath);
}
// Copy individual file with remap (can't use copyDir when paths differ)
const srcPath = path.join(addDir, relPath);
if (fs.existsSync(srcPath)) {
fs.mkdirSync(path.dirname(destPath), { recursive: true });
fs.copyFileSync(srcPath, destPath);
}
}
}
// --- Merge modified files ---
const mergeConflicts: string[] = [];
// Load pre-computed resolutions into git's rr-cache before merging
const appliedSkillNames = currentState.applied_skills.map((s) => s.name);
loadResolutions([...appliedSkillNames, manifest.skill], projectRoot, skillDir);
for (const relPath of manifest.modifies) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
const basePath = path.join(projectRoot, NANOCLAW_DIR, 'base', resolvedPath);
// skillPath uses original relPath — skill packages are never mutated
const skillPath = path.join(skillDir, 'modify', relPath);
if (!fs.existsSync(skillPath)) {
throw new Error(`Skill modified file not found: ${skillPath}`);
}
if (!fs.existsSync(currentPath)) {
// File doesn't exist yet — just copy from skill
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.copyFileSync(skillPath, currentPath);
continue;
}
if (!fs.existsSync(basePath)) {
// No base — use current as base (first-time apply)
fs.mkdirSync(path.dirname(basePath), { recursive: true });
fs.copyFileSync(currentPath, basePath);
}
// Three-way merge: current ← base → skill
// Save current content before merge overwrites it (needed for rerere stage 2 = "ours")
const oursContent = fs.readFileSync(currentPath, 'utf-8');
// git merge-file modifies the first argument in-place, so use a temp copy
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-merge-${crypto.randomUUID()}-${path.basename(relPath)}`,
);
fs.copyFileSync(currentPath, tmpCurrent);
const result = mergeFile(tmpCurrent, basePath, skillPath);
if (result.clean) {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Copy conflict markers to working tree path BEFORE rerere
// rerere looks at the working tree file at relPath, not at tmpCurrent
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(skillPath, 'utf-8');
setupRerereAdapter(resolvedPath, baseContent, oursContent, theirsContent);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
// rerere resolved the conflict — currentPath now has resolved content
// Record the resolution: git add + git rerere
execFileSync('git', ['add', resolvedPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(resolvedPath);
// Unstage the file — cleanupMergeState clears unmerged entries
// but the git add above leaves the file staged at stage 0
try {
execFileSync('git', ['restore', '--staged', resolvedPath], { stdio: 'pipe' });
} catch { /* may fail if file is new or not tracked */ }
continue;
}
cleanupMergeState(resolvedPath);
}
// Unresolved conflict — currentPath already has conflict markers
mergeConflicts.push(relPath);
}
}
if (mergeConflicts.length > 0) {
// Bug 4 fix: Preserve backup when returning with conflicts
return {
success: false,
skill: manifest.skill,
version: manifest.version,
mergeConflicts,
backupPending: true,
untrackedChanges: driftFiles.length > 0 ? driftFiles : undefined,
error: `Merge conflicts in: ${mergeConflicts.join(', ')}. Resolve manually then run recordSkillApplication(). Call clearBackup() after resolution or restoreBackup() + clearBackup() to abort.`,
};
}
// --- Structured operations ---
if (manifest.structured?.npm_dependencies) {
const pkgPath = path.join(projectRoot, 'package.json');
mergeNpmDependencies(pkgPath, manifest.structured.npm_dependencies);
}
if (manifest.structured?.env_additions) {
const envPath = path.join(projectRoot, '.env.example');
mergeEnvAdditions(envPath, manifest.structured.env_additions);
}
if (manifest.structured?.docker_compose_services) {
const composePath = path.join(projectRoot, 'docker-compose.yml');
mergeDockerComposeServices(
composePath,
manifest.structured.docker_compose_services,
);
}
// Run npm install if dependencies were added
if (
manifest.structured?.npm_dependencies &&
Object.keys(manifest.structured.npm_dependencies).length > 0
) {
runNpmInstall();
}
// --- Post-apply commands ---
if (manifest.post_apply && manifest.post_apply.length > 0) {
for (const cmd of manifest.post_apply) {
try {
execSync(cmd, { stdio: 'pipe', cwd: projectRoot, timeout: 120_000 });
} catch (postErr: any) {
// Rollback on post_apply failure
for (const f of addedFiles) {
try {
if (fs.existsSync(f)) fs.unlinkSync(f);
} catch { /* best effort */ }
}
restoreBackup();
clearBackup();
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error: `post_apply command failed: ${cmd}${postErr.message}`,
};
}
}
}
// --- Update state ---
const fileHashes: Record<string, string> = {};
for (const relPath of [...manifest.adds, ...manifest.modifies]) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const absPath = path.join(projectRoot, resolvedPath);
if (fs.existsSync(absPath)) {
fileHashes[resolvedPath] = computeFileHash(absPath);
}
}
// Store structured outcomes including the test command so applyUpdate() can run them
const outcomes: Record<string, unknown> = manifest.structured
? { ...manifest.structured }
: {};
if (manifest.test) {
outcomes.test = manifest.test;
}
recordSkillApplication(
manifest.skill,
manifest.version,
fileHashes,
Object.keys(outcomes).length > 0 ? outcomes : undefined,
);
// --- Bug 3 fix: Execute test command if defined ---
if (manifest.test) {
try {
execSync(manifest.test, {
stdio: 'pipe',
cwd: projectRoot,
timeout: 120_000,
});
} catch (testErr: any) {
// Tests failed — remove added files, restore backup and undo state
for (const f of addedFiles) {
try {
if (fs.existsSync(f)) fs.unlinkSync(f);
} catch { /* best effort */ }
}
restoreBackup();
// Re-read state and remove the skill we just recorded
const state = readState();
state.applied_skills = state.applied_skills.filter(
(s) => s.name !== manifest.skill,
);
writeState(state);
clearBackup();
return {
success: false,
skill: manifest.skill,
version: manifest.version,
error: `Tests failed: ${testErr.message}`,
};
}
}
// --- Cleanup ---
clearBackup();
return {
success: true,
skill: manifest.skill,
version: manifest.version,
untrackedChanges: driftFiles.length > 0 ? driftFiles : undefined,
};
} catch (err) {
// Remove newly added files before restoring backup
for (const f of addedFiles) {
try {
if (fs.existsSync(f)) fs.unlinkSync(f);
} catch { /* best effort */ }
}
restoreBackup();
clearBackup();
throw err;
} finally {
releaseLock();
}
}

65
skills-engine/backup.ts Normal file
View File

@@ -0,0 +1,65 @@
import fs from 'fs';
import path from 'path';
import { BACKUP_DIR } from './constants.js';
const TOMBSTONE_SUFFIX = '.tombstone';
function getBackupDir(): string {
return path.join(process.cwd(), BACKUP_DIR);
}
export function createBackup(filePaths: string[]): void {
const backupDir = getBackupDir();
fs.mkdirSync(backupDir, { recursive: true });
for (const filePath of filePaths) {
const absPath = path.resolve(filePath);
const relativePath = path.relative(process.cwd(), absPath);
const backupPath = path.join(backupDir, relativePath);
fs.mkdirSync(path.dirname(backupPath), { recursive: true });
if (fs.existsSync(absPath)) {
fs.copyFileSync(absPath, backupPath);
} else {
// File doesn't exist yet — write a tombstone so restore can delete it
fs.writeFileSync(backupPath + TOMBSTONE_SUFFIX, '', 'utf-8');
}
}
}
export function restoreBackup(): void {
const backupDir = getBackupDir();
if (!fs.existsSync(backupDir)) return;
const walk = (dir: string) => {
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
walk(fullPath);
} else if (entry.name.endsWith(TOMBSTONE_SUFFIX)) {
// Tombstone: delete the corresponding project file
const tombRelPath = path.relative(backupDir, fullPath);
const originalRelPath = tombRelPath.slice(0, -TOMBSTONE_SUFFIX.length);
const originalPath = path.join(process.cwd(), originalRelPath);
if (fs.existsSync(originalPath)) {
fs.unlinkSync(originalPath);
}
} else {
const relativePath = path.relative(backupDir, fullPath);
const originalPath = path.join(process.cwd(), relativePath);
fs.mkdirSync(path.dirname(originalPath), { recursive: true });
fs.copyFileSync(fullPath, originalPath);
}
}
};
walk(backupDir);
}
export function clearBackup(): void {
const backupDir = getBackupDir();
if (fs.existsSync(backupDir)) {
fs.rmSync(backupDir, { recursive: true, force: true });
}
}

View File

@@ -0,0 +1,9 @@
export const NANOCLAW_DIR = '.nanoclaw';
export const STATE_FILE = 'state.yaml';
export const BASE_DIR = '.nanoclaw/base';
export const BACKUP_DIR = '.nanoclaw/backup';
export const LOCK_FILE = '.nanoclaw/lock';
export const CUSTOM_DIR = '.nanoclaw/custom';
export const RESOLUTIONS_DIR = '.nanoclaw/resolutions';
export const SHIPPED_RESOLUTIONS_DIR = '.claude/resolutions';
export const SKILLS_SCHEMA_VERSION = '0.1.0';

144
skills-engine/customize.ts Normal file
View File

@@ -0,0 +1,144 @@
import { execFileSync, execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import { BASE_DIR, CUSTOM_DIR } from './constants.js';
import { computeFileHash, readState, recordCustomModification } from './state.js';
interface PendingCustomize {
description: string;
started_at: string;
file_hashes: Record<string, string>;
}
function getPendingPath(): string {
return path.join(process.cwd(), CUSTOM_DIR, 'pending.yaml');
}
export function isCustomizeActive(): boolean {
return fs.existsSync(getPendingPath());
}
export function startCustomize(description: string): void {
if (isCustomizeActive()) {
throw new Error(
'A customize session is already active. Commit or abort it first.',
);
}
const state = readState();
// Collect all file hashes from applied skills
const fileHashes: Record<string, string> = {};
for (const skill of state.applied_skills) {
for (const [relativePath, hash] of Object.entries(skill.file_hashes)) {
fileHashes[relativePath] = hash;
}
}
const pending: PendingCustomize = {
description,
started_at: new Date().toISOString(),
file_hashes: fileHashes,
};
const customDir = path.join(process.cwd(), CUSTOM_DIR);
fs.mkdirSync(customDir, { recursive: true });
fs.writeFileSync(getPendingPath(), stringify(pending), 'utf-8');
}
export function commitCustomize(): void {
const pendingPath = getPendingPath();
if (!fs.existsSync(pendingPath)) {
throw new Error('No active customize session. Run startCustomize() first.');
}
const pending = parse(
fs.readFileSync(pendingPath, 'utf-8'),
) as PendingCustomize;
const cwd = process.cwd();
// Find files that changed
const changedFiles: string[] = [];
for (const relativePath of Object.keys(pending.file_hashes)) {
const fullPath = path.join(cwd, relativePath);
if (!fs.existsSync(fullPath)) {
// File was deleted — counts as changed
changedFiles.push(relativePath);
continue;
}
const currentHash = computeFileHash(fullPath);
if (currentHash !== pending.file_hashes[relativePath]) {
changedFiles.push(relativePath);
}
}
if (changedFiles.length === 0) {
console.log('No files changed during customize session. Nothing to commit.');
fs.unlinkSync(pendingPath);
return;
}
// Generate unified diff for each changed file
const baseDir = path.join(cwd, BASE_DIR);
let combinedPatch = '';
for (const relativePath of changedFiles) {
const basePath = path.join(baseDir, relativePath);
const currentPath = path.join(cwd, relativePath);
// Use /dev/null if either side doesn't exist
const oldPath = fs.existsSync(basePath) ? basePath : '/dev/null';
const newPath = fs.existsSync(currentPath) ? currentPath : '/dev/null';
try {
const diff = execFileSync('diff', ['-ruN', oldPath, newPath], {
encoding: 'utf-8',
});
combinedPatch += diff;
} catch (err: unknown) {
const execErr = err as { status?: number; stdout?: string };
if (execErr.status === 1 && execErr.stdout) {
// diff exits 1 when files differ — that's expected
combinedPatch += execErr.stdout;
} else if (execErr.status === 2) {
throw new Error(`diff error for ${relativePath}: diff exited with status 2 (check file permissions or encoding)`);
} else {
throw err;
}
}
}
if (!combinedPatch.trim()) {
console.log('Diff was empty despite hash changes. Nothing to commit.');
fs.unlinkSync(pendingPath);
return;
}
// Determine sequence number
const state = readState();
const existingCount = state.custom_modifications?.length ?? 0;
const seqNum = String(existingCount + 1).padStart(3, '0');
// Sanitize description for filename
const sanitized = pending.description
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/^-|-$/g, '');
const patchFilename = `${seqNum}-${sanitized}.patch`;
const patchRelPath = path.join(CUSTOM_DIR, patchFilename);
const patchFullPath = path.join(cwd, patchRelPath);
fs.writeFileSync(patchFullPath, combinedPatch, 'utf-8');
recordCustomModification(pending.description, changedFiles, patchRelPath);
fs.unlinkSync(pendingPath);
}
export function abortCustomize(): void {
const pendingPath = getPendingPath();
if (fs.existsSync(pendingPath)) {
fs.unlinkSync(pendingPath);
}
}

126
skills-engine/file-ops.ts Normal file
View File

@@ -0,0 +1,126 @@
import fs from 'fs';
import path from 'path';
import type { FileOperation, FileOpsResult } from './types.js';
function safePath(projectRoot: string, relativePath: string): string | null {
const resolved = path.resolve(projectRoot, relativePath);
if (!resolved.startsWith(projectRoot + path.sep) && resolved !== projectRoot) {
return null;
}
return resolved;
}
export function executeFileOps(ops: FileOperation[], projectRoot: string): FileOpsResult {
const result: FileOpsResult = {
success: true,
executed: [],
warnings: [],
errors: [],
};
const root = path.resolve(projectRoot);
for (const op of ops) {
switch (op.type) {
case 'rename': {
if (!op.from || !op.to) {
result.errors.push(`rename: requires 'from' and 'to'`);
result.success = false;
return result;
}
const fromPath = safePath(root, op.from);
const toPath = safePath(root, op.to);
if (!fromPath) {
result.errors.push(`rename: path escapes project root: ${op.from}`);
result.success = false;
return result;
}
if (!toPath) {
result.errors.push(`rename: path escapes project root: ${op.to}`);
result.success = false;
return result;
}
if (!fs.existsSync(fromPath)) {
result.errors.push(`rename: source does not exist: ${op.from}`);
result.success = false;
return result;
}
if (fs.existsSync(toPath)) {
result.errors.push(`rename: target already exists: ${op.to}`);
result.success = false;
return result;
}
fs.renameSync(fromPath, toPath);
result.executed.push(op);
break;
}
case 'delete': {
if (!op.path) {
result.errors.push(`delete: requires 'path'`);
result.success = false;
return result;
}
const delPath = safePath(root, op.path);
if (!delPath) {
result.errors.push(`delete: path escapes project root: ${op.path}`);
result.success = false;
return result;
}
if (!fs.existsSync(delPath)) {
result.warnings.push(`delete: file does not exist (skipped): ${op.path}`);
result.executed.push(op);
break;
}
fs.unlinkSync(delPath);
result.executed.push(op);
break;
}
case 'move': {
if (!op.from || !op.to) {
result.errors.push(`move: requires 'from' and 'to'`);
result.success = false;
return result;
}
const srcPath = safePath(root, op.from);
const dstPath = safePath(root, op.to);
if (!srcPath) {
result.errors.push(`move: path escapes project root: ${op.from}`);
result.success = false;
return result;
}
if (!dstPath) {
result.errors.push(`move: path escapes project root: ${op.to}`);
result.success = false;
return result;
}
if (!fs.existsSync(srcPath)) {
result.errors.push(`move: source does not exist: ${op.from}`);
result.success = false;
return result;
}
if (fs.existsSync(dstPath)) {
result.errors.push(`move: target already exists: ${op.to}`);
result.success = false;
return result;
}
const dstDir = path.dirname(dstPath);
if (!fs.existsSync(dstDir)) {
fs.mkdirSync(dstDir, { recursive: true });
}
fs.renameSync(srcPath, dstPath);
result.executed.push(op);
break;
}
default: {
result.errors.push(`unknown operation type: ${(op as FileOperation).type}`);
result.success = false;
return result;
}
}
}
return result;
}

21
skills-engine/fs-utils.ts Normal file
View File

@@ -0,0 +1,21 @@
import fs from 'fs';
import path from 'path';
/**
* Recursively copy a directory tree from src to dest.
* Creates destination directories as needed.
*/
export function copyDir(src: string, dest: string): void {
for (const entry of fs.readdirSync(src, { withFileTypes: true })) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
fs.mkdirSync(destPath, { recursive: true });
copyDir(srcPath, destPath);
} else {
fs.mkdirSync(path.dirname(destPath), { recursive: true });
fs.copyFileSync(srcPath, destPath);
}
}
}

85
skills-engine/index.ts Normal file
View File

@@ -0,0 +1,85 @@
export { applySkill } from './apply.js';
export { clearBackup, createBackup, restoreBackup } from './backup.js';
export {
BACKUP_DIR,
BASE_DIR,
SKILLS_SCHEMA_VERSION,
CUSTOM_DIR,
LOCK_FILE,
NANOCLAW_DIR,
RESOLUTIONS_DIR,
SHIPPED_RESOLUTIONS_DIR,
STATE_FILE,
} from './constants.js';
export {
abortCustomize,
commitCustomize,
isCustomizeActive,
startCustomize,
} from './customize.js';
export { executeFileOps } from './file-ops.js';
export { initNanoclawDir } from './init.js';
export { acquireLock, isLocked, releaseLock } from './lock.js';
export {
checkConflicts,
checkCoreVersion,
checkDependencies,
checkSystemVersion,
readManifest,
} from './manifest.js';
export {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
export {
loadPathRemap,
recordPathRemap,
resolvePathRemap,
} from './path-remap.js';
export { rebase } from './rebase.js';
export { findSkillDir, replaySkills } from './replay.js';
export type { ReplayOptions, ReplayResult } from './replay.js';
export { uninstallSkill } from './uninstall.js';
export { initSkillsSystem, migrateExisting } from './migrate.js';
export {
clearAllResolutions,
findResolutionDir,
loadResolutions,
saveResolution,
} from './resolution-cache.js';
export { applyUpdate, previewUpdate } from './update.js';
export {
compareSemver,
computeFileHash,
getAppliedSkills,
getCustomModifications,
readState,
recordCustomModification,
recordSkillApplication,
writeState,
} from './state.js';
export {
areRangesCompatible,
mergeDockerComposeServices,
mergeEnvAdditions,
mergeNpmDependencies,
runNpmInstall,
} from './structured.js';
export type {
AppliedSkill,
ApplyResult,
CustomModification,
FileOpsResult,
FileOperation,
MergeResult,
RebaseResult,
ResolutionMeta,
SkillManifest,
SkillState,
UninstallResult,
UpdatePreview,
UpdateResult,
} from './types.js';

103
skills-engine/init.ts Normal file
View File

@@ -0,0 +1,103 @@
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { BACKUP_DIR, BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { isGitRepo } from './merge.js';
import { writeState } from './state.js';
import { SkillState } from './types.js';
// Top-level paths to include in base snapshot
const BASE_INCLUDES = ['src/', 'package.json', '.env.example', 'container/'];
// Directories/files to always exclude from base snapshot
const BASE_EXCLUDES = [
'node_modules',
'.nanoclaw',
'.git',
'dist',
'data',
'groups',
'store',
'logs',
];
export function initNanoclawDir(): void {
const projectRoot = process.cwd();
const nanoclawDir = path.join(projectRoot, NANOCLAW_DIR);
const baseDir = path.join(projectRoot, BASE_DIR);
// Create structure
fs.mkdirSync(path.join(projectRoot, BACKUP_DIR), { recursive: true });
// Clean existing base
if (fs.existsSync(baseDir)) {
fs.rmSync(baseDir, { recursive: true, force: true });
}
fs.mkdirSync(baseDir, { recursive: true });
// Snapshot all included paths
for (const include of BASE_INCLUDES) {
const srcPath = path.join(projectRoot, include);
if (!fs.existsSync(srcPath)) continue;
const destPath = path.join(baseDir, include);
const stat = fs.statSync(srcPath);
if (stat.isDirectory()) {
copyDirFiltered(srcPath, destPath, BASE_EXCLUDES);
} else {
fs.mkdirSync(path.dirname(destPath), { recursive: true });
fs.copyFileSync(srcPath, destPath);
}
}
// Create initial state
const coreVersion = getCoreVersion(projectRoot);
const initialState: SkillState = {
skills_system_version: '0.1.0',
core_version: coreVersion,
applied_skills: [],
};
writeState(initialState);
// Enable git rerere if in a git repo
if (isGitRepo()) {
try {
execSync('git config --local rerere.enabled true', { stdio: 'pipe' });
} catch {
// Non-fatal
}
}
}
function copyDirFiltered(
src: string,
dest: string,
excludes: string[],
): void {
fs.mkdirSync(dest, { recursive: true });
for (const entry of fs.readdirSync(src, { withFileTypes: true })) {
if (excludes.includes(entry.name)) continue;
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
copyDirFiltered(srcPath, destPath, excludes);
} else {
fs.copyFileSync(srcPath, destPath);
}
}
}
function getCoreVersion(projectRoot: string): string {
try {
const pkgPath = path.join(projectRoot, 'package.json');
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
return pkg.version || '0.0.0';
} catch {
return '0.0.0';
}
}

102
skills-engine/lock.ts Normal file
View File

@@ -0,0 +1,102 @@
import fs from 'fs';
import path from 'path';
import { LOCK_FILE } from './constants.js';
const STALE_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
interface LockInfo {
pid: number;
timestamp: number;
}
function getLockPath(): string {
return path.join(process.cwd(), LOCK_FILE);
}
function isStale(lock: LockInfo): boolean {
return Date.now() - lock.timestamp > STALE_TIMEOUT_MS;
}
function isProcessAlive(pid: number): boolean {
try {
process.kill(pid, 0);
return true;
} catch {
return false;
}
}
export function acquireLock(): () => void {
const lockPath = getLockPath();
fs.mkdirSync(path.dirname(lockPath), { recursive: true });
const lockInfo: LockInfo = { pid: process.pid, timestamp: Date.now() };
try {
// Atomic creation — fails if file already exists
fs.writeFileSync(lockPath, JSON.stringify(lockInfo), { flag: 'wx' });
return () => releaseLock();
} catch {
// Lock file exists — check if it's stale or from a dead process
try {
const existing: LockInfo = JSON.parse(
fs.readFileSync(lockPath, 'utf-8'),
);
if (!isStale(existing) && isProcessAlive(existing.pid)) {
throw new Error(
`Operation in progress (pid ${existing.pid}, started ${new Date(existing.timestamp).toISOString()}). If this is stale, delete ${LOCK_FILE}`,
);
}
// Stale or dead process — overwrite
} catch (err) {
if (
err instanceof Error &&
err.message.startsWith('Operation in progress')
) {
throw err;
}
// Corrupt or unreadable — overwrite
}
try { fs.unlinkSync(lockPath); } catch { /* already gone */ }
try {
fs.writeFileSync(lockPath, JSON.stringify(lockInfo), { flag: 'wx' });
} catch {
throw new Error('Lock contention: another process acquired the lock. Retry.');
}
return () => releaseLock();
}
}
export function releaseLock(): void {
const lockPath = getLockPath();
if (fs.existsSync(lockPath)) {
try {
const lock: LockInfo = JSON.parse(fs.readFileSync(lockPath, 'utf-8'));
// Only release our own lock
if (lock.pid === process.pid) {
fs.unlinkSync(lockPath);
}
} catch {
// Corrupt or missing — safe to remove
try {
fs.unlinkSync(lockPath);
} catch {
// Already gone
}
}
}
}
export function isLocked(): boolean {
const lockPath = getLockPath();
if (!fs.existsSync(lockPath)) return false;
try {
const lock: LockInfo = JSON.parse(fs.readFileSync(lockPath, 'utf-8'));
return !isStale(lock) && isProcessAlive(lock.pid);
} catch {
return false;
}
}

99
skills-engine/manifest.ts Normal file
View File

@@ -0,0 +1,99 @@
import fs from 'fs';
import path from 'path';
import { parse } from 'yaml';
import { SKILLS_SCHEMA_VERSION } from './constants.js';
import { getAppliedSkills, readState, compareSemver } from './state.js';
import { SkillManifest } from './types.js';
export function readManifest(skillDir: string): SkillManifest {
const manifestPath = path.join(skillDir, 'manifest.yaml');
if (!fs.existsSync(manifestPath)) {
throw new Error(`Manifest not found: ${manifestPath}`);
}
const content = fs.readFileSync(manifestPath, 'utf-8');
const manifest = parse(content) as SkillManifest;
// Validate required fields
const required = [
'skill',
'version',
'core_version',
'adds',
'modifies',
] as const;
for (const field of required) {
if (manifest[field] === undefined) {
throw new Error(`Manifest missing required field: ${field}`);
}
}
// Defaults
manifest.conflicts = manifest.conflicts || [];
manifest.depends = manifest.depends || [];
manifest.file_ops = manifest.file_ops || [];
// Validate paths don't escape project root
const allPaths = [...manifest.adds, ...manifest.modifies];
for (const p of allPaths) {
if (p.includes('..') || path.isAbsolute(p)) {
throw new Error(`Invalid path in manifest: ${p} (must be relative without "..")`);
}
}
return manifest;
}
export function checkCoreVersion(manifest: SkillManifest): {
ok: boolean;
warning?: string;
} {
const state = readState();
const cmp = compareSemver(manifest.core_version, state.core_version);
if (cmp > 0) {
return {
ok: true,
warning: `Skill targets core ${manifest.core_version} but current core is ${state.core_version}. The merge might still work but there's a compatibility risk.`,
};
}
return { ok: true };
}
export function checkDependencies(manifest: SkillManifest): {
ok: boolean;
missing: string[];
} {
const applied = getAppliedSkills();
const appliedNames = new Set(applied.map((s) => s.name));
const missing = manifest.depends.filter((dep) => !appliedNames.has(dep));
return { ok: missing.length === 0, missing };
}
export function checkSystemVersion(manifest: SkillManifest): {
ok: boolean;
error?: string;
} {
if (!manifest.min_skills_system_version) {
return { ok: true };
}
const cmp = compareSemver(manifest.min_skills_system_version, SKILLS_SCHEMA_VERSION);
if (cmp > 0) {
return {
ok: false,
error: `Skill requires skills system version ${manifest.min_skills_system_version} but current is ${SKILLS_SCHEMA_VERSION}. Update your skills engine.`,
};
}
return { ok: true };
}
export function checkConflicts(manifest: SkillManifest): {
ok: boolean;
conflicting: string[];
} {
const applied = getAppliedSkills();
const appliedNames = new Set(applied.map((s) => s.name));
const conflicting = manifest.conflicts.filter((c) => appliedNames.has(c));
return { ok: conflicting.length === 0, conflicting };
}

150
skills-engine/merge.ts Normal file
View File

@@ -0,0 +1,150 @@
import { execFileSync, execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { MergeResult } from './types.js';
export function isGitRepo(): boolean {
try {
execSync('git rev-parse --git-dir', { stdio: 'pipe' });
return true;
} catch {
return false;
}
}
/**
* Run git merge-file to three-way merge files.
* Modifies currentPath in-place.
* Returns { clean: true, exitCode: 0 } on clean merge,
* { clean: false, exitCode: N } on conflict (N = number of conflicts).
*/
export function mergeFile(
currentPath: string,
basePath: string,
skillPath: string,
): MergeResult {
try {
execFileSync('git', ['merge-file', currentPath, basePath, skillPath], {
stdio: 'pipe',
});
return { clean: true, exitCode: 0 };
} catch (err: any) {
const exitCode = err.status ?? 1;
if (exitCode > 0) {
// Positive exit code = number of conflicts
return { clean: false, exitCode };
}
// Negative exit code = error
throw new Error(`git merge-file failed: ${err.message}`);
}
}
/**
* Set up unmerged index entries for rerere adapter.
* Creates stages 1/2/3 so git rerere can record/resolve conflicts.
*/
export function setupRerereAdapter(
filePath: string,
baseContent: string,
oursContent: string,
theirsContent: string,
): void {
if (!isGitRepo()) return;
const gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
}).trim();
// Clean up stale MERGE_HEAD from a previous crash
if (fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))) {
cleanupMergeState();
}
// Hash objects into git object store
const baseHash = execSync('git hash-object -w --stdin', {
input: baseContent,
encoding: 'utf-8',
}).trim();
const oursHash = execSync('git hash-object -w --stdin', {
input: oursContent,
encoding: 'utf-8',
}).trim();
const theirsHash = execSync('git hash-object -w --stdin', {
input: theirsContent,
encoding: 'utf-8',
}).trim();
// Create unmerged index entries (stages 1/2/3)
const indexInfo = [
`100644 ${baseHash} 1\t${filePath}`,
`100644 ${oursHash} 2\t${filePath}`,
`100644 ${theirsHash} 3\t${filePath}`,
].join('\n');
execSync('git update-index --index-info', {
input: indexInfo,
stdio: ['pipe', 'pipe', 'pipe'],
});
// Set MERGE_HEAD and MERGE_MSG (required for rerere)
const headHash = execSync('git rev-parse HEAD', {
encoding: 'utf-8',
}).trim();
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
fs.writeFileSync(
path.join(gitDir, 'MERGE_MSG'),
`Skill merge: ${filePath}\n`,
);
}
/**
* Run git rerere to record or auto-resolve conflicts.
* When filePath is given, checks that specific file for remaining conflict markers.
* Returns true if rerere auto-resolved the conflict.
*/
export function runRerere(filePath: string): boolean {
if (!isGitRepo()) return false;
try {
execSync('git rerere', { stdio: 'pipe' });
// Check if the specific working tree file still has conflict markers.
// rerere resolves the working tree but does NOT update the index,
// so checking unmerged index entries would give a false negative.
const content = fs.readFileSync(filePath, 'utf-8');
return !content.includes('<<<<<<<');
} catch {
return false;
}
}
/**
* Clean up merge state after rerere operations.
* Pass filePath to only reset that file's index entries (preserving user's staged changes).
*/
export function cleanupMergeState(filePath?: string): void {
if (!isGitRepo()) return;
const gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
}).trim();
// Remove merge markers
const mergeHead = path.join(gitDir, 'MERGE_HEAD');
const mergeMsg = path.join(gitDir, 'MERGE_MSG');
if (fs.existsSync(mergeHead)) fs.unlinkSync(mergeHead);
if (fs.existsSync(mergeMsg)) fs.unlinkSync(mergeMsg);
// Reset only the specific file's unmerged index entries to avoid
// dropping the user's pre-existing staged changes
try {
if (filePath) {
execFileSync('git', ['reset', '--', filePath], { stdio: 'pipe' });
} else {
execSync('git reset', { stdio: 'pipe' });
}
} catch {
// May fail if nothing staged
}
}

74
skills-engine/migrate.ts Normal file
View File

@@ -0,0 +1,74 @@
import { execFileSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { BASE_DIR, CUSTOM_DIR, NANOCLAW_DIR } from './constants.js';
import { initNanoclawDir } from './init.js';
import { recordCustomModification } from './state.js';
export function initSkillsSystem(): void {
initNanoclawDir();
console.log('Skills system initialized. .nanoclaw/ directory created.');
}
export function migrateExisting(): void {
const projectRoot = process.cwd();
// First, do a fresh init
initNanoclawDir();
// Then, diff current files against base to capture modifications
const baseSrcDir = path.join(projectRoot, BASE_DIR, 'src');
const srcDir = path.join(projectRoot, 'src');
const customDir = path.join(projectRoot, CUSTOM_DIR);
const patchRelPath = path.join(CUSTOM_DIR, 'migration.patch');
try {
let diff: string;
try {
diff = execFileSync('diff', ['-ruN', baseSrcDir, srcDir], {
encoding: 'utf-8',
maxBuffer: 10 * 1024 * 1024,
});
} catch (err: unknown) {
// diff exits 1 when files differ — that's expected
const execErr = err as { status?: number; stdout?: string };
if (execErr.status === 1 && execErr.stdout) {
diff = execErr.stdout;
} else {
throw err;
}
}
if (diff.trim()) {
fs.mkdirSync(customDir, { recursive: true });
fs.writeFileSync(
path.join(projectRoot, patchRelPath),
diff,
'utf-8',
);
// Extract modified file paths from the diff
const filesModified = [...diff.matchAll(/^diff -ruN .+ (.+)$/gm)]
.map((m) => path.relative(projectRoot, m[1]))
.filter((f) => !f.startsWith('.nanoclaw'));
// Record in state so the patch is visible to the tracking system
recordCustomModification(
'Pre-skills migration',
filesModified,
patchRelPath,
);
console.log(
'Custom modifications captured in .nanoclaw/custom/migration.patch',
);
} else {
console.log('No custom modifications detected.');
}
} catch {
console.log('Could not generate diff. Continuing with clean base.');
}
console.log('Migration complete. Skills system ready.');
}

View File

@@ -0,0 +1,19 @@
import { readState, writeState } from './state.js';
export function resolvePathRemap(
relPath: string,
remap: Record<string, string>,
): string {
return remap[relPath] ?? relPath;
}
export function loadPathRemap(): Record<string, string> {
const state = readState();
return state.path_remap ?? {};
}
export function recordPathRemap(remap: Record<string, string>): void {
const state = readState();
state.path_remap = { ...state.path_remap, ...remap };
writeState(state);
}

293
skills-engine/rebase.ts Normal file
View File

@@ -0,0 +1,293 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { acquireLock } from './lock.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { clearAllResolutions } from './resolution-cache.js';
import { computeFileHash, readState, writeState } from './state.js';
import type { RebaseResult } from './types.js';
function walkDir(dir: string, root: string): string[] {
const results: string[] = [];
if (!fs.existsSync(dir)) return results;
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
results.push(...walkDir(fullPath, root));
} else {
results.push(path.relative(root, fullPath));
}
}
return results;
}
function collectTrackedFiles(
state: ReturnType<typeof readState>,
): Set<string> {
const tracked = new Set<string>();
for (const skill of state.applied_skills) {
for (const relPath of Object.keys(skill.file_hashes)) {
tracked.add(relPath);
}
}
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
for (const relPath of mod.files_modified) {
tracked.add(relPath);
}
}
}
return tracked;
}
export async function rebase(newBasePath?: string): Promise<RebaseResult> {
const projectRoot = process.cwd();
const state = readState();
if (state.applied_skills.length === 0) {
return {
success: false,
filesInPatch: 0,
error: 'No skills applied. Nothing to rebase.',
};
}
const releaseLock = acquireLock();
try {
const trackedFiles = collectTrackedFiles(state);
const baseAbsDir = path.join(projectRoot, BASE_DIR);
// Include base dir files
const baseFiles = walkDir(baseAbsDir, baseAbsDir);
for (const f of baseFiles) {
trackedFiles.add(f);
}
// Backup
const filesToBackup: string[] = [];
for (const relPath of trackedFiles) {
const absPath = path.join(projectRoot, relPath);
if (fs.existsSync(absPath)) filesToBackup.push(absPath);
const baseFilePath = path.join(baseAbsDir, relPath);
if (fs.existsSync(baseFilePath)) filesToBackup.push(baseFilePath);
}
const stateFilePath = path.join(projectRoot, NANOCLAW_DIR, 'state.yaml');
filesToBackup.push(stateFilePath);
createBackup(filesToBackup);
try {
// Generate unified diff: base vs working tree (archival record)
let combinedPatch = '';
let filesInPatch = 0;
for (const relPath of trackedFiles) {
const basePath = path.join(baseAbsDir, relPath);
const workingPath = path.join(projectRoot, relPath);
const oldPath = fs.existsSync(basePath) ? basePath : '/dev/null';
const newPath = fs.existsSync(workingPath) ? workingPath : '/dev/null';
if (oldPath === '/dev/null' && newPath === '/dev/null') continue;
try {
const diff = execFileSync('diff', ['-ruN', oldPath, newPath], {
encoding: 'utf-8',
});
if (diff.trim()) {
combinedPatch += diff;
filesInPatch++;
}
} catch (err: unknown) {
const execErr = err as { status?: number; stdout?: string };
if (execErr.status === 1 && execErr.stdout) {
combinedPatch += execErr.stdout;
filesInPatch++;
} else {
throw err;
}
}
}
// Save combined patch
const patchPath = path.join(
projectRoot,
NANOCLAW_DIR,
'combined.patch',
);
fs.writeFileSync(patchPath, combinedPatch, 'utf-8');
if (newBasePath) {
// --- Rebase with new base: three-way merge with resolution model ---
// Save current working tree content before overwriting
const savedContent: Record<string, string> = {};
for (const relPath of trackedFiles) {
const workingPath = path.join(projectRoot, relPath);
if (fs.existsSync(workingPath)) {
savedContent[relPath] = fs.readFileSync(workingPath, 'utf-8');
}
}
const absNewBase = path.resolve(newBasePath);
// Replace base
if (fs.existsSync(baseAbsDir)) {
fs.rmSync(baseAbsDir, { recursive: true, force: true });
}
fs.mkdirSync(baseAbsDir, { recursive: true });
copyDir(absNewBase, baseAbsDir);
// Copy new base to working tree
copyDir(absNewBase, projectRoot);
// Three-way merge per file: new-base ← old-base → saved-working-tree
const mergeConflicts: string[] = [];
for (const relPath of trackedFiles) {
const newBaseSrc = path.join(absNewBase, relPath);
const currentPath = path.join(projectRoot, relPath);
const saved = savedContent[relPath];
if (!saved) continue; // No working tree content to merge
if (!fs.existsSync(newBaseSrc)) {
// File only existed in working tree, not in new base — restore it
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.writeFileSync(currentPath, saved);
continue;
}
const newBaseContent = fs.readFileSync(newBaseSrc, 'utf-8');
if (newBaseContent === saved) continue; // No diff
// Find old base content from backup
const oldBasePath = path.join(
projectRoot,
'.nanoclaw',
'backup',
BASE_DIR,
relPath,
);
if (!fs.existsSync(oldBasePath)) {
// No old base — keep saved content
fs.writeFileSync(currentPath, saved);
continue;
}
// Save "ours" (new base content) before merge overwrites it
const oursContent = newBaseContent;
// Three-way merge: current(new base) ← old-base → saved(modifications)
const tmpSaved = path.join(
os.tmpdir(),
`nanoclaw-rebase-${crypto.randomUUID()}-${path.basename(relPath)}`,
);
fs.writeFileSync(tmpSaved, saved);
const result = mergeFile(currentPath, oldBasePath, tmpSaved);
fs.unlinkSync(tmpSaved);
if (!result.clean) {
// Try rerere resolution (three-level model)
if (isGitRepo()) {
const baseContent = fs.readFileSync(oldBasePath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, saved);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', relPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(relPath);
continue;
}
cleanupMergeState(relPath);
}
// Unresolved — conflict markers remain in working tree
mergeConflicts.push(relPath);
}
}
if (mergeConflicts.length > 0) {
// Return with backup pending for Claude Code / user resolution
return {
success: false,
patchFile: patchPath,
filesInPatch,
mergeConflicts,
backupPending: true,
error: `Merge conflicts in: ${mergeConflicts.join(', ')}. Resolve manually then call clearBackup(), or restoreBackup() + clearBackup() to abort.`,
};
}
} else {
// --- Rebase without new base: flatten into base ---
// Update base to current working tree state (all skills baked in)
for (const relPath of trackedFiles) {
const workingPath = path.join(projectRoot, relPath);
const basePath = path.join(baseAbsDir, relPath);
if (fs.existsSync(workingPath)) {
fs.mkdirSync(path.dirname(basePath), { recursive: true });
fs.copyFileSync(workingPath, basePath);
} else if (fs.existsSync(basePath)) {
// File was removed by skills — remove from base too
fs.unlinkSync(basePath);
}
}
}
// Update state
const now = new Date().toISOString();
for (const skill of state.applied_skills) {
const updatedHashes: Record<string, string> = {};
for (const relPath of Object.keys(skill.file_hashes)) {
const absPath = path.join(projectRoot, relPath);
if (fs.existsSync(absPath)) {
updatedHashes[relPath] = computeFileHash(absPath);
}
}
skill.file_hashes = updatedHashes;
}
delete state.custom_modifications;
state.rebased_at = now;
writeState(state);
// Clear stale resolution cache (base has changed, old resolutions invalid)
clearAllResolutions(projectRoot);
clearBackup();
return {
success: true,
patchFile: patchPath,
filesInPatch,
rebased_at: now,
};
} catch (err) {
restoreBackup();
clearBackup();
throw err;
}
} finally {
releaseLock();
}
}

309
skills-engine/replay.ts Normal file
View File

@@ -0,0 +1,309 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { readManifest } from './manifest.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import { loadResolutions } from './resolution-cache.js';
import {
mergeDockerComposeServices,
mergeEnvAdditions,
mergeNpmDependencies,
runNpmInstall,
} from './structured.js';
export interface ReplayOptions {
skills: string[];
skillDirs: Record<string, string>;
projectRoot?: string;
}
export interface ReplayResult {
success: boolean;
perSkill: Record<string, { success: boolean; error?: string }>;
mergeConflicts?: string[];
error?: string;
}
/**
* Scan .claude/skills/ for a directory whose manifest.yaml has skill: <skillName>.
*/
export function findSkillDir(
skillName: string,
projectRoot?: string,
): string | null {
const root = projectRoot ?? process.cwd();
const skillsRoot = path.join(root, '.claude', 'skills');
if (!fs.existsSync(skillsRoot)) return null;
for (const entry of fs.readdirSync(skillsRoot, { withFileTypes: true })) {
if (!entry.isDirectory()) continue;
const dir = path.join(skillsRoot, entry.name);
const manifestPath = path.join(dir, 'manifest.yaml');
if (!fs.existsSync(manifestPath)) continue;
try {
const manifest = readManifest(dir);
if (manifest.skill === skillName) return dir;
} catch {
// Skip invalid manifests
}
}
return null;
}
/**
* Replay a list of skills from clean base state.
* Used by uninstall (replay-without) and rebase.
*/
export async function replaySkills(
options: ReplayOptions,
): Promise<ReplayResult> {
const projectRoot = options.projectRoot ?? process.cwd();
const baseDir = path.join(projectRoot, BASE_DIR);
const pathRemap = loadPathRemap();
const perSkill: Record<string, { success: boolean; error?: string }> = {};
const allMergeConflicts: string[] = [];
// 1. Collect all files touched by any skill in the list
const allTouchedFiles = new Set<string>();
for (const skillName of options.skills) {
const skillDir = options.skillDirs[skillName];
if (!skillDir) {
perSkill[skillName] = {
success: false,
error: `Skill directory not found for: ${skillName}`,
};
return {
success: false,
perSkill,
error: `Missing skill directory for: ${skillName}`,
};
}
const manifest = readManifest(skillDir);
for (const f of manifest.adds) allTouchedFiles.add(f);
for (const f of manifest.modifies) allTouchedFiles.add(f);
}
// 2. Reset touched files to clean base
for (const relPath of allTouchedFiles) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
const basePath = path.join(baseDir, resolvedPath);
if (fs.existsSync(basePath)) {
// Restore from base
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.copyFileSync(basePath, currentPath);
} else if (fs.existsSync(currentPath)) {
// Add-only file not in base — remove it
fs.unlinkSync(currentPath);
}
}
// 3. Load pre-computed resolutions into git's rr-cache before replaying
// Pass the last skill's dir — it's the one applied on top, producing conflicts
const lastSkillDir = options.skills.length > 0
? options.skillDirs[options.skills[options.skills.length - 1]]
: undefined;
loadResolutions(options.skills, projectRoot, lastSkillDir);
// Replay each skill in order
// Collect structured ops for batch application
const allNpmDeps: Record<string, string> = {};
const allEnvAdditions: string[] = [];
const allDockerServices: Record<string, unknown> = {};
let hasNpmDeps = false;
for (const skillName of options.skills) {
const skillDir = options.skillDirs[skillName];
try {
const manifest = readManifest(skillDir);
// Execute file_ops
if (manifest.file_ops && manifest.file_ops.length > 0) {
const { executeFileOps } = await import('./file-ops.js');
const fileOpsResult = executeFileOps(manifest.file_ops, projectRoot);
if (!fileOpsResult.success) {
perSkill[skillName] = {
success: false,
error: `File operations failed: ${fileOpsResult.errors.join('; ')}`,
};
return {
success: false,
perSkill,
error: `File ops failed for ${skillName}`,
};
}
}
// Copy add/ files
const addDir = path.join(skillDir, 'add');
if (fs.existsSync(addDir)) {
for (const relPath of manifest.adds) {
const resolvedDest = resolvePathRemap(relPath, pathRemap);
const destPath = path.join(projectRoot, resolvedDest);
const srcPath = path.join(addDir, relPath);
if (fs.existsSync(srcPath)) {
fs.mkdirSync(path.dirname(destPath), { recursive: true });
fs.copyFileSync(srcPath, destPath);
}
}
}
// Three-way merge modify/ files
const skillConflicts: string[] = [];
for (const relPath of manifest.modifies) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
const basePath = path.join(baseDir, resolvedPath);
const skillPath = path.join(skillDir, 'modify', relPath);
if (!fs.existsSync(skillPath)) {
skillConflicts.push(relPath);
continue;
}
if (!fs.existsSync(currentPath)) {
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.copyFileSync(skillPath, currentPath);
continue;
}
if (!fs.existsSync(basePath)) {
fs.mkdirSync(path.dirname(basePath), { recursive: true });
fs.copyFileSync(currentPath, basePath);
}
const oursContent = fs.readFileSync(currentPath, 'utf-8');
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-replay-${crypto.randomUUID()}-${path.basename(relPath)}`,
);
fs.copyFileSync(currentPath, tmpCurrent);
const result = mergeFile(tmpCurrent, basePath, skillPath);
if (result.clean) {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(skillPath, 'utf-8');
setupRerereAdapter(
resolvedPath,
baseContent,
oursContent,
theirsContent,
);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', resolvedPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(resolvedPath);
continue;
}
cleanupMergeState(resolvedPath);
}
skillConflicts.push(resolvedPath);
}
}
if (skillConflicts.length > 0) {
allMergeConflicts.push(...skillConflicts);
perSkill[skillName] = {
success: false,
error: `Merge conflicts: ${skillConflicts.join(', ')}`,
};
// Stop on first conflict — later skills would merge against conflict markers
break;
} else {
perSkill[skillName] = { success: true };
}
// Collect structured ops
if (manifest.structured?.npm_dependencies) {
Object.assign(allNpmDeps, manifest.structured.npm_dependencies);
hasNpmDeps = true;
}
if (manifest.structured?.env_additions) {
allEnvAdditions.push(...manifest.structured.env_additions);
}
if (manifest.structured?.docker_compose_services) {
Object.assign(
allDockerServices,
manifest.structured.docker_compose_services,
);
}
} catch (err) {
perSkill[skillName] = {
success: false,
error: err instanceof Error ? err.message : String(err),
};
return {
success: false,
perSkill,
error: `Replay failed for ${skillName}: ${err instanceof Error ? err.message : String(err)}`,
};
}
}
if (allMergeConflicts.length > 0) {
return {
success: false,
perSkill,
mergeConflicts: allMergeConflicts,
error: `Unresolved merge conflicts: ${allMergeConflicts.join(', ')}`,
};
}
// 4. Apply aggregated structured operations (only if no conflicts)
if (hasNpmDeps) {
const pkgPath = path.join(projectRoot, 'package.json');
mergeNpmDependencies(pkgPath, allNpmDeps);
}
if (allEnvAdditions.length > 0) {
const envPath = path.join(projectRoot, '.env.example');
mergeEnvAdditions(envPath, allEnvAdditions);
}
if (Object.keys(allDockerServices).length > 0) {
const composePath = path.join(projectRoot, 'docker-compose.yml');
mergeDockerComposeServices(composePath, allDockerServices);
}
// 5. Run npm install if any deps
if (hasNpmDeps) {
try {
runNpmInstall();
} catch {
// npm install failure is non-fatal for replay
}
}
return { success: true, perSkill };
}

View File

@@ -0,0 +1,269 @@
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import { NANOCLAW_DIR, RESOLUTIONS_DIR, SHIPPED_RESOLUTIONS_DIR } from './constants.js';
import { computeFileHash } from './state.js';
import { FileInputHashes, ResolutionMeta } from './types.js';
/**
* Build the resolution directory key from a set of skill identifiers.
* Skills are sorted alphabetically and joined with "+".
*/
function resolutionKey(skills: string[]): string {
return [...skills].sort().join('+');
}
/**
* Find the resolution directory for a given skill combination.
* Returns absolute path if it exists, null otherwise.
*/
export function findResolutionDir(
skills: string[],
projectRoot: string,
): string | null {
const key = resolutionKey(skills);
// Check shipped resolutions (.claude/resolutions/) first, then project-level
for (const baseDir of [SHIPPED_RESOLUTIONS_DIR, RESOLUTIONS_DIR]) {
const dir = path.join(projectRoot, baseDir, key);
if (fs.existsSync(dir)) {
return dir;
}
}
return null;
}
/**
* Load cached resolutions into the local git rerere cache.
* Verifies file_hashes from meta.yaml match before loading each pair.
* Returns true if loaded successfully, false if not found or no pairs loaded.
*/
export function loadResolutions(
skills: string[],
projectRoot: string,
skillDir: string,
): boolean {
const resDir = findResolutionDir(skills, projectRoot);
if (!resDir) return false;
const metaPath = path.join(resDir, 'meta.yaml');
if (!fs.existsSync(metaPath)) return false;
let meta: ResolutionMeta;
try {
meta = parse(fs.readFileSync(metaPath, 'utf-8')) as ResolutionMeta;
} catch {
return false;
}
if (!meta.input_hashes) return false;
// Find all preimage/resolution pairs
const pairs = findPreimagePairs(resDir, resDir);
if (pairs.length === 0) return false;
// Get the git directory
let gitDir: string;
try {
gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
cwd: projectRoot,
}).trim();
if (!path.isAbsolute(gitDir)) {
gitDir = path.join(projectRoot, gitDir);
}
} catch {
return false;
}
const rrCacheDir = path.join(gitDir, 'rr-cache');
let loadedAny = false;
for (const { relPath, preimage, resolution } of pairs) {
// Verify file_hashes — skip pair if hashes don't match
const expected = meta.file_hashes?.[relPath];
if (!expected) {
console.log(`resolution-cache: skipping ${relPath} — no file_hashes in meta`);
continue;
}
const basePath = path.join(projectRoot, NANOCLAW_DIR, 'base', relPath);
const currentPath = path.join(projectRoot, relPath);
const skillModifyPath = path.join(skillDir, 'modify', relPath);
if (!fs.existsSync(basePath) || !fs.existsSync(currentPath) || !fs.existsSync(skillModifyPath)) {
console.log(`resolution-cache: skipping ${relPath} — input files not found`);
continue;
}
const baseHash = computeFileHash(basePath);
if (baseHash !== expected.base) {
console.log(`resolution-cache: skipping ${relPath} — base hash mismatch`);
continue;
}
const currentHash = computeFileHash(currentPath);
if (currentHash !== expected.current) {
console.log(`resolution-cache: skipping ${relPath} — current hash mismatch`);
continue;
}
const skillHash = computeFileHash(skillModifyPath);
if (skillHash !== expected.skill) {
console.log(`resolution-cache: skipping ${relPath} — skill hash mismatch`);
continue;
}
const preimageContent = fs.readFileSync(preimage, 'utf-8');
const resolutionContent = fs.readFileSync(resolution, 'utf-8');
// Git rerere uses its own internal hash format (not git hash-object).
// We store the rerere hash in the preimage filename as a .hash sidecar,
// captured when saveResolution() reads the actual rr-cache after rerere records it.
const hashSidecar = preimage + '.hash';
if (!fs.existsSync(hashSidecar)) {
// No hash recorded — skip this pair (legacy format)
continue;
}
const hash = fs.readFileSync(hashSidecar, 'utf-8').trim();
if (!hash) continue;
// Create rr-cache entry
const cacheDir = path.join(rrCacheDir, hash);
fs.mkdirSync(cacheDir, { recursive: true });
fs.writeFileSync(path.join(cacheDir, 'preimage'), preimageContent);
fs.writeFileSync(path.join(cacheDir, 'postimage'), resolutionContent);
loadedAny = true;
}
return loadedAny;
}
/**
* Save conflict resolutions to the resolution cache.
*/
export function saveResolution(
skills: string[],
files: { relPath: string; preimage: string; resolution: string; inputHashes: FileInputHashes }[],
meta: Partial<ResolutionMeta>,
projectRoot: string,
): void {
const key = resolutionKey(skills);
const resDir = path.join(projectRoot, RESOLUTIONS_DIR, key);
// Get the git rr-cache directory to find actual rerere hashes
let rrCacheDir: string | null = null;
try {
let gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
cwd: projectRoot,
}).trim();
if (!path.isAbsolute(gitDir)) {
gitDir = path.join(projectRoot, gitDir);
}
rrCacheDir = path.join(gitDir, 'rr-cache');
} catch {
// Not a git repo — skip hash capture
}
// Write preimage/resolution pairs
for (const file of files) {
const preimagePath = path.join(resDir, file.relPath + '.preimage');
const resolutionPath = path.join(resDir, file.relPath + '.resolution');
fs.mkdirSync(path.dirname(preimagePath), { recursive: true });
fs.writeFileSync(preimagePath, file.preimage);
fs.writeFileSync(resolutionPath, file.resolution);
// Capture the actual rerere hash by finding the rr-cache entry
// whose preimage matches ours
if (rrCacheDir && fs.existsSync(rrCacheDir)) {
const rerereHash = findRerereHash(rrCacheDir, file.preimage);
if (rerereHash) {
fs.writeFileSync(preimagePath + '.hash', rerereHash);
}
}
}
// Collect file_hashes from individual files
const fileHashes: Record<string, FileInputHashes> = {};
for (const file of files) {
fileHashes[file.relPath] = file.inputHashes;
}
// Build full meta with defaults
const fullMeta: ResolutionMeta = {
skills: [...skills].sort(),
apply_order: meta.apply_order ?? skills,
core_version: meta.core_version ?? '',
resolved_at: meta.resolved_at ?? new Date().toISOString(),
tested: meta.tested ?? false,
test_passed: meta.test_passed ?? false,
resolution_source: meta.resolution_source ?? 'user',
input_hashes: meta.input_hashes ?? {},
output_hash: meta.output_hash ?? '',
file_hashes: { ...fileHashes, ...meta.file_hashes },
};
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(fullMeta));
}
/**
* Remove all resolution cache entries.
* Called after rebase since the base has changed and old resolutions are invalid.
*/
export function clearAllResolutions(projectRoot: string): void {
const resDir = path.join(projectRoot, RESOLUTIONS_DIR);
if (fs.existsSync(resDir)) {
fs.rmSync(resDir, { recursive: true, force: true });
fs.mkdirSync(resDir, { recursive: true });
}
}
/**
* Recursively find preimage/resolution pairs in a directory.
*/
function findPreimagePairs(
dir: string,
baseDir: string,
): { relPath: string; preimage: string; resolution: string }[] {
const pairs: { relPath: string; preimage: string; resolution: string }[] = [];
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
pairs.push(...findPreimagePairs(fullPath, baseDir));
} else if (entry.name.endsWith('.preimage') && !entry.name.endsWith('.preimage.hash')) {
const resolutionPath = fullPath.replace(/\.preimage$/, '.resolution');
if (fs.existsSync(resolutionPath)) {
const relPath = path.relative(baseDir, fullPath).replace(/\.preimage$/, '');
pairs.push({ relPath, preimage: fullPath, resolution: resolutionPath });
}
}
}
return pairs;
}
/**
* Find the rerere hash for a given preimage by scanning rr-cache entries.
* Returns the directory name (hash) whose preimage matches the given content.
*/
function findRerereHash(rrCacheDir: string, preimageContent: string): string | null {
if (!fs.existsSync(rrCacheDir)) return null;
for (const entry of fs.readdirSync(rrCacheDir, { withFileTypes: true })) {
if (!entry.isDirectory()) continue;
const preimagePath = path.join(rrCacheDir, entry.name, 'preimage');
if (fs.existsSync(preimagePath)) {
const content = fs.readFileSync(preimagePath, 'utf-8');
if (content === preimageContent) {
return entry.name;
}
}
}
return null;
}

115
skills-engine/state.ts Normal file
View File

@@ -0,0 +1,115 @@
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import { SKILLS_SCHEMA_VERSION, NANOCLAW_DIR, STATE_FILE } from './constants.js';
import { AppliedSkill, CustomModification, SkillState } from './types.js';
function getStatePath(): string {
return path.join(process.cwd(), NANOCLAW_DIR, STATE_FILE);
}
export function readState(): SkillState {
const statePath = getStatePath();
if (!fs.existsSync(statePath)) {
throw new Error(
'.nanoclaw/state.yaml not found. Run initSkillsSystem() first.',
);
}
const content = fs.readFileSync(statePath, 'utf-8');
const state = parse(content) as SkillState;
if (compareSemver(state.skills_system_version, SKILLS_SCHEMA_VERSION) > 0) {
throw new Error(
`state.yaml version ${state.skills_system_version} is newer than tooling version ${SKILLS_SCHEMA_VERSION}. Update your skills engine.`,
);
}
return state;
}
export function writeState(state: SkillState): void {
const statePath = getStatePath();
fs.mkdirSync(path.dirname(statePath), { recursive: true });
const content = stringify(state, { sortMapEntries: true });
// Write to temp file then atomic rename to prevent corruption on crash
const tmpPath = statePath + '.tmp';
fs.writeFileSync(tmpPath, content, 'utf-8');
fs.renameSync(tmpPath, statePath);
}
export function recordSkillApplication(
skillName: string,
version: string,
fileHashes: Record<string, string>,
structuredOutcomes?: Record<string, unknown>,
): void {
const state = readState();
// Remove previous application of same skill if exists
state.applied_skills = state.applied_skills.filter(
(s) => s.name !== skillName,
);
state.applied_skills.push({
name: skillName,
version,
applied_at: new Date().toISOString(),
file_hashes: fileHashes,
structured_outcomes: structuredOutcomes,
});
writeState(state);
}
export function getAppliedSkills(): AppliedSkill[] {
const state = readState();
return state.applied_skills;
}
export function recordCustomModification(
description: string,
filesModified: string[],
patchFile: string,
): void {
const state = readState();
if (!state.custom_modifications) {
state.custom_modifications = [];
}
const mod: CustomModification = {
description,
applied_at: new Date().toISOString(),
files_modified: filesModified,
patch_file: patchFile,
};
state.custom_modifications.push(mod);
writeState(state);
}
export function getCustomModifications(): CustomModification[] {
const state = readState();
return state.custom_modifications || [];
}
export function computeFileHash(filePath: string): string {
const content = fs.readFileSync(filePath);
return crypto.createHash('sha256').update(content).digest('hex');
}
/**
* Compare two semver strings. Returns negative if a < b, 0 if equal, positive if a > b.
*/
export function compareSemver(a: string, b: string): number {
const partsA = a.split('.').map(Number);
const partsB = b.split('.').map(Number);
for (let i = 0; i < Math.max(partsA.length, partsB.length); i++) {
const diff = (partsA[i] || 0) - (partsB[i] || 0);
if (diff !== 0) return diff;
}
return 0;
}

196
skills-engine/structured.ts Normal file
View File

@@ -0,0 +1,196 @@
import { execSync } from 'child_process';
import fs from 'fs';
import { parse, stringify } from 'yaml';
interface PackageJson {
dependencies?: Record<string, string>;
devDependencies?: Record<string, string>;
[key: string]: unknown;
}
interface DockerComposeFile {
version?: string;
services?: Record<string, unknown>;
[key: string]: unknown;
}
function compareVersionParts(a: string[], b: string[]): number {
const len = Math.max(a.length, b.length);
for (let i = 0; i < len; i++) {
const aNum = parseInt(a[i] ?? '0', 10);
const bNum = parseInt(b[i] ?? '0', 10);
if (aNum !== bNum) return aNum - bNum;
}
return 0;
}
export function areRangesCompatible(
existing: string,
requested: string,
): { compatible: boolean; resolved: string } {
if (existing === requested) {
return { compatible: true, resolved: existing };
}
// Both start with ^
if (existing.startsWith('^') && requested.startsWith('^')) {
const eParts = existing.slice(1).split('.');
const rParts = requested.slice(1).split('.');
if (eParts[0] !== rParts[0]) {
return { compatible: false, resolved: existing };
}
// Same major — take the higher version
const resolved =
compareVersionParts(eParts, rParts) >= 0 ? existing : requested;
return { compatible: true, resolved };
}
// Both start with ~
if (existing.startsWith('~') && requested.startsWith('~')) {
const eParts = existing.slice(1).split('.');
const rParts = requested.slice(1).split('.');
if (eParts[0] !== rParts[0] || eParts[1] !== rParts[1]) {
return { compatible: false, resolved: existing };
}
// Same major.minor — take higher patch
const resolved =
compareVersionParts(eParts, rParts) >= 0 ? existing : requested;
return { compatible: true, resolved };
}
// Mismatched prefixes or anything else (exact, >=, *, etc.)
return { compatible: false, resolved: existing };
}
export function mergeNpmDependencies(
packageJsonPath: string,
newDeps: Record<string, string>,
): void {
const content = fs.readFileSync(packageJsonPath, 'utf-8');
const pkg: PackageJson = JSON.parse(content);
pkg.dependencies = pkg.dependencies || {};
for (const [name, version] of Object.entries(newDeps)) {
// Check both dependencies and devDependencies to avoid duplicates
const existing = pkg.dependencies[name] ?? pkg.devDependencies?.[name];
if (existing && existing !== version) {
const result = areRangesCompatible(existing, version);
if (!result.compatible) {
throw new Error(
`Dependency conflict: ${name} is already at ${existing}, skill wants ${version}`,
);
}
pkg.dependencies[name] = result.resolved;
} else {
pkg.dependencies[name] = version;
}
}
// Sort dependencies for deterministic output
pkg.dependencies = Object.fromEntries(
Object.entries(pkg.dependencies).sort(([a], [b]) => a.localeCompare(b)),
);
if (pkg.devDependencies) {
pkg.devDependencies = Object.fromEntries(
Object.entries(pkg.devDependencies).sort(([a], [b]) => a.localeCompare(b)),
);
}
fs.writeFileSync(
packageJsonPath,
JSON.stringify(pkg, null, 2) + '\n',
'utf-8',
);
}
export function mergeEnvAdditions(
envExamplePath: string,
additions: string[],
): void {
let content = '';
if (fs.existsSync(envExamplePath)) {
content = fs.readFileSync(envExamplePath, 'utf-8');
}
const existingVars = new Set<string>();
for (const line of content.split('\n')) {
const match = line.match(/^([A-Za-z_][A-Za-z0-9_]*)=/);
if (match) existingVars.add(match[1]);
}
const newVars = additions.filter((v) => !existingVars.has(v));
if (newVars.length === 0) return;
if (content && !content.endsWith('\n')) content += '\n';
content += '\n# Added by skill\n';
for (const v of newVars) {
content += `${v}=\n`;
}
fs.writeFileSync(envExamplePath, content, 'utf-8');
}
function extractHostPort(portMapping: string): string | null {
const str = String(portMapping);
const parts = str.split(':');
if (parts.length >= 2) {
return parts[0];
}
return null;
}
export function mergeDockerComposeServices(
composePath: string,
services: Record<string, unknown>,
): void {
let compose: DockerComposeFile;
if (fs.existsSync(composePath)) {
const content = fs.readFileSync(composePath, 'utf-8');
compose = (parse(content) as DockerComposeFile) || {};
} else {
compose = { version: '3' };
}
compose.services = compose.services || {};
// Collect host ports from existing services
const usedPorts = new Set<string>();
for (const [, svc] of Object.entries(compose.services)) {
const service = svc as Record<string, unknown>;
if (Array.isArray(service.ports)) {
for (const p of service.ports) {
const host = extractHostPort(String(p));
if (host) usedPorts.add(host);
}
}
}
// Add new services, checking for port collisions
for (const [name, definition] of Object.entries(services)) {
if (compose.services[name]) continue; // skip existing
const svc = definition as Record<string, unknown>;
if (Array.isArray(svc.ports)) {
for (const p of svc.ports) {
const host = extractHostPort(String(p));
if (host && usedPorts.has(host)) {
throw new Error(
`Port collision: host port ${host} from service "${name}" is already in use`,
);
}
if (host) usedPorts.add(host);
}
}
compose.services[name] = definition;
}
fs.writeFileSync(composePath, stringify(compose), 'utf-8');
}
export function runNpmInstall(): void {
execSync('npm install', { stdio: 'inherit', cwd: process.cwd() });
}

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"lib": ["ES2022"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"noEmit": true
},
"include": ["**/*.ts"],
"exclude": ["__tests__"]
}

134
skills-engine/types.ts Normal file
View File

@@ -0,0 +1,134 @@
export interface SkillManifest {
skill: string;
version: string;
description: string;
core_version: string;
adds: string[];
modifies: string[];
structured?: {
npm_dependencies?: Record<string, string>;
env_additions?: string[];
docker_compose_services?: Record<string, unknown>;
};
file_ops?: FileOperation[];
conflicts: string[];
depends: string[];
test?: string;
author?: string;
license?: string;
min_skills_system_version?: string;
tested_with?: string[];
post_apply?: string[];
}
export interface SkillState {
skills_system_version: string;
core_version: string;
applied_skills: AppliedSkill[];
custom_modifications?: CustomModification[];
path_remap?: Record<string, string>;
rebased_at?: string;
}
export interface AppliedSkill {
name: string;
version: string;
applied_at: string;
file_hashes: Record<string, string>;
structured_outcomes?: Record<string, unknown>;
custom_patch?: string;
custom_patch_description?: string;
}
export interface ApplyResult {
success: boolean;
skill: string;
version: string;
mergeConflicts?: string[];
backupPending?: boolean;
untrackedChanges?: string[];
error?: string;
}
export interface MergeResult {
clean: boolean;
exitCode: number;
}
export interface FileOperation {
type: 'rename' | 'delete' | 'move';
from?: string;
to?: string;
path?: string;
}
export interface FileOpsResult {
success: boolean;
executed: FileOperation[];
warnings: string[];
errors: string[];
}
export interface CustomModification {
description: string;
applied_at: string;
files_modified: string[];
patch_file: string;
}
export interface FileInputHashes {
base: string; // SHA-256 of .nanoclaw/base/<relPath>
current: string; // SHA-256 of working tree <relPath> before this merge
skill: string; // SHA-256 of skill's modify/<relPath>
}
export interface ResolutionMeta {
skills: string[];
apply_order: string[];
core_version: string;
resolved_at: string;
tested: boolean;
test_passed: boolean;
resolution_source: 'maintainer' | 'user' | 'claude';
input_hashes: Record<string, string>;
output_hash: string;
file_hashes: Record<string, FileInputHashes>;
}
export interface UpdatePreview {
currentVersion: string;
newVersion: string;
filesChanged: string[];
filesDeleted: string[];
conflictRisk: string[];
customPatchesAtRisk: string[];
}
export interface UpdateResult {
success: boolean;
previousVersion: string;
newVersion: string;
mergeConflicts?: string[];
backupPending?: boolean;
customPatchFailures?: string[];
skillReapplyResults?: Record<string, boolean>;
error?: string;
}
export interface UninstallResult {
success: boolean;
skill: string;
customPatchWarning?: string;
replayResults?: Record<string, boolean>;
error?: string;
}
export interface RebaseResult {
success: boolean;
patchFile?: string;
filesInPatch: number;
rebased_at?: string;
mergeConflicts?: string[];
backupPending?: boolean;
error?: string;
}

231
skills-engine/uninstall.ts Normal file
View File

@@ -0,0 +1,231 @@
import { execFileSync, execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { acquireLock } from './lock.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import { computeFileHash, readState, writeState } from './state.js';
import { findSkillDir, replaySkills } from './replay.js';
import type { UninstallResult } from './types.js';
export async function uninstallSkill(
skillName: string,
): Promise<UninstallResult> {
const projectRoot = process.cwd();
const state = readState();
// 1. Block after rebase — skills are baked into base
if (state.rebased_at) {
return {
success: false,
skill: skillName,
error:
'Cannot uninstall individual skills after rebase. The base includes all skill modifications. To remove a skill, start from a clean core and re-apply the skills you want.',
};
}
// 2. Verify skill exists
const skillEntry = state.applied_skills.find((s) => s.name === skillName);
if (!skillEntry) {
return {
success: false,
skill: skillName,
error: `Skill "${skillName}" is not applied.`,
};
}
// 3. Check for custom patch — warn but don't block
if (skillEntry.custom_patch) {
return {
success: false,
skill: skillName,
customPatchWarning: `Skill "${skillName}" has a custom patch (${skillEntry.custom_patch_description ?? 'no description'}). Uninstalling will lose these customizations. Re-run with confirmation to proceed.`,
};
}
// 4. Acquire lock
const releaseLock = acquireLock();
try {
// 4. Backup all files touched by any applied skill
const allTouchedFiles = new Set<string>();
for (const skill of state.applied_skills) {
for (const filePath of Object.keys(skill.file_hashes)) {
allTouchedFiles.add(filePath);
}
}
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
for (const f of mod.files_modified) {
allTouchedFiles.add(f);
}
}
}
const filesToBackup = [...allTouchedFiles].map((f) =>
path.join(projectRoot, f),
);
createBackup(filesToBackup);
// 5. Build remaining skill list (original order, minus removed)
const remainingSkills = state.applied_skills
.filter((s) => s.name !== skillName)
.map((s) => s.name);
// 6. Locate all skill dirs
const skillDirs: Record<string, string> = {};
for (const name of remainingSkills) {
const dir = findSkillDir(name, projectRoot);
if (!dir) {
restoreBackup();
clearBackup();
return {
success: false,
skill: skillName,
error: `Cannot find skill package for "${name}" in .claude/skills/. All remaining skills must be available for replay.`,
};
}
skillDirs[name] = dir;
}
// 7. Reset files exclusive to the removed skill; replaySkills handles the rest
const baseDir = path.join(projectRoot, BASE_DIR);
const pathRemap = loadPathRemap();
const remainingSkillFiles = new Set<string>();
for (const skill of state.applied_skills) {
if (skill.name === skillName) continue;
for (const filePath of Object.keys(skill.file_hashes)) {
remainingSkillFiles.add(filePath);
}
}
const removedSkillFiles = Object.keys(skillEntry.file_hashes);
for (const filePath of removedSkillFiles) {
if (remainingSkillFiles.has(filePath)) continue; // replaySkills handles it
const resolvedPath = resolvePathRemap(filePath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
const basePath = path.join(baseDir, resolvedPath);
if (fs.existsSync(basePath)) {
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.copyFileSync(basePath, currentPath);
} else if (fs.existsSync(currentPath)) {
// Add-only file not in base — remove
fs.unlinkSync(currentPath);
}
}
// 8. Replay remaining skills on clean base
const replayResult = await replaySkills({
skills: remainingSkills,
skillDirs,
projectRoot,
});
// 9. Check replay result before proceeding
if (!replayResult.success) {
restoreBackup();
clearBackup();
return {
success: false,
skill: skillName,
error: `Replay failed: ${replayResult.error}`,
};
}
// 10. Re-apply standalone custom_modifications
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
const patchPath = path.join(projectRoot, mod.patch_file);
if (fs.existsSync(patchPath)) {
try {
execFileSync('git', ['apply', '--3way', patchPath], {
stdio: 'pipe',
cwd: projectRoot,
});
} catch {
// Custom patch failure is non-fatal but noted
}
}
}
}
// 11. Run skill tests
const replayResults: Record<string, boolean> = {};
for (const skill of state.applied_skills) {
if (skill.name === skillName) continue;
const outcomes = skill.structured_outcomes as
| Record<string, unknown>
| undefined;
if (!outcomes?.test) continue;
try {
execSync(outcomes.test as string, {
stdio: 'pipe',
cwd: projectRoot,
timeout: 120_000,
});
replayResults[skill.name] = true;
} catch {
replayResults[skill.name] = false;
}
}
// Check for test failures
const testFailures = Object.entries(replayResults).filter(
([, passed]) => !passed,
);
if (testFailures.length > 0) {
restoreBackup();
clearBackup();
return {
success: false,
skill: skillName,
replayResults,
error: `Tests failed after uninstall: ${testFailures.map(([n]) => n).join(', ')}`,
};
}
// 11. Update state
state.applied_skills = state.applied_skills.filter(
(s) => s.name !== skillName,
);
// Update file hashes for remaining skills
for (const skill of state.applied_skills) {
const newHashes: Record<string, string> = {};
for (const filePath of Object.keys(skill.file_hashes)) {
const absPath = path.join(projectRoot, filePath);
if (fs.existsSync(absPath)) {
newHashes[filePath] = computeFileHash(absPath);
}
}
skill.file_hashes = newHashes;
}
writeState(state);
// 12. Cleanup
clearBackup();
return {
success: true,
skill: skillName,
replayResults:
Object.keys(replayResults).length > 0 ? replayResults : undefined,
};
} catch (err) {
restoreBackup();
clearBackup();
return {
success: false,
skill: skillName,
error: err instanceof Error ? err.message : String(err),
};
} finally {
releaseLock();
}
}

368
skills-engine/update.ts Normal file
View File

@@ -0,0 +1,368 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { parse as parseYaml } from 'yaml';
import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { isCustomizeActive } from './customize.js';
import { acquireLock } from './lock.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { recordPathRemap } from './path-remap.js';
import { computeFileHash, readState, writeState } from './state.js';
import {
mergeDockerComposeServices,
mergeEnvAdditions,
mergeNpmDependencies,
runNpmInstall,
} from './structured.js';
import type { UpdatePreview, UpdateResult } from './types.js';
function walkDir(dir: string, root?: string): string[] {
const rootDir = root ?? dir;
const results: string[] = [];
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
results.push(...walkDir(fullPath, rootDir));
} else {
results.push(path.relative(rootDir, fullPath));
}
}
return results;
}
export function previewUpdate(newCorePath: string): UpdatePreview {
const projectRoot = process.cwd();
const state = readState();
const baseDir = path.join(projectRoot, BASE_DIR);
// Read new version from package.json in newCorePath
const newPkgPath = path.join(newCorePath, 'package.json');
let newVersion = 'unknown';
if (fs.existsSync(newPkgPath)) {
const pkg = JSON.parse(fs.readFileSync(newPkgPath, 'utf-8'));
newVersion = pkg.version ?? 'unknown';
}
// Walk all files in newCorePath, compare against base to find changed files
const newCoreFiles = walkDir(newCorePath);
const filesChanged: string[] = [];
const filesDeleted: string[] = [];
for (const relPath of newCoreFiles) {
const basePath = path.join(baseDir, relPath);
const newPath = path.join(newCorePath, relPath);
if (!fs.existsSync(basePath)) {
filesChanged.push(relPath);
continue;
}
const baseHash = computeFileHash(basePath);
const newHash = computeFileHash(newPath);
if (baseHash !== newHash) {
filesChanged.push(relPath);
}
}
// Detect files deleted in the new core (exist in base but not in newCorePath)
if (fs.existsSync(baseDir)) {
const baseFiles = walkDir(baseDir);
const newCoreSet = new Set(newCoreFiles);
for (const relPath of baseFiles) {
if (!newCoreSet.has(relPath)) {
filesDeleted.push(relPath);
}
}
}
// Check which changed files have skill overlaps
const conflictRisk: string[] = [];
const customPatchesAtRisk: string[] = [];
for (const relPath of filesChanged) {
// Check applied skills
for (const skill of state.applied_skills) {
if (skill.file_hashes[relPath]) {
conflictRisk.push(relPath);
break;
}
}
// Check custom modifications
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
if (mod.files_modified.includes(relPath)) {
customPatchesAtRisk.push(relPath);
break;
}
}
}
}
return {
currentVersion: state.core_version,
newVersion,
filesChanged,
filesDeleted,
conflictRisk,
customPatchesAtRisk,
};
}
export async function applyUpdate(newCorePath: string): Promise<UpdateResult> {
const projectRoot = process.cwd();
const state = readState();
const baseDir = path.join(projectRoot, BASE_DIR);
// --- Pre-flight ---
if (isCustomizeActive()) {
return {
success: false,
previousVersion: state.core_version,
newVersion: 'unknown',
error:
'A customize session is active. Run commitCustomize() or abortCustomize() first.',
};
}
const releaseLock = acquireLock();
try {
// --- Preview ---
const preview = previewUpdate(newCorePath);
// --- Backup ---
const filesToBackup = [
...preview.filesChanged.map((f) => path.join(projectRoot, f)),
...preview.filesDeleted.map((f) => path.join(projectRoot, f)),
];
createBackup(filesToBackup);
// --- Three-way merge ---
const mergeConflicts: string[] = [];
for (const relPath of preview.filesChanged) {
const currentPath = path.join(projectRoot, relPath);
const basePath = path.join(baseDir, relPath);
const newCoreSrcPath = path.join(newCorePath, relPath);
if (!fs.existsSync(currentPath)) {
// File doesn't exist yet — just copy from new core
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.copyFileSync(newCoreSrcPath, currentPath);
continue;
}
if (!fs.existsSync(basePath)) {
// No base — use current as base
fs.mkdirSync(path.dirname(basePath), { recursive: true });
fs.copyFileSync(currentPath, basePath);
}
// Three-way merge: current ← base → newCore
// Save current content before merge overwrites it (needed for rerere stage 2 = "ours")
const oursContent = fs.readFileSync(currentPath, 'utf-8');
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-update-${crypto.randomUUID()}-${path.basename(relPath)}`,
);
fs.copyFileSync(currentPath, tmpCurrent);
const result = mergeFile(tmpCurrent, basePath, newCoreSrcPath);
if (result.clean) {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Copy conflict markers to working tree path before rerere
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(newCoreSrcPath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, theirsContent);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', relPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(relPath);
continue;
}
cleanupMergeState(relPath);
}
mergeConflicts.push(relPath);
}
}
if (mergeConflicts.length > 0) {
// Preserve backup so user can resolve conflicts manually, then continue
// Call clearBackup() after resolution or restoreBackup() + clearBackup() to abort
return {
success: false,
previousVersion: preview.currentVersion,
newVersion: preview.newVersion,
mergeConflicts,
backupPending: true,
error: `Unresolved merge conflicts in: ${mergeConflicts.join(', ')}. Resolve manually then call clearBackup(), or restoreBackup() + clearBackup() to abort.`,
};
}
// --- Remove deleted files ---
for (const relPath of preview.filesDeleted) {
const currentPath = path.join(projectRoot, relPath);
if (fs.existsSync(currentPath)) {
fs.unlinkSync(currentPath);
}
}
// --- Re-apply custom patches ---
const customPatchFailures: string[] = [];
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
const patchPath = path.join(projectRoot, mod.patch_file);
if (!fs.existsSync(patchPath)) {
customPatchFailures.push(
`${mod.description}: patch file missing (${mod.patch_file})`,
);
continue;
}
try {
execFileSync('git', ['apply', '--3way', patchPath], {
stdio: 'pipe',
cwd: projectRoot,
});
} catch {
customPatchFailures.push(mod.description);
}
}
}
// --- Record path remaps from update metadata ---
const remapFile = path.join(newCorePath, '.nanoclaw-meta', 'path_remap.yaml');
if (fs.existsSync(remapFile)) {
const remap = parseYaml(fs.readFileSync(remapFile, 'utf-8')) as Record<string, string>;
if (remap && typeof remap === 'object') {
recordPathRemap(remap);
}
}
// --- Update base ---
if (fs.existsSync(baseDir)) {
fs.rmSync(baseDir, { recursive: true, force: true });
}
fs.mkdirSync(baseDir, { recursive: true });
copyDir(newCorePath, baseDir);
// --- Structured ops: re-apply from all skills ---
const allNpmDeps: Record<string, string> = {};
const allEnvAdditions: string[] = [];
const allDockerServices: Record<string, unknown> = {};
let hasNpmDeps = false;
for (const skill of state.applied_skills) {
const outcomes = skill.structured_outcomes as Record<string, unknown> | undefined;
if (!outcomes) continue;
if (outcomes.npm_dependencies) {
Object.assign(allNpmDeps, outcomes.npm_dependencies as Record<string, string>);
hasNpmDeps = true;
}
if (outcomes.env_additions) {
allEnvAdditions.push(...(outcomes.env_additions as string[]));
}
if (outcomes.docker_compose_services) {
Object.assign(
allDockerServices,
outcomes.docker_compose_services as Record<string, unknown>,
);
}
}
if (hasNpmDeps) {
const pkgPath = path.join(projectRoot, 'package.json');
mergeNpmDependencies(pkgPath, allNpmDeps);
}
if (allEnvAdditions.length > 0) {
const envPath = path.join(projectRoot, '.env.example');
mergeEnvAdditions(envPath, allEnvAdditions);
}
if (Object.keys(allDockerServices).length > 0) {
const composePath = path.join(projectRoot, 'docker-compose.yml');
mergeDockerComposeServices(composePath, allDockerServices);
}
if (hasNpmDeps) {
runNpmInstall();
}
// --- Run tests for each applied skill ---
const skillReapplyResults: Record<string, boolean> = {};
for (const skill of state.applied_skills) {
const outcomes = skill.structured_outcomes as Record<string, unknown> | undefined;
if (!outcomes?.test) continue;
const testCmd = outcomes.test as string;
try {
execSync(testCmd, {
stdio: 'pipe',
cwd: projectRoot,
timeout: 120_000,
});
skillReapplyResults[skill.name] = true;
} catch {
skillReapplyResults[skill.name] = false;
}
}
// --- Update state ---
state.core_version = preview.newVersion;
writeState(state);
// --- Cleanup ---
clearBackup();
return {
success: true,
previousVersion: preview.currentVersion,
newVersion: preview.newVersion,
customPatchFailures:
customPatchFailures.length > 0 ? customPatchFailures : undefined,
skillReapplyResults:
Object.keys(skillReapplyResults).length > 0
? skillReapplyResults
: undefined,
};
} catch (err) {
restoreBackup();
clearBackup();
return {
success: false,
previousVersion: state.core_version,
newVersion: 'unknown',
error: err instanceof Error ? err.message : String(err),
};
} finally {
releaseLock();
}
}