refactor: remove deterministic caching system from skills engine (#453)

This commit is contained in:
Gabi Simons
2026-02-24 15:49:29 +02:00
committed by GitHub
parent 1448a14a94
commit 29a5dafe01
15 changed files with 16 additions and 1107 deletions

View File

@@ -6,7 +6,6 @@ import {
BACKUP_DIR,
LOCK_FILE,
CUSTOM_DIR,
RESOLUTIONS_DIR,
SKILLS_SCHEMA_VERSION,
} from '../constants.js';
@@ -18,7 +17,6 @@ describe('constants', () => {
BACKUP_DIR,
LOCK_FILE,
CUSTOM_DIR,
RESOLUTIONS_DIR,
SKILLS_SCHEMA_VERSION,
};
@@ -30,7 +28,7 @@ describe('constants', () => {
});
it('path constants use forward slashes and .nanoclaw prefix', () => {
const pathConstants = [BASE_DIR, BACKUP_DIR, LOCK_FILE, CUSTOM_DIR, RESOLUTIONS_DIR];
const pathConstants = [BASE_DIR, BACKUP_DIR, LOCK_FILE, CUSTOM_DIR];
for (const p of pathConstants) {
expect(p).not.toContain('\\');
expect(p).toMatch(/^\.nanoclaw\//);

View File

@@ -1,8 +1,7 @@
import { execSync } from 'child_process';
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { isGitRepo, mergeFile, setupRerereAdapter } from '../merge.js';
import { isGitRepo, mergeFile } from '../merge.js';
import { createTempDir, initGitRepo, cleanup } from './test-helpers.js';
describe('merge', () => {
@@ -51,31 +50,6 @@ describe('merge', () => {
expect(merged).toContain('line3-modified');
});
it('setupRerereAdapter cleans stale MERGE_HEAD before proceeding', () => {
// Simulate a stale MERGE_HEAD from a previous crash
const gitDir = execSync('git rev-parse --git-dir', {
cwd: tmpDir,
encoding: 'utf-8',
}).trim();
const headHash = execSync('git rev-parse HEAD', {
cwd: tmpDir,
encoding: 'utf-8',
}).trim();
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
fs.writeFileSync(path.join(gitDir, 'MERGE_MSG'), 'stale merge\n');
// Write a file for the adapter to work with
fs.writeFileSync(path.join(tmpDir, 'test.txt'), 'conflicted content');
// setupRerereAdapter should not throw despite stale MERGE_HEAD
expect(() =>
setupRerereAdapter('test.txt', 'base', 'ours', 'theirs'),
).not.toThrow();
// MERGE_HEAD should still exist (newly written by setupRerereAdapter)
expect(fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))).toBe(true);
});
it('conflict with overlapping changes', () => {
const base = path.join(tmpDir, 'base.txt');
const current = path.join(tmpDir, 'current.txt');

View File

@@ -239,48 +239,6 @@ describe('rebase', () => {
expect(baseConfig).toContain('skill-b');
});
it('rebase clears resolution cache', async () => {
// Set up base + working tree
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 1;\n// skill\n',
);
// Create a fake resolution cache entry
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
fs.mkdirSync(resDir, { recursive: true });
fs.writeFileSync(path.join(resDir, 'meta.yaml'), 'skills: [skill-a, skill-b]\n');
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'my-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: { 'src/index.ts': 'hash' },
},
],
});
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(true);
// Resolution cache should be cleared
const resolutions = fs.readdirSync(
path.join(tmpDir, '.nanoclaw', 'resolutions'),
);
expect(resolutions).toHaveLength(0);
});
it('rebase with new base: base updated, changes merged', async () => {
// Set up current base (multi-line so changes don't conflict)
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');

View File

@@ -1,283 +0,0 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import {
findResolutionDir,
loadResolutions,
saveResolution,
} from '../resolution-cache.js';
import { createTempDir, setupNanoclawDir, initGitRepo, cleanup } from './test-helpers.js';
function sha256(content: string): string {
return crypto.createHash('sha256').update(content).digest('hex');
}
const dummyHashes = { base: 'aaa', current: 'bbb', skill: 'ccc' };
describe('resolution-cache', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('findResolutionDir returns null when not found', () => {
const result = findResolutionDir(['skill-a', 'skill-b'], tmpDir);
expect(result).toBeNull();
});
it('saveResolution creates directory structure with files and meta', () => {
saveResolution(
['skill-b', 'skill-a'],
[{ relPath: 'src/config.ts', preimage: 'conflict content', resolution: 'resolved content', inputHashes: dummyHashes }],
{ core_version: '1.0.0' },
tmpDir,
);
// Skills are sorted, so key is "skill-a+skill-b"
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
expect(fs.existsSync(resDir)).toBe(true);
// Check preimage and resolution files exist
expect(fs.existsSync(path.join(resDir, 'src/config.ts.preimage'))).toBe(true);
expect(fs.existsSync(path.join(resDir, 'src/config.ts.resolution'))).toBe(true);
// Check meta.yaml exists and has expected fields
const metaPath = path.join(resDir, 'meta.yaml');
expect(fs.existsSync(metaPath)).toBe(true);
const meta = parse(fs.readFileSync(metaPath, 'utf-8'));
expect(meta.core_version).toBe('1.0.0');
expect(meta.skills).toEqual(['skill-a', 'skill-b']);
});
it('saveResolution writes file_hashes to meta.yaml', () => {
const hashes = {
base: sha256('base content'),
current: sha256('current content'),
skill: sha256('skill content'),
};
saveResolution(
['alpha', 'beta'],
[{ relPath: 'src/config.ts', preimage: 'pre', resolution: 'post', inputHashes: hashes }],
{},
tmpDir,
);
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'alpha+beta');
const meta = parse(fs.readFileSync(path.join(resDir, 'meta.yaml'), 'utf-8'));
expect(meta.file_hashes).toBeDefined();
expect(meta.file_hashes['src/config.ts']).toEqual(hashes);
});
it('findResolutionDir returns path after save', () => {
saveResolution(
['alpha', 'beta'],
[{ relPath: 'file.ts', preimage: 'pre', resolution: 'post', inputHashes: dummyHashes }],
{},
tmpDir,
);
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
expect(result).not.toBeNull();
expect(result).toContain('alpha+beta');
});
it('findResolutionDir finds shipped resolutions in .claude/resolutions', () => {
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
fs.mkdirSync(shippedDir, { recursive: true });
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [alpha, beta]\n');
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
expect(result).not.toBeNull();
expect(result).toContain('.claude/resolutions/alpha+beta');
});
it('findResolutionDir prefers shipped over project-level', () => {
// Create both shipped and project-level
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'a+b');
fs.mkdirSync(shippedDir, { recursive: true });
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [a, b]\n');
saveResolution(
['a', 'b'],
[{ relPath: 'f.ts', preimage: 'x', resolution: 'project', inputHashes: dummyHashes }],
{},
tmpDir,
);
const result = findResolutionDir(['a', 'b'], tmpDir);
expect(result).toContain('.claude/resolutions/a+b');
});
it('skills are sorted so order does not matter', () => {
saveResolution(
['zeta', 'alpha'],
[{ relPath: 'f.ts', preimage: 'a', resolution: 'b', inputHashes: dummyHashes }],
{},
tmpDir,
);
// Find with reversed order should still work
const result = findResolutionDir(['alpha', 'zeta'], tmpDir);
expect(result).not.toBeNull();
// Also works with original order
const result2 = findResolutionDir(['zeta', 'alpha'], tmpDir);
expect(result2).not.toBeNull();
expect(result).toBe(result2);
});
describe('loadResolutions hash verification', () => {
const baseContent = 'base file content';
const currentContent = 'current file content';
const skillContent = 'skill file content';
const preimageContent = 'preimage with conflict markers';
const resolutionContent = 'resolved content';
const rerereHash = 'abc123def456';
function setupResolutionDir(fileHashes: Record<string, any>) {
// Create a shipped resolution directory
const resDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
fs.mkdirSync(path.join(resDir, 'src'), { recursive: true });
// Write preimage, resolution, and hash sidecar
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage'), preimageContent);
fs.writeFileSync(path.join(resDir, 'src/config.ts.resolution'), resolutionContent);
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage.hash'), rerereHash);
// Write meta.yaml
const meta: any = {
skills: ['alpha', 'beta'],
apply_order: ['alpha', 'beta'],
core_version: '1.0.0',
resolved_at: new Date().toISOString(),
tested: true,
test_passed: true,
resolution_source: 'maintainer',
input_hashes: {},
output_hash: '',
file_hashes: fileHashes,
};
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(meta));
return resDir;
}
function setupInputFiles() {
// Create base file
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'base', 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, '.nanoclaw', 'base', 'src', 'config.ts'), baseContent);
// Create current file
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src', 'config.ts'), currentContent);
}
function createSkillDir() {
const skillDir = path.join(tmpDir, 'skill-pkg');
fs.mkdirSync(path.join(skillDir, 'modify', 'src'), { recursive: true });
fs.writeFileSync(path.join(skillDir, 'modify', 'src', 'config.ts'), skillContent);
return skillDir;
}
beforeEach(() => {
initGitRepo(tmpDir);
});
it('loads with matching file_hashes', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: sha256(currentContent),
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(true);
// Verify rr-cache entry was created
const gitDir = path.join(tmpDir, '.git');
const cacheEntry = path.join(gitDir, 'rr-cache', rerereHash);
expect(fs.existsSync(path.join(cacheEntry, 'preimage'))).toBe(true);
expect(fs.existsSync(path.join(cacheEntry, 'postimage'))).toBe(true);
});
it('skips pair with mismatched base hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: 'wrong_hash',
current: sha256(currentContent),
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
// rr-cache entry should NOT be created
const gitDir = path.join(tmpDir, '.git');
expect(fs.existsSync(path.join(gitDir, 'rr-cache', rerereHash))).toBe(false);
});
it('skips pair with mismatched current hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: 'wrong_hash',
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
it('skips pair with mismatched skill hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: sha256(currentContent),
skill: 'wrong_hash',
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
it('skips pair with no file_hashes entry for that file', () => {
setupInputFiles();
const skillDir = createSkillDir();
// file_hashes exists but doesn't include src/config.ts
setupResolutionDir({});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
});
});

View File

@@ -1,4 +1,4 @@
import { execFileSync, execSync } from 'child_process';
import { execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
@@ -18,14 +18,7 @@ import {
readManifest,
} from './manifest.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { loadResolutions } from './resolution-cache.js';
import { mergeFile } from './merge.js';
import { computeFileHash, readState, recordSkillApplication, writeState } from './state.js';
import {
mergeDockerComposeServices,
@@ -171,10 +164,6 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
// --- Merge modified files ---
const mergeConflicts: string[] = [];
// Load pre-computed resolutions into git's rr-cache before merging
const appliedSkillNames = currentState.applied_skills.map((s) => s.name);
loadResolutions([...appliedSkillNames, manifest.skill], projectRoot, skillDir);
for (const relPath of manifest.modifies) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
@@ -200,8 +189,6 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
}
// Three-way merge: current ← base → skill
// Save current content before merge overwrites it (needed for rerere stage 2 = "ours")
const oursContent = fs.readFileSync(currentPath, 'utf-8');
// git merge-file modifies the first argument in-place, so use a temp copy
const tmpCurrent = path.join(
os.tmpdir(),
@@ -215,36 +202,9 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Copy conflict markers to working tree path BEFORE rerere
// rerere looks at the working tree file at relPath, not at tmpCurrent
// Conflict — copy markers to working tree
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(skillPath, 'utf-8');
setupRerereAdapter(resolvedPath, baseContent, oursContent, theirsContent);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
// rerere resolved the conflict — currentPath now has resolved content
// Record the resolution: git add + git rerere
execFileSync('git', ['add', resolvedPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(resolvedPath);
// Unstage the file — cleanupMergeState clears unmerged entries
// but the git add above leaves the file staged at stage 0
try {
execFileSync('git', ['restore', '--staged', resolvedPath], { stdio: 'pipe' });
} catch { /* may fail if file is new or not tracked */ }
continue;
}
cleanupMergeState(resolvedPath);
}
// Unresolved conflict — currentPath already has conflict markers
mergeConflicts.push(relPath);
}
}

View File

@@ -4,8 +4,6 @@ export const BASE_DIR = '.nanoclaw/base';
export const BACKUP_DIR = '.nanoclaw/backup';
export const LOCK_FILE = '.nanoclaw/lock';
export const CUSTOM_DIR = '.nanoclaw/custom';
export const RESOLUTIONS_DIR = '.nanoclaw/resolutions';
export const SHIPPED_RESOLUTIONS_DIR = '.claude/resolutions';
export const SKILLS_SCHEMA_VERSION = '0.1.0';
// Top-level paths to include in base snapshot and upstream extraction.

View File

@@ -7,8 +7,6 @@ export {
CUSTOM_DIR,
LOCK_FILE,
NANOCLAW_DIR,
RESOLUTIONS_DIR,
SHIPPED_RESOLUTIONS_DIR,
STATE_FILE,
} from './constants.js';
export {
@@ -28,11 +26,8 @@ export {
readManifest,
} from './manifest.js';
export {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
export {
loadPathRemap,
@@ -44,12 +39,6 @@ export { findSkillDir, replaySkills } from './replay.js';
export type { ReplayOptions, ReplayResult } from './replay.js';
export { uninstallSkill } from './uninstall.js';
export { initSkillsSystem, migrateExisting } from './migrate.js';
export {
clearAllResolutions,
findResolutionDir,
loadResolutions,
saveResolution,
} from './resolution-cache.js';
export { applyUpdate, previewUpdate } from './update.js';
export {
compareSemver,
@@ -76,7 +65,6 @@ export type {
FileOperation,
MergeResult,
RebaseResult,
ResolutionMeta,
SkillManifest,
SkillState,
UninstallResult,

View File

@@ -1,6 +1,4 @@
import { execFileSync, execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { MergeResult } from './types.js';
@@ -39,112 +37,3 @@ export function mergeFile(
throw new Error(`git merge-file failed: ${err.message}`);
}
}
/**
* Set up unmerged index entries for rerere adapter.
* Creates stages 1/2/3 so git rerere can record/resolve conflicts.
*/
export function setupRerereAdapter(
filePath: string,
baseContent: string,
oursContent: string,
theirsContent: string,
): void {
if (!isGitRepo()) return;
const gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
}).trim();
// Clean up stale MERGE_HEAD from a previous crash
if (fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))) {
cleanupMergeState(filePath);
}
// Hash objects into git object store
const baseHash = execSync('git hash-object -w --stdin', {
input: baseContent,
encoding: 'utf-8',
}).trim();
const oursHash = execSync('git hash-object -w --stdin', {
input: oursContent,
encoding: 'utf-8',
}).trim();
const theirsHash = execSync('git hash-object -w --stdin', {
input: theirsContent,
encoding: 'utf-8',
}).trim();
// Create unmerged index entries (stages 1/2/3)
const indexInfo = [
`100644 ${baseHash} 1\t${filePath}`,
`100644 ${oursHash} 2\t${filePath}`,
`100644 ${theirsHash} 3\t${filePath}`,
].join('\n');
execSync('git update-index --index-info', {
input: indexInfo,
stdio: ['pipe', 'pipe', 'pipe'],
});
// Set MERGE_HEAD and MERGE_MSG (required for rerere)
const headHash = execSync('git rev-parse HEAD', {
encoding: 'utf-8',
}).trim();
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
fs.writeFileSync(
path.join(gitDir, 'MERGE_MSG'),
`Skill merge: ${filePath}\n`,
);
}
/**
* Run git rerere to record or auto-resolve conflicts.
* When filePath is given, checks that specific file for remaining conflict markers.
* Returns true if rerere auto-resolved the conflict.
*/
export function runRerere(filePath: string): boolean {
if (!isGitRepo()) return false;
try {
execSync('git rerere', { stdio: 'pipe' });
// Check if the specific working tree file still has conflict markers.
// rerere resolves the working tree but does NOT update the index,
// so checking unmerged index entries would give a false negative.
const content = fs.readFileSync(filePath, 'utf-8');
return !content.includes('<<<<<<<');
} catch {
return false;
}
}
/**
* Clean up merge state after rerere operations.
* Pass filePath to only reset that file's index entries (preserving user's staged changes).
*/
export function cleanupMergeState(filePath?: string): void {
if (!isGitRepo()) return;
const gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
}).trim();
// Remove merge markers
const mergeHead = path.join(gitDir, 'MERGE_HEAD');
const mergeMsg = path.join(gitDir, 'MERGE_MSG');
if (fs.existsSync(mergeHead)) fs.unlinkSync(mergeHead);
if (fs.existsSync(mergeMsg)) fs.unlinkSync(mergeMsg);
// Reset only the specific file's unmerged index entries to avoid
// dropping the user's pre-existing staged changes
try {
if (filePath) {
execFileSync('git', ['reset', '--', filePath], { stdio: 'pipe' });
} else {
execSync('git reset', { stdio: 'pipe' });
}
} catch {
// May fail if nothing staged
}
}

View File

@@ -1,4 +1,4 @@
import { execFileSync, execSync } from 'child_process';
import { execFileSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
@@ -8,14 +8,7 @@ import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { acquireLock } from './lock.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { clearAllResolutions } from './resolution-cache.js';
import { mergeFile } from './merge.js';
import { computeFileHash, readState, writeState } from './state.js';
import type { RebaseResult } from './types.js';
@@ -190,9 +183,6 @@ export async function rebase(newBasePath?: string): Promise<RebaseResult> {
continue;
}
// Save "ours" (new base content) before merge overwrites it
const oursContent = newBaseContent;
// Three-way merge: current(new base) ← old-base → saved(modifications)
const tmpSaved = path.join(
os.tmpdir(),
@@ -204,23 +194,6 @@ export async function rebase(newBasePath?: string): Promise<RebaseResult> {
fs.unlinkSync(tmpSaved);
if (!result.clean) {
// Try rerere resolution (three-level model)
if (isGitRepo()) {
const baseContent = fs.readFileSync(oldBasePath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, saved);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', relPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(relPath);
continue;
}
cleanupMergeState(relPath);
}
// Unresolved — conflict markers remain in working tree
mergeConflicts.push(relPath);
}
}
@@ -271,9 +244,6 @@ export async function rebase(newBasePath?: string): Promise<RebaseResult> {
state.rebased_at = now;
writeState(state);
// Clear stale resolution cache (base has changed, old resolutions invalid)
clearAllResolutions(projectRoot);
clearBackup();
return {

View File

@@ -1,4 +1,3 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
@@ -7,15 +6,8 @@ import path from 'path';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { readManifest } from './manifest.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { mergeFile } from './merge.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import { loadResolutions } from './resolution-cache.js';
import {
mergeDockerComposeServices,
mergeEnvAdditions,
@@ -115,13 +107,6 @@ export async function replaySkills(
}
}
// 3. Load pre-computed resolutions into git's rr-cache before replaying
// Pass the last skill's dir — it's the one applied on top, producing conflicts
const lastSkillDir = options.skills.length > 0
? options.skillDirs[options.skills[options.skills.length - 1]]
: undefined;
loadResolutions(options.skills, projectRoot, lastSkillDir);
// Replay each skill in order
// Collect structured ops for batch application
const allNpmDeps: Record<string, string> = {};
@@ -190,7 +175,6 @@ export async function replaySkills(
fs.copyFileSync(currentPath, basePath);
}
const oursContent = fs.readFileSync(currentPath, 'utf-8');
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-replay-${crypto.randomUUID()}-${path.basename(relPath)}`,
@@ -205,29 +189,6 @@ export async function replaySkills(
} else {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(skillPath, 'utf-8');
setupRerereAdapter(
resolvedPath,
baseContent,
oursContent,
theirsContent,
);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', resolvedPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(resolvedPath);
continue;
}
cleanupMergeState(resolvedPath);
}
skillConflicts.push(resolvedPath);
}
}

View File

@@ -1,269 +0,0 @@
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import { NANOCLAW_DIR, RESOLUTIONS_DIR, SHIPPED_RESOLUTIONS_DIR } from './constants.js';
import { computeFileHash } from './state.js';
import { FileInputHashes, ResolutionMeta } from './types.js';
/**
* Build the resolution directory key from a set of skill identifiers.
* Skills are sorted alphabetically and joined with "+".
*/
function resolutionKey(skills: string[]): string {
return [...skills].sort().join('+');
}
/**
* Find the resolution directory for a given skill combination.
* Returns absolute path if it exists, null otherwise.
*/
export function findResolutionDir(
skills: string[],
projectRoot: string,
): string | null {
const key = resolutionKey(skills);
// Check shipped resolutions (.claude/resolutions/) first, then project-level
for (const baseDir of [SHIPPED_RESOLUTIONS_DIR, RESOLUTIONS_DIR]) {
const dir = path.join(projectRoot, baseDir, key);
if (fs.existsSync(dir)) {
return dir;
}
}
return null;
}
/**
* Load cached resolutions into the local git rerere cache.
* Verifies file_hashes from meta.yaml match before loading each pair.
* Returns true if loaded successfully, false if not found or no pairs loaded.
*/
export function loadResolutions(
skills: string[],
projectRoot: string,
skillDir: string,
): boolean {
const resDir = findResolutionDir(skills, projectRoot);
if (!resDir) return false;
const metaPath = path.join(resDir, 'meta.yaml');
if (!fs.existsSync(metaPath)) return false;
let meta: ResolutionMeta;
try {
meta = parse(fs.readFileSync(metaPath, 'utf-8')) as ResolutionMeta;
} catch {
return false;
}
if (!meta.input_hashes) return false;
// Find all preimage/resolution pairs
const pairs = findPreimagePairs(resDir, resDir);
if (pairs.length === 0) return false;
// Get the git directory
let gitDir: string;
try {
gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
cwd: projectRoot,
}).trim();
if (!path.isAbsolute(gitDir)) {
gitDir = path.join(projectRoot, gitDir);
}
} catch {
return false;
}
const rrCacheDir = path.join(gitDir, 'rr-cache');
let loadedAny = false;
for (const { relPath, preimage, resolution } of pairs) {
// Verify file_hashes — skip pair if hashes don't match
const expected = meta.file_hashes?.[relPath];
if (!expected) {
console.log(`resolution-cache: skipping ${relPath} — no file_hashes in meta`);
continue;
}
const basePath = path.join(projectRoot, NANOCLAW_DIR, 'base', relPath);
const currentPath = path.join(projectRoot, relPath);
const skillModifyPath = path.join(skillDir, 'modify', relPath);
if (!fs.existsSync(basePath) || !fs.existsSync(currentPath) || !fs.existsSync(skillModifyPath)) {
console.log(`resolution-cache: skipping ${relPath} — input files not found`);
continue;
}
const baseHash = computeFileHash(basePath);
if (baseHash !== expected.base) {
console.log(`resolution-cache: skipping ${relPath} — base hash mismatch`);
continue;
}
const currentHash = computeFileHash(currentPath);
if (currentHash !== expected.current) {
console.log(`resolution-cache: skipping ${relPath} — current hash mismatch`);
continue;
}
const skillHash = computeFileHash(skillModifyPath);
if (skillHash !== expected.skill) {
console.log(`resolution-cache: skipping ${relPath} — skill hash mismatch`);
continue;
}
const preimageContent = fs.readFileSync(preimage, 'utf-8');
const resolutionContent = fs.readFileSync(resolution, 'utf-8');
// Git rerere uses its own internal hash format (not git hash-object).
// We store the rerere hash in the preimage filename as a .hash sidecar,
// captured when saveResolution() reads the actual rr-cache after rerere records it.
const hashSidecar = preimage + '.hash';
if (!fs.existsSync(hashSidecar)) {
// No hash recorded — skip this pair (legacy format)
continue;
}
const hash = fs.readFileSync(hashSidecar, 'utf-8').trim();
if (!hash) continue;
// Create rr-cache entry
const cacheDir = path.join(rrCacheDir, hash);
fs.mkdirSync(cacheDir, { recursive: true });
fs.writeFileSync(path.join(cacheDir, 'preimage'), preimageContent);
fs.writeFileSync(path.join(cacheDir, 'postimage'), resolutionContent);
loadedAny = true;
}
return loadedAny;
}
/**
* Save conflict resolutions to the resolution cache.
*/
export function saveResolution(
skills: string[],
files: { relPath: string; preimage: string; resolution: string; inputHashes: FileInputHashes }[],
meta: Partial<ResolutionMeta>,
projectRoot: string,
): void {
const key = resolutionKey(skills);
const resDir = path.join(projectRoot, RESOLUTIONS_DIR, key);
// Get the git rr-cache directory to find actual rerere hashes
let rrCacheDir: string | null = null;
try {
let gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
cwd: projectRoot,
}).trim();
if (!path.isAbsolute(gitDir)) {
gitDir = path.join(projectRoot, gitDir);
}
rrCacheDir = path.join(gitDir, 'rr-cache');
} catch {
// Not a git repo — skip hash capture
}
// Write preimage/resolution pairs
for (const file of files) {
const preimagePath = path.join(resDir, file.relPath + '.preimage');
const resolutionPath = path.join(resDir, file.relPath + '.resolution');
fs.mkdirSync(path.dirname(preimagePath), { recursive: true });
fs.writeFileSync(preimagePath, file.preimage);
fs.writeFileSync(resolutionPath, file.resolution);
// Capture the actual rerere hash by finding the rr-cache entry
// whose preimage matches ours
if (rrCacheDir && fs.existsSync(rrCacheDir)) {
const rerereHash = findRerereHash(rrCacheDir, file.preimage);
if (rerereHash) {
fs.writeFileSync(preimagePath + '.hash', rerereHash);
}
}
}
// Collect file_hashes from individual files
const fileHashes: Record<string, FileInputHashes> = {};
for (const file of files) {
fileHashes[file.relPath] = file.inputHashes;
}
// Build full meta with defaults
const fullMeta: ResolutionMeta = {
skills: [...skills].sort(),
apply_order: meta.apply_order ?? skills,
core_version: meta.core_version ?? '',
resolved_at: meta.resolved_at ?? new Date().toISOString(),
tested: meta.tested ?? false,
test_passed: meta.test_passed ?? false,
resolution_source: meta.resolution_source ?? 'user',
input_hashes: meta.input_hashes ?? {},
output_hash: meta.output_hash ?? '',
file_hashes: { ...fileHashes, ...meta.file_hashes },
};
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(fullMeta));
}
/**
* Remove all resolution cache entries.
* Called after rebase since the base has changed and old resolutions are invalid.
*/
export function clearAllResolutions(projectRoot: string): void {
const resDir = path.join(projectRoot, RESOLUTIONS_DIR);
if (fs.existsSync(resDir)) {
fs.rmSync(resDir, { recursive: true, force: true });
fs.mkdirSync(resDir, { recursive: true });
}
}
/**
* Recursively find preimage/resolution pairs in a directory.
*/
function findPreimagePairs(
dir: string,
baseDir: string,
): { relPath: string; preimage: string; resolution: string }[] {
const pairs: { relPath: string; preimage: string; resolution: string }[] = [];
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
pairs.push(...findPreimagePairs(fullPath, baseDir));
} else if (entry.name.endsWith('.preimage') && !entry.name.endsWith('.preimage.hash')) {
const resolutionPath = fullPath.replace(/\.preimage$/, '.resolution');
if (fs.existsSync(resolutionPath)) {
const relPath = path.relative(baseDir, fullPath).replace(/\.preimage$/, '');
pairs.push({ relPath, preimage: fullPath, resolution: resolutionPath });
}
}
}
return pairs;
}
/**
* Find the rerere hash for a given preimage by scanning rr-cache entries.
* Returns the directory name (hash) whose preimage matches the given content.
*/
function findRerereHash(rrCacheDir: string, preimageContent: string): string | null {
if (!fs.existsSync(rrCacheDir)) return null;
for (const entry of fs.readdirSync(rrCacheDir, { withFileTypes: true })) {
if (!entry.isDirectory()) continue;
const preimagePath = path.join(rrCacheDir, entry.name, 'preimage');
if (fs.existsSync(preimagePath)) {
const content = fs.readFileSync(preimagePath, 'utf-8');
if (content === preimageContent) {
return entry.name;
}
}
}
return null;
}

View File

@@ -76,25 +76,6 @@ export interface CustomModification {
patch_file: string;
}
export interface FileInputHashes {
base: string; // SHA-256 of .nanoclaw/base/<relPath>
current: string; // SHA-256 of working tree <relPath> before this merge
skill: string; // SHA-256 of skill's modify/<relPath>
}
export interface ResolutionMeta {
skills: string[];
apply_order: string[];
core_version: string;
resolved_at: string;
tested: boolean;
test_passed: boolean;
resolution_source: 'maintainer' | 'user' | 'claude';
input_hashes: Record<string, string>;
output_hash: string;
file_hashes: Record<string, FileInputHashes>;
}
export interface UpdatePreview {
currentVersion: string;
newVersion: string;

View File

@@ -11,13 +11,7 @@ import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { isCustomizeActive } from './customize.js';
import { acquireLock } from './lock.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { mergeFile } from './merge.js';
import { recordPathRemap } from './path-remap.js';
import { computeFileHash, readState, writeState } from './state.js';
import {
@@ -172,8 +166,6 @@ export async function applyUpdate(newCorePath: string): Promise<UpdateResult> {
}
// Three-way merge: current ← base → newCore
// Save current content before merge overwrites it (needed for rerere stage 2 = "ours")
const oursContent = fs.readFileSync(currentPath, 'utf-8');
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-update-${crypto.randomUUID()}-${path.basename(relPath)}`,
@@ -186,27 +178,9 @@ export async function applyUpdate(newCorePath: string): Promise<UpdateResult> {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Copy conflict markers to working tree path before rerere
// Conflict — copy markers to working tree
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(newCoreSrcPath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, theirsContent);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', relPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(relPath);
continue;
}
cleanupMergeState(relPath);
}
mergeConflicts.push(relPath);
}
}