refactor: remove deterministic caching system from skills engine (#453)

This commit is contained in:
Gabi Simons
2026-02-24 15:49:29 +02:00
committed by GitHub
parent 1448a14a94
commit 29a5dafe01
15 changed files with 16 additions and 1107 deletions

32
package-lock.json generated
View File

@@ -144,7 +144,6 @@
"integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==",
"license": "MIT",
"optional": true,
"peer": true,
"dependencies": {
"tslib": "^2.4.0"
}
@@ -611,7 +610,6 @@
"resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz",
"integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=18"
}
@@ -628,7 +626,6 @@
"os": [
"darwin"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -651,7 +648,6 @@
"os": [
"darwin"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -674,7 +670,6 @@
"os": [
"darwin"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -691,7 +686,6 @@
"os": [
"darwin"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -708,7 +702,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -725,7 +718,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -742,7 +734,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -759,7 +750,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -776,7 +766,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -793,7 +782,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -810,7 +798,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -827,7 +814,6 @@
"os": [
"linux"
],
"peer": true,
"funding": {
"url": "https://opencollective.com/libvips"
}
@@ -844,7 +830,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -867,7 +852,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -890,7 +874,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -913,7 +896,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -936,7 +918,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -959,7 +940,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -982,7 +962,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -1005,7 +984,6 @@
"os": [
"linux"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -1025,7 +1003,6 @@
],
"license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT",
"optional": true,
"peer": true,
"dependencies": {
"@emnapi/runtime": "^1.7.0"
},
@@ -1048,7 +1025,6 @@
"os": [
"win32"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -1068,7 +1044,6 @@
"os": [
"win32"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -1088,7 +1063,6 @@
"os": [
"win32"
],
"peer": true,
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
@@ -2504,6 +2478,7 @@
"resolved": "https://registry.npmjs.org/keyv/-/keyv-5.6.0.tgz",
"integrity": "sha512-CYDD3SOtsHtyXeEORYRx2qBtpDJFjRTGXUtmNEMGyzYOKj1TE3tycdlho7kA1Ufx9OYWZzg52QFBGALTirzDSw==",
"license": "MIT",
"peer": true,
"dependencies": {
"@keyv/serialize": "^1.1.1"
}
@@ -2862,6 +2837,7 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -3618,6 +3594,7 @@
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "~0.27.0",
"get-tsconfig": "^4.7.5"
@@ -3688,6 +3665,7 @@
"integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "^0.27.0",
"fdir": "^6.5.0",
@@ -3763,6 +3741,7 @@
"integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@vitest/expect": "4.0.18",
"@vitest/mocker": "4.0.18",
@@ -3916,6 +3895,7 @@
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
"license": "ISC",
"peer": true,
"bin": {
"yaml": "bin.mjs"
},

View File

@@ -1,170 +0,0 @@
/**
* Generate rerere-compatible resolution files for known skill combinations.
*
* For each conflicting file when applying discord after telegram:
* 1. Run merge-file to produce conflict markers
* 2. Set up rerere adapter — git records preimage and assigns a hash
* 3. Capture the hash by diffing rr-cache before/after
* 4. Write the correct resolution, git add + git rerere to record postimage
* 5. Save preimage, resolution, hash sidecar, and meta to .claude/resolutions/
*/
import crypto from 'crypto';
import { execSync } from 'child_process';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { stringify } from 'yaml';
import {
cleanupMergeState,
mergeFile,
setupRerereAdapter,
} from '../skills-engine/merge.js';
import type { FileInputHashes } from '../skills-engine/types.js';
function sha256(filePath: string): string {
const content = fs.readFileSync(filePath);
return crypto.createHash('sha256').update(content).digest('hex');
}
const projectRoot = process.cwd();
const baseDir = '.nanoclaw/base';
// The files that conflict when applying discord after telegram
const conflictFiles = ['src/index.ts', 'src/config.ts', 'src/routing.test.ts'];
const telegramModify = '.claude/skills/add-telegram/modify';
const discordModify = '.claude/skills/add-discord/modify';
const shippedResDir = path.join(projectRoot, '.claude', 'resolutions', 'discord+telegram');
// Get git rr-cache directory
const gitDir = execSync('git rev-parse --git-dir', { encoding: 'utf-8', cwd: projectRoot }).trim();
const rrCacheDir = path.join(
path.isAbsolute(gitDir) ? gitDir : path.join(projectRoot, gitDir),
'rr-cache',
);
function getRrCacheEntries(): Set<string> {
if (!fs.existsSync(rrCacheDir)) return new Set();
return new Set(fs.readdirSync(rrCacheDir));
}
// Clear rr-cache to start fresh
if (fs.existsSync(rrCacheDir)) {
fs.rmSync(rrCacheDir, { recursive: true });
}
fs.mkdirSync(rrCacheDir, { recursive: true });
// Prepare output directory
if (fs.existsSync(shippedResDir)) {
fs.rmSync(shippedResDir, { recursive: true });
}
const results: { relPath: string; hash: string }[] = [];
const fileHashes: Record<string, FileInputHashes> = {};
for (const relPath of conflictFiles) {
const basePath = path.join(projectRoot, baseDir, relPath);
const oursPath = path.join(projectRoot, telegramModify, relPath);
const theirsPath = path.join(projectRoot, discordModify, relPath);
// Resolution = the correct combined file. Read from existing .resolution files.
const existingResFile = path.join(shippedResDir, relPath + '.resolution');
// The .resolution files were deleted above, so read from the backup copy
const resolutionContent = (() => {
// Check if we have a backup from a previous run
const backupPath = path.join(projectRoot, '.claude', 'resolutions', '_backup', relPath + '.resolution');
if (fs.existsSync(backupPath)) return fs.readFileSync(backupPath, 'utf-8');
// Fall back to working tree (only works if both skills are applied)
const wtPath = path.join(projectRoot, relPath);
return fs.readFileSync(wtPath, 'utf-8');
})();
// Do the merge to produce conflict markers
const tmpFile = path.join(os.tmpdir(), `nanoclaw-gen-${Date.now()}-${path.basename(relPath)}`);
fs.copyFileSync(oursPath, tmpFile);
const result = mergeFile(tmpFile, basePath, theirsPath);
if (result.clean) {
console.log(`${relPath}: clean merge, no resolution needed`);
fs.unlinkSync(tmpFile);
continue;
}
// Compute input file hashes for this conflicted file
fileHashes[relPath] = {
base: sha256(basePath),
current: sha256(oursPath), // "ours" = telegram's modify (current state after first skill)
skill: sha256(theirsPath), // "theirs" = discord's modify (the skill being applied)
};
const preimageContent = fs.readFileSync(tmpFile, 'utf-8');
fs.unlinkSync(tmpFile);
// Save original working tree file to restore later
const origContent = fs.readFileSync(path.join(projectRoot, relPath), 'utf-8');
// Write conflict markers to working tree for rerere
fs.writeFileSync(path.join(projectRoot, relPath), preimageContent);
// Track rr-cache entries before
const entriesBefore = getRrCacheEntries();
// Set up rerere adapter and let git record the preimage
const baseContent = fs.readFileSync(basePath, 'utf-8');
const oursContent = fs.readFileSync(oursPath, 'utf-8');
const theirsContent = fs.readFileSync(theirsPath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, theirsContent);
execSync('git rerere', { stdio: 'pipe', cwd: projectRoot });
// Find the new rr-cache entry (the hash)
const entriesAfter = getRrCacheEntries();
const newEntries = [...entriesAfter].filter((e) => !entriesBefore.has(e));
if (newEntries.length !== 1) {
console.error(`${relPath}: expected 1 new rr-cache entry, got ${newEntries.length}`);
cleanupMergeState(relPath);
fs.writeFileSync(path.join(projectRoot, relPath), origContent);
continue;
}
const hash = newEntries[0];
// Write the resolution and record it
fs.writeFileSync(path.join(projectRoot, relPath), resolutionContent);
execSync(`git add "${relPath}"`, { stdio: 'pipe', cwd: projectRoot });
execSync('git rerere', { stdio: 'pipe', cwd: projectRoot });
// Clean up
cleanupMergeState(relPath);
fs.writeFileSync(path.join(projectRoot, relPath), origContent);
// Save to .claude/resolutions/
const outDir = path.join(shippedResDir, path.dirname(relPath));
fs.mkdirSync(outDir, { recursive: true });
const baseName = path.join(shippedResDir, relPath);
// Copy preimage and postimage directly from rr-cache (normalized by git)
fs.copyFileSync(path.join(rrCacheDir, hash, 'preimage'), baseName + '.preimage');
fs.writeFileSync(baseName + '.resolution', resolutionContent);
fs.writeFileSync(baseName + '.preimage.hash', hash);
results.push({ relPath, hash });
console.log(`${relPath}: hash=${hash}`);
}
// Write meta.yaml
const meta = {
skills: ['discord', 'telegram'],
apply_order: ['telegram', 'discord'],
resolved_at: new Date().toISOString(),
tested: true,
test_passed: true,
resolution_source: 'generated',
input_hashes: {},
output_hash: '',
file_hashes: fileHashes,
};
fs.writeFileSync(path.join(shippedResDir, 'meta.yaml'), stringify(meta));
console.log(`\nGenerated ${results.length} resolution(s) in .claude/resolutions/discord+telegram/`);

View File

@@ -6,7 +6,6 @@ import {
BACKUP_DIR,
LOCK_FILE,
CUSTOM_DIR,
RESOLUTIONS_DIR,
SKILLS_SCHEMA_VERSION,
} from '../constants.js';
@@ -18,7 +17,6 @@ describe('constants', () => {
BACKUP_DIR,
LOCK_FILE,
CUSTOM_DIR,
RESOLUTIONS_DIR,
SKILLS_SCHEMA_VERSION,
};
@@ -30,7 +28,7 @@ describe('constants', () => {
});
it('path constants use forward slashes and .nanoclaw prefix', () => {
const pathConstants = [BASE_DIR, BACKUP_DIR, LOCK_FILE, CUSTOM_DIR, RESOLUTIONS_DIR];
const pathConstants = [BASE_DIR, BACKUP_DIR, LOCK_FILE, CUSTOM_DIR];
for (const p of pathConstants) {
expect(p).not.toContain('\\');
expect(p).toMatch(/^\.nanoclaw\//);

View File

@@ -1,8 +1,7 @@
import { execSync } from 'child_process';
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs';
import path from 'path';
import { isGitRepo, mergeFile, setupRerereAdapter } from '../merge.js';
import { isGitRepo, mergeFile } from '../merge.js';
import { createTempDir, initGitRepo, cleanup } from './test-helpers.js';
describe('merge', () => {
@@ -51,31 +50,6 @@ describe('merge', () => {
expect(merged).toContain('line3-modified');
});
it('setupRerereAdapter cleans stale MERGE_HEAD before proceeding', () => {
// Simulate a stale MERGE_HEAD from a previous crash
const gitDir = execSync('git rev-parse --git-dir', {
cwd: tmpDir,
encoding: 'utf-8',
}).trim();
const headHash = execSync('git rev-parse HEAD', {
cwd: tmpDir,
encoding: 'utf-8',
}).trim();
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
fs.writeFileSync(path.join(gitDir, 'MERGE_MSG'), 'stale merge\n');
// Write a file for the adapter to work with
fs.writeFileSync(path.join(tmpDir, 'test.txt'), 'conflicted content');
// setupRerereAdapter should not throw despite stale MERGE_HEAD
expect(() =>
setupRerereAdapter('test.txt', 'base', 'ours', 'theirs'),
).not.toThrow();
// MERGE_HEAD should still exist (newly written by setupRerereAdapter)
expect(fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))).toBe(true);
});
it('conflict with overlapping changes', () => {
const base = path.join(tmpDir, 'base.txt');
const current = path.join(tmpDir, 'current.txt');

View File

@@ -239,48 +239,6 @@ describe('rebase', () => {
expect(baseConfig).toContain('skill-b');
});
it('rebase clears resolution cache', async () => {
// Set up base + working tree
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
fs.mkdirSync(baseDir, { recursive: true });
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src', 'index.ts'),
'const x = 1;\n// skill\n',
);
// Create a fake resolution cache entry
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
fs.mkdirSync(resDir, { recursive: true });
fs.writeFileSync(path.join(resDir, 'meta.yaml'), 'skills: [skill-a, skill-b]\n');
writeState(tmpDir, {
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'my-skill',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: { 'src/index.ts': 'hash' },
},
],
});
initGitRepo(tmpDir);
const result = await rebase();
expect(result.success).toBe(true);
// Resolution cache should be cleared
const resolutions = fs.readdirSync(
path.join(tmpDir, '.nanoclaw', 'resolutions'),
);
expect(resolutions).toHaveLength(0);
});
it('rebase with new base: base updated, changes merged', async () => {
// Set up current base (multi-line so changes don't conflict)
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');

View File

@@ -1,283 +0,0 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import {
findResolutionDir,
loadResolutions,
saveResolution,
} from '../resolution-cache.js';
import { createTempDir, setupNanoclawDir, initGitRepo, cleanup } from './test-helpers.js';
function sha256(content: string): string {
return crypto.createHash('sha256').update(content).digest('hex');
}
const dummyHashes = { base: 'aaa', current: 'bbb', skill: 'ccc' };
describe('resolution-cache', () => {
let tmpDir: string;
const originalCwd = process.cwd();
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
it('findResolutionDir returns null when not found', () => {
const result = findResolutionDir(['skill-a', 'skill-b'], tmpDir);
expect(result).toBeNull();
});
it('saveResolution creates directory structure with files and meta', () => {
saveResolution(
['skill-b', 'skill-a'],
[{ relPath: 'src/config.ts', preimage: 'conflict content', resolution: 'resolved content', inputHashes: dummyHashes }],
{ core_version: '1.0.0' },
tmpDir,
);
// Skills are sorted, so key is "skill-a+skill-b"
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
expect(fs.existsSync(resDir)).toBe(true);
// Check preimage and resolution files exist
expect(fs.existsSync(path.join(resDir, 'src/config.ts.preimage'))).toBe(true);
expect(fs.existsSync(path.join(resDir, 'src/config.ts.resolution'))).toBe(true);
// Check meta.yaml exists and has expected fields
const metaPath = path.join(resDir, 'meta.yaml');
expect(fs.existsSync(metaPath)).toBe(true);
const meta = parse(fs.readFileSync(metaPath, 'utf-8'));
expect(meta.core_version).toBe('1.0.0');
expect(meta.skills).toEqual(['skill-a', 'skill-b']);
});
it('saveResolution writes file_hashes to meta.yaml', () => {
const hashes = {
base: sha256('base content'),
current: sha256('current content'),
skill: sha256('skill content'),
};
saveResolution(
['alpha', 'beta'],
[{ relPath: 'src/config.ts', preimage: 'pre', resolution: 'post', inputHashes: hashes }],
{},
tmpDir,
);
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'alpha+beta');
const meta = parse(fs.readFileSync(path.join(resDir, 'meta.yaml'), 'utf-8'));
expect(meta.file_hashes).toBeDefined();
expect(meta.file_hashes['src/config.ts']).toEqual(hashes);
});
it('findResolutionDir returns path after save', () => {
saveResolution(
['alpha', 'beta'],
[{ relPath: 'file.ts', preimage: 'pre', resolution: 'post', inputHashes: dummyHashes }],
{},
tmpDir,
);
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
expect(result).not.toBeNull();
expect(result).toContain('alpha+beta');
});
it('findResolutionDir finds shipped resolutions in .claude/resolutions', () => {
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
fs.mkdirSync(shippedDir, { recursive: true });
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [alpha, beta]\n');
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
expect(result).not.toBeNull();
expect(result).toContain('.claude/resolutions/alpha+beta');
});
it('findResolutionDir prefers shipped over project-level', () => {
// Create both shipped and project-level
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'a+b');
fs.mkdirSync(shippedDir, { recursive: true });
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [a, b]\n');
saveResolution(
['a', 'b'],
[{ relPath: 'f.ts', preimage: 'x', resolution: 'project', inputHashes: dummyHashes }],
{},
tmpDir,
);
const result = findResolutionDir(['a', 'b'], tmpDir);
expect(result).toContain('.claude/resolutions/a+b');
});
it('skills are sorted so order does not matter', () => {
saveResolution(
['zeta', 'alpha'],
[{ relPath: 'f.ts', preimage: 'a', resolution: 'b', inputHashes: dummyHashes }],
{},
tmpDir,
);
// Find with reversed order should still work
const result = findResolutionDir(['alpha', 'zeta'], tmpDir);
expect(result).not.toBeNull();
// Also works with original order
const result2 = findResolutionDir(['zeta', 'alpha'], tmpDir);
expect(result2).not.toBeNull();
expect(result).toBe(result2);
});
describe('loadResolutions hash verification', () => {
const baseContent = 'base file content';
const currentContent = 'current file content';
const skillContent = 'skill file content';
const preimageContent = 'preimage with conflict markers';
const resolutionContent = 'resolved content';
const rerereHash = 'abc123def456';
function setupResolutionDir(fileHashes: Record<string, any>) {
// Create a shipped resolution directory
const resDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
fs.mkdirSync(path.join(resDir, 'src'), { recursive: true });
// Write preimage, resolution, and hash sidecar
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage'), preimageContent);
fs.writeFileSync(path.join(resDir, 'src/config.ts.resolution'), resolutionContent);
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage.hash'), rerereHash);
// Write meta.yaml
const meta: any = {
skills: ['alpha', 'beta'],
apply_order: ['alpha', 'beta'],
core_version: '1.0.0',
resolved_at: new Date().toISOString(),
tested: true,
test_passed: true,
resolution_source: 'maintainer',
input_hashes: {},
output_hash: '',
file_hashes: fileHashes,
};
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(meta));
return resDir;
}
function setupInputFiles() {
// Create base file
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'base', 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, '.nanoclaw', 'base', 'src', 'config.ts'), baseContent);
// Create current file
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src', 'config.ts'), currentContent);
}
function createSkillDir() {
const skillDir = path.join(tmpDir, 'skill-pkg');
fs.mkdirSync(path.join(skillDir, 'modify', 'src'), { recursive: true });
fs.writeFileSync(path.join(skillDir, 'modify', 'src', 'config.ts'), skillContent);
return skillDir;
}
beforeEach(() => {
initGitRepo(tmpDir);
});
it('loads with matching file_hashes', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: sha256(currentContent),
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(true);
// Verify rr-cache entry was created
const gitDir = path.join(tmpDir, '.git');
const cacheEntry = path.join(gitDir, 'rr-cache', rerereHash);
expect(fs.existsSync(path.join(cacheEntry, 'preimage'))).toBe(true);
expect(fs.existsSync(path.join(cacheEntry, 'postimage'))).toBe(true);
});
it('skips pair with mismatched base hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: 'wrong_hash',
current: sha256(currentContent),
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
// rr-cache entry should NOT be created
const gitDir = path.join(tmpDir, '.git');
expect(fs.existsSync(path.join(gitDir, 'rr-cache', rerereHash))).toBe(false);
});
it('skips pair with mismatched current hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: 'wrong_hash',
skill: sha256(skillContent),
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
it('skips pair with mismatched skill hash', () => {
setupInputFiles();
const skillDir = createSkillDir();
setupResolutionDir({
'src/config.ts': {
base: sha256(baseContent),
current: sha256(currentContent),
skill: 'wrong_hash',
},
});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
it('skips pair with no file_hashes entry for that file', () => {
setupInputFiles();
const skillDir = createSkillDir();
// file_hashes exists but doesn't include src/config.ts
setupResolutionDir({});
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
expect(result).toBe(false);
});
});
});

View File

@@ -1,4 +1,4 @@
import { execFileSync, execSync } from 'child_process';
import { execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
@@ -18,14 +18,7 @@ import {
readManifest,
} from './manifest.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { loadResolutions } from './resolution-cache.js';
import { mergeFile } from './merge.js';
import { computeFileHash, readState, recordSkillApplication, writeState } from './state.js';
import {
mergeDockerComposeServices,
@@ -171,10 +164,6 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
// --- Merge modified files ---
const mergeConflicts: string[] = [];
// Load pre-computed resolutions into git's rr-cache before merging
const appliedSkillNames = currentState.applied_skills.map((s) => s.name);
loadResolutions([...appliedSkillNames, manifest.skill], projectRoot, skillDir);
for (const relPath of manifest.modifies) {
const resolvedPath = resolvePathRemap(relPath, pathRemap);
const currentPath = path.join(projectRoot, resolvedPath);
@@ -200,8 +189,6 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
}
// Three-way merge: current ← base → skill
// Save current content before merge overwrites it (needed for rerere stage 2 = "ours")
const oursContent = fs.readFileSync(currentPath, 'utf-8');
// git merge-file modifies the first argument in-place, so use a temp copy
const tmpCurrent = path.join(
os.tmpdir(),
@@ -215,36 +202,9 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Copy conflict markers to working tree path BEFORE rerere
// rerere looks at the working tree file at relPath, not at tmpCurrent
// Conflict — copy markers to working tree
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(skillPath, 'utf-8');
setupRerereAdapter(resolvedPath, baseContent, oursContent, theirsContent);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
// rerere resolved the conflict — currentPath now has resolved content
// Record the resolution: git add + git rerere
execFileSync('git', ['add', resolvedPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(resolvedPath);
// Unstage the file — cleanupMergeState clears unmerged entries
// but the git add above leaves the file staged at stage 0
try {
execFileSync('git', ['restore', '--staged', resolvedPath], { stdio: 'pipe' });
} catch { /* may fail if file is new or not tracked */ }
continue;
}
cleanupMergeState(resolvedPath);
}
// Unresolved conflict — currentPath already has conflict markers
mergeConflicts.push(relPath);
}
}

View File

@@ -4,8 +4,6 @@ export const BASE_DIR = '.nanoclaw/base';
export const BACKUP_DIR = '.nanoclaw/backup';
export const LOCK_FILE = '.nanoclaw/lock';
export const CUSTOM_DIR = '.nanoclaw/custom';
export const RESOLUTIONS_DIR = '.nanoclaw/resolutions';
export const SHIPPED_RESOLUTIONS_DIR = '.claude/resolutions';
export const SKILLS_SCHEMA_VERSION = '0.1.0';
// Top-level paths to include in base snapshot and upstream extraction.

View File

@@ -7,8 +7,6 @@ export {
CUSTOM_DIR,
LOCK_FILE,
NANOCLAW_DIR,
RESOLUTIONS_DIR,
SHIPPED_RESOLUTIONS_DIR,
STATE_FILE,
} from './constants.js';
export {
@@ -28,11 +26,8 @@ export {
readManifest,
} from './manifest.js';
export {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
export {
loadPathRemap,
@@ -44,12 +39,6 @@ export { findSkillDir, replaySkills } from './replay.js';
export type { ReplayOptions, ReplayResult } from './replay.js';
export { uninstallSkill } from './uninstall.js';
export { initSkillsSystem, migrateExisting } from './migrate.js';
export {
clearAllResolutions,
findResolutionDir,
loadResolutions,
saveResolution,
} from './resolution-cache.js';
export { applyUpdate, previewUpdate } from './update.js';
export {
compareSemver,
@@ -76,7 +65,6 @@ export type {
FileOperation,
MergeResult,
RebaseResult,
ResolutionMeta,
SkillManifest,
SkillState,
UninstallResult,

View File

@@ -1,6 +1,4 @@
import { execFileSync, execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { MergeResult } from './types.js';
@@ -39,112 +37,3 @@ export function mergeFile(
throw new Error(`git merge-file failed: ${err.message}`);
}
}
/**
* Set up unmerged index entries for rerere adapter.
* Creates stages 1/2/3 so git rerere can record/resolve conflicts.
*/
export function setupRerereAdapter(
filePath: string,
baseContent: string,
oursContent: string,
theirsContent: string,
): void {
if (!isGitRepo()) return;
const gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
}).trim();
// Clean up stale MERGE_HEAD from a previous crash
if (fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))) {
cleanupMergeState(filePath);
}
// Hash objects into git object store
const baseHash = execSync('git hash-object -w --stdin', {
input: baseContent,
encoding: 'utf-8',
}).trim();
const oursHash = execSync('git hash-object -w --stdin', {
input: oursContent,
encoding: 'utf-8',
}).trim();
const theirsHash = execSync('git hash-object -w --stdin', {
input: theirsContent,
encoding: 'utf-8',
}).trim();
// Create unmerged index entries (stages 1/2/3)
const indexInfo = [
`100644 ${baseHash} 1\t${filePath}`,
`100644 ${oursHash} 2\t${filePath}`,
`100644 ${theirsHash} 3\t${filePath}`,
].join('\n');
execSync('git update-index --index-info', {
input: indexInfo,
stdio: ['pipe', 'pipe', 'pipe'],
});
// Set MERGE_HEAD and MERGE_MSG (required for rerere)
const headHash = execSync('git rev-parse HEAD', {
encoding: 'utf-8',
}).trim();
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
fs.writeFileSync(
path.join(gitDir, 'MERGE_MSG'),
`Skill merge: ${filePath}\n`,
);
}
/**
* Run git rerere to record or auto-resolve conflicts.
* When filePath is given, checks that specific file for remaining conflict markers.
* Returns true if rerere auto-resolved the conflict.
*/
export function runRerere(filePath: string): boolean {
if (!isGitRepo()) return false;
try {
execSync('git rerere', { stdio: 'pipe' });
// Check if the specific working tree file still has conflict markers.
// rerere resolves the working tree but does NOT update the index,
// so checking unmerged index entries would give a false negative.
const content = fs.readFileSync(filePath, 'utf-8');
return !content.includes('<<<<<<<');
} catch {
return false;
}
}
/**
* Clean up merge state after rerere operations.
* Pass filePath to only reset that file's index entries (preserving user's staged changes).
*/
export function cleanupMergeState(filePath?: string): void {
if (!isGitRepo()) return;
const gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
}).trim();
// Remove merge markers
const mergeHead = path.join(gitDir, 'MERGE_HEAD');
const mergeMsg = path.join(gitDir, 'MERGE_MSG');
if (fs.existsSync(mergeHead)) fs.unlinkSync(mergeHead);
if (fs.existsSync(mergeMsg)) fs.unlinkSync(mergeMsg);
// Reset only the specific file's unmerged index entries to avoid
// dropping the user's pre-existing staged changes
try {
if (filePath) {
execFileSync('git', ['reset', '--', filePath], { stdio: 'pipe' });
} else {
execSync('git reset', { stdio: 'pipe' });
}
} catch {
// May fail if nothing staged
}
}

View File

@@ -1,4 +1,4 @@
import { execFileSync, execSync } from 'child_process';
import { execFileSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
@@ -8,14 +8,7 @@ import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { acquireLock } from './lock.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { clearAllResolutions } from './resolution-cache.js';
import { mergeFile } from './merge.js';
import { computeFileHash, readState, writeState } from './state.js';
import type { RebaseResult } from './types.js';
@@ -190,9 +183,6 @@ export async function rebase(newBasePath?: string): Promise<RebaseResult> {
continue;
}
// Save "ours" (new base content) before merge overwrites it
const oursContent = newBaseContent;
// Three-way merge: current(new base) ← old-base → saved(modifications)
const tmpSaved = path.join(
os.tmpdir(),
@@ -204,23 +194,6 @@ export async function rebase(newBasePath?: string): Promise<RebaseResult> {
fs.unlinkSync(tmpSaved);
if (!result.clean) {
// Try rerere resolution (three-level model)
if (isGitRepo()) {
const baseContent = fs.readFileSync(oldBasePath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, saved);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', relPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(relPath);
continue;
}
cleanupMergeState(relPath);
}
// Unresolved — conflict markers remain in working tree
mergeConflicts.push(relPath);
}
}
@@ -271,9 +244,6 @@ export async function rebase(newBasePath?: string): Promise<RebaseResult> {
state.rebased_at = now;
writeState(state);
// Clear stale resolution cache (base has changed, old resolutions invalid)
clearAllResolutions(projectRoot);
clearBackup();
return {

View File

@@ -1,4 +1,3 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
@@ -7,15 +6,8 @@ import path from 'path';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { readManifest } from './manifest.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { mergeFile } from './merge.js';
import { loadPathRemap, resolvePathRemap } from './path-remap.js';
import { loadResolutions } from './resolution-cache.js';
import {
mergeDockerComposeServices,
mergeEnvAdditions,
@@ -115,13 +107,6 @@ export async function replaySkills(
}
}
// 3. Load pre-computed resolutions into git's rr-cache before replaying
// Pass the last skill's dir — it's the one applied on top, producing conflicts
const lastSkillDir = options.skills.length > 0
? options.skillDirs[options.skills[options.skills.length - 1]]
: undefined;
loadResolutions(options.skills, projectRoot, lastSkillDir);
// Replay each skill in order
// Collect structured ops for batch application
const allNpmDeps: Record<string, string> = {};
@@ -190,7 +175,6 @@ export async function replaySkills(
fs.copyFileSync(currentPath, basePath);
}
const oursContent = fs.readFileSync(currentPath, 'utf-8');
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-replay-${crypto.randomUUID()}-${path.basename(relPath)}`,
@@ -205,29 +189,6 @@ export async function replaySkills(
} else {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(skillPath, 'utf-8');
setupRerereAdapter(
resolvedPath,
baseContent,
oursContent,
theirsContent,
);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', resolvedPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(resolvedPath);
continue;
}
cleanupMergeState(resolvedPath);
}
skillConflicts.push(resolvedPath);
}
}

View File

@@ -1,269 +0,0 @@
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { parse, stringify } from 'yaml';
import { NANOCLAW_DIR, RESOLUTIONS_DIR, SHIPPED_RESOLUTIONS_DIR } from './constants.js';
import { computeFileHash } from './state.js';
import { FileInputHashes, ResolutionMeta } from './types.js';
/**
* Build the resolution directory key from a set of skill identifiers.
* Skills are sorted alphabetically and joined with "+".
*/
function resolutionKey(skills: string[]): string {
return [...skills].sort().join('+');
}
/**
* Find the resolution directory for a given skill combination.
* Returns absolute path if it exists, null otherwise.
*/
export function findResolutionDir(
skills: string[],
projectRoot: string,
): string | null {
const key = resolutionKey(skills);
// Check shipped resolutions (.claude/resolutions/) first, then project-level
for (const baseDir of [SHIPPED_RESOLUTIONS_DIR, RESOLUTIONS_DIR]) {
const dir = path.join(projectRoot, baseDir, key);
if (fs.existsSync(dir)) {
return dir;
}
}
return null;
}
/**
* Load cached resolutions into the local git rerere cache.
* Verifies file_hashes from meta.yaml match before loading each pair.
* Returns true if loaded successfully, false if not found or no pairs loaded.
*/
export function loadResolutions(
skills: string[],
projectRoot: string,
skillDir: string,
): boolean {
const resDir = findResolutionDir(skills, projectRoot);
if (!resDir) return false;
const metaPath = path.join(resDir, 'meta.yaml');
if (!fs.existsSync(metaPath)) return false;
let meta: ResolutionMeta;
try {
meta = parse(fs.readFileSync(metaPath, 'utf-8')) as ResolutionMeta;
} catch {
return false;
}
if (!meta.input_hashes) return false;
// Find all preimage/resolution pairs
const pairs = findPreimagePairs(resDir, resDir);
if (pairs.length === 0) return false;
// Get the git directory
let gitDir: string;
try {
gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
cwd: projectRoot,
}).trim();
if (!path.isAbsolute(gitDir)) {
gitDir = path.join(projectRoot, gitDir);
}
} catch {
return false;
}
const rrCacheDir = path.join(gitDir, 'rr-cache');
let loadedAny = false;
for (const { relPath, preimage, resolution } of pairs) {
// Verify file_hashes — skip pair if hashes don't match
const expected = meta.file_hashes?.[relPath];
if (!expected) {
console.log(`resolution-cache: skipping ${relPath} — no file_hashes in meta`);
continue;
}
const basePath = path.join(projectRoot, NANOCLAW_DIR, 'base', relPath);
const currentPath = path.join(projectRoot, relPath);
const skillModifyPath = path.join(skillDir, 'modify', relPath);
if (!fs.existsSync(basePath) || !fs.existsSync(currentPath) || !fs.existsSync(skillModifyPath)) {
console.log(`resolution-cache: skipping ${relPath} — input files not found`);
continue;
}
const baseHash = computeFileHash(basePath);
if (baseHash !== expected.base) {
console.log(`resolution-cache: skipping ${relPath} — base hash mismatch`);
continue;
}
const currentHash = computeFileHash(currentPath);
if (currentHash !== expected.current) {
console.log(`resolution-cache: skipping ${relPath} — current hash mismatch`);
continue;
}
const skillHash = computeFileHash(skillModifyPath);
if (skillHash !== expected.skill) {
console.log(`resolution-cache: skipping ${relPath} — skill hash mismatch`);
continue;
}
const preimageContent = fs.readFileSync(preimage, 'utf-8');
const resolutionContent = fs.readFileSync(resolution, 'utf-8');
// Git rerere uses its own internal hash format (not git hash-object).
// We store the rerere hash in the preimage filename as a .hash sidecar,
// captured when saveResolution() reads the actual rr-cache after rerere records it.
const hashSidecar = preimage + '.hash';
if (!fs.existsSync(hashSidecar)) {
// No hash recorded — skip this pair (legacy format)
continue;
}
const hash = fs.readFileSync(hashSidecar, 'utf-8').trim();
if (!hash) continue;
// Create rr-cache entry
const cacheDir = path.join(rrCacheDir, hash);
fs.mkdirSync(cacheDir, { recursive: true });
fs.writeFileSync(path.join(cacheDir, 'preimage'), preimageContent);
fs.writeFileSync(path.join(cacheDir, 'postimage'), resolutionContent);
loadedAny = true;
}
return loadedAny;
}
/**
* Save conflict resolutions to the resolution cache.
*/
export function saveResolution(
skills: string[],
files: { relPath: string; preimage: string; resolution: string; inputHashes: FileInputHashes }[],
meta: Partial<ResolutionMeta>,
projectRoot: string,
): void {
const key = resolutionKey(skills);
const resDir = path.join(projectRoot, RESOLUTIONS_DIR, key);
// Get the git rr-cache directory to find actual rerere hashes
let rrCacheDir: string | null = null;
try {
let gitDir = execSync('git rev-parse --git-dir', {
encoding: 'utf-8',
cwd: projectRoot,
}).trim();
if (!path.isAbsolute(gitDir)) {
gitDir = path.join(projectRoot, gitDir);
}
rrCacheDir = path.join(gitDir, 'rr-cache');
} catch {
// Not a git repo — skip hash capture
}
// Write preimage/resolution pairs
for (const file of files) {
const preimagePath = path.join(resDir, file.relPath + '.preimage');
const resolutionPath = path.join(resDir, file.relPath + '.resolution');
fs.mkdirSync(path.dirname(preimagePath), { recursive: true });
fs.writeFileSync(preimagePath, file.preimage);
fs.writeFileSync(resolutionPath, file.resolution);
// Capture the actual rerere hash by finding the rr-cache entry
// whose preimage matches ours
if (rrCacheDir && fs.existsSync(rrCacheDir)) {
const rerereHash = findRerereHash(rrCacheDir, file.preimage);
if (rerereHash) {
fs.writeFileSync(preimagePath + '.hash', rerereHash);
}
}
}
// Collect file_hashes from individual files
const fileHashes: Record<string, FileInputHashes> = {};
for (const file of files) {
fileHashes[file.relPath] = file.inputHashes;
}
// Build full meta with defaults
const fullMeta: ResolutionMeta = {
skills: [...skills].sort(),
apply_order: meta.apply_order ?? skills,
core_version: meta.core_version ?? '',
resolved_at: meta.resolved_at ?? new Date().toISOString(),
tested: meta.tested ?? false,
test_passed: meta.test_passed ?? false,
resolution_source: meta.resolution_source ?? 'user',
input_hashes: meta.input_hashes ?? {},
output_hash: meta.output_hash ?? '',
file_hashes: { ...fileHashes, ...meta.file_hashes },
};
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(fullMeta));
}
/**
* Remove all resolution cache entries.
* Called after rebase since the base has changed and old resolutions are invalid.
*/
export function clearAllResolutions(projectRoot: string): void {
const resDir = path.join(projectRoot, RESOLUTIONS_DIR);
if (fs.existsSync(resDir)) {
fs.rmSync(resDir, { recursive: true, force: true });
fs.mkdirSync(resDir, { recursive: true });
}
}
/**
* Recursively find preimage/resolution pairs in a directory.
*/
function findPreimagePairs(
dir: string,
baseDir: string,
): { relPath: string; preimage: string; resolution: string }[] {
const pairs: { relPath: string; preimage: string; resolution: string }[] = [];
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
pairs.push(...findPreimagePairs(fullPath, baseDir));
} else if (entry.name.endsWith('.preimage') && !entry.name.endsWith('.preimage.hash')) {
const resolutionPath = fullPath.replace(/\.preimage$/, '.resolution');
if (fs.existsSync(resolutionPath)) {
const relPath = path.relative(baseDir, fullPath).replace(/\.preimage$/, '');
pairs.push({ relPath, preimage: fullPath, resolution: resolutionPath });
}
}
}
return pairs;
}
/**
* Find the rerere hash for a given preimage by scanning rr-cache entries.
* Returns the directory name (hash) whose preimage matches the given content.
*/
function findRerereHash(rrCacheDir: string, preimageContent: string): string | null {
if (!fs.existsSync(rrCacheDir)) return null;
for (const entry of fs.readdirSync(rrCacheDir, { withFileTypes: true })) {
if (!entry.isDirectory()) continue;
const preimagePath = path.join(rrCacheDir, entry.name, 'preimage');
if (fs.existsSync(preimagePath)) {
const content = fs.readFileSync(preimagePath, 'utf-8');
if (content === preimageContent) {
return entry.name;
}
}
}
return null;
}

View File

@@ -76,25 +76,6 @@ export interface CustomModification {
patch_file: string;
}
export interface FileInputHashes {
base: string; // SHA-256 of .nanoclaw/base/<relPath>
current: string; // SHA-256 of working tree <relPath> before this merge
skill: string; // SHA-256 of skill's modify/<relPath>
}
export interface ResolutionMeta {
skills: string[];
apply_order: string[];
core_version: string;
resolved_at: string;
tested: boolean;
test_passed: boolean;
resolution_source: 'maintainer' | 'user' | 'claude';
input_hashes: Record<string, string>;
output_hash: string;
file_hashes: Record<string, FileInputHashes>;
}
export interface UpdatePreview {
currentVersion: string;
newVersion: string;

View File

@@ -11,13 +11,7 @@ import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { isCustomizeActive } from './customize.js';
import { acquireLock } from './lock.js';
import {
cleanupMergeState,
isGitRepo,
mergeFile,
runRerere,
setupRerereAdapter,
} from './merge.js';
import { mergeFile } from './merge.js';
import { recordPathRemap } from './path-remap.js';
import { computeFileHash, readState, writeState } from './state.js';
import {
@@ -172,8 +166,6 @@ export async function applyUpdate(newCorePath: string): Promise<UpdateResult> {
}
// Three-way merge: current ← base → newCore
// Save current content before merge overwrites it (needed for rerere stage 2 = "ours")
const oursContent = fs.readFileSync(currentPath, 'utf-8');
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-update-${crypto.randomUUID()}-${path.basename(relPath)}`,
@@ -186,27 +178,9 @@ export async function applyUpdate(newCorePath: string): Promise<UpdateResult> {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Copy conflict markers to working tree path before rerere
// Conflict — copy markers to working tree
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
if (isGitRepo()) {
const baseContent = fs.readFileSync(basePath, 'utf-8');
const theirsContent = fs.readFileSync(newCoreSrcPath, 'utf-8');
setupRerereAdapter(relPath, baseContent, oursContent, theirsContent);
const autoResolved = runRerere(currentPath);
if (autoResolved) {
execFileSync('git', ['add', relPath], { stdio: 'pipe' });
execSync('git rerere', { stdio: 'pipe' });
cleanupMergeState(relPath);
continue;
}
cleanupMergeState(relPath);
}
mergeConflicts.push(relPath);
}
}