chore: remove old /update skill, replaced by /update-nanoclaw

The new /update-nanoclaw skill (PR #217) replaces the old update
mechanism. Delete the old skill, update module, CLI scripts, and tests.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
gavrielc
2026-03-01 23:23:31 +02:00
parent 3475e89406
commit 80cdd23c84
12 changed files with 2 additions and 1496 deletions

View File

@@ -1,240 +0,0 @@
import { execFileSync, execSync } from 'child_process';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
describe('fetch-upstream.sh', () => {
let projectDir: string;
let upstreamBareDir: string;
const scriptPath = path.resolve(
'.claude/skills/update/scripts/fetch-upstream.sh',
);
beforeEach(() => {
// Create a bare repo to act as "upstream"
upstreamBareDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'nanoclaw-upstream-'),
);
execSync('git init --bare -b main', {
cwd: upstreamBareDir,
stdio: 'pipe',
});
// Create a working repo, add files, push to the bare repo
const seedDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nanoclaw-seed-'));
execSync('git init -b main', { cwd: seedDir, stdio: 'pipe' });
execSync('git config user.email "test@test.com"', {
cwd: seedDir,
stdio: 'pipe',
});
execSync('git config user.name "Test"', { cwd: seedDir, stdio: 'pipe' });
fs.writeFileSync(
path.join(seedDir, 'package.json'),
JSON.stringify({ name: 'nanoclaw', version: '2.0.0' }),
);
fs.mkdirSync(path.join(seedDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(seedDir, 'src/index.ts'), 'export const v = 2;');
execSync('git add -A && git commit -m "upstream v2.0.0"', {
cwd: seedDir,
stdio: 'pipe',
});
execSync(`git remote add origin ${upstreamBareDir}`, {
cwd: seedDir,
stdio: 'pipe',
});
execSync('git push origin main', {
cwd: seedDir,
stdio: 'pipe',
});
fs.rmSync(seedDir, { recursive: true, force: true });
// Create the "project" repo that will run the script
projectDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nanoclaw-project-'));
execSync('git init -b main', { cwd: projectDir, stdio: 'pipe' });
execSync('git config user.email "test@test.com"', {
cwd: projectDir,
stdio: 'pipe',
});
execSync('git config user.name "Test"', {
cwd: projectDir,
stdio: 'pipe',
});
fs.writeFileSync(
path.join(projectDir, 'package.json'),
JSON.stringify({ name: 'nanoclaw', version: '1.0.0' }),
);
execSync('git add -A && git commit -m "init"', {
cwd: projectDir,
stdio: 'pipe',
});
// Copy skills-engine/constants.ts so fetch-upstream.sh can read BASE_INCLUDES
const constantsSrc = path.resolve('skills-engine/constants.ts');
const constantsDest = path.join(projectDir, 'skills-engine/constants.ts');
fs.mkdirSync(path.dirname(constantsDest), { recursive: true });
fs.copyFileSync(constantsSrc, constantsDest);
// Copy the script into the project so it can find PROJECT_ROOT
const skillScriptsDir = path.join(
projectDir,
'.claude/skills/update/scripts',
);
fs.mkdirSync(skillScriptsDir, { recursive: true });
fs.copyFileSync(
scriptPath,
path.join(skillScriptsDir, 'fetch-upstream.sh'),
);
fs.chmodSync(path.join(skillScriptsDir, 'fetch-upstream.sh'), 0o755);
});
afterEach(() => {
// Clean up temp dirs (also any TEMP_DIR created by the script)
for (const dir of [projectDir, upstreamBareDir]) {
if (dir && fs.existsSync(dir)) {
fs.rmSync(dir, { recursive: true, force: true });
}
}
});
function runFetchUpstream(): { stdout: string; exitCode: number } {
try {
const stdout = execFileSync(
'bash',
['.claude/skills/update/scripts/fetch-upstream.sh'],
{
cwd: projectDir,
encoding: 'utf-8',
stdio: 'pipe',
timeout: 30_000,
},
);
return { stdout, exitCode: 0 };
} catch (err: any) {
return {
stdout: (err.stdout ?? '') + (err.stderr ?? ''),
exitCode: err.status ?? 1,
};
}
}
function parseStatus(stdout: string): Record<string, string> {
const match = stdout.match(/<<< STATUS\n([\s\S]*?)\nSTATUS >>>/);
if (!match) return {};
const lines = match[1].trim().split('\n');
const result: Record<string, string> = {};
for (const line of lines) {
const eq = line.indexOf('=');
if (eq > 0) {
result[line.slice(0, eq)] = line.slice(eq + 1);
}
}
return result;
}
it('uses existing upstream remote', () => {
execSync(`git remote add upstream ${upstreamBareDir}`, {
cwd: projectDir,
stdio: 'pipe',
});
const { stdout, exitCode } = runFetchUpstream();
const status = parseStatus(stdout);
expect(exitCode).toBe(0);
expect(status.STATUS).toBe('success');
expect(status.REMOTE).toBe('upstream');
expect(status.CURRENT_VERSION).toBe('1.0.0');
expect(status.NEW_VERSION).toBe('2.0.0');
expect(status.TEMP_DIR).toMatch(/^\/tmp\/nanoclaw-update-/);
// Verify extracted files exist
expect(fs.existsSync(path.join(status.TEMP_DIR, 'package.json'))).toBe(
true,
);
expect(fs.existsSync(path.join(status.TEMP_DIR, 'src/index.ts'))).toBe(
true,
);
// Cleanup temp dir
fs.rmSync(status.TEMP_DIR, { recursive: true, force: true });
});
it('uses origin when it points to qwibitai/nanoclaw', () => {
// Set origin to a URL containing qwibitai/nanoclaw
execSync(`git remote add origin https://github.com/qwibitai/nanoclaw.git`, {
cwd: projectDir,
stdio: 'pipe',
});
// We can't actually fetch from GitHub in tests, but we can verify
// it picks the right remote. We'll add a second remote it CAN fetch from.
execSync(`git remote add upstream ${upstreamBareDir}`, {
cwd: projectDir,
stdio: 'pipe',
});
const { stdout, exitCode } = runFetchUpstream();
const status = parseStatus(stdout);
// It should find 'upstream' first (checked before origin)
expect(exitCode).toBe(0);
expect(status.REMOTE).toBe('upstream');
if (status.TEMP_DIR) {
fs.rmSync(status.TEMP_DIR, { recursive: true, force: true });
}
});
it('adds upstream remote when none exists', { timeout: 15_000 }, () => {
// Remove origin if any
try {
execSync('git remote remove origin', {
cwd: projectDir,
stdio: 'pipe',
});
} catch {
// No origin
}
const { stdout } = runFetchUpstream();
// It will try to add upstream pointing to github (which will fail to fetch),
// but we can verify it attempted to add the remote
expect(stdout).toContain('Adding upstream');
// Verify the remote was added
const remotes = execSync('git remote -v', {
cwd: projectDir,
encoding: 'utf-8',
});
expect(remotes).toContain('upstream');
expect(remotes).toContain('qwibitai/nanoclaw');
});
it('extracts files to temp dir correctly', () => {
execSync(`git remote add upstream ${upstreamBareDir}`, {
cwd: projectDir,
stdio: 'pipe',
});
const { stdout, exitCode } = runFetchUpstream();
const status = parseStatus(stdout);
expect(exitCode).toBe(0);
// Check file content matches what was pushed
const pkg = JSON.parse(
fs.readFileSync(path.join(status.TEMP_DIR, 'package.json'), 'utf-8'),
);
expect(pkg.version).toBe('2.0.0');
const indexContent = fs.readFileSync(
path.join(status.TEMP_DIR, 'src/index.ts'),
'utf-8',
);
expect(indexContent).toBe('export const v = 2;');
fs.rmSync(status.TEMP_DIR, { recursive: true, force: true });
});
});

View File

@@ -1,137 +0,0 @@
import { execFileSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { stringify } from 'yaml';
import {
cleanup,
createTempDir,
initGitRepo,
setupNanoclawDir,
} from './test-helpers.js';
describe('update-core.ts CLI flags', () => {
let tmpDir: string;
const scriptPath = path.resolve('scripts/update-core.ts');
const tsxBin = path.resolve('node_modules/.bin/tsx');
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
initGitRepo(tmpDir);
// Write state file
const statePath = path.join(tmpDir, '.nanoclaw', 'state.yaml');
fs.writeFileSync(
statePath,
stringify({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
}),
);
});
afterEach(() => {
cleanup(tmpDir);
});
function createNewCore(files: Record<string, string>): string {
const dir = path.join(tmpDir, 'new-core');
fs.mkdirSync(dir, { recursive: true });
for (const [relPath, content] of Object.entries(files)) {
const fullPath = path.join(dir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
return dir;
}
it('--json --preview-only outputs JSON preview without applying', () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'original');
const newCoreDir = createNewCore({
'src/index.ts': 'updated',
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const stdout = execFileSync(
tsxBin,
[scriptPath, '--json', '--preview-only', newCoreDir],
{ cwd: tmpDir, encoding: 'utf-8', stdio: 'pipe', timeout: 30_000 },
);
const preview = JSON.parse(stdout);
expect(preview.currentVersion).toBe('1.0.0');
expect(preview.newVersion).toBe('2.0.0');
expect(preview.filesChanged).toContain('src/index.ts');
// File should NOT have been modified (preview only)
expect(fs.readFileSync(path.join(tmpDir, 'src/index.ts'), 'utf-8')).toBe(
'original',
);
});
it('--preview-only without --json outputs human-readable text', () => {
const newCoreDir = createNewCore({
'src/new-file.ts': 'export const x = 1;',
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const stdout = execFileSync(
tsxBin,
[scriptPath, '--preview-only', newCoreDir],
{ cwd: tmpDir, encoding: 'utf-8', stdio: 'pipe', timeout: 30_000 },
);
expect(stdout).toContain('Update Preview');
expect(stdout).toContain('2.0.0');
// Should NOT contain JSON (it's human-readable mode)
expect(stdout).not.toContain('"currentVersion"');
});
it('--json applies and outputs JSON result', () => {
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'original');
const newCoreDir = createNewCore({
'src/index.ts': 'original',
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const stdout = execFileSync(tsxBin, [scriptPath, '--json', newCoreDir], {
cwd: tmpDir,
encoding: 'utf-8',
stdio: 'pipe',
timeout: 30_000,
});
const result = JSON.parse(stdout);
expect(result.success).toBe(true);
expect(result.previousVersion).toBe('1.0.0');
expect(result.newVersion).toBe('2.0.0');
});
it('exits with error when no path provided', () => {
try {
execFileSync(tsxBin, [scriptPath], {
cwd: tmpDir,
encoding: 'utf-8',
stdio: 'pipe',
timeout: 30_000,
});
expect.unreachable('Should have exited with error');
} catch (err: any) {
expect(err.status).toBe(1);
expect(err.stderr).toContain('Usage');
}
});
});

View File

@@ -1,418 +0,0 @@
import fs from 'fs';
import path from 'path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { stringify } from 'yaml';
import {
cleanup,
createTempDir,
initGitRepo,
setupNanoclawDir,
} from './test-helpers.js';
let tmpDir: string;
const originalCwd = process.cwd();
describe('update', () => {
beforeEach(() => {
tmpDir = createTempDir();
setupNanoclawDir(tmpDir);
initGitRepo(tmpDir);
process.chdir(tmpDir);
});
afterEach(() => {
process.chdir(originalCwd);
cleanup(tmpDir);
});
function writeStateFile(state: Record<string, unknown>): void {
const statePath = path.join(tmpDir, '.nanoclaw', 'state.yaml');
fs.writeFileSync(statePath, stringify(state), 'utf-8');
}
function createNewCoreDir(files: Record<string, string>): string {
const newCoreDir = path.join(tmpDir, 'new-core');
fs.mkdirSync(newCoreDir, { recursive: true });
for (const [relPath, content] of Object.entries(files)) {
const fullPath = path.join(newCoreDir, relPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
fs.writeFileSync(fullPath, content);
}
return newCoreDir;
}
describe('previewUpdate', () => {
it('detects new files in update', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/new-file.ts': 'export const x = 1;',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesChanged).toContain('src/new-file.ts');
expect(preview.currentVersion).toBe('1.0.0');
});
it('detects changed files vs base', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'modified',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesChanged).toContain('src/index.ts');
});
it('does not list unchanged files', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'same content');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'same content',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesChanged).not.toContain('src/index.ts');
});
it('identifies conflict risk with applied skills', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [
{
name: 'telegram',
version: '1.0.0',
applied_at: new Date().toISOString(),
file_hashes: { 'src/index.ts': 'abc123' },
},
],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'updated core',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.conflictRisk).toContain('src/index.ts');
});
it('identifies custom patches at risk', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/config.ts'), 'original');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
custom_modifications: [
{
description: 'custom tweak',
applied_at: new Date().toISOString(),
files_modified: ['src/config.ts'],
patch_file: '.nanoclaw/custom/001-tweak.patch',
},
],
});
const newCoreDir = createNewCoreDir({
'src/config.ts': 'updated core config',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.customPatchesAtRisk).toContain('src/config.ts');
});
it('reads version from package.json in new core', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.newVersion).toBe('2.0.0');
});
it('detects files deleted in new core', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'keep this');
fs.writeFileSync(path.join(baseDir, 'src/removed.ts'), 'delete this');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core only has index.ts — removed.ts is gone
const newCoreDir = createNewCoreDir({
'src/index.ts': 'keep this',
});
const { previewUpdate } = await import('../update.js');
const preview = previewUpdate(newCoreDir);
expect(preview.filesDeleted).toContain('src/removed.ts');
expect(preview.filesChanged).not.toContain('src/removed.ts');
});
});
describe('applyUpdate', () => {
it('rejects when customize session is active', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// Create the pending.yaml that indicates active customize
const customDir = path.join(tmpDir, '.nanoclaw', 'custom');
fs.mkdirSync(customDir, { recursive: true });
fs.writeFileSync(path.join(customDir, 'pending.yaml'), 'active: true');
const newCoreDir = createNewCoreDir({
'src/index.ts': 'new content',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(false);
expect(result.error).toContain('customize session');
});
it('copies new files that do not exist yet', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/brand-new.ts': 'export const fresh = true;',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.error).toBeUndefined();
expect(result.success).toBe(true);
expect(
fs.readFileSync(path.join(tmpDir, 'src/brand-new.ts'), 'utf-8'),
).toBe('export const fresh = true;');
});
it('performs clean three-way merge', async () => {
// Set up base
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'src/index.ts'),
'line 1\nline 2\nline 3\n',
);
// Current has user changes at the bottom
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src/index.ts'),
'line 1\nline 2\nline 3\nuser addition\n',
);
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core changes at the top
const newCoreDir = createNewCoreDir({
'src/index.ts': 'core update\nline 1\nline 2\nline 3\n',
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(true);
expect(result.newVersion).toBe('2.0.0');
const merged = fs.readFileSync(
path.join(tmpDir, 'src/index.ts'),
'utf-8',
);
expect(merged).toContain('core update');
expect(merged).toContain('user addition');
});
it('updates base directory after successful merge', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'old base');
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'old base');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'src/index.ts': 'new base content',
});
const { applyUpdate } = await import('../update.js');
await applyUpdate(newCoreDir);
const newBase = fs.readFileSync(
path.join(tmpDir, '.nanoclaw', 'base', 'src/index.ts'),
'utf-8',
);
expect(newBase).toBe('new base content');
});
it('updates core_version in state after success', async () => {
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
const newCoreDir = createNewCoreDir({
'package.json': JSON.stringify({ version: '2.0.0' }),
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(true);
expect(result.previousVersion).toBe('1.0.0');
expect(result.newVersion).toBe('2.0.0');
// Verify state file was updated
const { readState } = await import('../state.js');
const state = readState();
expect(state.core_version).toBe('2.0.0');
});
it('restores backup on merge conflict', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(baseDir, 'src/index.ts'),
'line 1\nline 2\nline 3\n',
);
// Current has conflicting change on same line
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(
path.join(tmpDir, 'src/index.ts'),
'line 1\nuser changed line 2\nline 3\n',
);
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core also changes line 2 — guaranteed conflict
const newCoreDir = createNewCoreDir({
'src/index.ts': 'line 1\ncore changed line 2\nline 3\n',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(false);
expect(result.mergeConflicts).toContain('src/index.ts');
expect(result.backupPending).toBe(true);
// File should have conflict markers (backup preserved, not restored)
const content = fs.readFileSync(
path.join(tmpDir, 'src/index.ts'),
'utf-8',
);
expect(content).toContain('<<<<<<<');
expect(content).toContain('>>>>>>>');
});
it('removes files deleted in new core', async () => {
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'keep');
fs.writeFileSync(path.join(baseDir, 'src/removed.ts'), 'old content');
// Working tree has both files
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'keep');
fs.writeFileSync(path.join(tmpDir, 'src/removed.ts'), 'old content');
writeStateFile({
skills_system_version: '0.1.0',
core_version: '1.0.0',
applied_skills: [],
});
// New core only has index.ts
const newCoreDir = createNewCoreDir({
'src/index.ts': 'keep',
});
const { applyUpdate } = await import('../update.js');
const result = await applyUpdate(newCoreDir);
expect(result.success).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'src/index.ts'))).toBe(true);
expect(fs.existsSync(path.join(tmpDir, 'src/removed.ts'))).toBe(false);
});
});
});

View File

@@ -301,7 +301,7 @@ export async function applySkill(skillDir: string): Promise<ApplyResult> {
}
}
// Store structured outcomes including the test command so applyUpdate() can run them
// Store structured outcomes including the test command
const outcomes: Record<string, unknown> = manifest.structured
? { ...manifest.structured }
: {};

View File

@@ -36,7 +36,6 @@ export { findSkillDir, replaySkills } from './replay.js';
export type { ReplayOptions, ReplayResult } from './replay.js';
export { uninstallSkill } from './uninstall.js';
export { initSkillsSystem, migrateExisting } from './migrate.js';
export { applyUpdate, previewUpdate } from './update.js';
export {
compareSemver,
computeFileHash,
@@ -65,6 +64,4 @@ export type {
SkillManifest,
SkillState,
UninstallResult,
UpdatePreview,
UpdateResult,
} from './types.js';

View File

@@ -76,26 +76,6 @@ export interface CustomModification {
patch_file: string;
}
export interface UpdatePreview {
currentVersion: string;
newVersion: string;
filesChanged: string[];
filesDeleted: string[];
conflictRisk: string[];
customPatchesAtRisk: string[];
}
export interface UpdateResult {
success: boolean;
previousVersion: string;
newVersion: string;
mergeConflicts?: string[];
backupPending?: boolean;
customPatchFailures?: string[];
skillReapplyResults?: Record<string, boolean>;
error?: string;
}
export interface UninstallResult {
success: boolean;
skill: string;

View File

@@ -1,355 +0,0 @@
import { execFileSync, execSync } from 'child_process';
import crypto from 'crypto';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { parse as parseYaml } from 'yaml';
import { clearBackup, createBackup, restoreBackup } from './backup.js';
import { BASE_DIR, NANOCLAW_DIR } from './constants.js';
import { copyDir } from './fs-utils.js';
import { isCustomizeActive } from './customize.js';
import { acquireLock } from './lock.js';
import { mergeFile } from './merge.js';
import { recordPathRemap } from './path-remap.js';
import { computeFileHash, readState, writeState } from './state.js';
import {
mergeDockerComposeServices,
mergeEnvAdditions,
mergeNpmDependencies,
runNpmInstall,
} from './structured.js';
import type { UpdatePreview, UpdateResult } from './types.js';
function walkDir(dir: string, root?: string): string[] {
const rootDir = root ?? dir;
const results: string[] = [];
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
results.push(...walkDir(fullPath, rootDir));
} else {
results.push(path.relative(rootDir, fullPath));
}
}
return results;
}
export function previewUpdate(newCorePath: string): UpdatePreview {
const projectRoot = process.cwd();
const state = readState();
const baseDir = path.join(projectRoot, BASE_DIR);
// Read new version from package.json in newCorePath
const newPkgPath = path.join(newCorePath, 'package.json');
let newVersion = 'unknown';
if (fs.existsSync(newPkgPath)) {
const pkg = JSON.parse(fs.readFileSync(newPkgPath, 'utf-8'));
newVersion = pkg.version ?? 'unknown';
}
// Walk all files in newCorePath, compare against base to find changed files
const newCoreFiles = walkDir(newCorePath);
const filesChanged: string[] = [];
const filesDeleted: string[] = [];
for (const relPath of newCoreFiles) {
const basePath = path.join(baseDir, relPath);
const newPath = path.join(newCorePath, relPath);
if (!fs.existsSync(basePath)) {
filesChanged.push(relPath);
continue;
}
const baseHash = computeFileHash(basePath);
const newHash = computeFileHash(newPath);
if (baseHash !== newHash) {
filesChanged.push(relPath);
}
}
// Detect files deleted in the new core (exist in base but not in newCorePath)
if (fs.existsSync(baseDir)) {
const baseFiles = walkDir(baseDir);
const newCoreSet = new Set(newCoreFiles);
for (const relPath of baseFiles) {
if (!newCoreSet.has(relPath)) {
filesDeleted.push(relPath);
}
}
}
// Check which changed files have skill overlaps
const conflictRisk: string[] = [];
const customPatchesAtRisk: string[] = [];
for (const relPath of filesChanged) {
// Check applied skills
for (const skill of state.applied_skills) {
if (skill.file_hashes[relPath]) {
conflictRisk.push(relPath);
break;
}
}
// Check custom modifications
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
if (mod.files_modified.includes(relPath)) {
customPatchesAtRisk.push(relPath);
break;
}
}
}
}
return {
currentVersion: state.core_version,
newVersion,
filesChanged,
filesDeleted,
conflictRisk,
customPatchesAtRisk,
};
}
export async function applyUpdate(newCorePath: string): Promise<UpdateResult> {
const projectRoot = process.cwd();
const state = readState();
const baseDir = path.join(projectRoot, BASE_DIR);
// --- Pre-flight ---
if (isCustomizeActive()) {
return {
success: false,
previousVersion: state.core_version,
newVersion: 'unknown',
error:
'A customize session is active. Run commitCustomize() or abortCustomize() first.',
};
}
const releaseLock = acquireLock();
try {
// --- Preview ---
const preview = previewUpdate(newCorePath);
// --- Backup ---
const filesToBackup = [
...preview.filesChanged.map((f) => path.join(projectRoot, f)),
...preview.filesDeleted.map((f) => path.join(projectRoot, f)),
];
createBackup(filesToBackup);
// --- Three-way merge ---
const mergeConflicts: string[] = [];
for (const relPath of preview.filesChanged) {
const currentPath = path.join(projectRoot, relPath);
const basePath = path.join(baseDir, relPath);
const newCoreSrcPath = path.join(newCorePath, relPath);
if (!fs.existsSync(currentPath)) {
// File doesn't exist yet — just copy from new core
fs.mkdirSync(path.dirname(currentPath), { recursive: true });
fs.copyFileSync(newCoreSrcPath, currentPath);
continue;
}
if (!fs.existsSync(basePath)) {
// No base — use current as base
fs.mkdirSync(path.dirname(basePath), { recursive: true });
fs.copyFileSync(currentPath, basePath);
}
// Three-way merge: current ← base → newCore
const tmpCurrent = path.join(
os.tmpdir(),
`nanoclaw-update-${crypto.randomUUID()}-${path.basename(relPath)}`,
);
fs.copyFileSync(currentPath, tmpCurrent);
const result = mergeFile(tmpCurrent, basePath, newCoreSrcPath);
if (result.clean) {
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
} else {
// Conflict — copy markers to working tree
fs.copyFileSync(tmpCurrent, currentPath);
fs.unlinkSync(tmpCurrent);
mergeConflicts.push(relPath);
}
}
if (mergeConflicts.length > 0) {
// Preserve backup so user can resolve conflicts manually, then continue
// Call clearBackup() after resolution or restoreBackup() + clearBackup() to abort
return {
success: false,
previousVersion: preview.currentVersion,
newVersion: preview.newVersion,
mergeConflicts,
backupPending: true,
error: `Unresolved merge conflicts in: ${mergeConflicts.join(', ')}. Resolve manually then call clearBackup(), or restoreBackup() + clearBackup() to abort.`,
};
}
// --- Remove deleted files ---
for (const relPath of preview.filesDeleted) {
const currentPath = path.join(projectRoot, relPath);
if (fs.existsSync(currentPath)) {
fs.unlinkSync(currentPath);
}
}
// --- Re-apply custom patches ---
const customPatchFailures: string[] = [];
if (state.custom_modifications) {
for (const mod of state.custom_modifications) {
const patchPath = path.join(projectRoot, mod.patch_file);
if (!fs.existsSync(patchPath)) {
customPatchFailures.push(
`${mod.description}: patch file missing (${mod.patch_file})`,
);
continue;
}
try {
execFileSync('git', ['apply', '--3way', patchPath], {
stdio: 'pipe',
cwd: projectRoot,
});
} catch {
customPatchFailures.push(mod.description);
}
}
}
// --- Record path remaps from update metadata ---
const remapFile = path.join(
newCorePath,
'.nanoclaw-meta',
'path_remap.yaml',
);
if (fs.existsSync(remapFile)) {
const remap = parseYaml(fs.readFileSync(remapFile, 'utf-8')) as Record<
string,
string
>;
if (remap && typeof remap === 'object') {
recordPathRemap(remap);
}
}
// --- Update base ---
if (fs.existsSync(baseDir)) {
fs.rmSync(baseDir, { recursive: true, force: true });
}
fs.mkdirSync(baseDir, { recursive: true });
copyDir(newCorePath, baseDir);
// --- Structured ops: re-apply from all skills ---
const allNpmDeps: Record<string, string> = {};
const allEnvAdditions: string[] = [];
const allDockerServices: Record<string, unknown> = {};
let hasNpmDeps = false;
for (const skill of state.applied_skills) {
const outcomes = skill.structured_outcomes as
| Record<string, unknown>
| undefined;
if (!outcomes) continue;
if (outcomes.npm_dependencies) {
Object.assign(
allNpmDeps,
outcomes.npm_dependencies as Record<string, string>,
);
hasNpmDeps = true;
}
if (outcomes.env_additions) {
allEnvAdditions.push(...(outcomes.env_additions as string[]));
}
if (outcomes.docker_compose_services) {
Object.assign(
allDockerServices,
outcomes.docker_compose_services as Record<string, unknown>,
);
}
}
if (hasNpmDeps) {
const pkgPath = path.join(projectRoot, 'package.json');
mergeNpmDependencies(pkgPath, allNpmDeps);
}
if (allEnvAdditions.length > 0) {
const envPath = path.join(projectRoot, '.env.example');
mergeEnvAdditions(envPath, allEnvAdditions);
}
if (Object.keys(allDockerServices).length > 0) {
const composePath = path.join(projectRoot, 'docker-compose.yml');
mergeDockerComposeServices(composePath, allDockerServices);
}
if (hasNpmDeps) {
runNpmInstall();
}
// --- Run tests for each applied skill ---
const skillReapplyResults: Record<string, boolean> = {};
for (const skill of state.applied_skills) {
const outcomes = skill.structured_outcomes as
| Record<string, unknown>
| undefined;
if (!outcomes?.test) continue;
const testCmd = outcomes.test as string;
try {
execSync(testCmd, {
stdio: 'pipe',
cwd: projectRoot,
timeout: 120_000,
});
skillReapplyResults[skill.name] = true;
} catch {
skillReapplyResults[skill.name] = false;
}
}
// --- Update state ---
state.core_version = preview.newVersion;
writeState(state);
// --- Cleanup ---
clearBackup();
return {
success: true,
previousVersion: preview.currentVersion,
newVersion: preview.newVersion,
customPatchFailures:
customPatchFailures.length > 0 ? customPatchFailures : undefined,
skillReapplyResults:
Object.keys(skillReapplyResults).length > 0
? skillReapplyResults
: undefined,
};
} catch (err) {
restoreBackup();
clearBackup();
return {
success: false,
previousVersion: state.core_version,
newVersion: 'unknown',
error: err instanceof Error ? err.message : String(err),
};
} finally {
releaseLock();
}
}