Compare commits
176 Commits
cd8d043eaf
...
576d5ce366
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
576d5ce366 | ||
|
|
c3e9a892c2 | ||
|
|
acb0abaf8b | ||
|
|
4f1b09fcb6 | ||
|
|
fa4ace423c | ||
|
|
e6e0c6fa9e | ||
|
|
c5e0001637 | ||
|
|
e73bf2f324 | ||
|
|
c98205ca0d | ||
|
|
2faf1c6e19 | ||
|
|
842ec5fd30 | ||
|
|
017a72d57d | ||
|
|
bd94c8144a | ||
|
|
6e602a1f5b | ||
|
|
415a1cfd44 | ||
|
|
fee05f7ee8 | ||
|
|
877650541a | ||
|
|
c923f07829 | ||
|
|
f138f25c79 | ||
|
|
e9e9e05290 | ||
|
|
5b7b0867da | ||
|
|
e606eac91d | ||
|
|
8935e4f636 | ||
|
|
f900670aaf | ||
|
|
62fc8c7708 | ||
|
|
7e7492ebba | ||
|
|
7b22e23761 | ||
|
|
2f472a8600 | ||
|
|
8f01a9a05e | ||
|
|
a4591ab5e0 | ||
|
|
3332da03af | ||
|
|
f5375972c4 | ||
|
|
0f01fe2c07 | ||
|
|
a4fd4f2a2f | ||
|
|
4383e3e61a | ||
|
|
1f5cc760a7 | ||
|
|
722c8ee595 | ||
|
|
730ea0d713 | ||
|
|
637545dfca | ||
|
|
4588579622 | ||
|
|
eda14f472b | ||
|
|
a29ca0835c | ||
|
|
813e1c6fa4 | ||
|
|
d25b79a5a9 | ||
|
|
a41746530f | ||
|
|
d398ba5ac6 | ||
|
|
8b53a95a5f | ||
|
|
4c6d9241d4 | ||
|
|
87c3640cfc | ||
|
|
e4f15b659e | ||
|
|
349b54ae9e | ||
|
|
9413ace113 | ||
|
|
2c447085b5 | ||
|
|
2cddefbef4 | ||
|
|
125757bc7d | ||
|
|
2483cb3e2a | ||
|
|
c16d70cdf7 | ||
|
|
f7979bfa11 | ||
|
|
271acf9101 | ||
|
|
ab9613a2b0 | ||
|
|
68c59a1abf | ||
|
|
b16fe4d9fc | ||
|
|
5f385974e7 | ||
|
|
deb5389077 | ||
|
|
7bfd060536 | ||
|
|
255e139433 | ||
|
|
3699363eb7 | ||
|
|
3a26f69c7f | ||
|
|
aae173d86f | ||
|
|
23e9e1c150 | ||
|
|
8b6e9d6cf6 | ||
|
|
77b7c658d6 | ||
|
|
5954dfb3e7 | ||
|
|
1f36232ef0 | ||
|
|
e9e6d987ac | ||
|
|
608f935ad7 | ||
|
|
b2fa85b04a | ||
|
|
7bba21af1e | ||
|
|
22c1186f16 | ||
|
|
b71414957d | ||
|
|
6d4f972ad0 | ||
|
|
36a8ec643f | ||
|
|
28937938b2 | ||
|
|
b8f6a9b794 | ||
|
|
89681a6d0d | ||
|
|
fd444681ef | ||
|
|
72404968e1 | ||
|
|
115b0a3167 | ||
|
|
17c63b94a2 | ||
|
|
ff4075d9cb | ||
|
|
8824a84afe | ||
|
|
627f13a83c | ||
|
|
df76dc6797 | ||
|
|
bb736f37f2 | ||
|
|
f3644f123e | ||
|
|
deece6bf35 | ||
|
|
9391304e70 | ||
|
|
31c03cf924 | ||
|
|
33ff3b8c03 | ||
|
|
b112fafff4 | ||
|
|
300dcda9c9 | ||
|
|
0240f48751 | ||
|
|
b7434b8a76 | ||
|
|
15b9aa99ff | ||
|
|
80f6fb2b9a | ||
|
|
5395b732a5 | ||
|
|
cf5fa1daf0 | ||
|
|
d4073a01c5 | ||
|
|
d622a79fe2 | ||
|
|
6e5834ee3c | ||
|
|
093530a418 | ||
|
|
675a6d87a3 | ||
|
|
e60eb6dea2 | ||
|
|
63f680d0be | ||
|
|
1b18d50ae4 | ||
|
|
4e3189da8f | ||
|
|
2c46d74066 | ||
|
|
aeabfcc65a | ||
|
|
5d5b90448c | ||
|
|
341b8df0a2 | ||
|
|
f375dd5011 | ||
|
|
6d4e251534 | ||
|
|
11847a1af0 | ||
|
|
616c1ae10a | ||
|
|
2142f03eaf | ||
|
|
4d853c5d38 | ||
|
|
e26e1b3e68 | ||
|
|
bf9b7d0311 | ||
|
|
57e520c7e1 | ||
|
|
2f1d7fe98b | ||
|
|
b7f59da70a | ||
|
|
8d0baac892 | ||
|
|
e6df18ca8b | ||
|
|
1fff99ffb8 | ||
|
|
58faf624a3 | ||
|
|
7d640cb9f6 | ||
|
|
8fc42e4f82 | ||
|
|
7366b0d7db | ||
|
|
0015931e37 | ||
|
|
d05a8dec49 | ||
|
|
35722801e3 | ||
|
|
07cf1fb8a5 | ||
|
|
14247d0b57 | ||
|
|
b0671ef9e6 | ||
|
|
81f6703102 | ||
|
|
d8cc230227 | ||
|
|
57085cc02e | ||
|
|
3207c35e50 | ||
|
|
07dc8c977c | ||
|
|
b6e18688c2 | ||
|
|
5a12ddd4cb | ||
|
|
8dcc70cf5c | ||
|
|
01b6258f59 | ||
|
|
724fe7250d | ||
|
|
5f42646598 | ||
|
|
ff16e93713 | ||
|
|
4f7efd3c67 | ||
|
|
def3748d02 | ||
|
|
d40affbdef | ||
|
|
2583af7ead | ||
|
|
7f6298a1bb | ||
|
|
b7f8c20a25 | ||
|
|
e9369617fb | ||
|
|
00ff0e00eb | ||
|
|
b7f1d48423 | ||
|
|
a4dc3a7446 | ||
|
|
9f5aff99b6 | ||
|
|
42d098c3c1 | ||
|
|
eb65121938 | ||
|
|
0f283cbdd3 | ||
|
|
a516cc5cfe | ||
|
|
675acffeb1 | ||
|
|
f629f9361a | ||
|
|
9e5dde6ebb | ||
|
|
82206570d1 | ||
|
|
32dda34af4 |
289
.claude/skills/add-emacs/SKILL.md
Normal file
289
.claude/skills/add-emacs/SKILL.md
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
---
|
||||||
|
name: add-emacs
|
||||||
|
description: Add Emacs as a channel. Opens an interactive chat buffer and org-mode integration so you can talk to NanoClaw from within Emacs (Doom, Spacemacs, or vanilla). Uses a local HTTP bridge — no bot token or external service needed.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Add Emacs Channel
|
||||||
|
|
||||||
|
This skill adds Emacs support to NanoClaw, then walks through interactive setup.
|
||||||
|
Works with Doom Emacs, Spacemacs, and vanilla Emacs 27.1+.
|
||||||
|
|
||||||
|
## What you can do with this
|
||||||
|
|
||||||
|
- **Ask while coding** — open the chat buffer (`C-c n c` / `SPC N c`), ask about a function or error without leaving Emacs
|
||||||
|
- **Code review** — select a region and send it with `nanoclaw-org-send`; the response appears as a child heading inline in your org file
|
||||||
|
- **Meeting notes** — send an org agenda entry; get a summary or action item list back as a child node
|
||||||
|
- **Draft writing** — send org prose; receive revisions or continuations in place
|
||||||
|
- **Research capture** — ask a question directly in your org notes; the answer lands exactly where you need it
|
||||||
|
- **Schedule tasks** — ask Andy to set a reminder or create a scheduled NanoClaw task (e.g. "remind me tomorrow to review the PR")
|
||||||
|
|
||||||
|
## Phase 1: Pre-flight
|
||||||
|
|
||||||
|
### Check if already applied
|
||||||
|
|
||||||
|
Check if `src/channels/emacs.ts` exists:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
test -f src/channels/emacs.ts && echo "already applied" || echo "not applied"
|
||||||
|
```
|
||||||
|
|
||||||
|
If it exists, skip to Phase 3 (Setup). The code changes are already in place.
|
||||||
|
|
||||||
|
## Phase 2: Apply Code Changes
|
||||||
|
|
||||||
|
### Ensure the upstream remote
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote -v
|
||||||
|
```
|
||||||
|
|
||||||
|
If an `upstream` remote pointing to `https://github.com/qwibitai/nanoclaw.git` is missing,
|
||||||
|
add it:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote add upstream https://github.com/qwibitai/nanoclaw.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### Merge the skill branch
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git fetch upstream skill/emacs
|
||||||
|
git merge upstream/skill/emacs
|
||||||
|
```
|
||||||
|
|
||||||
|
If there are merge conflicts on `package-lock.json`, resolve them by accepting the incoming
|
||||||
|
version and continuing:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git checkout --theirs package-lock.json
|
||||||
|
git add package-lock.json
|
||||||
|
git merge --continue
|
||||||
|
```
|
||||||
|
|
||||||
|
For any other conflict, read the conflicted file and reconcile both sides manually.
|
||||||
|
|
||||||
|
This adds:
|
||||||
|
- `src/channels/emacs.ts` — `EmacsBridgeChannel` HTTP server (port 8766)
|
||||||
|
- `src/channels/emacs.test.ts` — unit tests
|
||||||
|
- `emacs/nanoclaw.el` — Emacs Lisp package (`nanoclaw-chat`, `nanoclaw-org-send`)
|
||||||
|
- `import './emacs.js'` appended to `src/channels/index.ts`
|
||||||
|
|
||||||
|
If the merge reports conflicts, resolve them by reading the conflicted files and understanding the intent of both sides.
|
||||||
|
|
||||||
|
### Validate code changes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
npx vitest run src/channels/emacs.test.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
Build must be clean and tests must pass before proceeding.
|
||||||
|
|
||||||
|
## Phase 3: Setup
|
||||||
|
|
||||||
|
### Configure environment (optional)
|
||||||
|
|
||||||
|
The channel works out of the box with defaults. Add to `.env` only if you need non-defaults:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
EMACS_CHANNEL_PORT=8766 # default — change if 8766 is already in use
|
||||||
|
EMACS_AUTH_TOKEN=<random> # optional — locks the endpoint to Emacs only
|
||||||
|
```
|
||||||
|
|
||||||
|
If you change or add values, sync to the container environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p data/env && cp .env data/env/env
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure Emacs
|
||||||
|
|
||||||
|
The `nanoclaw.el` package requires only Emacs 27.1+ built-in libraries (`url`, `json`, `org`) — no package manager setup needed.
|
||||||
|
|
||||||
|
AskUserQuestion: Which Emacs distribution are you using?
|
||||||
|
- **Doom Emacs** - config.el with map! keybindings
|
||||||
|
- **Spacemacs** - dotspacemacs/user-config in ~/.spacemacs
|
||||||
|
- **Vanilla Emacs / other** - init.el with global-set-key
|
||||||
|
|
||||||
|
**Doom Emacs** — add to `~/.config/doom/config.el` (or `~/.doom.d/config.el`):
|
||||||
|
|
||||||
|
```elisp
|
||||||
|
;; NanoClaw — personal AI assistant channel
|
||||||
|
(load (expand-file-name "~/src/nanoclaw/emacs/nanoclaw.el"))
|
||||||
|
|
||||||
|
(map! :leader
|
||||||
|
:prefix ("N" . "NanoClaw")
|
||||||
|
:desc "Chat buffer" "c" #'nanoclaw-chat
|
||||||
|
:desc "Send org" "o" #'nanoclaw-org-send)
|
||||||
|
```
|
||||||
|
|
||||||
|
Then reload: `M-x doom/reload`
|
||||||
|
|
||||||
|
**Spacemacs** — add to `dotspacemacs/user-config` in `~/.spacemacs`:
|
||||||
|
|
||||||
|
```elisp
|
||||||
|
;; NanoClaw — personal AI assistant channel
|
||||||
|
(load-file "~/src/nanoclaw/emacs/nanoclaw.el")
|
||||||
|
|
||||||
|
(spacemacs/set-leader-keys "aNc" #'nanoclaw-chat)
|
||||||
|
(spacemacs/set-leader-keys "aNo" #'nanoclaw-org-send)
|
||||||
|
```
|
||||||
|
|
||||||
|
Then reload: `M-x dotspacemacs/sync-configuration-layers` or restart Emacs.
|
||||||
|
|
||||||
|
**Vanilla Emacs** — add to `~/.emacs.d/init.el` (or `~/.emacs`):
|
||||||
|
|
||||||
|
```elisp
|
||||||
|
;; NanoClaw — personal AI assistant channel
|
||||||
|
(load-file "~/src/nanoclaw/emacs/nanoclaw.el")
|
||||||
|
|
||||||
|
(global-set-key (kbd "C-c n c") #'nanoclaw-chat)
|
||||||
|
(global-set-key (kbd "C-c n o") #'nanoclaw-org-send)
|
||||||
|
```
|
||||||
|
|
||||||
|
Then reload: `M-x eval-buffer` or restart Emacs.
|
||||||
|
|
||||||
|
If `EMACS_AUTH_TOKEN` was set, also add (any distribution):
|
||||||
|
|
||||||
|
```elisp
|
||||||
|
(setq nanoclaw-auth-token "<your-token>")
|
||||||
|
```
|
||||||
|
|
||||||
|
If `EMACS_CHANNEL_PORT` was changed from the default, also add:
|
||||||
|
|
||||||
|
```elisp
|
||||||
|
(setq nanoclaw-port <your-port>)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Restart NanoClaw
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
launchctl kickstart -k gui/$(id -u)/com.nanoclaw # macOS
|
||||||
|
# Linux: systemctl --user restart nanoclaw
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 4: Verify
|
||||||
|
|
||||||
|
### Test the HTTP endpoint
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -s "http://localhost:8766/api/messages?since=0"
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: `{"messages":[]}`
|
||||||
|
|
||||||
|
If you set `EMACS_AUTH_TOKEN`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -s -H "Authorization: Bearer <token>" "http://localhost:8766/api/messages?since=0"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test from Emacs
|
||||||
|
|
||||||
|
Tell the user:
|
||||||
|
|
||||||
|
> 1. Open the chat buffer with your keybinding (`SPC N c`, `SPC a N c`, or `C-c n c`)
|
||||||
|
> 2. Type a message and press `RET`
|
||||||
|
> 3. A response from Andy should appear within a few seconds
|
||||||
|
>
|
||||||
|
> For org-mode: open any `.org` file, position the cursor on a heading, and use `SPC N o` / `SPC a N o` / `C-c n o`
|
||||||
|
|
||||||
|
### Check logs if needed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tail -f logs/nanoclaw.log
|
||||||
|
```
|
||||||
|
|
||||||
|
Look for `Emacs channel listening` at startup and `Emacs message received` when a message is sent.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Port already in use
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: listen EADDRINUSE: address already in use :::8766
|
||||||
|
```
|
||||||
|
|
||||||
|
Either a stale NanoClaw process is running, or 8766 is taken by another app.
|
||||||
|
|
||||||
|
Find and kill the stale process:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
lsof -ti :8766 | xargs kill -9
|
||||||
|
```
|
||||||
|
|
||||||
|
Or change the port in `.env` (`EMACS_CHANNEL_PORT=8767`) and update `nanoclaw-port` in Emacs config.
|
||||||
|
|
||||||
|
### No response from agent
|
||||||
|
|
||||||
|
Check:
|
||||||
|
1. NanoClaw is running: `launchctl list | grep nanoclaw` (macOS) or `systemctl --user status nanoclaw` (Linux)
|
||||||
|
2. Emacs group is registered: `sqlite3 store/messages.db "SELECT * FROM registered_groups WHERE jid = 'emacs:default'"`
|
||||||
|
3. Logs show activity: `tail -50 logs/nanoclaw.log`
|
||||||
|
|
||||||
|
If the group is not registered, it will be created automatically on the next NanoClaw restart.
|
||||||
|
|
||||||
|
### Auth token mismatch (401 Unauthorized)
|
||||||
|
|
||||||
|
Verify the token in Emacs matches `.env`:
|
||||||
|
|
||||||
|
```elisp
|
||||||
|
;; M-x describe-variable RET nanoclaw-auth-token RET
|
||||||
|
```
|
||||||
|
|
||||||
|
Must exactly match `EMACS_AUTH_TOKEN` in `.env`.
|
||||||
|
|
||||||
|
### nanoclaw.el not loading
|
||||||
|
|
||||||
|
Check the path is correct:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ls ~/src/nanoclaw/emacs/nanoclaw.el
|
||||||
|
```
|
||||||
|
|
||||||
|
If NanoClaw is cloned elsewhere, update the `load`/`load-file` path in your Emacs config.
|
||||||
|
|
||||||
|
## After Setup
|
||||||
|
|
||||||
|
If running `npm run dev` while the service is active:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# macOS:
|
||||||
|
launchctl unload ~/Library/LaunchAgents/com.nanoclaw.plist
|
||||||
|
npm run dev
|
||||||
|
# When done testing:
|
||||||
|
launchctl load ~/Library/LaunchAgents/com.nanoclaw.plist
|
||||||
|
|
||||||
|
# Linux:
|
||||||
|
# systemctl --user stop nanoclaw
|
||||||
|
# npm run dev
|
||||||
|
# systemctl --user start nanoclaw
|
||||||
|
```
|
||||||
|
|
||||||
|
## Agent Formatting
|
||||||
|
|
||||||
|
The Emacs bridge converts markdown → org-mode automatically. Agents should
|
||||||
|
output standard markdown — **not** org-mode syntax. The conversion handles:
|
||||||
|
|
||||||
|
| Markdown | Org-mode |
|
||||||
|
|----------|----------|
|
||||||
|
| `**bold**` | `*bold*` |
|
||||||
|
| `*italic*` | `/italic/` |
|
||||||
|
| `~~text~~` | `+text+` |
|
||||||
|
| `` `code` `` | `~code~` |
|
||||||
|
| ` ```lang ` | `#+begin_src lang` |
|
||||||
|
|
||||||
|
If an agent outputs org-mode directly, bold/italic/etc. will be double-converted
|
||||||
|
and render incorrectly.
|
||||||
|
|
||||||
|
## Removal
|
||||||
|
|
||||||
|
To remove the Emacs channel:
|
||||||
|
|
||||||
|
1. Delete `src/channels/emacs.ts`, `src/channels/emacs.test.ts`, and `emacs/nanoclaw.el`
|
||||||
|
2. Remove `import './emacs.js'` from `src/channels/index.ts`
|
||||||
|
3. Remove the NanoClaw block from your Emacs config file
|
||||||
|
4. Remove Emacs registration from SQLite: `sqlite3 store/messages.db "DELETE FROM registered_groups WHERE jid = 'emacs:default'"`
|
||||||
|
5. Remove `EMACS_CHANNEL_PORT` and `EMACS_AUTH_TOKEN` from `.env` if set
|
||||||
|
6. Rebuild: `npm run build && launchctl kickstart -k gui/$(id -u)/com.nanoclaw` (macOS) or `npm run build && systemctl --user restart nanoclaw` (Linux)
|
||||||
133
.claude/skills/add-macos-statusbar/SKILL.md
Normal file
133
.claude/skills/add-macos-statusbar/SKILL.md
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
---
|
||||||
|
name: add-macos-statusbar
|
||||||
|
description: Add a macOS menu bar status indicator for NanoClaw. Shows a bolt icon with a green/red dot indicating whether NanoClaw is running, with Start, Stop, and Restart controls. macOS only.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Add macOS Menu Bar Status Indicator
|
||||||
|
|
||||||
|
Adds a persistent menu bar icon that shows NanoClaw's running status and lets the user
|
||||||
|
start, stop, or restart the service — similar to how Docker Desktop appears in the menu bar.
|
||||||
|
|
||||||
|
**macOS only.** Requires Xcode Command Line Tools (`swiftc`).
|
||||||
|
|
||||||
|
## Phase 1: Pre-flight
|
||||||
|
|
||||||
|
### Check platform
|
||||||
|
|
||||||
|
If not on macOS, stop and tell the user:
|
||||||
|
|
||||||
|
> This skill is macOS only. The menu bar status indicator uses AppKit and requires `swiftc` (Xcode Command Line Tools).
|
||||||
|
|
||||||
|
### Check for swiftc
|
||||||
|
|
||||||
|
```bash
|
||||||
|
which swiftc
|
||||||
|
```
|
||||||
|
|
||||||
|
If not found, tell the user:
|
||||||
|
|
||||||
|
> Xcode Command Line Tools are required. Install them by running:
|
||||||
|
>
|
||||||
|
> ```bash
|
||||||
|
> xcode-select --install
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> Then re-run `/add-macos-statusbar`.
|
||||||
|
|
||||||
|
### Check if already installed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
launchctl list | grep com.nanoclaw.statusbar
|
||||||
|
```
|
||||||
|
|
||||||
|
If it returns a PID (not `-`), tell the user it's already installed and skip to Phase 3 (Verify).
|
||||||
|
|
||||||
|
## Phase 2: Compile and Install
|
||||||
|
|
||||||
|
### Compile the Swift binary
|
||||||
|
|
||||||
|
The source lives in the skill directory. Compile it into `dist/`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p dist
|
||||||
|
swiftc -O -o dist/statusbar "${CLAUDE_SKILL_DIR}/add/src/statusbar.swift"
|
||||||
|
```
|
||||||
|
|
||||||
|
This produces a small native binary at `dist/statusbar`.
|
||||||
|
|
||||||
|
On macOS Sequoia or later, clear the quarantine attribute so the binary can run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
xattr -cr dist/statusbar
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create the launchd plist
|
||||||
|
|
||||||
|
Determine the absolute project root and home directory:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pwd
|
||||||
|
echo $HOME
|
||||||
|
```
|
||||||
|
|
||||||
|
Create `~/Library/LaunchAgents/com.nanoclaw.statusbar.plist`, substituting the actual values
|
||||||
|
for `{PROJECT_ROOT}` and `{HOME}`:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>Label</key>
|
||||||
|
<string>com.nanoclaw.statusbar</string>
|
||||||
|
<key>ProgramArguments</key>
|
||||||
|
<array>
|
||||||
|
<string>{PROJECT_ROOT}/dist/statusbar</string>
|
||||||
|
</array>
|
||||||
|
<key>RunAtLoad</key>
|
||||||
|
<true/>
|
||||||
|
<key>KeepAlive</key>
|
||||||
|
<true/>
|
||||||
|
<key>EnvironmentVariables</key>
|
||||||
|
<dict>
|
||||||
|
<key>HOME</key>
|
||||||
|
<string>{HOME}</string>
|
||||||
|
</dict>
|
||||||
|
<key>StandardOutPath</key>
|
||||||
|
<string>{PROJECT_ROOT}/logs/statusbar.log</string>
|
||||||
|
<key>StandardErrorPath</key>
|
||||||
|
<string>{PROJECT_ROOT}/logs/statusbar.error.log</string>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Load the service
|
||||||
|
|
||||||
|
```bash
|
||||||
|
launchctl load ~/Library/LaunchAgents/com.nanoclaw.statusbar.plist
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 3: Verify
|
||||||
|
|
||||||
|
```bash
|
||||||
|
launchctl list | grep com.nanoclaw.statusbar
|
||||||
|
```
|
||||||
|
|
||||||
|
The first column should show a PID (not `-`).
|
||||||
|
|
||||||
|
Tell the user:
|
||||||
|
|
||||||
|
> The bolt icon should now appear in your macOS menu bar. Click it to see NanoClaw's status and control the service.
|
||||||
|
>
|
||||||
|
> - **Green dot** — NanoClaw is running
|
||||||
|
> - **Red dot** — NanoClaw is stopped
|
||||||
|
>
|
||||||
|
> Use **Restart** after making code changes, and **View Logs** to open the log file directly.
|
||||||
|
|
||||||
|
## Removal
|
||||||
|
|
||||||
|
```bash
|
||||||
|
launchctl unload ~/Library/LaunchAgents/com.nanoclaw.statusbar.plist
|
||||||
|
rm ~/Library/LaunchAgents/com.nanoclaw.statusbar.plist
|
||||||
|
rm dist/statusbar
|
||||||
|
```
|
||||||
147
.claude/skills/add-macos-statusbar/add/src/statusbar.swift
Normal file
147
.claude/skills/add-macos-statusbar/add/src/statusbar.swift
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import AppKit
|
||||||
|
|
||||||
|
class StatusBarController: NSObject {
|
||||||
|
private var statusItem: NSStatusItem!
|
||||||
|
private var isRunning = false
|
||||||
|
private var timer: Timer?
|
||||||
|
|
||||||
|
private let plistPath = "\(NSHomeDirectory())/Library/LaunchAgents/com.nanoclaw.plist"
|
||||||
|
|
||||||
|
/// Derive the NanoClaw project root from the binary location.
|
||||||
|
/// The binary is compiled to {project}/dist/statusbar, so the parent of
|
||||||
|
/// the parent directory is the project root.
|
||||||
|
private static let projectRoot: String = {
|
||||||
|
let binary = URL(fileURLWithPath: CommandLine.arguments[0]).resolvingSymlinksInPath()
|
||||||
|
return binary.deletingLastPathComponent().deletingLastPathComponent().path
|
||||||
|
}()
|
||||||
|
|
||||||
|
override init() {
|
||||||
|
super.init()
|
||||||
|
setupStatusItem()
|
||||||
|
isRunning = checkRunning()
|
||||||
|
updateMenu()
|
||||||
|
// Poll every 5 seconds to reflect external state changes
|
||||||
|
timer = Timer.scheduledTimer(withTimeInterval: 5.0, repeats: true) { [weak self] _ in
|
||||||
|
guard let self else { return }
|
||||||
|
let current = self.checkRunning()
|
||||||
|
if current != self.isRunning {
|
||||||
|
self.isRunning = current
|
||||||
|
self.updateMenu()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func setupStatusItem() {
|
||||||
|
statusItem = NSStatusBar.system.statusItem(withLength: NSStatusItem.variableLength)
|
||||||
|
if let button = statusItem.button {
|
||||||
|
if let image = NSImage(systemSymbolName: "bolt.fill", accessibilityDescription: "NanoClaw") {
|
||||||
|
image.isTemplate = true
|
||||||
|
button.image = image
|
||||||
|
} else {
|
||||||
|
button.title = "⚡"
|
||||||
|
}
|
||||||
|
button.toolTip = "NanoClaw"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func checkRunning() -> Bool {
|
||||||
|
let task = Process()
|
||||||
|
task.launchPath = "/bin/launchctl"
|
||||||
|
task.arguments = ["list", "com.nanoclaw"]
|
||||||
|
let pipe = Pipe()
|
||||||
|
task.standardOutput = pipe
|
||||||
|
task.standardError = Pipe()
|
||||||
|
guard (try? task.run()) != nil else { return false }
|
||||||
|
task.waitUntilExit()
|
||||||
|
if task.terminationStatus != 0 { return false }
|
||||||
|
let output = String(data: pipe.fileHandleForReading.readDataToEndOfFile(), encoding: .utf8) ?? ""
|
||||||
|
// launchctl list output: "PID\tExitCode\tLabel" — "-" means not running
|
||||||
|
let pid = output.trimmingCharacters(in: .whitespacesAndNewlines).components(separatedBy: "\t").first ?? "-"
|
||||||
|
return pid != "-"
|
||||||
|
}
|
||||||
|
|
||||||
|
private func updateMenu() {
|
||||||
|
let menu = NSMenu()
|
||||||
|
|
||||||
|
// Status row with colored dot
|
||||||
|
let statusItem = NSMenuItem()
|
||||||
|
let dot = "● "
|
||||||
|
let dotColor: NSColor = isRunning ? .systemGreen : .systemRed
|
||||||
|
let attr = NSMutableAttributedString(string: dot, attributes: [.foregroundColor: dotColor])
|
||||||
|
let label = isRunning ? "NanoClaw is running" : "NanoClaw is stopped"
|
||||||
|
attr.append(NSAttributedString(string: label, attributes: [.foregroundColor: NSColor.labelColor]))
|
||||||
|
statusItem.attributedTitle = attr
|
||||||
|
statusItem.isEnabled = false
|
||||||
|
menu.addItem(statusItem)
|
||||||
|
|
||||||
|
menu.addItem(NSMenuItem.separator())
|
||||||
|
|
||||||
|
if isRunning {
|
||||||
|
let stop = NSMenuItem(title: "Stop", action: #selector(stopService), keyEquivalent: "")
|
||||||
|
stop.target = self
|
||||||
|
menu.addItem(stop)
|
||||||
|
|
||||||
|
let restart = NSMenuItem(title: "Restart", action: #selector(restartService), keyEquivalent: "r")
|
||||||
|
restart.target = self
|
||||||
|
menu.addItem(restart)
|
||||||
|
} else {
|
||||||
|
let start = NSMenuItem(title: "Start", action: #selector(startService), keyEquivalent: "")
|
||||||
|
start.target = self
|
||||||
|
menu.addItem(start)
|
||||||
|
}
|
||||||
|
|
||||||
|
menu.addItem(NSMenuItem.separator())
|
||||||
|
|
||||||
|
let logs = NSMenuItem(title: "View Logs", action: #selector(viewLogs), keyEquivalent: "")
|
||||||
|
logs.target = self
|
||||||
|
menu.addItem(logs)
|
||||||
|
|
||||||
|
self.statusItem.menu = menu
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func startService() {
|
||||||
|
run("/bin/launchctl", ["load", plistPath])
|
||||||
|
refresh(after: 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func stopService() {
|
||||||
|
run("/bin/launchctl", ["unload", plistPath])
|
||||||
|
refresh(after: 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func restartService() {
|
||||||
|
let uid = getuid()
|
||||||
|
run("/bin/launchctl", ["kickstart", "-k", "gui/\(uid)/com.nanoclaw"])
|
||||||
|
refresh(after: 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func viewLogs() {
|
||||||
|
let logPath = "\(StatusBarController.projectRoot)/logs/nanoclaw.log"
|
||||||
|
NSWorkspace.shared.open(URL(fileURLWithPath: logPath))
|
||||||
|
}
|
||||||
|
|
||||||
|
private func refresh(after seconds: Double) {
|
||||||
|
DispatchQueue.main.asyncAfter(deadline: .now() + seconds) { [weak self] in
|
||||||
|
guard let self else { return }
|
||||||
|
self.isRunning = self.checkRunning()
|
||||||
|
self.updateMenu()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@discardableResult
|
||||||
|
private func run(_ path: String, _ args: [String]) -> Int32 {
|
||||||
|
let task = Process()
|
||||||
|
task.launchPath = path
|
||||||
|
task.arguments = args
|
||||||
|
task.standardOutput = Pipe()
|
||||||
|
task.standardError = Pipe()
|
||||||
|
try? task.run()
|
||||||
|
task.waitUntilExit()
|
||||||
|
return task.terminationStatus
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let app = NSApplication.shared
|
||||||
|
app.setActivationPolicy(.accessory)
|
||||||
|
let controller = StatusBarController()
|
||||||
|
app.run()
|
||||||
@@ -1,15 +1,21 @@
|
|||||||
---
|
---
|
||||||
name: add-ollama-tool
|
name: add-ollama-tool
|
||||||
description: Add Ollama MCP server so the container agent can call local models for cheaper/faster tasks like summarization, translation, or general queries.
|
description: Add Ollama MCP server so the container agent can call local models and optionally manage the Ollama model library.
|
||||||
---
|
---
|
||||||
|
|
||||||
# Add Ollama Integration
|
# Add Ollama Integration
|
||||||
|
|
||||||
This skill adds a stdio-based MCP server that exposes local Ollama models as tools for the container agent. Claude remains the orchestrator but can offload work to local models.
|
This skill adds a stdio-based MCP server that exposes local Ollama models as tools for the container agent. Claude remains the orchestrator but can offload work to local models, and can optionally manage the model library directly.
|
||||||
|
|
||||||
Tools added:
|
Core tools (always available):
|
||||||
- `ollama_list_models` — lists installed Ollama models
|
- `ollama_list_models` — list installed Ollama models with name, size, and family
|
||||||
- `ollama_generate` — sends a prompt to a specified model and returns the response
|
- `ollama_generate` — send a prompt to a specified model and return the response
|
||||||
|
|
||||||
|
Management tools (opt-in via `OLLAMA_ADMIN_TOOLS=true`):
|
||||||
|
- `ollama_pull_model` — pull (download) a model from the Ollama registry
|
||||||
|
- `ollama_delete_model` — delete a locally installed model to free disk space
|
||||||
|
- `ollama_show_model` — show model details: modelfile, parameters, and architecture info
|
||||||
|
- `ollama_list_running` — list models currently loaded in memory with memory usage and processor type
|
||||||
|
|
||||||
## Phase 1: Pre-flight
|
## Phase 1: Pre-flight
|
||||||
|
|
||||||
@@ -89,6 +95,23 @@ Build must be clean before proceeding.
|
|||||||
|
|
||||||
## Phase 3: Configure
|
## Phase 3: Configure
|
||||||
|
|
||||||
|
### Enable model management tools (optional)
|
||||||
|
|
||||||
|
Ask the user:
|
||||||
|
|
||||||
|
> Would you like the agent to be able to **manage Ollama models** (pull, delete, inspect, list running)?
|
||||||
|
>
|
||||||
|
> - **Yes** — adds tools to pull new models, delete old ones, show model info, and check what's loaded in memory
|
||||||
|
> - **No** — the agent can only list installed models and generate responses (you manage models yourself on the host)
|
||||||
|
|
||||||
|
If the user wants management tools, add to `.env`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
OLLAMA_ADMIN_TOOLS=true
|
||||||
|
```
|
||||||
|
|
||||||
|
If they decline (or don't answer), do not add the variable — management tools will be disabled by default.
|
||||||
|
|
||||||
### Set Ollama host (optional)
|
### Set Ollama host (optional)
|
||||||
|
|
||||||
By default, the MCP server connects to `http://host.docker.internal:11434` (Docker Desktop) with a fallback to `localhost`. To use a custom Ollama host, add to `.env`:
|
By default, the MCP server connects to `http://host.docker.internal:11434` (Docker Desktop) with a fallback to `localhost`. To use a custom Ollama host, add to `.env`:
|
||||||
@@ -106,7 +129,7 @@ launchctl kickstart -k gui/$(id -u)/com.nanoclaw # macOS
|
|||||||
|
|
||||||
## Phase 4: Verify
|
## Phase 4: Verify
|
||||||
|
|
||||||
### Test via WhatsApp
|
### Test inference
|
||||||
|
|
||||||
Tell the user:
|
Tell the user:
|
||||||
|
|
||||||
@@ -114,6 +137,14 @@ Tell the user:
|
|||||||
>
|
>
|
||||||
> The agent should use `ollama_list_models` to find available models, then `ollama_generate` to get a response.
|
> The agent should use `ollama_list_models` to find available models, then `ollama_generate` to get a response.
|
||||||
|
|
||||||
|
### Test model management (if enabled)
|
||||||
|
|
||||||
|
If `OLLAMA_ADMIN_TOOLS=true` was set, tell the user:
|
||||||
|
|
||||||
|
> Send a message like: "pull the gemma3:1b model" or "which ollama models are currently loaded in memory?"
|
||||||
|
>
|
||||||
|
> The agent should call `ollama_pull_model` or `ollama_list_running` respectively.
|
||||||
|
|
||||||
### Monitor activity (optional)
|
### Monitor activity (optional)
|
||||||
|
|
||||||
Run the watcher script for macOS notifications when Ollama is used:
|
Run the watcher script for macOS notifications when Ollama is used:
|
||||||
@@ -129,9 +160,10 @@ tail -f logs/nanoclaw.log | grep -i ollama
|
|||||||
```
|
```
|
||||||
|
|
||||||
Look for:
|
Look for:
|
||||||
- `Agent output: ... Ollama ...` — agent used Ollama successfully
|
- `[OLLAMA] >>> Generating` — generation started
|
||||||
- `[OLLAMA] >>> Generating` — generation started (if log surfacing works)
|
|
||||||
- `[OLLAMA] <<< Done` — generation completed
|
- `[OLLAMA] <<< Done` — generation completed
|
||||||
|
- `[OLLAMA] Pulling model:` — pull in progress (management tools)
|
||||||
|
- `[OLLAMA] Deleted:` — model removed (management tools)
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
@@ -151,3 +183,11 @@ The agent is trying to run `ollama` CLI inside the container instead of using th
|
|||||||
### Agent doesn't use Ollama tools
|
### Agent doesn't use Ollama tools
|
||||||
|
|
||||||
The agent may not know about the tools. Try being explicit: "use the ollama_generate tool with gemma3:1b to answer: ..."
|
The agent may not know about the tools. Try being explicit: "use the ollama_generate tool with gemma3:1b to answer: ..."
|
||||||
|
|
||||||
|
### `ollama_pull_model` times out on large models
|
||||||
|
|
||||||
|
Large models (7B+) can take several minutes. The tool uses `stream: false` so it blocks until complete — this is intentional. For very large pulls, use the host CLI directly: `ollama pull <model>`
|
||||||
|
|
||||||
|
### Management tools not showing up
|
||||||
|
|
||||||
|
Ensure `OLLAMA_ADMIN_TOOLS=true` is set in `.env` and the service was restarted after adding it.
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ Otherwise (macOS, desktop Linux, or WSL) → AskUserQuestion: How do you want to
|
|||||||
|
|
||||||
If they chose pairing code:
|
If they chose pairing code:
|
||||||
|
|
||||||
AskUserQuestion: What is your phone number? (Include country code without +, e.g., 1234567890)
|
AskUserQuestion: What is your phone number? (Digits only — country code followed by your 10-digit number, no + prefix, spaces, or dashes. Example: 14155551234 where 1 is the US country code and 4155551234 is the phone number.)
|
||||||
|
|
||||||
## Phase 2: Apply Code Changes
|
## Phase 2: Apply Code Changes
|
||||||
|
|
||||||
@@ -308,7 +308,7 @@ rm -rf store/auth/ && npx tsx src/whatsapp-auth.ts --pairing-code --phone <phone
|
|||||||
```
|
```
|
||||||
|
|
||||||
Enter the code **immediately** when it appears. Also ensure:
|
Enter the code **immediately** when it appears. Also ensure:
|
||||||
1. Phone number includes country code without `+` (e.g., `1234567890`)
|
1. Phone number is digits only — country code + number, no `+` prefix (e.g., `14155551234` where `1` is country code, `4155551234` is the number)
|
||||||
2. Phone has internet access
|
2. Phone has internet access
|
||||||
3. WhatsApp is updated to the latest version
|
3. WhatsApp is updated to the latest version
|
||||||
|
|
||||||
|
|||||||
137
.claude/skills/channel-formatting/SKILL.md
Normal file
137
.claude/skills/channel-formatting/SKILL.md
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
---
|
||||||
|
name: channel-formatting
|
||||||
|
description: Convert Claude's Markdown output to each channel's native text syntax before delivery. Adds zero-dependency formatting for WhatsApp, Telegram, and Slack (marker substitution). Also ships a Signal rich-text helper (parseSignalStyles) used by the Signal skill.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Channel Formatting
|
||||||
|
|
||||||
|
This skill wires channel-aware Markdown conversion into the outbound pipeline so Claude's
|
||||||
|
responses render natively on each platform — no more literal `**asterisks**` in WhatsApp or
|
||||||
|
Telegram.
|
||||||
|
|
||||||
|
| Channel | Transformation |
|
||||||
|
|---------|---------------|
|
||||||
|
| WhatsApp | `**bold**` → `*bold*`, `*italic*` → `_italic_`, headings → bold, links → `text (url)` |
|
||||||
|
| Telegram | same as WhatsApp, but `[text](url)` links are preserved (Markdown v1 renders them natively) |
|
||||||
|
| Slack | same as WhatsApp, but links become `<url\|text>` |
|
||||||
|
| Discord | passthrough (Discord already renders Markdown) |
|
||||||
|
| Signal | passthrough for `parseTextStyles`; `parseSignalStyles` in `src/text-styles.ts` produces plain text + native `textStyle` ranges for use by the Signal skill |
|
||||||
|
|
||||||
|
Code blocks (fenced and inline) are always protected — their content is never transformed.
|
||||||
|
|
||||||
|
## Phase 1: Pre-flight
|
||||||
|
|
||||||
|
### Check if already applied
|
||||||
|
|
||||||
|
```bash
|
||||||
|
test -f src/text-styles.ts && echo "already applied" || echo "not yet applied"
|
||||||
|
```
|
||||||
|
|
||||||
|
If `already applied`, skip to Phase 3 (Verify).
|
||||||
|
|
||||||
|
## Phase 2: Apply Code Changes
|
||||||
|
|
||||||
|
### Ensure the upstream remote
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote -v
|
||||||
|
```
|
||||||
|
|
||||||
|
If an `upstream` remote pointing to `https://github.com/qwibitai/nanoclaw.git` is missing,
|
||||||
|
add it:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote add upstream https://github.com/qwibitai/nanoclaw.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### Merge the skill branch
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git fetch upstream skill/channel-formatting
|
||||||
|
git merge upstream/skill/channel-formatting
|
||||||
|
```
|
||||||
|
|
||||||
|
If there are merge conflicts on `package-lock.json`, resolve them by accepting the incoming
|
||||||
|
version and continuing:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git checkout --theirs package-lock.json
|
||||||
|
git add package-lock.json
|
||||||
|
git merge --continue
|
||||||
|
```
|
||||||
|
|
||||||
|
For any other conflict, read the conflicted file and reconcile both sides manually.
|
||||||
|
|
||||||
|
This merge adds:
|
||||||
|
|
||||||
|
- `src/text-styles.ts` — `parseTextStyles(text, channel)` for marker substitution and
|
||||||
|
`parseSignalStyles(text)` for Signal native rich text
|
||||||
|
- `src/router.ts` — `formatOutbound` gains an optional `channel` parameter; when provided
|
||||||
|
it calls `parseTextStyles` after stripping `<internal>` tags
|
||||||
|
- `src/index.ts` — both outbound `sendMessage` paths pass `channel.name` to `formatOutbound`
|
||||||
|
- `src/formatting.test.ts` — test coverage for both functions across all channels
|
||||||
|
|
||||||
|
### Validate
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
npx vitest run src/formatting.test.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
All 73 tests should pass and the build should be clean before continuing.
|
||||||
|
|
||||||
|
## Phase 3: Verify
|
||||||
|
|
||||||
|
### Rebuild and restart
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
launchctl kickstart -k gui/$(id -u)/com.nanoclaw # macOS
|
||||||
|
# Linux: systemctl --user restart nanoclaw
|
||||||
|
```
|
||||||
|
|
||||||
|
### Spot-check formatting
|
||||||
|
|
||||||
|
Send a message through any registered WhatsApp or Telegram chat that will trigger a
|
||||||
|
response from Claude. Ask something that will produce formatted output, such as:
|
||||||
|
|
||||||
|
> Summarise the three main advantages of TypeScript using bullet points and **bold** headings.
|
||||||
|
|
||||||
|
Confirm that the response arrives with native bold (`*text*`) rather than raw double
|
||||||
|
asterisks.
|
||||||
|
|
||||||
|
### Check logs if needed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tail -f logs/nanoclaw.log
|
||||||
|
```
|
||||||
|
|
||||||
|
## Signal Skill Integration
|
||||||
|
|
||||||
|
If you have the Signal skill installed, `src/channels/signal.ts` can import
|
||||||
|
`parseSignalStyles` from the newly present `src/text-styles.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { parseSignalStyles, SignalTextStyle } from '../text-styles.js';
|
||||||
|
```
|
||||||
|
|
||||||
|
`parseSignalStyles` returns `{ text: string, textStyle: SignalTextStyle[] }` where
|
||||||
|
`textStyle` is an array of `{ style, start, length }` objects suitable for the
|
||||||
|
`signal-cli` JSON-RPC `textStyles` parameter (format: `"start:length:STYLE"`).
|
||||||
|
|
||||||
|
## Removal
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Remove the new file
|
||||||
|
rm src/text-styles.ts
|
||||||
|
|
||||||
|
# Revert router.ts to remove the channel param
|
||||||
|
git diff upstream/main src/router.ts # review changes
|
||||||
|
git checkout upstream/main -- src/router.ts
|
||||||
|
|
||||||
|
# Revert the index.ts sendMessage call sites to plain formatOutbound(rawText)
|
||||||
|
# (edit manually or: git checkout upstream/main -- src/index.ts)
|
||||||
|
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
@@ -121,8 +121,48 @@ def find_group(groups: list[dict], query: str) -> dict | None:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def run_container(runtime: str, image: str, payload: dict, timeout: int = 300) -> None:
|
def build_mounts(folder: str, is_main: bool) -> list[tuple[str, str, bool]]:
|
||||||
cmd = [runtime, "run", "-i", "--rm", image]
|
"""Return list of (host_path, container_path, readonly) tuples."""
|
||||||
|
groups_dir = NANOCLAW_DIR / "groups"
|
||||||
|
data_dir = NANOCLAW_DIR / "data"
|
||||||
|
sessions_dir = data_dir / "sessions" / folder
|
||||||
|
ipc_dir = data_dir / "ipc" / folder
|
||||||
|
|
||||||
|
# Ensure required dirs exist
|
||||||
|
group_dir = groups_dir / folder
|
||||||
|
group_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
(sessions_dir / ".claude").mkdir(parents=True, exist_ok=True)
|
||||||
|
for sub in ("messages", "tasks", "input"):
|
||||||
|
(ipc_dir / sub).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
agent_runner_src = sessions_dir / "agent-runner-src"
|
||||||
|
project_agent_runner = NANOCLAW_DIR / "container" / "agent-runner" / "src"
|
||||||
|
if not agent_runner_src.exists() and project_agent_runner.exists():
|
||||||
|
import shutil
|
||||||
|
shutil.copytree(project_agent_runner, agent_runner_src)
|
||||||
|
|
||||||
|
mounts: list[tuple[str, str, bool]] = []
|
||||||
|
if is_main:
|
||||||
|
mounts.append((str(NANOCLAW_DIR), "/workspace/project", True))
|
||||||
|
mounts.append((str(group_dir), "/workspace/group", False))
|
||||||
|
mounts.append((str(sessions_dir / ".claude"), "/home/node/.claude", False))
|
||||||
|
mounts.append((str(ipc_dir), "/workspace/ipc", False))
|
||||||
|
if agent_runner_src.exists():
|
||||||
|
mounts.append((str(agent_runner_src), "/app/src", False))
|
||||||
|
return mounts
|
||||||
|
|
||||||
|
|
||||||
|
def run_container(runtime: str, image: str, payload: dict,
|
||||||
|
folder: str | None = None, is_main: bool = False,
|
||||||
|
timeout: int = 300) -> None:
|
||||||
|
cmd = [runtime, "run", "-i", "--rm"]
|
||||||
|
if folder:
|
||||||
|
for host, container, readonly in build_mounts(folder, is_main):
|
||||||
|
if readonly:
|
||||||
|
cmd += ["--mount", f"type=bind,source={host},target={container},readonly"]
|
||||||
|
else:
|
||||||
|
cmd += ["-v", f"{host}:{container}"]
|
||||||
|
cmd.append(image)
|
||||||
dbg(f"cmd: {' '.join(cmd)}")
|
dbg(f"cmd: {' '.join(cmd)}")
|
||||||
|
|
||||||
# Show payload sans secrets
|
# Show payload sans secrets
|
||||||
@@ -167,6 +207,11 @@ def run_container(runtime: str, image: str, payload: dict, timeout: int = 300) -
|
|||||||
dbg("output sentinel found, terminating container")
|
dbg("output sentinel found, terminating container")
|
||||||
done.set()
|
done.set()
|
||||||
try:
|
try:
|
||||||
|
proc.terminate()
|
||||||
|
try:
|
||||||
|
proc.wait(timeout=5)
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
dbg("graceful stop timed out, force killing container")
|
||||||
proc.kill()
|
proc.kill()
|
||||||
except ProcessLookupError:
|
except ProcessLookupError:
|
||||||
pass
|
pass
|
||||||
@@ -197,6 +242,8 @@ def run_container(runtime: str, image: str, payload: dict, timeout: int = 300) -
|
|||||||
stdout,
|
stdout,
|
||||||
re.DOTALL,
|
re.DOTALL,
|
||||||
)
|
)
|
||||||
|
success = False
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
try:
|
try:
|
||||||
data = json.loads(match.group(1))
|
data = json.loads(match.group(1))
|
||||||
@@ -206,6 +253,7 @@ def run_container(runtime: str, image: str, payload: dict, timeout: int = 300) -
|
|||||||
session_id = data.get("newSessionId") or data.get("sessionId")
|
session_id = data.get("newSessionId") or data.get("sessionId")
|
||||||
if session_id:
|
if session_id:
|
||||||
print(f"\n[session: {session_id}]", file=sys.stderr)
|
print(f"\n[session: {session_id}]", file=sys.stderr)
|
||||||
|
success = True
|
||||||
else:
|
else:
|
||||||
print(f"[{status}] {data.get('result', '')}", file=sys.stderr)
|
print(f"[{status}] {data.get('result', '')}", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -215,6 +263,9 @@ def run_container(runtime: str, image: str, payload: dict, timeout: int = 300) -
|
|||||||
# No structured output — print raw stdout
|
# No structured output — print raw stdout
|
||||||
print(stdout)
|
print(stdout)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
return
|
||||||
|
|
||||||
if proc.returncode not in (0, None):
|
if proc.returncode not in (0, None):
|
||||||
sys.exit(proc.returncode)
|
sys.exit(proc.returncode)
|
||||||
|
|
||||||
@@ -273,6 +324,7 @@ def main():
|
|||||||
# Resolve group → jid
|
# Resolve group → jid
|
||||||
jid = args.jid
|
jid = args.jid
|
||||||
group_name = None
|
group_name = None
|
||||||
|
group_folder = None
|
||||||
is_main = False
|
is_main = False
|
||||||
|
|
||||||
if args.group:
|
if args.group:
|
||||||
@@ -281,6 +333,7 @@ def main():
|
|||||||
sys.exit(f"error: group '{args.group}' not found. Run --list-groups to see options.")
|
sys.exit(f"error: group '{args.group}' not found. Run --list-groups to see options.")
|
||||||
jid = g["jid"]
|
jid = g["jid"]
|
||||||
group_name = g["name"]
|
group_name = g["name"]
|
||||||
|
group_folder = g["folder"]
|
||||||
is_main = g["is_main"]
|
is_main = g["is_main"]
|
||||||
elif not jid:
|
elif not jid:
|
||||||
# Default: main group
|
# Default: main group
|
||||||
@@ -288,6 +341,7 @@ def main():
|
|||||||
if mains:
|
if mains:
|
||||||
jid = mains[0]["jid"]
|
jid = mains[0]["jid"]
|
||||||
group_name = mains[0]["name"]
|
group_name = mains[0]["name"]
|
||||||
|
group_folder = mains[0]["folder"]
|
||||||
is_main = True
|
is_main = True
|
||||||
else:
|
else:
|
||||||
sys.exit("error: no group specified and no main group found. Use -g or -j.")
|
sys.exit("error: no group specified and no main group found. Use -g or -j.")
|
||||||
@@ -311,7 +365,9 @@ def main():
|
|||||||
payload["resumeAt"] = "latest"
|
payload["resumeAt"] = "latest"
|
||||||
|
|
||||||
print(f"[{group_name or jid}] running via {runtime}...", file=sys.stderr)
|
print(f"[{group_name or jid}] running via {runtime}...", file=sys.stderr)
|
||||||
run_container(runtime, args.image, payload, timeout=args.timeout)
|
run_container(runtime, args.image, payload,
|
||||||
|
folder=group_folder, is_main=is_main,
|
||||||
|
timeout=args.timeout)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
276
.claude/skills/init-onecli/SKILL.md
Normal file
276
.claude/skills/init-onecli/SKILL.md
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
---
|
||||||
|
name: init-onecli
|
||||||
|
description: Install and initialize OneCLI Agent Vault. Migrates existing .env credentials to the vault. Use after /update-nanoclaw brings in OneCLI as a breaking change, or for first-time OneCLI setup.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Initialize OneCLI Agent Vault
|
||||||
|
|
||||||
|
This skill installs OneCLI, configures the Agent Vault gateway, and migrates any existing `.env` credentials into it. Run this after `/update-nanoclaw` introduces OneCLI as a breaking change, or any time OneCLI needs to be set up from scratch.
|
||||||
|
|
||||||
|
**Principle:** When something is broken or missing, fix it. Don't tell the user to go fix it themselves unless it genuinely requires their manual action (e.g. pasting a token).
|
||||||
|
|
||||||
|
## Phase 1: Pre-flight
|
||||||
|
|
||||||
|
### Check if OneCLI is already working
|
||||||
|
|
||||||
|
```bash
|
||||||
|
onecli version 2>/dev/null
|
||||||
|
```
|
||||||
|
|
||||||
|
If the command succeeds, OneCLI is installed. Check if the gateway is reachable:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -sf http://127.0.0.1:10254/health
|
||||||
|
```
|
||||||
|
|
||||||
|
If both succeed, check for an Anthropic secret:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
onecli secrets list
|
||||||
|
```
|
||||||
|
|
||||||
|
If an Anthropic secret exists, tell the user OneCLI is already configured and working. Use AskUserQuestion:
|
||||||
|
|
||||||
|
1. **Keep current setup** — description: "OneCLI is installed and has credentials configured. Nothing to do."
|
||||||
|
2. **Reconfigure** — description: "Start fresh — reinstall OneCLI and re-register credentials."
|
||||||
|
|
||||||
|
If they choose to keep, skip to Phase 5 (Verify). If they choose to reconfigure, continue.
|
||||||
|
|
||||||
|
### Check for native credential proxy
|
||||||
|
|
||||||
|
```bash
|
||||||
|
grep "credential-proxy" src/index.ts 2>/dev/null
|
||||||
|
```
|
||||||
|
|
||||||
|
If `startCredentialProxy` is imported, the native credential proxy skill is active. Tell the user: "You're currently using the native credential proxy (`.env`-based). This skill will switch you to OneCLI's Agent Vault, which adds per-agent policies and rate limits. Your `.env` credentials will be migrated to the vault."
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
1. **Continue** — description: "Switch to OneCLI Agent Vault."
|
||||||
|
2. **Cancel** — description: "Keep the native credential proxy."
|
||||||
|
|
||||||
|
If they cancel, stop.
|
||||||
|
|
||||||
|
### Check the codebase expects OneCLI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
grep "@onecli-sh/sdk" package.json
|
||||||
|
```
|
||||||
|
|
||||||
|
If `@onecli-sh/sdk` is NOT in package.json, the codebase hasn't been updated to use OneCLI yet. Tell the user to run `/update-nanoclaw` first to get the OneCLI integration, then retry `/init-onecli`. Stop here.
|
||||||
|
|
||||||
|
## Phase 2: Install OneCLI
|
||||||
|
|
||||||
|
### Install the gateway and CLI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -fsSL onecli.sh/install | sh
|
||||||
|
curl -fsSL onecli.sh/cli/install | sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Verify: `onecli version`
|
||||||
|
|
||||||
|
If the command is not found, the CLI was likely installed to `~/.local/bin/`. Add it to PATH:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export PATH="$HOME/.local/bin:$PATH"
|
||||||
|
grep -q '.local/bin' ~/.bashrc 2>/dev/null || echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
||||||
|
grep -q '.local/bin' ~/.zshrc 2>/dev/null || echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.zshrc
|
||||||
|
```
|
||||||
|
|
||||||
|
Re-verify with `onecli version`.
|
||||||
|
|
||||||
|
### Configure the CLI
|
||||||
|
|
||||||
|
Point the CLI at the local OneCLI instance:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
onecli config set api-host http://127.0.0.1:10254
|
||||||
|
```
|
||||||
|
|
||||||
|
### Set ONECLI_URL in .env
|
||||||
|
|
||||||
|
```bash
|
||||||
|
grep -q 'ONECLI_URL' .env 2>/dev/null || echo 'ONECLI_URL=http://127.0.0.1:10254' >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
### Wait for gateway readiness
|
||||||
|
|
||||||
|
The gateway may take a moment to start after installation. Poll for up to 15 seconds:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
for i in $(seq 1 15); do
|
||||||
|
curl -sf http://127.0.0.1:10254/health && break
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
If it never becomes healthy, check if the gateway process is running:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ps aux | grep -i onecli | grep -v grep
|
||||||
|
```
|
||||||
|
|
||||||
|
If it's not running, try starting it manually: `onecli start`. If that fails, show the error and stop — the user needs to debug their OneCLI installation.
|
||||||
|
|
||||||
|
## Phase 3: Migrate existing credentials
|
||||||
|
|
||||||
|
### Scan .env for credentials to migrate
|
||||||
|
|
||||||
|
Read the `.env` file and look for these credential variables:
|
||||||
|
|
||||||
|
| .env variable | OneCLI secret type | Host pattern |
|
||||||
|
|---|---|---|
|
||||||
|
| `ANTHROPIC_API_KEY` | `anthropic` | `api.anthropic.com` |
|
||||||
|
| `CLAUDE_CODE_OAUTH_TOKEN` | `anthropic` | `api.anthropic.com` |
|
||||||
|
| `ANTHROPIC_AUTH_TOKEN` | `anthropic` | `api.anthropic.com` |
|
||||||
|
|
||||||
|
Read `.env`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Parse the file for any of the credential variables listed above.
|
||||||
|
|
||||||
|
### If credentials found in .env
|
||||||
|
|
||||||
|
For each credential found, migrate it to OneCLI:
|
||||||
|
|
||||||
|
**Anthropic API key** (`ANTHROPIC_API_KEY=sk-ant-...`):
|
||||||
|
```bash
|
||||||
|
onecli secrets create --name Anthropic --type anthropic --value <key> --host-pattern api.anthropic.com
|
||||||
|
```
|
||||||
|
|
||||||
|
**Claude OAuth token** (`CLAUDE_CODE_OAUTH_TOKEN=...` or `ANTHROPIC_AUTH_TOKEN=...`):
|
||||||
|
```bash
|
||||||
|
onecli secrets create --name Anthropic --type anthropic --value <token> --host-pattern api.anthropic.com
|
||||||
|
```
|
||||||
|
|
||||||
|
After successful migration, remove the credential lines from `.env`. Use the Edit tool to remove only the credential variable lines (`ANTHROPIC_API_KEY`, `CLAUDE_CODE_OAUTH_TOKEN`, `ANTHROPIC_AUTH_TOKEN`). Keep all other `.env` entries intact (e.g. `ONECLI_URL`, `TELEGRAM_BOT_TOKEN`, channel tokens).
|
||||||
|
|
||||||
|
Verify the secret was registered:
|
||||||
|
```bash
|
||||||
|
onecli secrets list
|
||||||
|
```
|
||||||
|
|
||||||
|
Tell the user: "Migrated your Anthropic credentials from `.env` to the OneCLI Agent Vault. The raw keys have been removed from `.env` — they're now managed by OneCLI and will be injected at request time without entering containers."
|
||||||
|
|
||||||
|
### Offer to migrate other container-facing credentials
|
||||||
|
|
||||||
|
After handling Anthropic credentials (whether migrated or freshly registered), scan `.env` again for remaining credential variables that containers use for outbound API calls.
|
||||||
|
|
||||||
|
**Important:** Only migrate credentials that containers use via outbound HTTPS. Channel tokens (`TELEGRAM_BOT_TOKEN`, `SLACK_BOT_TOKEN`, `SLACK_APP_TOKEN`, `DISCORD_BOT_TOKEN`) are used by the NanoClaw host process to connect to messaging platforms — they must stay in `.env`.
|
||||||
|
|
||||||
|
Known container-facing credentials:
|
||||||
|
|
||||||
|
| .env variable | Secret name | Host pattern |
|
||||||
|
|---|---|---|
|
||||||
|
| `OPENAI_API_KEY` | `OpenAI` | `api.openai.com` |
|
||||||
|
| `PARALLEL_API_KEY` | `Parallel` | `api.parallel.ai` |
|
||||||
|
|
||||||
|
If any of these are found with non-empty values, present them to the user:
|
||||||
|
|
||||||
|
AskUserQuestion (multiSelect): "These credentials are used by container agents for outbound API calls. Moving them to the vault means agents never see the raw keys, and you can apply rate limits and policies."
|
||||||
|
|
||||||
|
- One option per credential found (e.g., "OPENAI_API_KEY" — description: "Used by voice transcription and other OpenAI integrations inside containers")
|
||||||
|
- **Skip — keep them in .env** — description: "Leave these in .env for now. You can move them later."
|
||||||
|
|
||||||
|
For each credential the user selects:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
onecli secrets create --name <SecretName> --type api_key --value <value> --host-pattern <host>
|
||||||
|
```
|
||||||
|
|
||||||
|
If there are credential variables not in the table above that look container-facing (i.e. not a channel token), ask the user: "Is `<VARIABLE_NAME>` used by agents inside containers? If so, what API host does it authenticate against? (e.g., `api.example.com`)" — then migrate accordingly.
|
||||||
|
|
||||||
|
After migration, remove the migrated lines from `.env` using the Edit tool. Keep channel tokens and any credentials the user chose not to migrate.
|
||||||
|
|
||||||
|
Verify all secrets were registered:
|
||||||
|
```bash
|
||||||
|
onecli secrets list
|
||||||
|
```
|
||||||
|
|
||||||
|
### If no credentials found in .env
|
||||||
|
|
||||||
|
No migration needed. Proceed to register credentials fresh.
|
||||||
|
|
||||||
|
Check if OneCLI already has an Anthropic secret:
|
||||||
|
```bash
|
||||||
|
onecli secrets list
|
||||||
|
```
|
||||||
|
|
||||||
|
If an Anthropic secret already exists, skip to Phase 4.
|
||||||
|
|
||||||
|
Otherwise, register credentials using the same flow as `/setup`:
|
||||||
|
|
||||||
|
AskUserQuestion: Do you want to use your **Claude subscription** (Pro/Max) or an **Anthropic API key**?
|
||||||
|
|
||||||
|
1. **Claude subscription (Pro/Max)** — description: "Uses your existing Claude Pro or Max subscription. You'll run `claude setup-token` in another terminal to get your token."
|
||||||
|
2. **Anthropic API key** — description: "Pay-per-use API key from console.anthropic.com."
|
||||||
|
|
||||||
|
#### Subscription path
|
||||||
|
|
||||||
|
Tell the user to run `claude setup-token` in another terminal and copy the token it outputs. Do NOT collect the token in chat.
|
||||||
|
|
||||||
|
Once they have the token, AskUserQuestion with two options:
|
||||||
|
|
||||||
|
1. **Dashboard** — description: "Best if you have a browser on this machine. Open http://127.0.0.1:10254 and add the secret in the UI. Use type 'anthropic' and paste your token as the value."
|
||||||
|
2. **CLI** — description: "Best for remote/headless servers. Run: `onecli secrets create --name Anthropic --type anthropic --value YOUR_TOKEN --host-pattern api.anthropic.com`"
|
||||||
|
|
||||||
|
#### API key path
|
||||||
|
|
||||||
|
Tell the user to get an API key from https://console.anthropic.com/settings/keys if they don't have one.
|
||||||
|
|
||||||
|
AskUserQuestion with two options:
|
||||||
|
|
||||||
|
1. **Dashboard** — description: "Best if you have a browser on this machine. Open http://127.0.0.1:10254 and add the secret in the UI."
|
||||||
|
2. **CLI** — description: "Best for remote/headless servers. Run: `onecli secrets create --name Anthropic --type anthropic --value YOUR_KEY --host-pattern api.anthropic.com`"
|
||||||
|
|
||||||
|
#### After either path
|
||||||
|
|
||||||
|
Ask them to let you know when done.
|
||||||
|
|
||||||
|
**If the user's response happens to contain a token or key** (starts with `sk-ant-` or looks like a token): handle it gracefully — run the `onecli secrets create` command with that value on their behalf.
|
||||||
|
|
||||||
|
**After user confirms:** verify with `onecli secrets list` that an Anthropic secret exists. If not, ask again.
|
||||||
|
|
||||||
|
## Phase 4: Build and restart
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
If build fails, diagnose and fix. Common issue: `@onecli-sh/sdk` not installed — run `npm install` first.
|
||||||
|
|
||||||
|
Restart the service:
|
||||||
|
- macOS (launchd): `launchctl kickstart -k gui/$(id -u)/com.nanoclaw`
|
||||||
|
- Linux (systemd): `systemctl --user restart nanoclaw`
|
||||||
|
- WSL/manual: stop and re-run `bash start-nanoclaw.sh`
|
||||||
|
|
||||||
|
## Phase 5: Verify
|
||||||
|
|
||||||
|
Check logs for successful OneCLI integration:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tail -30 logs/nanoclaw.log | grep -i "onecli\|gateway"
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: `OneCLI gateway config applied` messages when containers start.
|
||||||
|
|
||||||
|
If the service is running and a channel is configured, tell the user to send a test message to verify the agent responds.
|
||||||
|
|
||||||
|
Tell the user:
|
||||||
|
- OneCLI Agent Vault is now managing credentials
|
||||||
|
- Agents never see raw API keys — credentials are injected at the gateway level
|
||||||
|
- To manage secrets: `onecli secrets list`, or open http://127.0.0.1:10254
|
||||||
|
- To add rate limits or policies: `onecli rules create --help`
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
**"OneCLI gateway not reachable" in logs:** The gateway isn't running. Check with `curl -sf http://127.0.0.1:10254/health`. Start it with `onecli start` if needed.
|
||||||
|
|
||||||
|
**Container gets no credentials:** Verify `ONECLI_URL` is set in `.env` and the gateway has an Anthropic secret (`onecli secrets list`).
|
||||||
|
|
||||||
|
**Old .env credentials still present:** This skill should have removed them. Double-check `.env` for `ANTHROPIC_API_KEY`, `CLAUDE_CODE_OAUTH_TOKEN`, or `ANTHROPIC_AUTH_TOKEN` and remove them manually if still present.
|
||||||
|
|
||||||
|
**Port 10254 already in use:** Another OneCLI instance may be running. Check with `lsof -i :10254` and kill the old process, or configure a different port.
|
||||||
@@ -50,7 +50,7 @@ Already configured. Continue.
|
|||||||
|
|
||||||
**Verify:** `git remote -v` should show `origin` → user's repo, `upstream` → `qwibitai/nanoclaw.git`.
|
**Verify:** `git remote -v` should show `origin` → user's repo, `upstream` → `qwibitai/nanoclaw.git`.
|
||||||
|
|
||||||
## 1. Bootstrap (Node.js + Dependencies)
|
## 1. Bootstrap (Node.js + Dependencies + OneCLI)
|
||||||
|
|
||||||
Run `bash setup.sh` and parse the status block.
|
Run `bash setup.sh` and parse the status block.
|
||||||
|
|
||||||
@@ -62,6 +62,34 @@ Run `bash setup.sh` and parse the status block.
|
|||||||
- If NATIVE_OK=false → better-sqlite3 failed to load. Install build tools and re-run.
|
- If NATIVE_OK=false → better-sqlite3 failed to load. Install build tools and re-run.
|
||||||
- Record PLATFORM and IS_WSL for later steps.
|
- Record PLATFORM and IS_WSL for later steps.
|
||||||
|
|
||||||
|
After bootstrap succeeds, install OneCLI and its CLI tool:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -fsSL onecli.sh/install | sh
|
||||||
|
curl -fsSL onecli.sh/cli/install | sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Verify both installed: `onecli version`. If the command is not found, the CLI was likely installed to `~/.local/bin/`. Add it to PATH for the current session and persist it:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export PATH="$HOME/.local/bin:$PATH"
|
||||||
|
# Persist for future sessions (append to shell profile if not already present)
|
||||||
|
grep -q '.local/bin' ~/.bashrc 2>/dev/null || echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
||||||
|
grep -q '.local/bin' ~/.zshrc 2>/dev/null || echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.zshrc
|
||||||
|
```
|
||||||
|
|
||||||
|
Then re-verify with `onecli version`.
|
||||||
|
|
||||||
|
Point the CLI at the local OneCLI instance (it defaults to the cloud service otherwise):
|
||||||
|
```bash
|
||||||
|
onecli config set api-host http://127.0.0.1:10254
|
||||||
|
```
|
||||||
|
|
||||||
|
Ensure `.env` has the OneCLI URL (create the file if it doesn't exist):
|
||||||
|
```bash
|
||||||
|
grep -q 'ONECLI_URL' .env 2>/dev/null || echo 'ONECLI_URL=http://127.0.0.1:10254' >> .env
|
||||||
|
```
|
||||||
|
|
||||||
## 2. Check Environment
|
## 2. Check Environment
|
||||||
|
|
||||||
Run `npx tsx setup/index.ts --step environment` and parse the status block.
|
Run `npx tsx setup/index.ts --step environment` and parse the status block.
|
||||||
@@ -70,6 +98,13 @@ Run `npx tsx setup/index.ts --step environment` and parse the status block.
|
|||||||
- If HAS_REGISTERED_GROUPS=true → note existing config, offer to skip or reconfigure
|
- If HAS_REGISTERED_GROUPS=true → note existing config, offer to skip or reconfigure
|
||||||
- Record APPLE_CONTAINER and DOCKER values for step 3
|
- Record APPLE_CONTAINER and DOCKER values for step 3
|
||||||
|
|
||||||
|
## 2a. Timezone
|
||||||
|
|
||||||
|
Run `npx tsx setup/index.ts --step timezone` and parse the status block.
|
||||||
|
|
||||||
|
- If NEEDS_USER_INPUT=true → The system timezone could not be autodetected (e.g. POSIX-style TZ like `IST-2`). AskUserQuestion: "What is your timezone?" with common options (America/New_York, Europe/London, Asia/Jerusalem, Asia/Tokyo) and an "Other" escape. Then re-run: `npx tsx setup/index.ts --step timezone -- --tz <their-answer>`.
|
||||||
|
- If STATUS=success → Timezone is configured. Note RESOLVED_TZ for reference.
|
||||||
|
|
||||||
## 3. Container Runtime
|
## 3. Container Runtime
|
||||||
|
|
||||||
### 3a. Choose runtime
|
### 3a. Choose runtime
|
||||||
@@ -112,15 +147,47 @@ Run `npx tsx setup/index.ts --step container -- --runtime <chosen>` and parse th
|
|||||||
|
|
||||||
**If TEST_OK=false but BUILD_OK=true:** The image built but won't run. Check logs — common cause is runtime not fully started. Wait a moment and retry the test.
|
**If TEST_OK=false but BUILD_OK=true:** The image built but won't run. Check logs — common cause is runtime not fully started. Wait a moment and retry the test.
|
||||||
|
|
||||||
## 4. Claude Authentication (No Script)
|
## 4. Anthropic Credentials via OneCLI
|
||||||
|
|
||||||
If HAS_ENV=true from step 2, read `.env` and check for `CLAUDE_CODE_OAUTH_TOKEN` or `ANTHROPIC_API_KEY`. If present, confirm with user: keep or reconfigure?
|
NanoClaw uses OneCLI to manage credentials — API keys are never stored in `.env` or exposed to containers. The OneCLI gateway injects them at request time.
|
||||||
|
|
||||||
AskUserQuestion: Claude subscription (Pro/Max) vs Anthropic API key?
|
Check if a secret already exists:
|
||||||
|
```bash
|
||||||
|
onecli secrets list
|
||||||
|
```
|
||||||
|
|
||||||
**Subscription:** Tell user to run `claude setup-token` in another terminal, copy the token, add `CLAUDE_CODE_OAUTH_TOKEN=<token>` to `.env`. Do NOT collect the token in chat.
|
If an Anthropic secret is listed, confirm with user: keep or reconfigure? If keeping, skip to step 5.
|
||||||
|
|
||||||
**API key:** Tell user to add `ANTHROPIC_API_KEY=<key>` to `.env`.
|
AskUserQuestion: Do you want to use your **Claude subscription** (Pro/Max) or an **Anthropic API key**?
|
||||||
|
|
||||||
|
1. **Claude subscription (Pro/Max)** — description: "Uses your existing Claude Pro or Max subscription. You'll run `claude setup-token` in another terminal to get your token."
|
||||||
|
2. **Anthropic API key** — description: "Pay-per-use API key from console.anthropic.com."
|
||||||
|
|
||||||
|
### Subscription path
|
||||||
|
|
||||||
|
Tell the user to run `claude setup-token` in another terminal and copy the token it outputs. Do NOT collect the token in chat.
|
||||||
|
|
||||||
|
Once they have the token, they register it with OneCLI. AskUserQuestion with two options:
|
||||||
|
|
||||||
|
1. **Dashboard** — description: "Best if you have a browser on this machine. Open http://127.0.0.1:10254 and add the secret in the UI. Use type 'anthropic' and paste your token as the value."
|
||||||
|
2. **CLI** — description: "Best for remote/headless servers. Run: `onecli secrets create --name Anthropic --type anthropic --value YOUR_TOKEN --host-pattern api.anthropic.com`"
|
||||||
|
|
||||||
|
### API key path
|
||||||
|
|
||||||
|
Tell the user to get an API key from https://console.anthropic.com/settings/keys if they don't have one.
|
||||||
|
|
||||||
|
Then AskUserQuestion with two options:
|
||||||
|
|
||||||
|
1. **Dashboard** — description: "Best if you have a browser on this machine. Open http://127.0.0.1:10254 and add the secret in the UI."
|
||||||
|
2. **CLI** — description: "Best for remote/headless servers. Run: `onecli secrets create --name Anthropic --type anthropic --value YOUR_KEY --host-pattern api.anthropic.com`"
|
||||||
|
|
||||||
|
### After either path
|
||||||
|
|
||||||
|
Ask them to let you know when done.
|
||||||
|
|
||||||
|
**If the user's response happens to contain a token or key** (starts with `sk-ant-`): handle it gracefully — run the `onecli secrets create` command with that value on their behalf.
|
||||||
|
|
||||||
|
**After user confirms:** verify with `onecli secrets list` that an Anthropic secret exists. If not, ask again.
|
||||||
|
|
||||||
## 5. Set Up Channels
|
## 5. Set Up Channels
|
||||||
|
|
||||||
@@ -198,7 +265,7 @@ Run `npx tsx setup/index.ts --step verify` and parse the status block.
|
|||||||
**If STATUS=failed, fix each:**
|
**If STATUS=failed, fix each:**
|
||||||
- SERVICE=stopped → `npm run build`, then restart: `launchctl kickstart -k gui/$(id -u)/com.nanoclaw` (macOS) or `systemctl --user restart nanoclaw` (Linux) or `bash start-nanoclaw.sh` (WSL nohup)
|
- SERVICE=stopped → `npm run build`, then restart: `launchctl kickstart -k gui/$(id -u)/com.nanoclaw` (macOS) or `systemctl --user restart nanoclaw` (Linux) or `bash start-nanoclaw.sh` (WSL nohup)
|
||||||
- SERVICE=not_found → re-run step 7
|
- SERVICE=not_found → re-run step 7
|
||||||
- CREDENTIALS=missing → re-run step 4
|
- CREDENTIALS=missing → re-run step 4 (check `onecli secrets list` for Anthropic secret)
|
||||||
- CHANNEL_AUTH shows `not_found` for any channel → re-invoke that channel's skill (e.g. `/add-telegram`)
|
- CHANNEL_AUTH shows `not_found` for any channel → re-invoke that channel's skill (e.g. `/add-telegram`)
|
||||||
- REGISTERED_GROUPS=0 → re-invoke the channel skills from step 5
|
- REGISTERED_GROUPS=0 → re-invoke the channel skills from step 5
|
||||||
- MOUNT_ALLOWLIST=missing → `npx tsx setup/index.ts --step mounts -- --empty`
|
- MOUNT_ALLOWLIST=missing → `npx tsx setup/index.ts --step mounts -- --empty`
|
||||||
@@ -207,7 +274,7 @@ Tell user to test: send a message in their registered chat. Show: `tail -f logs/
|
|||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
**Service not starting:** Check `logs/nanoclaw.error.log`. Common: wrong Node path (re-run step 7), missing `.env` (step 4), missing channel credentials (re-invoke channel skill).
|
**Service not starting:** Check `logs/nanoclaw.error.log`. Common: wrong Node path (re-run step 7), OneCLI not running (check `curl http://127.0.0.1:10254/api/health`), missing channel credentials (re-invoke channel skill).
|
||||||
|
|
||||||
**Container agent fails ("Claude Code process exited with code 1"):** Ensure the container runtime is running — `open -a Docker` (macOS Docker), `container system start` (Apple Container), or `sudo systemctl start docker` (Linux). Check container logs in `groups/main/logs/container-*.log`.
|
**Container agent fails ("Claude Code process exited with code 1"):** Ensure the container runtime is running — `open -a Docker` (macOS Docker), `container system start` (Apple Container), or `sudo systemctl start docker` (Linux). Check container logs in `groups/main/logs/container-*.log`.
|
||||||
|
|
||||||
@@ -218,6 +285,7 @@ Tell user to test: send a message in their registered chat. Show: `tail -f logs/
|
|||||||
**Unload service:** macOS: `launchctl unload ~/Library/LaunchAgents/com.nanoclaw.plist` | Linux: `systemctl --user stop nanoclaw`
|
**Unload service:** macOS: `launchctl unload ~/Library/LaunchAgents/com.nanoclaw.plist` | Linux: `systemctl --user stop nanoclaw`
|
||||||
|
|
||||||
|
|
||||||
## Diagnostics
|
## 9. Diagnostics
|
||||||
|
|
||||||
Read and follow [diagnostics.md](diagnostics.md).
|
1. Use the Read tool to read `.claude/skills/setup/diagnostics.md`.
|
||||||
|
2. Follow every step in that file before completing setup.
|
||||||
|
|||||||
@@ -42,5 +42,5 @@ rm /tmp/nanoclaw-diagnostics.json
|
|||||||
**Never ask again**:
|
**Never ask again**:
|
||||||
1. Replace contents of `.claude/skills/setup/diagnostics.md` with `# Diagnostics — opted out`
|
1. Replace contents of `.claude/skills/setup/diagnostics.md` with `# Diagnostics — opted out`
|
||||||
2. Replace contents of `.claude/skills/update-nanoclaw/diagnostics.md` with `# Diagnostics — opted out`
|
2. Replace contents of `.claude/skills/update-nanoclaw/diagnostics.md` with `# Diagnostics — opted out`
|
||||||
3. Remove the "Diagnostics" section from both `.claude/skills/setup/SKILL.md` and `.claude/skills/update-nanoclaw/SKILL.md`
|
3. Remove the `## 9. Diagnostics` section from `.claude/skills/setup/SKILL.md` and the `## Diagnostics` section from `.claude/skills/update-nanoclaw/SKILL.md`
|
||||||
4. `rm /tmp/nanoclaw-diagnostics.json`
|
4. `rm /tmp/nanoclaw-diagnostics.json`
|
||||||
|
|||||||
@@ -237,4 +237,5 @@ Tell the user:
|
|||||||
|
|
||||||
## Diagnostics
|
## Diagnostics
|
||||||
|
|
||||||
Read and follow [diagnostics.md](diagnostics.md).
|
1. Use the Read tool to read `.claude/skills/update-nanoclaw/diagnostics.md`.
|
||||||
|
2. Follow every step in that file before finishing.
|
||||||
|
|||||||
@@ -45,5 +45,5 @@ rm /tmp/nanoclaw-diagnostics.json
|
|||||||
**Never ask again**:
|
**Never ask again**:
|
||||||
1. Replace contents of `.claude/skills/setup/diagnostics.md` with `# Diagnostics — opted out`
|
1. Replace contents of `.claude/skills/setup/diagnostics.md` with `# Diagnostics — opted out`
|
||||||
2. Replace contents of `.claude/skills/update-nanoclaw/diagnostics.md` with `# Diagnostics — opted out`
|
2. Replace contents of `.claude/skills/update-nanoclaw/diagnostics.md` with `# Diagnostics — opted out`
|
||||||
3. Remove the "Diagnostics" section from both `.claude/skills/setup/SKILL.md` and `.claude/skills/update-nanoclaw/SKILL.md`
|
3. Remove the `## 9. Diagnostics` section from `.claude/skills/setup/SKILL.md` and the `## Diagnostics` section from `.claude/skills/update-nanoclaw/SKILL.md`
|
||||||
4. `rm /tmp/nanoclaw-diagnostics.json`
|
4. `rm /tmp/nanoclaw-diagnostics.json`
|
||||||
|
|||||||
167
.claude/skills/use-native-credential-proxy/SKILL.md
Normal file
167
.claude/skills/use-native-credential-proxy/SKILL.md
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
---
|
||||||
|
name: use-native-credential-proxy
|
||||||
|
description: Replace OneCLI gateway with the built-in credential proxy. For users who want simple .env-based credential management without installing OneCLI. Reads API key or OAuth token from .env and injects into container API requests.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Use Native Credential Proxy
|
||||||
|
|
||||||
|
This skill replaces the OneCLI gateway with NanoClaw's built-in credential proxy. Containers get credentials injected via a local HTTP proxy that reads from `.env` — no external services needed.
|
||||||
|
|
||||||
|
## Phase 1: Pre-flight
|
||||||
|
|
||||||
|
### Check if already applied
|
||||||
|
|
||||||
|
Check if `src/credential-proxy.ts` is imported in `src/index.ts`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
grep "credential-proxy" src/index.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
If it shows an import for `startCredentialProxy`, the native proxy is already active. Skip to Phase 3 (Setup).
|
||||||
|
|
||||||
|
### Check if OneCLI is active
|
||||||
|
|
||||||
|
```bash
|
||||||
|
grep "@onecli-sh/sdk" package.json
|
||||||
|
```
|
||||||
|
|
||||||
|
If `@onecli-sh/sdk` appears, OneCLI is the active credential provider. Proceed with Phase 2 to replace it.
|
||||||
|
|
||||||
|
If neither check matches, you may be on an older version. Run `/update-nanoclaw` first, then retry.
|
||||||
|
|
||||||
|
## Phase 2: Apply Code Changes
|
||||||
|
|
||||||
|
### Ensure upstream remote
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote -v
|
||||||
|
```
|
||||||
|
|
||||||
|
If `upstream` is missing, add it:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git remote add upstream https://github.com/qwibitai/nanoclaw.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### Merge the skill branch
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git fetch upstream skill/native-credential-proxy
|
||||||
|
git merge upstream/skill/native-credential-proxy || {
|
||||||
|
git checkout --theirs package-lock.json
|
||||||
|
git add package-lock.json
|
||||||
|
git merge --continue
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This merges in:
|
||||||
|
- `src/credential-proxy.ts` and `src/credential-proxy.test.ts` (the proxy implementation)
|
||||||
|
- Restored credential proxy usage in `src/index.ts`, `src/container-runner.ts`, `src/container-runtime.ts`, `src/config.ts`
|
||||||
|
- Removed `@onecli-sh/sdk` dependency
|
||||||
|
- Restored `CREDENTIAL_PROXY_PORT` config (default 3001)
|
||||||
|
- Restored platform-aware proxy bind address detection
|
||||||
|
- Reverted setup skill to `.env`-based credential instructions
|
||||||
|
|
||||||
|
If the merge reports conflicts beyond `package-lock.json`, resolve them by reading the conflicted files and understanding the intent of both sides.
|
||||||
|
|
||||||
|
### Update main group CLAUDE.md
|
||||||
|
|
||||||
|
Replace the OneCLI auth reference with the native proxy:
|
||||||
|
|
||||||
|
In `groups/main/CLAUDE.md`, replace:
|
||||||
|
> OneCLI manages credentials (including Anthropic auth) — run `onecli --help`.
|
||||||
|
|
||||||
|
with:
|
||||||
|
> The native credential proxy manages credentials (including Anthropic auth) via `.env` — see `src/credential-proxy.ts`.
|
||||||
|
|
||||||
|
### Validate code changes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
npx vitest run src/credential-proxy.test.ts src/container-runner.test.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
All tests must pass and build must be clean before proceeding.
|
||||||
|
|
||||||
|
## Phase 3: Setup Credentials
|
||||||
|
|
||||||
|
AskUserQuestion: Do you want to use your **Claude subscription** (Pro/Max) or an **Anthropic API key**?
|
||||||
|
|
||||||
|
1. **Claude subscription (Pro/Max)** — description: "Uses your existing Claude Pro or Max subscription. You'll run `claude setup-token` in another terminal to get your token."
|
||||||
|
2. **Anthropic API key** — description: "Pay-per-use API key from console.anthropic.com."
|
||||||
|
|
||||||
|
### Subscription path
|
||||||
|
|
||||||
|
Tell the user to run `claude setup-token` in another terminal and copy the token it outputs. Do NOT collect the token in chat.
|
||||||
|
|
||||||
|
Once they have the token, add it to `.env`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Add to .env (create file if needed)
|
||||||
|
echo 'CLAUDE_CODE_OAUTH_TOKEN=<token>' >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: `ANTHROPIC_AUTH_TOKEN` is also supported as a fallback.
|
||||||
|
|
||||||
|
### API key path
|
||||||
|
|
||||||
|
Tell the user to get an API key from https://console.anthropic.com/settings/keys if they don't have one.
|
||||||
|
|
||||||
|
Add it to `.env`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo 'ANTHROPIC_API_KEY=<key>' >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
### After either path
|
||||||
|
|
||||||
|
**If the user's response happens to contain a token or key** (starts with `sk-ant-` or looks like a token): write it to `.env` on their behalf using the appropriate variable name.
|
||||||
|
|
||||||
|
**Optional:** If the user needs a custom API endpoint, they can add `ANTHROPIC_BASE_URL=<url>` to `.env` (defaults to `https://api.anthropic.com`).
|
||||||
|
|
||||||
|
## Phase 4: Verify
|
||||||
|
|
||||||
|
1. Rebuild and restart:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Then restart the service:
|
||||||
|
- macOS: `launchctl kickstart -k gui/$(id -u)/com.nanoclaw`
|
||||||
|
- Linux: `systemctl --user restart nanoclaw`
|
||||||
|
- WSL/manual: stop and re-run `bash start-nanoclaw.sh`
|
||||||
|
|
||||||
|
2. Check logs for successful proxy startup:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tail -20 logs/nanoclaw.log | grep "Credential proxy"
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: `Credential proxy started` with port and auth mode.
|
||||||
|
|
||||||
|
3. Send a test message in the registered chat to verify the agent responds.
|
||||||
|
|
||||||
|
4. Note: after applying this skill, the OneCLI credential steps in `/setup` no longer apply. `.env` is now the credential source.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
**"Credential proxy upstream error" in logs:** Check that `.env` has a valid `ANTHROPIC_API_KEY` or `CLAUDE_CODE_OAUTH_TOKEN`. Verify the API is reachable: `curl -s https://api.anthropic.com/v1/messages -H "x-api-key: test" | head`.
|
||||||
|
|
||||||
|
**Port 3001 already in use:** Set `CREDENTIAL_PROXY_PORT=<other port>` in `.env` or as an environment variable.
|
||||||
|
|
||||||
|
**Container can't reach proxy (Linux):** The proxy binds to the `docker0` bridge IP by default. If that interface doesn't exist (e.g. rootless Docker), set `CREDENTIAL_PROXY_HOST=0.0.0.0` as an environment variable.
|
||||||
|
|
||||||
|
**OAuth token expired (401 errors):** Re-run `claude setup-token` in a terminal and update the token in `.env`.
|
||||||
|
|
||||||
|
## Removal
|
||||||
|
|
||||||
|
To revert to OneCLI gateway:
|
||||||
|
|
||||||
|
1. Find the merge commit: `git log --oneline --merges -5`
|
||||||
|
2. Revert it: `git revert <merge-commit> -m 1` (undoes the skill branch merge, keeps your other changes)
|
||||||
|
3. `npm install` (re-adds `@onecli-sh/sdk`)
|
||||||
|
4. `npm run build`
|
||||||
|
5. Follow `/setup` step 4 to configure OneCLI credentials
|
||||||
|
6. Remove `ANTHROPIC_API_KEY` / `CLAUDE_CODE_OAUTH_TOKEN` from `.env`
|
||||||
@@ -8,12 +8,8 @@
|
|||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import pino from 'pino';
|
|
||||||
|
|
||||||
const logger = pino({
|
import { logger } from '../../../src/logger.js';
|
||||||
level: process.env.LOG_LEVEL || 'info',
|
|
||||||
transport: { target: 'pino-pretty', options: { colorize: true } }
|
|
||||||
});
|
|
||||||
|
|
||||||
interface SkillResult {
|
interface SkillResult {
|
||||||
success: boolean;
|
success: boolean;
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
TELEGRAM_BOT_TOKEN=
|
|
||||||
|
|||||||
1
.github/workflows/bump-version.yml
vendored
1
.github/workflows/bump-version.yml
vendored
@@ -7,6 +7,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bump-version:
|
bump-version:
|
||||||
|
if: github.repository == 'qwibitai/nanoclaw'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/create-github-app-token@v1
|
- uses: actions/create-github-app-token@v1
|
||||||
|
|||||||
214
.github/workflows/fork-sync-skills.yml
vendored
214
.github/workflows/fork-sync-skills.yml
vendored
@@ -1,214 +0,0 @@
|
|||||||
name: Sync upstream & merge-forward skill branches
|
|
||||||
|
|
||||||
on:
|
|
||||||
# Triggered by upstream repo via repository_dispatch
|
|
||||||
repository_dispatch:
|
|
||||||
types: [upstream-main-updated]
|
|
||||||
# Fallback: run on a schedule in case dispatch isn't configured
|
|
||||||
schedule:
|
|
||||||
- cron: '0 */6 * * *' # every 6 hours
|
|
||||||
# Also run when fork's main is pushed directly
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: fork-sync
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sync-and-merge:
|
|
||||||
if: github.repository != 'qwibitai/nanoclaw'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/create-github-app-token@v1
|
|
||||||
id: app-token
|
|
||||||
with:
|
|
||||||
app-id: ${{ secrets.APP_ID }}
|
|
||||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ steps.app-token.outputs.token }}
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: npm
|
|
||||||
|
|
||||||
- name: Configure git
|
|
||||||
run: |
|
|
||||||
git config user.name "github-actions[bot]"
|
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
|
||||||
|
|
||||||
- name: Sync with upstream main
|
|
||||||
id: sync
|
|
||||||
run: |
|
|
||||||
# Add upstream remote
|
|
||||||
git remote add upstream https://github.com/qwibitai/nanoclaw.git
|
|
||||||
git fetch upstream main
|
|
||||||
|
|
||||||
# Check if upstream has new commits
|
|
||||||
if git merge-base --is-ancestor upstream/main HEAD; then
|
|
||||||
echo "Already up to date with upstream main."
|
|
||||||
echo "synced=false" >> "$GITHUB_OUTPUT"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Merge upstream main into fork's main
|
|
||||||
if ! git merge upstream/main --no-edit; then
|
|
||||||
echo "::error::Failed to merge upstream/main into fork main — conflicts detected"
|
|
||||||
git merge --abort
|
|
||||||
echo "synced=false" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "sync_failed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Validate build
|
|
||||||
npm ci
|
|
||||||
if ! npm run build; then
|
|
||||||
echo "::error::Build failed after merging upstream/main"
|
|
||||||
git reset --hard "origin/main"
|
|
||||||
echo "synced=false" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "sync_failed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! npm test 2>/dev/null; then
|
|
||||||
echo "::error::Tests failed after merging upstream/main"
|
|
||||||
git reset --hard "origin/main"
|
|
||||||
echo "synced=false" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "sync_failed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
git push origin main
|
|
||||||
echo "synced=true" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Merge main into skill branches
|
|
||||||
id: merge
|
|
||||||
run: |
|
|
||||||
# Re-fetch to pick up any changes pushed since job start
|
|
||||||
git fetch origin
|
|
||||||
|
|
||||||
FAILED=""
|
|
||||||
SUCCEEDED=""
|
|
||||||
|
|
||||||
# List all remote skill branches
|
|
||||||
SKILL_BRANCHES=$(git branch -r --list 'origin/skill/*' | sed 's|origin/||' | xargs)
|
|
||||||
|
|
||||||
if [ -z "$SKILL_BRANCHES" ]; then
|
|
||||||
echo "No skill branches found."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
for BRANCH in $SKILL_BRANCHES; do
|
|
||||||
SKILL_NAME=$(echo "$BRANCH" | sed 's|skill/||')
|
|
||||||
echo ""
|
|
||||||
echo "=== Processing $BRANCH ==="
|
|
||||||
|
|
||||||
git checkout -B "$BRANCH" "origin/$BRANCH"
|
|
||||||
|
|
||||||
if ! git merge main --no-edit; then
|
|
||||||
echo "::warning::Merge conflict in $BRANCH"
|
|
||||||
git merge --abort
|
|
||||||
FAILED="$FAILED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if there's anything new to push
|
|
||||||
if git diff --quiet "origin/$BRANCH"; then
|
|
||||||
echo "$BRANCH is already up to date with main."
|
|
||||||
SUCCEEDED="$SUCCEEDED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
npm ci
|
|
||||||
|
|
||||||
if ! npm run build; then
|
|
||||||
echo "::warning::Build failed for $BRANCH"
|
|
||||||
git reset --hard "origin/$BRANCH"
|
|
||||||
FAILED="$FAILED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! npm test 2>/dev/null; then
|
|
||||||
echo "::warning::Tests failed for $BRANCH"
|
|
||||||
git reset --hard "origin/$BRANCH"
|
|
||||||
FAILED="$FAILED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
git push origin "$BRANCH"
|
|
||||||
SUCCEEDED="$SUCCEEDED $SKILL_NAME"
|
|
||||||
echo "$BRANCH merged and pushed successfully."
|
|
||||||
done
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=== Results ==="
|
|
||||||
echo "Succeeded: $SUCCEEDED"
|
|
||||||
echo "Failed: $FAILED"
|
|
||||||
|
|
||||||
echo "failed=$FAILED" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "succeeded=$SUCCEEDED" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Open issue for upstream sync failure
|
|
||||||
if: steps.sync.outputs.sync_failed == 'true'
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
await github.rest.issues.create({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
title: `Upstream sync failed — merge conflict or build failure`,
|
|
||||||
body: [
|
|
||||||
'The automated sync with `qwibitai/nanoclaw` main failed.',
|
|
||||||
'',
|
|
||||||
'This usually means upstream made changes that conflict with this fork\'s channel code.',
|
|
||||||
'',
|
|
||||||
'To resolve manually:',
|
|
||||||
'```bash',
|
|
||||||
'git fetch upstream main',
|
|
||||||
'git merge upstream/main',
|
|
||||||
'# resolve conflicts',
|
|
||||||
'npm run build && npm test',
|
|
||||||
'git push',
|
|
||||||
'```',
|
|
||||||
].join('\n'),
|
|
||||||
labels: ['upstream-sync']
|
|
||||||
});
|
|
||||||
|
|
||||||
- name: Open issue for failed skill merges
|
|
||||||
if: steps.merge.outputs.failed != ''
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const failed = '${{ steps.merge.outputs.failed }}'.trim().split(/\s+/);
|
|
||||||
const body = [
|
|
||||||
`The merge-forward workflow failed to merge \`main\` into the following skill branches:`,
|
|
||||||
'',
|
|
||||||
...failed.map(s => `- \`skill/${s}\`: merge conflict, build failure, or test failure`),
|
|
||||||
'',
|
|
||||||
'Please resolve manually:',
|
|
||||||
'```bash',
|
|
||||||
...failed.map(s => [
|
|
||||||
`git checkout skill/${s}`,
|
|
||||||
`git merge main`,
|
|
||||||
`# resolve conflicts, then: git push`,
|
|
||||||
''
|
|
||||||
]).flat(),
|
|
||||||
'```',
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
await github.rest.issues.create({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
title: `Merge-forward failed for ${failed.length} skill branch(es)`,
|
|
||||||
body,
|
|
||||||
labels: ['skill-maintenance']
|
|
||||||
});
|
|
||||||
160
.github/workflows/merge-forward-skills.yml
vendored
160
.github/workflows/merge-forward-skills.yml
vendored
@@ -1,160 +0,0 @@
|
|||||||
name: Merge-forward skill branches
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
merge-forward:
|
|
||||||
if: github.repository == 'qwibitai/nanoclaw'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: npm
|
|
||||||
|
|
||||||
- name: Configure git
|
|
||||||
run: |
|
|
||||||
git config user.name "github-actions[bot]"
|
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
|
||||||
|
|
||||||
- name: Merge main into each skill branch
|
|
||||||
id: merge
|
|
||||||
run: |
|
|
||||||
FAILED=""
|
|
||||||
SUCCEEDED=""
|
|
||||||
|
|
||||||
# List all remote skill branches
|
|
||||||
SKILL_BRANCHES=$(git branch -r --list 'origin/skill/*' | sed 's|origin/||' | xargs)
|
|
||||||
|
|
||||||
if [ -z "$SKILL_BRANCHES" ]; then
|
|
||||||
echo "No skill branches found."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
for BRANCH in $SKILL_BRANCHES; do
|
|
||||||
SKILL_NAME=$(echo "$BRANCH" | sed 's|skill/||')
|
|
||||||
echo ""
|
|
||||||
echo "=== Processing $BRANCH ==="
|
|
||||||
|
|
||||||
# Checkout the skill branch
|
|
||||||
git checkout -B "$BRANCH" "origin/$BRANCH"
|
|
||||||
|
|
||||||
# Attempt merge
|
|
||||||
if ! git merge main --no-edit; then
|
|
||||||
echo "::warning::Merge conflict in $BRANCH"
|
|
||||||
git merge --abort
|
|
||||||
FAILED="$FAILED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if there's anything new to push
|
|
||||||
if git diff --quiet "origin/$BRANCH"; then
|
|
||||||
echo "$BRANCH is already up to date with main."
|
|
||||||
SUCCEEDED="$SUCCEEDED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install deps and validate
|
|
||||||
npm ci
|
|
||||||
|
|
||||||
if ! npm run build; then
|
|
||||||
echo "::warning::Build failed for $BRANCH"
|
|
||||||
git reset --hard "origin/$BRANCH"
|
|
||||||
FAILED="$FAILED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! npm test 2>/dev/null; then
|
|
||||||
echo "::warning::Tests failed for $BRANCH"
|
|
||||||
git reset --hard "origin/$BRANCH"
|
|
||||||
FAILED="$FAILED $SKILL_NAME"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push the updated branch
|
|
||||||
git push origin "$BRANCH"
|
|
||||||
SUCCEEDED="$SUCCEEDED $SKILL_NAME"
|
|
||||||
echo "$BRANCH merged and pushed successfully."
|
|
||||||
done
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=== Results ==="
|
|
||||||
echo "Succeeded: $SUCCEEDED"
|
|
||||||
echo "Failed: $FAILED"
|
|
||||||
|
|
||||||
# Export for issue creation
|
|
||||||
echo "failed=$FAILED" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "succeeded=$SUCCEEDED" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Open issue for failed merges
|
|
||||||
if: steps.merge.outputs.failed != ''
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const failed = '${{ steps.merge.outputs.failed }}'.trim().split(/\s+/);
|
|
||||||
const sha = context.sha.substring(0, 7);
|
|
||||||
const body = [
|
|
||||||
`The merge-forward workflow failed to merge \`main\` (${sha}) into the following skill branches:`,
|
|
||||||
'',
|
|
||||||
...failed.map(s => `- \`skill/${s}\`: merge conflict, build failure, or test failure`),
|
|
||||||
'',
|
|
||||||
'Please resolve manually:',
|
|
||||||
'```bash',
|
|
||||||
...failed.map(s => [
|
|
||||||
`git checkout skill/${s}`,
|
|
||||||
`git merge main`,
|
|
||||||
`# resolve conflicts, then: git push`,
|
|
||||||
''
|
|
||||||
]).flat(),
|
|
||||||
'```',
|
|
||||||
'',
|
|
||||||
`Triggered by push to main: ${context.sha}`
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
await github.rest.issues.create({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
title: `Merge-forward failed for ${failed.length} skill branch(es) after ${sha}`,
|
|
||||||
body,
|
|
||||||
labels: ['skill-maintenance']
|
|
||||||
});
|
|
||||||
|
|
||||||
- name: Notify channel forks
|
|
||||||
if: always()
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.FORK_DISPATCH_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
script: |
|
|
||||||
const forks = [
|
|
||||||
'nanoclaw-whatsapp',
|
|
||||||
'nanoclaw-telegram',
|
|
||||||
'nanoclaw-discord',
|
|
||||||
'nanoclaw-slack',
|
|
||||||
'nanoclaw-gmail',
|
|
||||||
'nanoclaw-docker-sandboxes',
|
|
||||||
];
|
|
||||||
const sha = context.sha.substring(0, 7);
|
|
||||||
for (const repo of forks) {
|
|
||||||
try {
|
|
||||||
await github.rest.repos.createDispatchEvent({
|
|
||||||
owner: 'qwibitai',
|
|
||||||
repo,
|
|
||||||
event_type: 'upstream-main-updated',
|
|
||||||
client_payload: { sha: context.sha },
|
|
||||||
});
|
|
||||||
console.log(`Notified ${repo}`);
|
|
||||||
} catch (e) {
|
|
||||||
console.log(`Failed to notify ${repo}: ${e.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1
.github/workflows/update-tokens.yml
vendored
1
.github/workflows/update-tokens.yml
vendored
@@ -8,6 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update-tokens:
|
update-tokens:
|
||||||
|
if: github.repository == 'qwibitai/nanoclaw'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/create-github-app-token@v1
|
- uses: actions/create-github-app-token@v1
|
||||||
|
|||||||
142
CHANGELOG.md
142
CHANGELOG.md
@@ -2,7 +2,143 @@
|
|||||||
|
|
||||||
All notable changes to NanoClaw will be documented in this file.
|
All notable changes to NanoClaw will be documented in this file.
|
||||||
|
|
||||||
## [1.2.0](https://github.com/qwibitai/nanoclaw/compare/v1.1.6...v1.2.0)
|
For detailed release notes, see the [full changelog on the documentation site](https://docs.nanoclaw.dev/changelog).
|
||||||
|
|
||||||
[BREAKING] WhatsApp removed from core, now a skill. Run `/add-whatsapp` to re-add (existing auth/groups preserved).
|
## [1.2.35] - 2026-03-26
|
||||||
- **fix:** Prevent scheduled tasks from executing twice when container runtime exceeds poll interval (#138, #669)
|
|
||||||
|
- [BREAKING] OneCLI Agent Vault replaces the built-in credential proxy. Existing `.env` credentials must be migrated to the vault. Run `/init-onecli` to install OneCLI and migrate credentials.
|
||||||
|
|
||||||
|
## [1.2.21] - 2026-03-22
|
||||||
|
|
||||||
|
- Added opt-in diagnostics via PostHog with explicit user consent (Yes / No / Never ask again)
|
||||||
|
|
||||||
|
## [1.2.20] - 2026-03-21
|
||||||
|
|
||||||
|
- Added ESLint configuration with error-handling rules
|
||||||
|
|
||||||
|
## [1.2.19] - 2026-03-19
|
||||||
|
|
||||||
|
- Reduced `docker stop` timeout for faster container restarts (`-t 1` flag)
|
||||||
|
|
||||||
|
## [1.2.18] - 2026-03-19
|
||||||
|
|
||||||
|
- User prompt content no longer logged on container errors — only input metadata
|
||||||
|
- Added Japanese README translation
|
||||||
|
|
||||||
|
## [1.2.17] - 2026-03-18
|
||||||
|
|
||||||
|
- Added `/capabilities` and `/status` container-agent skills
|
||||||
|
|
||||||
|
## [1.2.16] - 2026-03-18
|
||||||
|
|
||||||
|
- Tasks snapshot now refreshes immediately after IPC task mutations
|
||||||
|
|
||||||
|
## [1.2.15] - 2026-03-16
|
||||||
|
|
||||||
|
- Fixed remote-control prompt auto-accept to prevent immediate exit
|
||||||
|
- Added `KillMode=process` so remote-control survives service restarts
|
||||||
|
|
||||||
|
## [1.2.14] - 2026-03-14
|
||||||
|
|
||||||
|
- Added `/remote-control` command for host-level Claude Code access from within containers
|
||||||
|
|
||||||
|
## [1.2.13] - 2026-03-14
|
||||||
|
|
||||||
|
**Breaking:** Skills are now git branches, channels are separate fork repos.
|
||||||
|
|
||||||
|
- Skills live as `skill/*` git branches merged via `git merge`
|
||||||
|
- Added Docker Sandboxes support
|
||||||
|
- Fixed setup registration to use correct CLI commands
|
||||||
|
|
||||||
|
## [1.2.12] - 2026-03-08
|
||||||
|
|
||||||
|
- Added `/compact` skill for manual context compaction
|
||||||
|
- Enhanced container environment isolation via credential proxy
|
||||||
|
|
||||||
|
## [1.2.11] - 2026-03-08
|
||||||
|
|
||||||
|
- Added PDF reader, image vision, and WhatsApp reactions skills
|
||||||
|
- Fixed task container to close promptly when agent uses IPC-only messaging
|
||||||
|
|
||||||
|
## [1.2.10] - 2026-03-06
|
||||||
|
|
||||||
|
- Added `LIMIT` to unbounded message history queries for better performance
|
||||||
|
|
||||||
|
## [1.2.9] - 2026-03-06
|
||||||
|
|
||||||
|
- Agent prompts now include timezone context for accurate time references
|
||||||
|
|
||||||
|
## [1.2.8] - 2026-03-06
|
||||||
|
|
||||||
|
- Fixed misleading `send_message` tool description for scheduled tasks
|
||||||
|
|
||||||
|
## [1.2.7] - 2026-03-06
|
||||||
|
|
||||||
|
- Added `/add-ollama` skill for local model inference
|
||||||
|
- Added `update_task` tool and return task ID from `schedule_task`
|
||||||
|
|
||||||
|
## [1.2.6] - 2026-03-04
|
||||||
|
|
||||||
|
- Updated `claude-agent-sdk` to 0.2.68
|
||||||
|
|
||||||
|
## [1.2.5] - 2026-03-04
|
||||||
|
|
||||||
|
- CI formatting fix
|
||||||
|
|
||||||
|
## [1.2.4] - 2026-03-04
|
||||||
|
|
||||||
|
- Fixed `_chatJid` rename to `chatJid` in `onMessage` callback
|
||||||
|
|
||||||
|
## [1.2.3] - 2026-03-04
|
||||||
|
|
||||||
|
- Added sender allowlist for per-chat access control
|
||||||
|
|
||||||
|
## [1.2.2] - 2026-03-04
|
||||||
|
|
||||||
|
- Added `/use-local-whisper` skill for local voice transcription
|
||||||
|
- Atomic task claims prevent scheduled tasks from executing twice
|
||||||
|
|
||||||
|
## [1.2.1] - 2026-03-02
|
||||||
|
|
||||||
|
- Version bump (no functional changes)
|
||||||
|
|
||||||
|
## [1.2.0] - 2026-03-02
|
||||||
|
|
||||||
|
**Breaking:** WhatsApp removed from core, now a skill. Run `/add-whatsapp` to re-add.
|
||||||
|
|
||||||
|
- Channel registry: channels self-register at startup via `registerChannel()` factory pattern
|
||||||
|
- `isMain` flag replaces folder-name-based main group detection
|
||||||
|
- `ENABLED_CHANNELS` removed — channels detected by credential presence
|
||||||
|
- Prevent scheduled tasks from executing twice when container runtime exceeds poll interval
|
||||||
|
|
||||||
|
## [1.1.6] - 2026-03-01
|
||||||
|
|
||||||
|
- Added CJK font support for Chromium screenshots
|
||||||
|
|
||||||
|
## [1.1.5] - 2026-03-01
|
||||||
|
|
||||||
|
- Fixed wrapped WhatsApp message normalization
|
||||||
|
|
||||||
|
## [1.1.4] - 2026-03-01
|
||||||
|
|
||||||
|
- Added third-party model support
|
||||||
|
- Added `/update-nanoclaw` skill for syncing with upstream
|
||||||
|
|
||||||
|
## [1.1.3] - 2026-02-25
|
||||||
|
|
||||||
|
- Added `/add-slack` skill
|
||||||
|
- Restructured Gmail skill for new architecture
|
||||||
|
|
||||||
|
## [1.1.2] - 2026-02-24
|
||||||
|
|
||||||
|
- Improved error handling for WhatsApp Web version fetch
|
||||||
|
|
||||||
|
## [1.1.1] - 2026-02-24
|
||||||
|
|
||||||
|
- Added Qodo skills and codebase intelligence
|
||||||
|
- Fixed WhatsApp 405 connection failures
|
||||||
|
|
||||||
|
## [1.1.0] - 2026-02-23
|
||||||
|
|
||||||
|
- Added `/update` skill to pull upstream changes from within Claude Code
|
||||||
|
- Enhanced container environment isolation via credential proxy
|
||||||
|
|||||||
@@ -32,6 +32,10 @@ Single Node.js process with skill-based channel system. Channels (WhatsApp, Tele
|
|||||||
| `groups/{name}/CLAUDE.md` | Per-group memory (isolated) |
|
| `groups/{name}/CLAUDE.md` | Per-group memory (isolated) |
|
||||||
| `container/skills/` | Skills loaded inside agent containers (browser, status, formatting) |
|
| `container/skills/` | Skills loaded inside agent containers (browser, status, formatting) |
|
||||||
|
|
||||||
|
## Secrets / Credentials / Proxy (OneCLI)
|
||||||
|
|
||||||
|
API keys, secret keys, OAuth tokens, and auth credentials are managed by the OneCLI gateway — which handles secret injection into containers at request time, so no keys or tokens are ever passed to containers directly. Run `onecli --help`.
|
||||||
|
|
||||||
## Skills
|
## Skills
|
||||||
|
|
||||||
Four types of skills exist in NanoClaw. See [CONTRIBUTING.md](CONTRIBUTING.md) for the full taxonomy and guidelines.
|
Four types of skills exist in NanoClaw. See [CONTRIBUTING.md](CONTRIBUTING.md) for the full taxonomy and guidelines.
|
||||||
@@ -47,6 +51,7 @@ Four types of skills exist in NanoClaw. See [CONTRIBUTING.md](CONTRIBUTING.md) f
|
|||||||
| `/customize` | Adding channels, integrations, changing behavior |
|
| `/customize` | Adding channels, integrations, changing behavior |
|
||||||
| `/debug` | Container issues, logs, troubleshooting |
|
| `/debug` | Container issues, logs, troubleshooting |
|
||||||
| `/update-nanoclaw` | Bring upstream NanoClaw updates into a customized install |
|
| `/update-nanoclaw` | Bring upstream NanoClaw updates into a customized install |
|
||||||
|
| `/init-onecli` | Install OneCLI Agent Vault and migrate `.env` credentials to it |
|
||||||
| `/qodo-pr-resolver` | Fetch and fix Qodo PR review issues interactively or in batch |
|
| `/qodo-pr-resolver` | Fetch and fix Qodo PR review issues interactively or in batch |
|
||||||
| `/get-qodo-rules` | Load org- and repo-level coding rules from Qodo before code tasks |
|
| `/get-qodo-rules` | Load org- and repo-level coding rules from Qodo before code tasks |
|
||||||
|
|
||||||
|
|||||||
@@ -13,3 +13,6 @@ Thanks to everyone who has contributed to NanoClaw!
|
|||||||
- [baijunjie](https://github.com/baijunjie) — BaiJunjie
|
- [baijunjie](https://github.com/baijunjie) — BaiJunjie
|
||||||
- [Michaelliv](https://github.com/Michaelliv) — Michael
|
- [Michaelliv](https://github.com/Michaelliv) — Michael
|
||||||
- [kk17](https://github.com/kk17) — Kyle Zhike Chen
|
- [kk17](https://github.com/kk17) — Kyle Zhike Chen
|
||||||
|
- [flobo3](https://github.com/flobo3) — Flo
|
||||||
|
- [edwinwzhe](https://github.com/edwinwzhe) — Edwin He
|
||||||
|
- [scottgl9](https://github.com/scottgl9) — Scott Glover
|
||||||
|
|||||||
14
README.md
14
README.md
@@ -8,6 +8,7 @@
|
|||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://nanoclaw.dev">nanoclaw.dev</a> •
|
<a href="https://nanoclaw.dev">nanoclaw.dev</a> •
|
||||||
|
<a href="https://docs.nanoclaw.dev">docs</a> •
|
||||||
<a href="README_zh.md">中文</a> •
|
<a href="README_zh.md">中文</a> •
|
||||||
<a href="README_ja.md">日本語</a> •
|
<a href="README_ja.md">日本語</a> •
|
||||||
<a href="https://discord.gg/VDdww8qS42"><img src="https://img.shields.io/discord/1470188214710046894?label=Discord&logo=discord&v=2" alt="Discord" valign="middle"></a> •
|
<a href="https://discord.gg/VDdww8qS42"><img src="https://img.shields.io/discord/1470188214710046894?label=Discord&logo=discord&v=2" alt="Discord" valign="middle"></a> •
|
||||||
@@ -71,6 +72,7 @@ Then run `/setup`. Claude Code handles everything: dependencies, authentication,
|
|||||||
- **Scheduled tasks** - Recurring jobs that run Claude and can message you back
|
- **Scheduled tasks** - Recurring jobs that run Claude and can message you back
|
||||||
- **Web access** - Search and fetch content from the Web
|
- **Web access** - Search and fetch content from the Web
|
||||||
- **Container isolation** - Agents are sandboxed in Docker (macOS/Linux), [Docker Sandboxes](docs/docker-sandboxes.md) (micro VM isolation), or Apple Container (macOS)
|
- **Container isolation** - Agents are sandboxed in Docker (macOS/Linux), [Docker Sandboxes](docs/docker-sandboxes.md) (micro VM isolation), or Apple Container (macOS)
|
||||||
|
- **Credential security** - Agents never hold raw API keys. Outbound requests route through [OneCLI's Agent Vault](https://github.com/onecli/onecli), which injects credentials at request time and enforces per-agent policies and rate limits.
|
||||||
- **Agent Swarms** - Spin up teams of specialized agents that collaborate on complex tasks
|
- **Agent Swarms** - Spin up teams of specialized agents that collaborate on complex tasks
|
||||||
- **Optional integrations** - Add Gmail (`/add-gmail`) and more via skills
|
- **Optional integrations** - Add Gmail (`/add-gmail`) and more via skills
|
||||||
|
|
||||||
@@ -121,7 +123,7 @@ Skills we'd like to see:
|
|||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
- macOS or Linux
|
- macOS, Linux, or Windows (via WSL2)
|
||||||
- Node.js 20+
|
- Node.js 20+
|
||||||
- [Claude Code](https://claude.ai/download)
|
- [Claude Code](https://claude.ai/download)
|
||||||
- [Apple Container](https://github.com/apple/container) (macOS) or [Docker](https://docker.com/products/docker-desktop) (macOS/Linux)
|
- [Apple Container](https://github.com/apple/container) (macOS) or [Docker](https://docker.com/products/docker-desktop) (macOS/Linux)
|
||||||
@@ -134,7 +136,7 @@ Channels --> SQLite --> Polling loop --> Container (Claude Agent SDK) --> Respon
|
|||||||
|
|
||||||
Single Node.js process. Channels are added via skills and self-register at startup — the orchestrator connects whichever ones have credentials present. Agents execute in isolated Linux containers with filesystem isolation. Only mounted directories are accessible. Per-group message queue with concurrency control. IPC via filesystem.
|
Single Node.js process. Channels are added via skills and self-register at startup — the orchestrator connects whichever ones have credentials present. Agents execute in isolated Linux containers with filesystem isolation. Only mounted directories are accessible. Per-group message queue with concurrency control. IPC via filesystem.
|
||||||
|
|
||||||
For the full architecture details, see [docs/SPEC.md](docs/SPEC.md).
|
For the full architecture details, see the [documentation site](https://docs.nanoclaw.dev/concepts/architecture).
|
||||||
|
|
||||||
Key files:
|
Key files:
|
||||||
- `src/index.ts` - Orchestrator: state, message loop, agent invocation
|
- `src/index.ts` - Orchestrator: state, message loop, agent invocation
|
||||||
@@ -153,13 +155,13 @@ Key files:
|
|||||||
|
|
||||||
Docker provides cross-platform support (macOS, Linux and even Windows via WSL2) and a mature ecosystem. On macOS, you can optionally switch to Apple Container via `/convert-to-apple-container` for a lighter-weight native runtime. For additional isolation, [Docker Sandboxes](docs/docker-sandboxes.md) run each container inside a micro VM.
|
Docker provides cross-platform support (macOS, Linux and even Windows via WSL2) and a mature ecosystem. On macOS, you can optionally switch to Apple Container via `/convert-to-apple-container` for a lighter-weight native runtime. For additional isolation, [Docker Sandboxes](docs/docker-sandboxes.md) run each container inside a micro VM.
|
||||||
|
|
||||||
**Can I run this on Linux?**
|
**Can I run this on Linux or Windows?**
|
||||||
|
|
||||||
Yes. Docker is the default runtime and works on both macOS and Linux. Just run `/setup`.
|
Yes. Docker is the default runtime and works on macOS, Linux, and Windows (via WSL2). Just run `/setup`.
|
||||||
|
|
||||||
**Is this secure?**
|
**Is this secure?**
|
||||||
|
|
||||||
Agents run in containers, not behind application-level permission checks. They can only access explicitly mounted directories. You should still review what you're running, but the codebase is small enough that you actually can. See [docs/SECURITY.md](docs/SECURITY.md) for the full security model.
|
Agents run in containers, not behind application-level permission checks. They can only access explicitly mounted directories. Credentials never enter the container — outbound API requests route through [OneCLI's Agent Vault](https://github.com/onecli/onecli), which injects authentication at the proxy level and supports rate limits and access policies. You should still review what you're running, but the codebase is small enough that you actually can. See the [security documentation](https://docs.nanoclaw.dev/concepts/security) for the full security model.
|
||||||
|
|
||||||
**Why no configuration files?**
|
**Why no configuration files?**
|
||||||
|
|
||||||
@@ -203,7 +205,7 @@ Questions? Ideas? [Join the Discord](https://discord.gg/VDdww8qS42).
|
|||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
See [CHANGELOG.md](CHANGELOG.md) for breaking changes and migration notes.
|
See [CHANGELOG.md](CHANGELOG.md) for breaking changes, or the [full release history](https://docs.nanoclaw.dev/changelog) on the documentation site.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import { execFile } from 'child_process';
|
||||||
import { query, HookCallback, PreCompactHookInput } from '@anthropic-ai/claude-agent-sdk';
|
import { query, HookCallback, PreCompactHookInput } from '@anthropic-ai/claude-agent-sdk';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
@@ -27,6 +28,7 @@ interface ContainerInput {
|
|||||||
isMain: boolean;
|
isMain: boolean;
|
||||||
isScheduledTask?: boolean;
|
isScheduledTask?: boolean;
|
||||||
assistantName?: string;
|
assistantName?: string;
|
||||||
|
script?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ContainerOutput {
|
interface ContainerOutput {
|
||||||
@@ -464,6 +466,55 @@ async function runQuery(
|
|||||||
return { newSessionId, lastAssistantUuid, closedDuringQuery };
|
return { newSessionId, lastAssistantUuid, closedDuringQuery };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ScriptResult {
|
||||||
|
wakeAgent: boolean;
|
||||||
|
data?: unknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCRIPT_TIMEOUT_MS = 30_000;
|
||||||
|
|
||||||
|
async function runScript(script: string): Promise<ScriptResult | null> {
|
||||||
|
const scriptPath = '/tmp/task-script.sh';
|
||||||
|
fs.writeFileSync(scriptPath, script, { mode: 0o755 });
|
||||||
|
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
execFile('bash', [scriptPath], {
|
||||||
|
timeout: SCRIPT_TIMEOUT_MS,
|
||||||
|
maxBuffer: 1024 * 1024,
|
||||||
|
env: process.env,
|
||||||
|
}, (error, stdout, stderr) => {
|
||||||
|
if (stderr) {
|
||||||
|
log(`Script stderr: ${stderr.slice(0, 500)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
log(`Script error: ${error.message}`);
|
||||||
|
return resolve(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse last non-empty line of stdout as JSON
|
||||||
|
const lines = stdout.trim().split('\n');
|
||||||
|
const lastLine = lines[lines.length - 1];
|
||||||
|
if (!lastLine) {
|
||||||
|
log('Script produced no output');
|
||||||
|
return resolve(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = JSON.parse(lastLine);
|
||||||
|
if (typeof result.wakeAgent !== 'boolean') {
|
||||||
|
log(`Script output missing wakeAgent boolean: ${lastLine.slice(0, 200)}`);
|
||||||
|
return resolve(null);
|
||||||
|
}
|
||||||
|
resolve(result as ScriptResult);
|
||||||
|
} catch {
|
||||||
|
log(`Script output is not valid JSON: ${lastLine.slice(0, 200)}`);
|
||||||
|
resolve(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async function main(): Promise<void> {
|
async function main(): Promise<void> {
|
||||||
let containerInput: ContainerInput;
|
let containerInput: ContainerInput;
|
||||||
|
|
||||||
@@ -505,6 +556,26 @@ async function main(): Promise<void> {
|
|||||||
prompt += '\n' + pending.join('\n');
|
prompt += '\n' + pending.join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Script phase: run script before waking agent
|
||||||
|
if (containerInput.script && containerInput.isScheduledTask) {
|
||||||
|
log('Running task script...');
|
||||||
|
const scriptResult = await runScript(containerInput.script);
|
||||||
|
|
||||||
|
if (!scriptResult || !scriptResult.wakeAgent) {
|
||||||
|
const reason = scriptResult ? 'wakeAgent=false' : 'script error/no output';
|
||||||
|
log(`Script decided not to wake agent: ${reason}`);
|
||||||
|
writeOutput({
|
||||||
|
status: 'success',
|
||||||
|
result: null,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Script says wake agent — enrich prompt with script data
|
||||||
|
log(`Script wakeAgent=true, enriching prompt with data`);
|
||||||
|
prompt = `[SCHEDULED TASK]\n\nScript output:\n${JSON.stringify(scriptResult.data, null, 2)}\n\nInstructions:\n${containerInput.prompt}`;
|
||||||
|
}
|
||||||
|
|
||||||
// Query loop: run query → wait for IPC message → run new query → repeat
|
// Query loop: run query → wait for IPC message → run new query → repeat
|
||||||
let resumeAt: string | undefined;
|
let resumeAt: string | undefined;
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -91,6 +91,7 @@ SCHEDULE VALUE FORMAT (all times are LOCAL timezone):
|
|||||||
schedule_value: z.string().describe('cron: "*/5 * * * *" | interval: milliseconds like "300000" | once: local timestamp like "2026-02-01T15:30:00" (no Z suffix!)'),
|
schedule_value: z.string().describe('cron: "*/5 * * * *" | interval: milliseconds like "300000" | once: local timestamp like "2026-02-01T15:30:00" (no Z suffix!)'),
|
||||||
context_mode: z.enum(['group', 'isolated']).default('group').describe('group=runs with chat history and memory, isolated=fresh session (include context in prompt)'),
|
context_mode: z.enum(['group', 'isolated']).default('group').describe('group=runs with chat history and memory, isolated=fresh session (include context in prompt)'),
|
||||||
target_group_jid: z.string().optional().describe('(Main group only) JID of the group to schedule the task for. Defaults to the current group.'),
|
target_group_jid: z.string().optional().describe('(Main group only) JID of the group to schedule the task for. Defaults to the current group.'),
|
||||||
|
script: z.string().optional().describe('Optional bash script to run before waking the agent. Script must output JSON on the last line of stdout: { "wakeAgent": boolean, "data"?: any }. If wakeAgent is false, the agent is not called. Test your script with bash -c "..." before scheduling.'),
|
||||||
},
|
},
|
||||||
async (args) => {
|
async (args) => {
|
||||||
// Validate schedule_value before writing IPC
|
// Validate schedule_value before writing IPC
|
||||||
@@ -136,6 +137,7 @@ SCHEDULE VALUE FORMAT (all times are LOCAL timezone):
|
|||||||
type: 'schedule_task',
|
type: 'schedule_task',
|
||||||
taskId,
|
taskId,
|
||||||
prompt: args.prompt,
|
prompt: args.prompt,
|
||||||
|
script: args.script || undefined,
|
||||||
schedule_type: args.schedule_type,
|
schedule_type: args.schedule_type,
|
||||||
schedule_value: args.schedule_value,
|
schedule_value: args.schedule_value,
|
||||||
context_mode: args.context_mode || 'group',
|
context_mode: args.context_mode || 'group',
|
||||||
@@ -255,6 +257,7 @@ server.tool(
|
|||||||
prompt: z.string().optional().describe('New prompt for the task'),
|
prompt: z.string().optional().describe('New prompt for the task'),
|
||||||
schedule_type: z.enum(['cron', 'interval', 'once']).optional().describe('New schedule type'),
|
schedule_type: z.enum(['cron', 'interval', 'once']).optional().describe('New schedule type'),
|
||||||
schedule_value: z.string().optional().describe('New schedule value (see schedule_task for format)'),
|
schedule_value: z.string().optional().describe('New schedule value (see schedule_task for format)'),
|
||||||
|
script: z.string().optional().describe('New script for the task. Set to empty string to remove the script.'),
|
||||||
},
|
},
|
||||||
async (args) => {
|
async (args) => {
|
||||||
// Validate schedule_value if provided
|
// Validate schedule_value if provided
|
||||||
@@ -288,6 +291,7 @@ server.tool(
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
};
|
};
|
||||||
if (args.prompt !== undefined) data.prompt = args.prompt;
|
if (args.prompt !== undefined) data.prompt = args.prompt;
|
||||||
|
if (args.script !== undefined) data.script = args.script;
|
||||||
if (args.schedule_type !== undefined) data.schedule_type = args.schedule_type;
|
if (args.schedule_type !== undefined) data.schedule_type = args.schedule_type;
|
||||||
if (args.schedule_value !== undefined) data.schedule_value = args.schedule_value;
|
if (args.schedule_value !== undefined) data.schedule_value = args.schedule_value;
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,34 @@ Both timers fire at the same time, so containers always exit via hard SIGKILL (c
|
|||||||
### 3. Cursor advanced before agent succeeds
|
### 3. Cursor advanced before agent succeeds
|
||||||
`processGroupMessages` advances `lastAgentTimestamp` before the agent runs. If the container times out, retries find no messages (cursor already past them). Messages are permanently lost on timeout.
|
`processGroupMessages` advances `lastAgentTimestamp` before the agent runs. If the container times out, retries find no messages (cursor already past them). Messages are permanently lost on timeout.
|
||||||
|
|
||||||
|
### 4. Kubernetes image garbage collection deletes nanoclaw-agent image
|
||||||
|
|
||||||
|
**Symptoms**: `Container exited with code 125: pull access denied for nanoclaw-agent` — the container image disappears overnight or after a few hours, even though you just built it.
|
||||||
|
|
||||||
|
**Cause**: If your container runtime has Kubernetes enabled (Rancher Desktop enables it by default), the kubelet runs image garbage collection when disk usage exceeds 85%. NanoClaw containers are ephemeral (run and exit), so `nanoclaw-agent:latest` is never protected by a running container. The kubelet sees it as unused and deletes it — often overnight when no messages are being processed. Other images (docker-compose services) survive because they have long-running containers referencing them.
|
||||||
|
|
||||||
|
**Fix**: Disable Kubernetes if you don't need it:
|
||||||
|
```bash
|
||||||
|
# Rancher Desktop
|
||||||
|
rdctl set --kubernetes-enabled=false
|
||||||
|
|
||||||
|
# Then rebuild the container image
|
||||||
|
./container/build.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
**Diagnosis**: Check the k3s log for image GC activity:
|
||||||
|
```bash
|
||||||
|
grep -i "nanoclaw" ~/Library/Logs/rancher-desktop/k3s.log
|
||||||
|
# Look for: "Removing image to free bytes" with the nanoclaw-agent image ID
|
||||||
|
```
|
||||||
|
|
||||||
|
Check NanoClaw logs for image status:
|
||||||
|
```bash
|
||||||
|
grep -E "image found|image NOT found|image missing" logs/nanoclaw.log
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need Kubernetes enabled, set `CONTAINER_IMAGE` to an image stored in a registry that the kubelet won't GC, or raise the GC thresholds.
|
||||||
|
|
||||||
## Quick Status Check
|
## Quick Status Check
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -19,16 +47,16 @@ launchctl list | grep nanoclaw
|
|||||||
# Expected: PID 0 com.nanoclaw (PID = running, "-" = not running, non-zero exit = crashed)
|
# Expected: PID 0 com.nanoclaw (PID = running, "-" = not running, non-zero exit = crashed)
|
||||||
|
|
||||||
# 2. Any running containers?
|
# 2. Any running containers?
|
||||||
container ls --format '{{.Names}} {{.Status}}' 2>/dev/null | grep nanoclaw
|
docker ps --format '{{.Names}} {{.Status}}' 2>/dev/null | grep nanoclaw
|
||||||
|
|
||||||
# 3. Any stopped/orphaned containers?
|
# 3. Any stopped/orphaned containers?
|
||||||
container ls -a --format '{{.Names}} {{.Status}}' 2>/dev/null | grep nanoclaw
|
docker ps -a --format '{{.Names}} {{.Status}}' 2>/dev/null | grep nanoclaw
|
||||||
|
|
||||||
# 4. Recent errors in service log?
|
# 4. Recent errors in service log?
|
||||||
grep -E 'ERROR|WARN' logs/nanoclaw.log | tail -20
|
grep -E 'ERROR|WARN' logs/nanoclaw.log | tail -20
|
||||||
|
|
||||||
# 5. Is WhatsApp connected? (look for last connection event)
|
# 5. Are channels connected? (look for last connection event)
|
||||||
grep -E 'Connected to WhatsApp|Connection closed|connection.*close' logs/nanoclaw.log | tail -5
|
grep -E 'Connected|Connection closed|connection.*close|channel.*ready' logs/nanoclaw.log | tail -5
|
||||||
|
|
||||||
# 6. Are groups loaded?
|
# 6. Are groups loaded?
|
||||||
grep 'groupCount' logs/nanoclaw.log | tail -3
|
grep 'groupCount' logs/nanoclaw.log | tail -3
|
||||||
@@ -77,7 +105,7 @@ grep -E 'Scheduling retry|retry|Max retries' logs/nanoclaw.log | tail -10
|
|||||||
## Agent Not Responding
|
## Agent Not Responding
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Check if messages are being received from WhatsApp
|
# Check if messages are being received from channels
|
||||||
grep 'New messages' logs/nanoclaw.log | tail -10
|
grep 'New messages' logs/nanoclaw.log | tail -10
|
||||||
|
|
||||||
# Check if messages are being processed (container spawned)
|
# Check if messages are being processed (container spawned)
|
||||||
@@ -107,10 +135,10 @@ sqlite3 store/messages.db "SELECT name, container_config FROM registered_groups;
|
|||||||
|
|
||||||
# Test-run a container to check mounts (dry run)
|
# Test-run a container to check mounts (dry run)
|
||||||
# Replace <group-folder> with the group's folder name
|
# Replace <group-folder> with the group's folder name
|
||||||
container run -i --rm --entrypoint ls nanoclaw-agent:latest /workspace/extra/
|
docker run -i --rm --entrypoint ls nanoclaw-agent:latest /workspace/extra/
|
||||||
```
|
```
|
||||||
|
|
||||||
## WhatsApp Auth Issues
|
## Channel Auth Issues
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Check if QR code was requested (means auth expired)
|
# Check if QR code was requested (means auth expired)
|
||||||
|
|||||||
15
docs/README.md
Normal file
15
docs/README.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# NanoClaw Documentation
|
||||||
|
|
||||||
|
The official documentation is at **[docs.nanoclaw.dev](https://docs.nanoclaw.dev)**.
|
||||||
|
|
||||||
|
The files in this directory are original design documents and developer references. For the most current and accurate information, use the documentation site.
|
||||||
|
|
||||||
|
| This directory | Documentation site |
|
||||||
|
|---|---|
|
||||||
|
| [SPEC.md](SPEC.md) | [Architecture](https://docs.nanoclaw.dev/concepts/architecture) |
|
||||||
|
| [SECURITY.md](SECURITY.md) | [Security model](https://docs.nanoclaw.dev/concepts/security) |
|
||||||
|
| [REQUIREMENTS.md](REQUIREMENTS.md) | [Introduction](https://docs.nanoclaw.dev/introduction) |
|
||||||
|
| [skills-as-branches.md](skills-as-branches.md) | [Skills system](https://docs.nanoclaw.dev/integrations/skills-system) |
|
||||||
|
| [DEBUG_CHECKLIST.md](DEBUG_CHECKLIST.md) | [Troubleshooting](https://docs.nanoclaw.dev/advanced/troubleshooting) |
|
||||||
|
| [docker-sandboxes.md](docker-sandboxes.md) | [Docker Sandboxes](https://docs.nanoclaw.dev/advanced/docker-sandboxes) |
|
||||||
|
| [APPLE-CONTAINER-NETWORKING.md](APPLE-CONTAINER-NETWORKING.md) | [Container runtime](https://docs.nanoclaw.dev/advanced/container-runtime) |
|
||||||
@@ -22,9 +22,9 @@ The entire codebase should be something you can read and understand. One Node.js
|
|||||||
|
|
||||||
Instead of application-level permission systems trying to prevent agents from accessing things, agents run in actual Linux containers. The isolation is at the OS level. Agents can only see what's explicitly mounted. Bash access is safe because commands run inside the container, not on your Mac.
|
Instead of application-level permission systems trying to prevent agents from accessing things, agents run in actual Linux containers. The isolation is at the OS level. Agents can only see what's explicitly mounted. Bash access is safe because commands run inside the container, not on your Mac.
|
||||||
|
|
||||||
### Built for One User
|
### Built for the Individual User
|
||||||
|
|
||||||
This isn't a framework or a platform. It's working software for my specific needs. I use WhatsApp and Email, so it supports WhatsApp and Email. I don't use Telegram, so it doesn't support Telegram. I add the integrations I actually want, not every possible integration.
|
This isn't a framework or a platform. It's software that fits each user's exact needs. You fork the repo, add the channels you want (WhatsApp, Telegram, Discord, Slack, Gmail), and end up with clean code that does exactly what you need.
|
||||||
|
|
||||||
### Customization = Code Changes
|
### Customization = Code Changes
|
||||||
|
|
||||||
@@ -44,41 +44,31 @@ When people contribute, they shouldn't add "Telegram support alongside WhatsApp.
|
|||||||
|
|
||||||
## RFS (Request for Skills)
|
## RFS (Request for Skills)
|
||||||
|
|
||||||
Skills we'd love contributors to build:
|
Skills we'd like to see contributed:
|
||||||
|
|
||||||
### Communication Channels
|
### Communication Channels
|
||||||
Skills to add or switch to different messaging platforms:
|
- `/add-signal` - Add Signal as a channel
|
||||||
- `/add-telegram` - Add Telegram as an input channel
|
- `/add-matrix` - Add Matrix integration
|
||||||
- `/add-slack` - Add Slack as an input channel
|
|
||||||
- `/add-discord` - Add Discord as an input channel
|
|
||||||
- `/add-sms` - Add SMS via Twilio or similar
|
|
||||||
- `/convert-to-telegram` - Replace WhatsApp with Telegram entirely
|
|
||||||
|
|
||||||
### Container Runtime
|
> **Note:** Telegram, Slack, Discord, Gmail, and Apple Container skills already exist. See the [skills documentation](https://docs.nanoclaw.dev/integrations/skills-system) for the full list.
|
||||||
The project uses Docker by default (cross-platform). For macOS users who prefer Apple Container:
|
|
||||||
- `/convert-to-apple-container` - Switch from Docker to Apple Container (macOS-only)
|
|
||||||
|
|
||||||
### Platform Support
|
|
||||||
- `/setup-linux` - Make the full setup work on Linux (depends on Docker conversion)
|
|
||||||
- `/setup-windows` - Windows support via WSL2 + Docker
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Vision
|
## Vision
|
||||||
|
|
||||||
A personal Claude assistant accessible via WhatsApp, with minimal custom code.
|
A personal Claude assistant accessible via messaging, with minimal custom code.
|
||||||
|
|
||||||
**Core components:**
|
**Core components:**
|
||||||
- **Claude Agent SDK** as the core agent
|
- **Claude Agent SDK** as the core agent
|
||||||
- **Containers** for isolated agent execution (Linux VMs)
|
- **Containers** for isolated agent execution (Linux VMs)
|
||||||
- **WhatsApp** as the primary I/O channel
|
- **Multi-channel messaging** (WhatsApp, Telegram, Discord, Slack, Gmail) — add exactly the channels you need
|
||||||
- **Persistent memory** per conversation and globally
|
- **Persistent memory** per conversation and globally
|
||||||
- **Scheduled tasks** that run Claude and can message back
|
- **Scheduled tasks** that run Claude and can message back
|
||||||
- **Web access** for search and browsing
|
- **Web access** for search and browsing
|
||||||
- **Browser automation** via agent-browser
|
- **Browser automation** via agent-browser
|
||||||
|
|
||||||
**Implementation approach:**
|
**Implementation approach:**
|
||||||
- Use existing tools (WhatsApp connector, Claude Agent SDK, MCP servers)
|
- Use existing tools (channel libraries, Claude Agent SDK, MCP servers)
|
||||||
- Minimal glue code
|
- Minimal glue code
|
||||||
- File-based systems where possible (CLAUDE.md for memory, folders for groups)
|
- File-based systems where possible (CLAUDE.md for memory, folders for groups)
|
||||||
|
|
||||||
@@ -87,7 +77,7 @@ A personal Claude assistant accessible via WhatsApp, with minimal custom code.
|
|||||||
## Architecture Decisions
|
## Architecture Decisions
|
||||||
|
|
||||||
### Message Routing
|
### Message Routing
|
||||||
- A router listens to WhatsApp and routes messages based on configuration
|
- A router listens to connected channels and routes messages based on configuration
|
||||||
- Only messages from registered groups are processed
|
- Only messages from registered groups are processed
|
||||||
- Trigger: `@Andy` prefix (case insensitive), configurable via `ASSISTANT_NAME` env var
|
- Trigger: `@Andy` prefix (case insensitive), configurable via `ASSISTANT_NAME` env var
|
||||||
- Unregistered groups are ignored completely
|
- Unregistered groups are ignored completely
|
||||||
@@ -136,10 +126,11 @@ A personal Claude assistant accessible via WhatsApp, with minimal custom code.
|
|||||||
|
|
||||||
## Integration Points
|
## Integration Points
|
||||||
|
|
||||||
### WhatsApp
|
### Channels
|
||||||
- Using baileys library for WhatsApp Web connection
|
- WhatsApp (baileys), Telegram (grammy), Discord (discord.js), Slack (@slack/bolt), Gmail (googleapis)
|
||||||
|
- Each channel lives in a separate fork repo and is added via skills (e.g., `/add-whatsapp`, `/add-telegram`)
|
||||||
- Messages stored in SQLite, polled by router
|
- Messages stored in SQLite, polled by router
|
||||||
- QR code authentication during setup
|
- Channels self-register at startup — unconfigured channels are skipped with a warning
|
||||||
|
|
||||||
### Scheduler
|
### Scheduler
|
||||||
- Built-in scheduler runs on the host, spawns containers for task execution
|
- Built-in scheduler runs on the host, spawns containers for task execution
|
||||||
@@ -170,12 +161,12 @@ A personal Claude assistant accessible via WhatsApp, with minimal custom code.
|
|||||||
- Each user gets a custom setup matching their exact needs
|
- Each user gets a custom setup matching their exact needs
|
||||||
|
|
||||||
### Skills
|
### Skills
|
||||||
- `/setup` - Install dependencies, authenticate WhatsApp, configure scheduler, start services
|
- `/setup` - Install dependencies, configure channels, start services
|
||||||
- `/customize` - General-purpose skill for adding capabilities (new channels like Telegram, new integrations, behavior changes)
|
- `/customize` - General-purpose skill for adding capabilities
|
||||||
- `/update` - Pull upstream changes, merge with customizations, run migrations
|
- `/update-nanoclaw` - Pull upstream changes, merge with customizations
|
||||||
|
|
||||||
### Deployment
|
### Deployment
|
||||||
- Runs on local Mac via launchd
|
- Runs on macOS (launchd), Linux (systemd), or Windows (WSL2)
|
||||||
- Single Node.js process handles everything
|
- Single Node.js process handles everything
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
| Main group | Trusted | Private self-chat, admin control |
|
| Main group | Trusted | Private self-chat, admin control |
|
||||||
| Non-main groups | Untrusted | Other users may be malicious |
|
| Non-main groups | Untrusted | Other users may be malicious |
|
||||||
| Container agents | Sandboxed | Isolated execution environment |
|
| Container agents | Sandboxed | Isolated execution environment |
|
||||||
| WhatsApp messages | User input | Potential prompt injection |
|
| Incoming messages | User input | Potential prompt injection |
|
||||||
|
|
||||||
## Security Boundaries
|
## Security Boundaries
|
||||||
|
|
||||||
@@ -64,20 +64,22 @@ Messages and task operations are verified against group identity:
|
|||||||
| View all tasks | ✓ | Own only |
|
| View all tasks | ✓ | Own only |
|
||||||
| Manage other groups | ✓ | ✗ |
|
| Manage other groups | ✓ | ✗ |
|
||||||
|
|
||||||
### 5. Credential Isolation (Credential Proxy)
|
### 5. Credential Isolation (OneCLI Agent Vault)
|
||||||
|
|
||||||
Real API credentials **never enter containers**. Instead, the host runs an HTTP credential proxy that injects authentication headers transparently.
|
Real API credentials **never enter containers**. NanoClaw uses [OneCLI's Agent Vault](https://github.com/onecli/onecli) to proxy outbound requests and inject credentials at the gateway level.
|
||||||
|
|
||||||
**How it works:**
|
**How it works:**
|
||||||
1. Host starts a credential proxy on `CREDENTIAL_PROXY_PORT` (default: 3001)
|
1. Credentials are registered once with `onecli secrets create`, stored and managed by OneCLI
|
||||||
2. Containers receive `ANTHROPIC_BASE_URL=http://host.docker.internal:<port>` and `ANTHROPIC_API_KEY=placeholder`
|
2. When NanoClaw spawns a container, it calls `applyContainerConfig()` to route outbound HTTPS through the OneCLI gateway
|
||||||
3. The SDK sends API requests to the proxy with the placeholder key
|
3. The gateway matches requests by host and path, injects the real credential, and forwards
|
||||||
4. The proxy strips placeholder auth, injects real credentials (`x-api-key` or `Authorization: Bearer`), and forwards to `api.anthropic.com`
|
4. Agents cannot discover real credentials — not in environment, stdin, files, or `/proc`
|
||||||
5. Agents cannot discover real credentials — not in environment, stdin, files, or `/proc`
|
|
||||||
|
**Per-agent policies:**
|
||||||
|
Each NanoClaw group gets its own OneCLI agent identity. This allows different credential policies per group (e.g. your sales agent vs. support agent). OneCLI supports rate limits, and time-bound access and approval flows are on the roadmap.
|
||||||
|
|
||||||
**NOT Mounted:**
|
**NOT Mounted:**
|
||||||
- WhatsApp session (`store/auth/`) - host only
|
- Channel auth sessions (`store/auth/`) — host only
|
||||||
- Mount allowlist - external, never mounted
|
- Mount allowlist — external, never mounted
|
||||||
- Any credentials matching blocked patterns
|
- Any credentials matching blocked patterns
|
||||||
- `.env` is shadowed with `/dev/null` in the project root mount
|
- `.env` is shadowed with `/dev/null` in the project root mount
|
||||||
|
|
||||||
@@ -97,7 +99,7 @@ Real API credentials **never enter containers**. Instead, the host runs an HTTP
|
|||||||
```
|
```
|
||||||
┌──────────────────────────────────────────────────────────────────┐
|
┌──────────────────────────────────────────────────────────────────┐
|
||||||
│ UNTRUSTED ZONE │
|
│ UNTRUSTED ZONE │
|
||||||
│ WhatsApp Messages (potentially malicious) │
|
│ Incoming Messages (potentially malicious) │
|
||||||
└────────────────────────────────┬─────────────────────────────────┘
|
└────────────────────────────────┬─────────────────────────────────┘
|
||||||
│
|
│
|
||||||
▼ Trigger check, input escaping
|
▼ Trigger check, input escaping
|
||||||
@@ -107,7 +109,7 @@ Real API credentials **never enter containers**. Instead, the host runs an HTTP
|
|||||||
│ • IPC authorization │
|
│ • IPC authorization │
|
||||||
│ • Mount validation (external allowlist) │
|
│ • Mount validation (external allowlist) │
|
||||||
│ • Container lifecycle │
|
│ • Container lifecycle │
|
||||||
│ • Credential proxy (injects auth headers) │
|
│ • OneCLI Agent Vault (injects credentials, enforces policies) │
|
||||||
└────────────────────────────────┬─────────────────────────────────┘
|
└────────────────────────────────┬─────────────────────────────────┘
|
||||||
│
|
│
|
||||||
▼ Explicit mounts only, no secrets
|
▼ Explicit mounts only, no secrets
|
||||||
@@ -116,7 +118,7 @@ Real API credentials **never enter containers**. Instead, the host runs an HTTP
|
|||||||
│ • Agent execution │
|
│ • Agent execution │
|
||||||
│ • Bash commands (sandboxed) │
|
│ • Bash commands (sandboxed) │
|
||||||
│ • File operations (limited to mounts) │
|
│ • File operations (limited to mounts) │
|
||||||
│ • API calls routed through credential proxy │
|
│ • API calls routed through OneCLI Agent Vault │
|
||||||
│ • No real credentials in environment or filesystem │
|
│ • No real credentials in environment or filesystem │
|
||||||
└──────────────────────────────────────────────────────────────────┘
|
└──────────────────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -110,3 +110,42 @@ No `##` headings. No `[links](url)`. No `**double stars**`.
|
|||||||
### Discord channels (folder starts with `discord_`)
|
### Discord channels (folder starts with `discord_`)
|
||||||
|
|
||||||
Standard Markdown works: `**bold**`, `*italic*`, `[links](url)`, `# headings`.
|
Standard Markdown works: `**bold**`, `*italic*`, `[links](url)`, `# headings`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task Scripts
|
||||||
|
|
||||||
|
For any recurring task, use `schedule_task`. Frequent agent invocations — especially multiple times a day — consume API credits and can risk account restrictions. If a simple check can determine whether action is needed, add a `script` — it runs first, and the agent is only called when the check passes. This keeps invocations to a minimum.
|
||||||
|
|
||||||
|
### How it works
|
||||||
|
|
||||||
|
1. You provide a bash `script` alongside the `prompt` when scheduling
|
||||||
|
2. When the task fires, the script runs first (30-second timeout)
|
||||||
|
3. Script prints JSON to stdout: `{ "wakeAgent": true/false, "data": {...} }`
|
||||||
|
4. If `wakeAgent: false` — nothing happens, task waits for next run
|
||||||
|
5. If `wakeAgent: true` — you wake up and receive the script's data + prompt
|
||||||
|
|
||||||
|
### Always test your script first
|
||||||
|
|
||||||
|
Before scheduling, run the script in your sandbox to verify it works:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash -c 'node --input-type=module -e "
|
||||||
|
const r = await fetch(\"https://api.github.com/repos/owner/repo/pulls?state=open\");
|
||||||
|
const prs = await r.json();
|
||||||
|
console.log(JSON.stringify({ wakeAgent: prs.length > 0, data: prs.slice(0, 5) }));
|
||||||
|
"'
|
||||||
|
```
|
||||||
|
|
||||||
|
### When NOT to use scripts
|
||||||
|
|
||||||
|
If a task requires your judgment every time (daily briefings, reminders, reports), skip the script — just use a regular prompt.
|
||||||
|
|
||||||
|
### Frequent task guidance
|
||||||
|
|
||||||
|
If a user wants tasks running more than ~2x daily and a script can't reduce agent wake-ups:
|
||||||
|
|
||||||
|
- Explain that each wake-up uses API credits and risks rate limits
|
||||||
|
- Suggest restructuring with a script that checks the condition first
|
||||||
|
- If the user needs an LLM to evaluate data, suggest using an API key with direct Anthropic API calls inside the script
|
||||||
|
- Help the user find the minimum viable frequency
|
||||||
|
|||||||
@@ -88,6 +88,10 @@ Standard Markdown: `**bold**`, `*italic*`, `[links](url)`, `# headings`.
|
|||||||
|
|
||||||
This is the **main channel**, which has elevated privileges.
|
This is the **main channel**, which has elevated privileges.
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
Anthropic credentials must be either an API key from console.anthropic.com (`ANTHROPIC_API_KEY`) or a long-lived OAuth token from `claude setup-token` (`CLAUDE_CODE_OAUTH_TOKEN`). Short-lived tokens from the system keychain or `~/.claude/.credentials.json` expire within hours and can cause recurring container 401s. The `/setup` skill walks through this. OneCLI manages credentials (including Anthropic auth) — run `onecli --help`.
|
||||||
|
|
||||||
## Container Mounts
|
## Container Mounts
|
||||||
|
|
||||||
Main has read-only access to the project and read-write access to its group folder:
|
Main has read-only access to the project and read-write access to its group folder:
|
||||||
@@ -273,3 +277,42 @@ When scheduling tasks for other groups, use the `target_group_jid` parameter wit
|
|||||||
- `schedule_task(prompt: "...", schedule_type: "cron", schedule_value: "0 9 * * 1", target_group_jid: "120363336345536173@g.us")`
|
- `schedule_task(prompt: "...", schedule_type: "cron", schedule_value: "0 9 * * 1", target_group_jid: "120363336345536173@g.us")`
|
||||||
|
|
||||||
The task will run in that group's context with access to their files and memory.
|
The task will run in that group's context with access to their files and memory.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task Scripts
|
||||||
|
|
||||||
|
For any recurring task, use `schedule_task`. Frequent agent invocations — especially multiple times a day — consume API credits and can risk account restrictions. If a simple check can determine whether action is needed, add a `script` — it runs first, and the agent is only called when the check passes. This keeps invocations to a minimum.
|
||||||
|
|
||||||
|
### How it works
|
||||||
|
|
||||||
|
1. You provide a bash `script` alongside the `prompt` when scheduling
|
||||||
|
2. When the task fires, the script runs first (30-second timeout)
|
||||||
|
3. Script prints JSON to stdout: `{ "wakeAgent": true/false, "data": {...} }`
|
||||||
|
4. If `wakeAgent: false` — nothing happens, task waits for next run
|
||||||
|
5. If `wakeAgent: true` — you wake up and receive the script's data + prompt
|
||||||
|
|
||||||
|
### Always test your script first
|
||||||
|
|
||||||
|
Before scheduling, run the script in your sandbox to verify it works:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash -c 'node --input-type=module -e "
|
||||||
|
const r = await fetch(\"https://api.github.com/repos/owner/repo/pulls?state=open\");
|
||||||
|
const prs = await r.json();
|
||||||
|
console.log(JSON.stringify({ wakeAgent: prs.length > 0, data: prs.slice(0, 5) }));
|
||||||
|
"'
|
||||||
|
```
|
||||||
|
|
||||||
|
### When NOT to use scripts
|
||||||
|
|
||||||
|
If a task requires your judgment every time (daily briefings, reminders, reports), skip the script — just use a regular prompt.
|
||||||
|
|
||||||
|
### Frequent task guidance
|
||||||
|
|
||||||
|
If a user wants tasks running more than ~2x daily and a script can't reduce agent wake-ups:
|
||||||
|
|
||||||
|
- Explain that each wake-up uses API credits and risks rate limits
|
||||||
|
- Suggest restructuring with a script that checks the condition first
|
||||||
|
- If the user needs an LLM to evaluate data, suggest using an API key with direct Anthropic API calls inside the script
|
||||||
|
- Help the user find the minimum viable frequency
|
||||||
|
|||||||
824
package-lock.json
generated
824
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "nanoclaw",
|
"name": "nanoclaw",
|
||||||
"version": "1.2.21",
|
"version": "1.2.41",
|
||||||
"description": "Personal Claude assistant. Lightweight, secure, customizable.",
|
"description": "Personal Claude assistant. Lightweight, secure, customizable.",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
@@ -21,19 +21,14 @@
|
|||||||
"test:watch": "vitest"
|
"test:watch": "vitest"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"better-sqlite3": "^11.8.1",
|
"@onecli-sh/sdk": "^0.2.0",
|
||||||
"grammy": "^1.39.3",
|
"better-sqlite3": "11.10.0",
|
||||||
"cron-parser": "^5.5.0",
|
"cron-parser": "5.5.0"
|
||||||
"pino": "^9.6.0",
|
|
||||||
"pino-pretty": "^13.0.0",
|
|
||||||
"yaml": "^2.8.2",
|
|
||||||
"zod": "^4.3.6"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/js": "^9.35.0",
|
"@eslint/js": "^9.35.0",
|
||||||
"@types/better-sqlite3": "^7.6.12",
|
"@types/better-sqlite3": "^7.6.12",
|
||||||
"@types/node": "^22.10.0",
|
"@types/node": "^22.10.0",
|
||||||
"@vitest/coverage-v8": "^4.0.18",
|
|
||||||
"eslint": "^9.35.0",
|
"eslint": "^9.35.0",
|
||||||
"eslint-plugin-no-catch-all": "^1.1.0",
|
"eslint-plugin-no-catch-all": "^1.1.0",
|
||||||
"globals": "^15.12.0",
|
"globals": "^15.12.0",
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="97" height="20" role="img" aria-label="41.1k tokens, 21% of context window">
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="97" height="20" role="img" aria-label="42.4k tokens, 21% of context window">
|
||||||
<title>41.1k tokens, 21% of context window</title>
|
<title>42.4k tokens, 21% of context window</title>
|
||||||
<linearGradient id="s" x2="0" y2="100%">
|
<linearGradient id="s" x2="0" y2="100%">
|
||||||
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
|
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
|
||||||
<stop offset="1" stop-opacity=".1"/>
|
<stop offset="1" stop-opacity=".1"/>
|
||||||
@@ -15,8 +15,8 @@
|
|||||||
<g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" font-size="11">
|
<g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" font-size="11">
|
||||||
<text aria-hidden="true" x="26" y="15" fill="#010101" fill-opacity=".3">tokens</text>
|
<text aria-hidden="true" x="26" y="15" fill="#010101" fill-opacity=".3">tokens</text>
|
||||||
<text x="26" y="14">tokens</text>
|
<text x="26" y="14">tokens</text>
|
||||||
<text aria-hidden="true" x="74" y="15" fill="#010101" fill-opacity=".3">41.1k</text>
|
<text aria-hidden="true" x="74" y="15" fill="#010101" fill-opacity=".3">42.4k</text>
|
||||||
<text x="74" y="14">41.1k</text>
|
<text x="74" y="14">42.4k</text>
|
||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
</a>
|
</a>
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
@@ -9,6 +9,7 @@ const STEPS: Record<
|
|||||||
string,
|
string,
|
||||||
() => Promise<{ run: (args: string[]) => Promise<void> }>
|
() => Promise<{ run: (args: string[]) => Promise<void> }>
|
||||||
> = {
|
> = {
|
||||||
|
timezone: () => import('./timezone.js'),
|
||||||
environment: () => import('./environment.js'),
|
environment: () => import('./environment.js'),
|
||||||
container: () => import('./container.js'),
|
container: () => import('./container.js'),
|
||||||
groups: () => import('./groups.js'),
|
groups: () => import('./groups.js'),
|
||||||
|
|||||||
@@ -10,21 +10,23 @@ import { logger } from '../src/logger.js';
|
|||||||
import { isRoot } from './platform.js';
|
import { isRoot } from './platform.js';
|
||||||
import { emitStatus } from './status.js';
|
import { emitStatus } from './status.js';
|
||||||
|
|
||||||
function parseArgs(args: string[]): { empty: boolean; json: string } {
|
function parseArgs(args: string[]): { empty: boolean; json: string; force: boolean } {
|
||||||
let empty = false;
|
let empty = false;
|
||||||
let json = '';
|
let json = '';
|
||||||
|
let force = false;
|
||||||
for (let i = 0; i < args.length; i++) {
|
for (let i = 0; i < args.length; i++) {
|
||||||
if (args[i] === '--empty') empty = true;
|
if (args[i] === '--empty') empty = true;
|
||||||
|
if (args[i] === '--force') force = true;
|
||||||
if (args[i] === '--json' && args[i + 1]) {
|
if (args[i] === '--json' && args[i + 1]) {
|
||||||
json = args[i + 1];
|
json = args[i + 1];
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return { empty, json };
|
return { empty, json, force };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function run(args: string[]): Promise<void> {
|
export async function run(args: string[]): Promise<void> {
|
||||||
const { empty, json } = parseArgs(args);
|
const { empty, json, force } = parseArgs(args);
|
||||||
const homeDir = os.homedir();
|
const homeDir = os.homedir();
|
||||||
const configDir = path.join(homeDir, '.config', 'nanoclaw');
|
const configDir = path.join(homeDir, '.config', 'nanoclaw');
|
||||||
const configFile = path.join(configDir, 'mount-allowlist.json');
|
const configFile = path.join(configDir, 'mount-allowlist.json');
|
||||||
@@ -37,6 +39,21 @@ export async function run(args: string[]): Promise<void> {
|
|||||||
|
|
||||||
fs.mkdirSync(configDir, { recursive: true });
|
fs.mkdirSync(configDir, { recursive: true });
|
||||||
|
|
||||||
|
if (fs.existsSync(configFile) && !force) {
|
||||||
|
logger.info(
|
||||||
|
{ configFile },
|
||||||
|
'Mount allowlist already exists — skipping (use --force to overwrite)',
|
||||||
|
);
|
||||||
|
emitStatus('CONFIGURE_MOUNTS', {
|
||||||
|
PATH: configFile,
|
||||||
|
ALLOWED_ROOTS: 0,
|
||||||
|
NON_MAIN_READ_ONLY: 'unknown',
|
||||||
|
STATUS: 'skipped',
|
||||||
|
LOG: 'logs/setup.log',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let allowedRoots = 0;
|
let allowedRoots = 0;
|
||||||
let nonMainReadOnly = 'true';
|
let nonMainReadOnly = 'true';
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
import { describe, it, expect, beforeEach } from 'vitest';
|
import fs from 'fs';
|
||||||
|
import os from 'os';
|
||||||
|
import path from 'path';
|
||||||
|
import { afterEach, describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
|
||||||
import Database from 'better-sqlite3';
|
import Database from 'better-sqlite3';
|
||||||
|
|
||||||
@@ -6,7 +9,7 @@ import Database from 'better-sqlite3';
|
|||||||
* Tests for the register step.
|
* Tests for the register step.
|
||||||
*
|
*
|
||||||
* Verifies: parameterized SQL (no injection), file templating,
|
* Verifies: parameterized SQL (no injection), file templating,
|
||||||
* apostrophe in names, .env updates.
|
* apostrophe in names, .env updates, CLAUDE.md template copy.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
function createTestDb(): Database.Database {
|
function createTestDb(): Database.Database {
|
||||||
@@ -255,3 +258,207 @@ describe('file templating', () => {
|
|||||||
expect(envContent).toContain('ASSISTANT_NAME="Nova"');
|
expect(envContent).toContain('ASSISTANT_NAME="Nova"');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('CLAUDE.md template copy', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
let groupsDir: string;
|
||||||
|
|
||||||
|
// Replicates register.ts template copy + name update logic
|
||||||
|
function simulateRegister(
|
||||||
|
folder: string,
|
||||||
|
isMain: boolean,
|
||||||
|
assistantName = 'Andy',
|
||||||
|
): void {
|
||||||
|
const folderDir = path.join(groupsDir, folder);
|
||||||
|
fs.mkdirSync(path.join(folderDir, 'logs'), { recursive: true });
|
||||||
|
|
||||||
|
// Template copy — never overwrite existing (register.ts lines 119-135)
|
||||||
|
const dest = path.join(folderDir, 'CLAUDE.md');
|
||||||
|
if (!fs.existsSync(dest)) {
|
||||||
|
const templatePath = isMain
|
||||||
|
? path.join(groupsDir, 'main', 'CLAUDE.md')
|
||||||
|
: path.join(groupsDir, 'global', 'CLAUDE.md');
|
||||||
|
if (fs.existsSync(templatePath)) {
|
||||||
|
fs.copyFileSync(templatePath, dest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name update across all groups (register.ts lines 140-165)
|
||||||
|
if (assistantName !== 'Andy') {
|
||||||
|
const mdFiles = fs
|
||||||
|
.readdirSync(groupsDir)
|
||||||
|
.map((d) => path.join(groupsDir, d, 'CLAUDE.md'))
|
||||||
|
.filter((f) => fs.existsSync(f));
|
||||||
|
|
||||||
|
for (const mdFile of mdFiles) {
|
||||||
|
let content = fs.readFileSync(mdFile, 'utf-8');
|
||||||
|
content = content.replace(/^# Andy$/m, `# ${assistantName}`);
|
||||||
|
content = content.replace(
|
||||||
|
/You are Andy/g,
|
||||||
|
`You are ${assistantName}`,
|
||||||
|
);
|
||||||
|
fs.writeFileSync(mdFile, content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readGroupMd(folder: string): string {
|
||||||
|
return fs.readFileSync(
|
||||||
|
path.join(groupsDir, folder, 'CLAUDE.md'),
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nanoclaw-register-test-'));
|
||||||
|
groupsDir = path.join(tmpDir, 'groups');
|
||||||
|
fs.mkdirSync(path.join(groupsDir, 'main'), { recursive: true });
|
||||||
|
fs.mkdirSync(path.join(groupsDir, 'global'), { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(groupsDir, 'main', 'CLAUDE.md'),
|
||||||
|
'# Andy\n\nYou are Andy, a personal assistant.\n\n## Admin Context\n\nThis is the **main channel**.',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(groupsDir, 'global', 'CLAUDE.md'),
|
||||||
|
'# Andy\n\nYou are Andy, a personal assistant.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('copies global template for non-main group', () => {
|
||||||
|
simulateRegister('telegram_dev-team', false);
|
||||||
|
|
||||||
|
const content = readGroupMd('telegram_dev-team');
|
||||||
|
expect(content).toContain('You are Andy');
|
||||||
|
expect(content).not.toContain('Admin Context');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('copies main template for main group', () => {
|
||||||
|
simulateRegister('whatsapp_main', true);
|
||||||
|
|
||||||
|
expect(readGroupMd('whatsapp_main')).toContain('Admin Context');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('each channel can have its own main with admin context', () => {
|
||||||
|
simulateRegister('whatsapp_main', true);
|
||||||
|
simulateRegister('telegram_main', true);
|
||||||
|
simulateRegister('slack_main', true);
|
||||||
|
simulateRegister('discord_main', true);
|
||||||
|
|
||||||
|
for (const folder of [
|
||||||
|
'whatsapp_main',
|
||||||
|
'telegram_main',
|
||||||
|
'slack_main',
|
||||||
|
'discord_main',
|
||||||
|
]) {
|
||||||
|
const content = readGroupMd(folder);
|
||||||
|
expect(content).toContain('Admin Context');
|
||||||
|
expect(content).toContain('You are Andy');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-main groups across channels get global template', () => {
|
||||||
|
simulateRegister('whatsapp_main', true);
|
||||||
|
simulateRegister('telegram_friends', false);
|
||||||
|
simulateRegister('slack_engineering', false);
|
||||||
|
simulateRegister('discord_general', false);
|
||||||
|
|
||||||
|
expect(readGroupMd('whatsapp_main')).toContain('Admin Context');
|
||||||
|
for (const folder of [
|
||||||
|
'telegram_friends',
|
||||||
|
'slack_engineering',
|
||||||
|
'discord_general',
|
||||||
|
]) {
|
||||||
|
const content = readGroupMd(folder);
|
||||||
|
expect(content).toContain('You are Andy');
|
||||||
|
expect(content).not.toContain('Admin Context');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('custom name propagates to all channels and groups', () => {
|
||||||
|
// Register multiple channels, last one sets custom name
|
||||||
|
simulateRegister('whatsapp_main', true);
|
||||||
|
simulateRegister('telegram_main', true);
|
||||||
|
simulateRegister('slack_devs', false);
|
||||||
|
// Final registration triggers name update across all
|
||||||
|
simulateRegister('discord_main', true, 'Luna');
|
||||||
|
|
||||||
|
for (const folder of [
|
||||||
|
'main',
|
||||||
|
'global',
|
||||||
|
'whatsapp_main',
|
||||||
|
'telegram_main',
|
||||||
|
'slack_devs',
|
||||||
|
'discord_main',
|
||||||
|
]) {
|
||||||
|
const content = readGroupMd(folder);
|
||||||
|
expect(content).toContain('# Luna');
|
||||||
|
expect(content).toContain('You are Luna');
|
||||||
|
expect(content).not.toContain('Andy');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('never overwrites existing CLAUDE.md on re-registration', () => {
|
||||||
|
simulateRegister('slack_main', true);
|
||||||
|
// User customizes the file extensively (persona, workspace, rules)
|
||||||
|
const mdPath = path.join(groupsDir, 'slack_main', 'CLAUDE.md');
|
||||||
|
fs.writeFileSync(
|
||||||
|
mdPath,
|
||||||
|
'# Gambi\n\nCustom persona with workspace rules and family context.',
|
||||||
|
);
|
||||||
|
// Re-registering same folder (e.g. re-running /add-slack)
|
||||||
|
simulateRegister('slack_main', true);
|
||||||
|
|
||||||
|
const content = readGroupMd('slack_main');
|
||||||
|
expect(content).toContain('Custom persona');
|
||||||
|
expect(content).not.toContain('Admin Context');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('never overwrites when non-main becomes main (isMain changes)', () => {
|
||||||
|
// User registers a family group as non-main
|
||||||
|
simulateRegister('whatsapp_casa', false);
|
||||||
|
// User extensively customizes it (PARA system, task management, etc.)
|
||||||
|
const mdPath = path.join(groupsDir, 'whatsapp_casa', 'CLAUDE.md');
|
||||||
|
fs.writeFileSync(
|
||||||
|
mdPath,
|
||||||
|
'# Casa\n\nFamily group with PARA system, task management, shopping lists.',
|
||||||
|
);
|
||||||
|
// Later, user promotes to main (no trigger required) — CLAUDE.md must be preserved
|
||||||
|
simulateRegister('whatsapp_casa', true);
|
||||||
|
|
||||||
|
const content = readGroupMd('whatsapp_casa');
|
||||||
|
expect(content).toContain('PARA system');
|
||||||
|
expect(content).not.toContain('Admin Context');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('preserves custom CLAUDE.md across channels when changing main', () => {
|
||||||
|
// Real-world scenario: WhatsApp main + customized Discord research channel
|
||||||
|
simulateRegister('whatsapp_main', true);
|
||||||
|
simulateRegister('discord_main', false);
|
||||||
|
const discordPath = path.join(groupsDir, 'discord_main', 'CLAUDE.md');
|
||||||
|
fs.writeFileSync(
|
||||||
|
discordPath,
|
||||||
|
'# Gambi HQ — Research Assistant\n\nResearch workflows for Laura and Ethan.',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Discord becomes main too — custom content must survive
|
||||||
|
simulateRegister('discord_main', true);
|
||||||
|
expect(readGroupMd('discord_main')).toContain('Research Assistant');
|
||||||
|
// WhatsApp main also untouched
|
||||||
|
expect(readGroupMd('whatsapp_main')).toContain('Admin Context');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles missing templates gracefully', () => {
|
||||||
|
fs.unlinkSync(path.join(groupsDir, 'global', 'CLAUDE.md'));
|
||||||
|
fs.unlinkSync(path.join(groupsDir, 'main', 'CLAUDE.md'));
|
||||||
|
|
||||||
|
simulateRegister('discord_general', false);
|
||||||
|
|
||||||
|
expect(
|
||||||
|
fs.existsSync(path.join(groupsDir, 'discord_general', 'CLAUDE.md')),
|
||||||
|
).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -116,6 +116,30 @@ export async function run(args: string[]): Promise<void> {
|
|||||||
recursive: true,
|
recursive: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Create CLAUDE.md in the new group folder from template if it doesn't exist.
|
||||||
|
// The agent runs with CWD=/workspace/group and loads CLAUDE.md from there.
|
||||||
|
// Never overwrite an existing CLAUDE.md — users customize these extensively
|
||||||
|
// (persona, workspace structure, communication rules, family context, etc.)
|
||||||
|
// and a stock template replacement would destroy that work.
|
||||||
|
const groupClaudeMdPath = path.join(
|
||||||
|
projectRoot,
|
||||||
|
'groups',
|
||||||
|
parsed.folder,
|
||||||
|
'CLAUDE.md',
|
||||||
|
);
|
||||||
|
if (!fs.existsSync(groupClaudeMdPath)) {
|
||||||
|
const templatePath = parsed.isMain
|
||||||
|
? path.join(projectRoot, 'groups', 'main', 'CLAUDE.md')
|
||||||
|
: path.join(projectRoot, 'groups', 'global', 'CLAUDE.md');
|
||||||
|
if (fs.existsSync(templatePath)) {
|
||||||
|
fs.copyFileSync(templatePath, groupClaudeMdPath);
|
||||||
|
logger.info(
|
||||||
|
{ file: groupClaudeMdPath, template: templatePath },
|
||||||
|
'Created CLAUDE.md from template',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Update assistant name in CLAUDE.md files if different from default
|
// Update assistant name in CLAUDE.md files if different from default
|
||||||
let nameUpdated = false;
|
let nameUpdated = false;
|
||||||
if (parsed.assistantName !== 'Andy') {
|
if (parsed.assistantName !== 'Andy') {
|
||||||
@@ -124,10 +148,11 @@ export async function run(args: string[]): Promise<void> {
|
|||||||
'Updating assistant name',
|
'Updating assistant name',
|
||||||
);
|
);
|
||||||
|
|
||||||
const mdFiles = [
|
const groupsDir = path.join(projectRoot, 'groups');
|
||||||
path.join(projectRoot, 'groups', 'global', 'CLAUDE.md'),
|
const mdFiles = fs
|
||||||
path.join(projectRoot, 'groups', parsed.folder, 'CLAUDE.md'),
|
.readdirSync(groupsDir)
|
||||||
];
|
.map((d) => path.join(groupsDir, d, 'CLAUDE.md'))
|
||||||
|
.filter((f) => fs.existsSync(f));
|
||||||
|
|
||||||
for (const mdFile of mdFiles) {
|
for (const mdFile of mdFiles) {
|
||||||
if (fs.existsSync(mdFile)) {
|
if (fs.existsSync(mdFile)) {
|
||||||
|
|||||||
@@ -266,6 +266,20 @@ WantedBy=${runningAsRoot ? 'multi-user.target' : 'default.target'}`;
|
|||||||
// Kill orphaned nanoclaw processes to avoid channel connection conflicts
|
// Kill orphaned nanoclaw processes to avoid channel connection conflicts
|
||||||
killOrphanedProcesses(projectRoot);
|
killOrphanedProcesses(projectRoot);
|
||||||
|
|
||||||
|
// Enable lingering so the user service survives SSH logout.
|
||||||
|
// Without linger, systemd terminates all user processes when the last session closes.
|
||||||
|
if (!runningAsRoot) {
|
||||||
|
try {
|
||||||
|
execSync('loginctl enable-linger', { stdio: 'ignore' });
|
||||||
|
logger.info('Enabled loginctl linger for current user');
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn(
|
||||||
|
{ err },
|
||||||
|
'loginctl enable-linger failed — service may stop on SSH logout',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Enable and start
|
// Enable and start
|
||||||
try {
|
try {
|
||||||
execSync(`${systemctlPrefix} daemon-reload`, { stdio: 'ignore' });
|
execSync(`${systemctlPrefix} daemon-reload`, { stdio: 'ignore' });
|
||||||
@@ -301,6 +315,7 @@ WantedBy=${runningAsRoot ? 'multi-user.target' : 'default.target'}`;
|
|||||||
UNIT_PATH: unitPath,
|
UNIT_PATH: unitPath,
|
||||||
SERVICE_LOADED: serviceLoaded,
|
SERVICE_LOADED: serviceLoaded,
|
||||||
...(dockerGroupStale ? { DOCKER_GROUP_STALE: true } : {}),
|
...(dockerGroupStale ? { DOCKER_GROUP_STALE: true } : {}),
|
||||||
|
LINGER_ENABLED: !runningAsRoot,
|
||||||
STATUS: 'success',
|
STATUS: 'success',
|
||||||
LOG: 'logs/setup.log',
|
LOG: 'logs/setup.log',
|
||||||
});
|
});
|
||||||
|
|||||||
67
setup/timezone.ts
Normal file
67
setup/timezone.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
/**
|
||||||
|
* Step: timezone — Detect, validate, and persist the user's timezone.
|
||||||
|
* Writes TZ to .env if a valid IANA timezone is resolved.
|
||||||
|
* Emits NEEDS_USER_INPUT=true when autodetection fails.
|
||||||
|
*/
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
import { isValidTimezone } from '../src/timezone.js';
|
||||||
|
import { logger } from '../src/logger.js';
|
||||||
|
import { emitStatus } from './status.js';
|
||||||
|
|
||||||
|
export async function run(args: string[]): Promise<void> {
|
||||||
|
const projectRoot = process.cwd();
|
||||||
|
const envFile = path.join(projectRoot, '.env');
|
||||||
|
|
||||||
|
// Check what's already in .env
|
||||||
|
let envFileTz: string | undefined;
|
||||||
|
if (fs.existsSync(envFile)) {
|
||||||
|
const content = fs.readFileSync(envFile, 'utf-8');
|
||||||
|
const match = content.match(/^TZ=(.+)$/m);
|
||||||
|
if (match) envFileTz = match[1].trim().replace(/^["']|["']$/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
const systemTz = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||||
|
const envTz = process.env.TZ;
|
||||||
|
|
||||||
|
// Accept --tz flag from CLI (used when setup skill collects from user)
|
||||||
|
const tzFlagIdx = args.indexOf('--tz');
|
||||||
|
const userTz = tzFlagIdx !== -1 ? args[tzFlagIdx + 1] : undefined;
|
||||||
|
|
||||||
|
// Resolve: user-provided > .env > process.env > system autodetect
|
||||||
|
let resolvedTz: string | undefined;
|
||||||
|
for (const candidate of [userTz, envFileTz, envTz, systemTz]) {
|
||||||
|
if (candidate && isValidTimezone(candidate)) {
|
||||||
|
resolvedTz = candidate;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const needsUserInput = !resolvedTz;
|
||||||
|
|
||||||
|
if (resolvedTz && resolvedTz !== envFileTz) {
|
||||||
|
// Write/update TZ in .env
|
||||||
|
if (fs.existsSync(envFile)) {
|
||||||
|
let content = fs.readFileSync(envFile, 'utf-8');
|
||||||
|
if (/^TZ=/m.test(content)) {
|
||||||
|
content = content.replace(/^TZ=.*$/m, `TZ=${resolvedTz}`);
|
||||||
|
} else {
|
||||||
|
content = content.trimEnd() + `\nTZ=${resolvedTz}\n`;
|
||||||
|
}
|
||||||
|
fs.writeFileSync(envFile, content);
|
||||||
|
} else {
|
||||||
|
fs.writeFileSync(envFile, `TZ=${resolvedTz}\n`);
|
||||||
|
}
|
||||||
|
logger.info({ timezone: resolvedTz }, 'Set TZ in .env');
|
||||||
|
}
|
||||||
|
|
||||||
|
emitStatus('TIMEZONE', {
|
||||||
|
SYSTEM_TZ: systemTz || 'unknown',
|
||||||
|
ENV_TZ: envTz || 'unset',
|
||||||
|
ENV_FILE_TZ: envFileTz || 'unset',
|
||||||
|
RESOLVED_TZ: resolvedTz || 'none',
|
||||||
|
NEEDS_USER_INPUT: needsUserInput,
|
||||||
|
STATUS: needsUserInput ? 'needs_input' : 'success',
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -101,7 +101,7 @@ export async function run(_args: string[]): Promise<void> {
|
|||||||
const envFile = path.join(projectRoot, '.env');
|
const envFile = path.join(projectRoot, '.env');
|
||||||
if (fs.existsSync(envFile)) {
|
if (fs.existsSync(envFile)) {
|
||||||
const envContent = fs.readFileSync(envFile, 'utf-8');
|
const envContent = fs.readFileSync(envFile, 'utf-8');
|
||||||
if (/^(CLAUDE_CODE_OAUTH_TOKEN|ANTHROPIC_API_KEY)=/m.test(envContent)) {
|
if (/^(CLAUDE_CODE_OAUTH_TOKEN|ANTHROPIC_API_KEY|ONECLI_URL)=/m.test(envContent)) {
|
||||||
credentials = 'configured';
|
credentials = 'configured';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,5 @@
|
|||||||
// slack
|
// slack
|
||||||
|
|
||||||
// telegram
|
// telegram
|
||||||
import './telegram.js';
|
|
||||||
|
|
||||||
// whatsapp
|
// whatsapp
|
||||||
|
|||||||
@@ -1,949 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
|
||||||
|
|
||||||
// --- Mocks ---
|
|
||||||
|
|
||||||
// Mock registry (registerChannel runs at import time)
|
|
||||||
vi.mock('./registry.js', () => ({ registerChannel: vi.fn() }));
|
|
||||||
|
|
||||||
// Mock env reader (used by the factory, not needed in unit tests)
|
|
||||||
vi.mock('../env.js', () => ({ readEnvFile: vi.fn(() => ({})) }));
|
|
||||||
|
|
||||||
// Mock config
|
|
||||||
vi.mock('../config.js', () => ({
|
|
||||||
ASSISTANT_NAME: 'Andy',
|
|
||||||
TRIGGER_PATTERN: /^@Andy\b/i,
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock logger
|
|
||||||
vi.mock('../logger.js', () => ({
|
|
||||||
logger: {
|
|
||||||
debug: vi.fn(),
|
|
||||||
info: vi.fn(),
|
|
||||||
warn: vi.fn(),
|
|
||||||
error: vi.fn(),
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
// --- Grammy mock ---
|
|
||||||
|
|
||||||
type Handler = (...args: any[]) => any;
|
|
||||||
|
|
||||||
const botRef = vi.hoisted(() => ({ current: null as any }));
|
|
||||||
|
|
||||||
vi.mock('grammy', () => ({
|
|
||||||
Bot: class MockBot {
|
|
||||||
token: string;
|
|
||||||
commandHandlers = new Map<string, Handler>();
|
|
||||||
filterHandlers = new Map<string, Handler[]>();
|
|
||||||
errorHandler: Handler | null = null;
|
|
||||||
|
|
||||||
api = {
|
|
||||||
sendMessage: vi.fn().mockResolvedValue(undefined),
|
|
||||||
sendChatAction: vi.fn().mockResolvedValue(undefined),
|
|
||||||
};
|
|
||||||
|
|
||||||
constructor(token: string) {
|
|
||||||
this.token = token;
|
|
||||||
botRef.current = this;
|
|
||||||
}
|
|
||||||
|
|
||||||
command(name: string, handler: Handler) {
|
|
||||||
this.commandHandlers.set(name, handler);
|
|
||||||
}
|
|
||||||
|
|
||||||
on(filter: string, handler: Handler) {
|
|
||||||
const existing = this.filterHandlers.get(filter) || [];
|
|
||||||
existing.push(handler);
|
|
||||||
this.filterHandlers.set(filter, existing);
|
|
||||||
}
|
|
||||||
|
|
||||||
catch(handler: Handler) {
|
|
||||||
this.errorHandler = handler;
|
|
||||||
}
|
|
||||||
|
|
||||||
start(opts: { onStart: (botInfo: any) => void }) {
|
|
||||||
opts.onStart({ username: 'andy_ai_bot', id: 12345 });
|
|
||||||
}
|
|
||||||
|
|
||||||
stop() {}
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
import { TelegramChannel, TelegramChannelOpts } from './telegram.js';
|
|
||||||
|
|
||||||
// --- Test helpers ---
|
|
||||||
|
|
||||||
function createTestOpts(
|
|
||||||
overrides?: Partial<TelegramChannelOpts>,
|
|
||||||
): TelegramChannelOpts {
|
|
||||||
return {
|
|
||||||
onMessage: vi.fn(),
|
|
||||||
onChatMetadata: vi.fn(),
|
|
||||||
registeredGroups: vi.fn(() => ({
|
|
||||||
'tg:100200300': {
|
|
||||||
name: 'Test Group',
|
|
||||||
folder: 'test-group',
|
|
||||||
trigger: '@Andy',
|
|
||||||
added_at: '2024-01-01T00:00:00.000Z',
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createTextCtx(overrides: {
|
|
||||||
chatId?: number;
|
|
||||||
chatType?: string;
|
|
||||||
chatTitle?: string;
|
|
||||||
text: string;
|
|
||||||
fromId?: number;
|
|
||||||
firstName?: string;
|
|
||||||
username?: string;
|
|
||||||
messageId?: number;
|
|
||||||
date?: number;
|
|
||||||
entities?: any[];
|
|
||||||
}) {
|
|
||||||
const chatId = overrides.chatId ?? 100200300;
|
|
||||||
const chatType = overrides.chatType ?? 'group';
|
|
||||||
return {
|
|
||||||
chat: {
|
|
||||||
id: chatId,
|
|
||||||
type: chatType,
|
|
||||||
title: overrides.chatTitle ?? 'Test Group',
|
|
||||||
},
|
|
||||||
from: {
|
|
||||||
id: overrides.fromId ?? 99001,
|
|
||||||
first_name: overrides.firstName ?? 'Alice',
|
|
||||||
username: overrides.username ?? 'alice_user',
|
|
||||||
},
|
|
||||||
message: {
|
|
||||||
text: overrides.text,
|
|
||||||
date: overrides.date ?? Math.floor(Date.now() / 1000),
|
|
||||||
message_id: overrides.messageId ?? 1,
|
|
||||||
entities: overrides.entities ?? [],
|
|
||||||
},
|
|
||||||
me: { username: 'andy_ai_bot' },
|
|
||||||
reply: vi.fn(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createMediaCtx(overrides: {
|
|
||||||
chatId?: number;
|
|
||||||
chatType?: string;
|
|
||||||
fromId?: number;
|
|
||||||
firstName?: string;
|
|
||||||
date?: number;
|
|
||||||
messageId?: number;
|
|
||||||
caption?: string;
|
|
||||||
extra?: Record<string, any>;
|
|
||||||
}) {
|
|
||||||
const chatId = overrides.chatId ?? 100200300;
|
|
||||||
return {
|
|
||||||
chat: {
|
|
||||||
id: chatId,
|
|
||||||
type: overrides.chatType ?? 'group',
|
|
||||||
title: 'Test Group',
|
|
||||||
},
|
|
||||||
from: {
|
|
||||||
id: overrides.fromId ?? 99001,
|
|
||||||
first_name: overrides.firstName ?? 'Alice',
|
|
||||||
username: 'alice_user',
|
|
||||||
},
|
|
||||||
message: {
|
|
||||||
date: overrides.date ?? Math.floor(Date.now() / 1000),
|
|
||||||
message_id: overrides.messageId ?? 1,
|
|
||||||
caption: overrides.caption,
|
|
||||||
...(overrides.extra || {}),
|
|
||||||
},
|
|
||||||
me: { username: 'andy_ai_bot' },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function currentBot() {
|
|
||||||
return botRef.current;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function triggerTextMessage(ctx: ReturnType<typeof createTextCtx>) {
|
|
||||||
const handlers = currentBot().filterHandlers.get('message:text') || [];
|
|
||||||
for (const h of handlers) await h(ctx);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function triggerMediaMessage(
|
|
||||||
filter: string,
|
|
||||||
ctx: ReturnType<typeof createMediaCtx>,
|
|
||||||
) {
|
|
||||||
const handlers = currentBot().filterHandlers.get(filter) || [];
|
|
||||||
for (const h of handlers) await h(ctx);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Tests ---
|
|
||||||
|
|
||||||
describe('TelegramChannel', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.clearAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
vi.restoreAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- Connection lifecycle ---
|
|
||||||
|
|
||||||
describe('connection lifecycle', () => {
|
|
||||||
it('resolves connect() when bot starts', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
expect(channel.isConnected()).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('registers command and message handlers on connect', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
expect(currentBot().commandHandlers.has('chatid')).toBe(true);
|
|
||||||
expect(currentBot().commandHandlers.has('ping')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:text')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:photo')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:video')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:voice')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:audio')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:document')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:sticker')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:location')).toBe(true);
|
|
||||||
expect(currentBot().filterHandlers.has('message:contact')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('registers error handler on connect', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
expect(currentBot().errorHandler).not.toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('disconnects cleanly', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
await channel.connect();
|
|
||||||
expect(channel.isConnected()).toBe(true);
|
|
||||||
|
|
||||||
await channel.disconnect();
|
|
||||||
expect(channel.isConnected()).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('isConnected() returns false before connect', () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
expect(channel.isConnected()).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- Text message handling ---
|
|
||||||
|
|
||||||
describe('text message handling', () => {
|
|
||||||
it('delivers message for registered group', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({ text: 'Hello everyone' });
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onChatMetadata).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.any(String),
|
|
||||||
'Test Group',
|
|
||||||
'telegram',
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
id: '1',
|
|
||||||
chat_jid: 'tg:100200300',
|
|
||||||
sender: '99001',
|
|
||||||
sender_name: 'Alice',
|
|
||||||
content: 'Hello everyone',
|
|
||||||
is_from_me: false,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('only emits metadata for unregistered chats', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({ chatId: 999999, text: 'Unknown chat' });
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onChatMetadata).toHaveBeenCalledWith(
|
|
||||||
'tg:999999',
|
|
||||||
expect.any(String),
|
|
||||||
'Test Group',
|
|
||||||
'telegram',
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
expect(opts.onMessage).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('skips bot commands (/chatid, /ping) but passes other / messages through', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
// Bot commands should be skipped
|
|
||||||
const ctx1 = createTextCtx({ text: '/chatid' });
|
|
||||||
await triggerTextMessage(ctx1);
|
|
||||||
expect(opts.onMessage).not.toHaveBeenCalled();
|
|
||||||
expect(opts.onChatMetadata).not.toHaveBeenCalled();
|
|
||||||
|
|
||||||
const ctx2 = createTextCtx({ text: '/ping' });
|
|
||||||
await triggerTextMessage(ctx2);
|
|
||||||
expect(opts.onMessage).not.toHaveBeenCalled();
|
|
||||||
|
|
||||||
// Non-bot /commands should flow through
|
|
||||||
const ctx3 = createTextCtx({ text: '/remote-control' });
|
|
||||||
await triggerTextMessage(ctx3);
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledTimes(1);
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '/remote-control' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('extracts sender name from first_name', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({ text: 'Hi', firstName: 'Bob' });
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ sender_name: 'Bob' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('falls back to username when first_name missing', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({ text: 'Hi' });
|
|
||||||
ctx.from.first_name = undefined as any;
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ sender_name: 'alice_user' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('falls back to user ID when name and username missing', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({ text: 'Hi', fromId: 42 });
|
|
||||||
ctx.from.first_name = undefined as any;
|
|
||||||
ctx.from.username = undefined as any;
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ sender_name: '42' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses sender name as chat name for private chats', async () => {
|
|
||||||
const opts = createTestOpts({
|
|
||||||
registeredGroups: vi.fn(() => ({
|
|
||||||
'tg:100200300': {
|
|
||||||
name: 'Private',
|
|
||||||
folder: 'private',
|
|
||||||
trigger: '@Andy',
|
|
||||||
added_at: '2024-01-01T00:00:00.000Z',
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
});
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: 'Hello',
|
|
||||||
chatType: 'private',
|
|
||||||
firstName: 'Alice',
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onChatMetadata).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.any(String),
|
|
||||||
'Alice', // Private chats use sender name
|
|
||||||
'telegram',
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses chat title as name for group chats', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: 'Hello',
|
|
||||||
chatType: 'supergroup',
|
|
||||||
chatTitle: 'Project Team',
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onChatMetadata).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.any(String),
|
|
||||||
'Project Team',
|
|
||||||
'telegram',
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('converts message.date to ISO timestamp', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const unixTime = 1704067200; // 2024-01-01T00:00:00.000Z
|
|
||||||
const ctx = createTextCtx({ text: 'Hello', date: unixTime });
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
timestamp: '2024-01-01T00:00:00.000Z',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- @mention translation ---
|
|
||||||
|
|
||||||
describe('@mention translation', () => {
|
|
||||||
it('translates @bot_username mention to trigger format', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: '@andy_ai_bot what time is it?',
|
|
||||||
entities: [{ type: 'mention', offset: 0, length: 12 }],
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
content: '@Andy @andy_ai_bot what time is it?',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not translate if message already matches trigger', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: '@Andy @andy_ai_bot hello',
|
|
||||||
entities: [{ type: 'mention', offset: 6, length: 12 }],
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
// Should NOT double-prepend — already starts with @Andy
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
content: '@Andy @andy_ai_bot hello',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not translate mentions of other bots', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: '@some_other_bot hi',
|
|
||||||
entities: [{ type: 'mention', offset: 0, length: 15 }],
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
content: '@some_other_bot hi', // No translation
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles mention in middle of message', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: 'hey @andy_ai_bot check this',
|
|
||||||
entities: [{ type: 'mention', offset: 4, length: 12 }],
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
// Bot is mentioned, message doesn't match trigger → prepend trigger
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
content: '@Andy hey @andy_ai_bot check this',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles message with no entities', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({ text: 'plain message' });
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
content: 'plain message',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('ignores non-mention entities', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createTextCtx({
|
|
||||||
text: 'check https://example.com',
|
|
||||||
entities: [{ type: 'url', offset: 6, length: 19 }],
|
|
||||||
});
|
|
||||||
await triggerTextMessage(ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({
|
|
||||||
content: 'check https://example.com',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- Non-text messages ---
|
|
||||||
|
|
||||||
describe('non-text messages', () => {
|
|
||||||
it('stores photo with placeholder', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({});
|
|
||||||
await triggerMediaMessage('message:photo', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Photo]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores photo with caption', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({ caption: 'Look at this' });
|
|
||||||
await triggerMediaMessage('message:photo', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Photo] Look at this' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores video with placeholder', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({});
|
|
||||||
await triggerMediaMessage('message:video', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Video]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores voice message with placeholder', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({});
|
|
||||||
await triggerMediaMessage('message:voice', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Voice message]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores audio with placeholder', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({});
|
|
||||||
await triggerMediaMessage('message:audio', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Audio]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores document with filename', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({
|
|
||||||
extra: { document: { file_name: 'report.pdf' } },
|
|
||||||
});
|
|
||||||
await triggerMediaMessage('message:document', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Document: report.pdf]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores document with fallback name when filename missing', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({ extra: { document: {} } });
|
|
||||||
await triggerMediaMessage('message:document', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Document: file]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores sticker with emoji', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({
|
|
||||||
extra: { sticker: { emoji: '😂' } },
|
|
||||||
});
|
|
||||||
await triggerMediaMessage('message:sticker', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Sticker 😂]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores location with placeholder', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({});
|
|
||||||
await triggerMediaMessage('message:location', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Location]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores contact with placeholder', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({});
|
|
||||||
await triggerMediaMessage('message:contact', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).toHaveBeenCalledWith(
|
|
||||||
'tg:100200300',
|
|
||||||
expect.objectContaining({ content: '[Contact]' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('ignores non-text messages from unregistered chats', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const ctx = createMediaCtx({ chatId: 999999 });
|
|
||||||
await triggerMediaMessage('message:photo', ctx);
|
|
||||||
|
|
||||||
expect(opts.onMessage).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- sendMessage ---
|
|
||||||
|
|
||||||
describe('sendMessage', () => {
|
|
||||||
it('sends message via bot API', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
await channel.sendMessage('tg:100200300', 'Hello');
|
|
||||||
|
|
||||||
expect(currentBot().api.sendMessage).toHaveBeenCalledWith(
|
|
||||||
'100200300',
|
|
||||||
'Hello',
|
|
||||||
{ parse_mode: 'Markdown' },
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('strips tg: prefix from JID', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
await channel.sendMessage('tg:-1001234567890', 'Group message');
|
|
||||||
|
|
||||||
expect(currentBot().api.sendMessage).toHaveBeenCalledWith(
|
|
||||||
'-1001234567890',
|
|
||||||
'Group message',
|
|
||||||
{ parse_mode: 'Markdown' },
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('splits messages exceeding 4096 characters', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const longText = 'x'.repeat(5000);
|
|
||||||
await channel.sendMessage('tg:100200300', longText);
|
|
||||||
|
|
||||||
expect(currentBot().api.sendMessage).toHaveBeenCalledTimes(2);
|
|
||||||
expect(currentBot().api.sendMessage).toHaveBeenNthCalledWith(
|
|
||||||
1,
|
|
||||||
'100200300',
|
|
||||||
'x'.repeat(4096),
|
|
||||||
{ parse_mode: 'Markdown' },
|
|
||||||
);
|
|
||||||
expect(currentBot().api.sendMessage).toHaveBeenNthCalledWith(
|
|
||||||
2,
|
|
||||||
'100200300',
|
|
||||||
'x'.repeat(904),
|
|
||||||
{ parse_mode: 'Markdown' },
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sends exactly one message at 4096 characters', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const exactText = 'y'.repeat(4096);
|
|
||||||
await channel.sendMessage('tg:100200300', exactText);
|
|
||||||
|
|
||||||
expect(currentBot().api.sendMessage).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles send failure gracefully', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
currentBot().api.sendMessage.mockRejectedValueOnce(
|
|
||||||
new Error('Network error'),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Should not throw
|
|
||||||
await expect(
|
|
||||||
channel.sendMessage('tg:100200300', 'Will fail'),
|
|
||||||
).resolves.toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does nothing when bot is not initialized', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
// Don't connect — bot is null
|
|
||||||
await channel.sendMessage('tg:100200300', 'No bot');
|
|
||||||
|
|
||||||
// No error, no API call
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- ownsJid ---
|
|
||||||
|
|
||||||
describe('ownsJid', () => {
|
|
||||||
it('owns tg: JIDs', () => {
|
|
||||||
const channel = new TelegramChannel('test-token', createTestOpts());
|
|
||||||
expect(channel.ownsJid('tg:123456')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('owns tg: JIDs with negative IDs (groups)', () => {
|
|
||||||
const channel = new TelegramChannel('test-token', createTestOpts());
|
|
||||||
expect(channel.ownsJid('tg:-1001234567890')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not own WhatsApp group JIDs', () => {
|
|
||||||
const channel = new TelegramChannel('test-token', createTestOpts());
|
|
||||||
expect(channel.ownsJid('12345@g.us')).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not own WhatsApp DM JIDs', () => {
|
|
||||||
const channel = new TelegramChannel('test-token', createTestOpts());
|
|
||||||
expect(channel.ownsJid('12345@s.whatsapp.net')).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not own unknown JID formats', () => {
|
|
||||||
const channel = new TelegramChannel('test-token', createTestOpts());
|
|
||||||
expect(channel.ownsJid('random-string')).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- setTyping ---
|
|
||||||
|
|
||||||
describe('setTyping', () => {
|
|
||||||
it('sends typing action when isTyping is true', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
await channel.setTyping('tg:100200300', true);
|
|
||||||
|
|
||||||
expect(currentBot().api.sendChatAction).toHaveBeenCalledWith(
|
|
||||||
'100200300',
|
|
||||||
'typing',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does nothing when isTyping is false', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
await channel.setTyping('tg:100200300', false);
|
|
||||||
|
|
||||||
expect(currentBot().api.sendChatAction).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does nothing when bot is not initialized', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
|
|
||||||
// Don't connect
|
|
||||||
await channel.setTyping('tg:100200300', true);
|
|
||||||
|
|
||||||
// No error, no API call
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles typing indicator failure gracefully', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
currentBot().api.sendChatAction.mockRejectedValueOnce(
|
|
||||||
new Error('Rate limited'),
|
|
||||||
);
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
channel.setTyping('tg:100200300', true),
|
|
||||||
).resolves.toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- Bot commands ---
|
|
||||||
|
|
||||||
describe('bot commands', () => {
|
|
||||||
it('/chatid replies with chat ID and metadata', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const handler = currentBot().commandHandlers.get('chatid')!;
|
|
||||||
const ctx = {
|
|
||||||
chat: { id: 100200300, type: 'group' as const },
|
|
||||||
from: { first_name: 'Alice' },
|
|
||||||
reply: vi.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
await handler(ctx);
|
|
||||||
|
|
||||||
expect(ctx.reply).toHaveBeenCalledWith(
|
|
||||||
expect.stringContaining('tg:100200300'),
|
|
||||||
expect.objectContaining({ parse_mode: 'Markdown' }),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('/chatid shows chat type', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const handler = currentBot().commandHandlers.get('chatid')!;
|
|
||||||
const ctx = {
|
|
||||||
chat: { id: 555, type: 'private' as const },
|
|
||||||
from: { first_name: 'Bob' },
|
|
||||||
reply: vi.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
await handler(ctx);
|
|
||||||
|
|
||||||
expect(ctx.reply).toHaveBeenCalledWith(
|
|
||||||
expect.stringContaining('private'),
|
|
||||||
expect.any(Object),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('/ping replies with bot status', async () => {
|
|
||||||
const opts = createTestOpts();
|
|
||||||
const channel = new TelegramChannel('test-token', opts);
|
|
||||||
await channel.connect();
|
|
||||||
|
|
||||||
const handler = currentBot().commandHandlers.get('ping')!;
|
|
||||||
const ctx = { reply: vi.fn() };
|
|
||||||
|
|
||||||
await handler(ctx);
|
|
||||||
|
|
||||||
expect(ctx.reply).toHaveBeenCalledWith('Andy is online.');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- Channel properties ---
|
|
||||||
|
|
||||||
describe('channel properties', () => {
|
|
||||||
it('has name "telegram"', () => {
|
|
||||||
const channel = new TelegramChannel('test-token', createTestOpts());
|
|
||||||
expect(channel.name).toBe('telegram');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,396 +0,0 @@
|
|||||||
import https from 'https';
|
|
||||||
import { Api, Bot } from 'grammy';
|
|
||||||
|
|
||||||
import { ASSISTANT_NAME, TRIGGER_PATTERN } from '../config.js';
|
|
||||||
import { readEnvFile } from '../env.js';
|
|
||||||
import { logger } from '../logger.js';
|
|
||||||
import { registerChannel, ChannelOpts } from './registry.js';
|
|
||||||
import {
|
|
||||||
Channel,
|
|
||||||
OnChatMetadata,
|
|
||||||
OnInboundMessage,
|
|
||||||
RegisteredGroup,
|
|
||||||
} from '../types.js';
|
|
||||||
|
|
||||||
// Bot pool for agent teams: send-only Api instances (no polling)
|
|
||||||
const poolApis: Api[] = [];
|
|
||||||
// Maps "{groupFolder}:{senderName}" → pool Api index for stable assignment
|
|
||||||
const senderBotMap = new Map<string, number>();
|
|
||||||
let nextPoolIndex = 0;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize send-only Api instances for the bot pool.
|
|
||||||
* Each pool bot can send messages but doesn't poll for updates.
|
|
||||||
*/
|
|
||||||
export async function initBotPool(tokens: string[]): Promise<void> {
|
|
||||||
for (const token of tokens) {
|
|
||||||
try {
|
|
||||||
const api = new Api(token);
|
|
||||||
const me = await api.getMe();
|
|
||||||
poolApis.push(api);
|
|
||||||
logger.info(
|
|
||||||
{ username: me.username, id: me.id, poolSize: poolApis.length },
|
|
||||||
'Pool bot initialized',
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
logger.error({ err }, 'Failed to initialize pool bot');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (poolApis.length > 0) {
|
|
||||||
logger.info({ count: poolApis.length }, 'Telegram bot pool ready');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a message via a pool bot assigned to the given sender name.
|
|
||||||
* Assigns bots round-robin on first use; subsequent messages from the
|
|
||||||
* same sender in the same group always use the same bot.
|
|
||||||
* On first assignment, renames the bot to match the sender's role.
|
|
||||||
*/
|
|
||||||
export async function sendPoolMessage(
|
|
||||||
chatId: string,
|
|
||||||
text: string,
|
|
||||||
sender: string,
|
|
||||||
groupFolder: string,
|
|
||||||
): Promise<void> {
|
|
||||||
if (poolApis.length === 0) {
|
|
||||||
// No pool bots — fall back to main bot sendMessage path
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = `${groupFolder}:${sender}`;
|
|
||||||
let idx = senderBotMap.get(key);
|
|
||||||
if (idx === undefined) {
|
|
||||||
idx = nextPoolIndex % poolApis.length;
|
|
||||||
nextPoolIndex++;
|
|
||||||
senderBotMap.set(key, idx);
|
|
||||||
// Rename the bot to match the sender's role, then wait for Telegram to propagate
|
|
||||||
try {
|
|
||||||
await poolApis[idx].setMyName(sender);
|
|
||||||
await new Promise((r) => setTimeout(r, 2000));
|
|
||||||
logger.info(
|
|
||||||
{ sender, groupFolder, poolIndex: idx },
|
|
||||||
'Assigned and renamed pool bot',
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn(
|
|
||||||
{ sender, err },
|
|
||||||
'Failed to rename pool bot (sending anyway)',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const api = poolApis[idx];
|
|
||||||
try {
|
|
||||||
const numericId = chatId.replace(/^tg:/, '');
|
|
||||||
const MAX_LENGTH = 4096;
|
|
||||||
if (text.length <= MAX_LENGTH) {
|
|
||||||
await sendTelegramMessage(api, numericId, text);
|
|
||||||
} else {
|
|
||||||
for (let i = 0; i < text.length; i += MAX_LENGTH) {
|
|
||||||
await sendTelegramMessage(
|
|
||||||
api,
|
|
||||||
numericId,
|
|
||||||
text.slice(i, i + MAX_LENGTH),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.info(
|
|
||||||
{ chatId, sender, poolIndex: idx, length: text.length },
|
|
||||||
'Pool message sent',
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
logger.error({ chatId, sender, err }, 'Failed to send pool message');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TelegramChannelOpts {
|
|
||||||
onMessage: OnInboundMessage;
|
|
||||||
onChatMetadata: OnChatMetadata;
|
|
||||||
registeredGroups: () => Record<string, RegisteredGroup>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a message with Telegram Markdown parse mode, falling back to plain text.
|
|
||||||
* Claude's output naturally matches Telegram's Markdown v1 format:
|
|
||||||
* *bold*, _italic_, `code`, ```code blocks```, [links](url)
|
|
||||||
*/
|
|
||||||
async function sendTelegramMessage(
|
|
||||||
api: { sendMessage: Api['sendMessage'] },
|
|
||||||
chatId: string | number,
|
|
||||||
text: string,
|
|
||||||
options: { message_thread_id?: number } = {},
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
await api.sendMessage(chatId, text, {
|
|
||||||
...options,
|
|
||||||
parse_mode: 'Markdown',
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
// Fallback: send as plain text if Markdown parsing fails
|
|
||||||
logger.debug({ err }, 'Markdown send failed, falling back to plain text');
|
|
||||||
await api.sendMessage(chatId, text, options);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class TelegramChannel implements Channel {
|
|
||||||
name = 'telegram';
|
|
||||||
|
|
||||||
private bot: Bot | null = null;
|
|
||||||
private opts: TelegramChannelOpts;
|
|
||||||
private botToken: string;
|
|
||||||
|
|
||||||
constructor(botToken: string, opts: TelegramChannelOpts) {
|
|
||||||
this.botToken = botToken;
|
|
||||||
this.opts = opts;
|
|
||||||
}
|
|
||||||
|
|
||||||
async connect(): Promise<void> {
|
|
||||||
this.bot = new Bot(this.botToken, {
|
|
||||||
client: {
|
|
||||||
baseFetchConfig: { agent: https.globalAgent, compress: true },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Command to get chat ID (useful for registration)
|
|
||||||
this.bot.command('chatid', (ctx) => {
|
|
||||||
const chatId = ctx.chat.id;
|
|
||||||
const chatType = ctx.chat.type;
|
|
||||||
const chatName =
|
|
||||||
chatType === 'private'
|
|
||||||
? ctx.from?.first_name || 'Private'
|
|
||||||
: (ctx.chat as any).title || 'Unknown';
|
|
||||||
|
|
||||||
ctx.reply(
|
|
||||||
`Chat ID: \`tg:${chatId}\`\nName: ${chatName}\nType: ${chatType}`,
|
|
||||||
{ parse_mode: 'Markdown' },
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Command to check bot status
|
|
||||||
this.bot.command('ping', (ctx) => {
|
|
||||||
ctx.reply(`${ASSISTANT_NAME} is online.`);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Telegram bot commands handled above — skip them in the general handler
|
|
||||||
// so they don't also get stored as messages. All other /commands flow through.
|
|
||||||
const TELEGRAM_BOT_COMMANDS = new Set(['chatid', 'ping']);
|
|
||||||
|
|
||||||
this.bot.on('message:text', async (ctx) => {
|
|
||||||
if (ctx.message.text.startsWith('/')) {
|
|
||||||
const cmd = ctx.message.text.slice(1).split(/[\s@]/)[0].toLowerCase();
|
|
||||||
if (TELEGRAM_BOT_COMMANDS.has(cmd)) return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const chatJid = `tg:${ctx.chat.id}`;
|
|
||||||
let content = ctx.message.text;
|
|
||||||
const timestamp = new Date(ctx.message.date * 1000).toISOString();
|
|
||||||
const senderName =
|
|
||||||
ctx.from?.first_name ||
|
|
||||||
ctx.from?.username ||
|
|
||||||
ctx.from?.id.toString() ||
|
|
||||||
'Unknown';
|
|
||||||
const sender = ctx.from?.id.toString() || '';
|
|
||||||
const msgId = ctx.message.message_id.toString();
|
|
||||||
|
|
||||||
// Determine chat name
|
|
||||||
const chatName =
|
|
||||||
ctx.chat.type === 'private'
|
|
||||||
? senderName
|
|
||||||
: (ctx.chat as any).title || chatJid;
|
|
||||||
|
|
||||||
// Translate Telegram @bot_username mentions into TRIGGER_PATTERN format.
|
|
||||||
// Telegram @mentions (e.g., @andy_ai_bot) won't match TRIGGER_PATTERN
|
|
||||||
// (e.g., ^@Andy\b), so we prepend the trigger when the bot is @mentioned.
|
|
||||||
const botUsername = ctx.me?.username?.toLowerCase();
|
|
||||||
if (botUsername) {
|
|
||||||
const entities = ctx.message.entities || [];
|
|
||||||
const isBotMentioned = entities.some((entity) => {
|
|
||||||
if (entity.type === 'mention') {
|
|
||||||
const mentionText = content
|
|
||||||
.substring(entity.offset, entity.offset + entity.length)
|
|
||||||
.toLowerCase();
|
|
||||||
return mentionText === `@${botUsername}`;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
if (isBotMentioned && !TRIGGER_PATTERN.test(content)) {
|
|
||||||
content = `@${ASSISTANT_NAME} ${content}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store chat metadata for discovery
|
|
||||||
const isGroup =
|
|
||||||
ctx.chat.type === 'group' || ctx.chat.type === 'supergroup';
|
|
||||||
this.opts.onChatMetadata(
|
|
||||||
chatJid,
|
|
||||||
timestamp,
|
|
||||||
chatName,
|
|
||||||
'telegram',
|
|
||||||
isGroup,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Only deliver full message for registered groups
|
|
||||||
const group = this.opts.registeredGroups()[chatJid];
|
|
||||||
if (!group) {
|
|
||||||
logger.debug(
|
|
||||||
{ chatJid, chatName },
|
|
||||||
'Message from unregistered Telegram chat',
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deliver message — startMessageLoop() will pick it up
|
|
||||||
this.opts.onMessage(chatJid, {
|
|
||||||
id: msgId,
|
|
||||||
chat_jid: chatJid,
|
|
||||||
sender,
|
|
||||||
sender_name: senderName,
|
|
||||||
content,
|
|
||||||
timestamp,
|
|
||||||
is_from_me: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
{ chatJid, chatName, sender: senderName },
|
|
||||||
'Telegram message stored',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Handle non-text messages with placeholders so the agent knows something was sent
|
|
||||||
const storeNonText = (ctx: any, placeholder: string) => {
|
|
||||||
const chatJid = `tg:${ctx.chat.id}`;
|
|
||||||
const group = this.opts.registeredGroups()[chatJid];
|
|
||||||
if (!group) return;
|
|
||||||
|
|
||||||
const timestamp = new Date(ctx.message.date * 1000).toISOString();
|
|
||||||
const senderName =
|
|
||||||
ctx.from?.first_name ||
|
|
||||||
ctx.from?.username ||
|
|
||||||
ctx.from?.id?.toString() ||
|
|
||||||
'Unknown';
|
|
||||||
const caption = ctx.message.caption ? ` ${ctx.message.caption}` : '';
|
|
||||||
|
|
||||||
const isGroup =
|
|
||||||
ctx.chat.type === 'group' || ctx.chat.type === 'supergroup';
|
|
||||||
this.opts.onChatMetadata(
|
|
||||||
chatJid,
|
|
||||||
timestamp,
|
|
||||||
undefined,
|
|
||||||
'telegram',
|
|
||||||
isGroup,
|
|
||||||
);
|
|
||||||
this.opts.onMessage(chatJid, {
|
|
||||||
id: ctx.message.message_id.toString(),
|
|
||||||
chat_jid: chatJid,
|
|
||||||
sender: ctx.from?.id?.toString() || '',
|
|
||||||
sender_name: senderName,
|
|
||||||
content: `${placeholder}${caption}`,
|
|
||||||
timestamp,
|
|
||||||
is_from_me: false,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
this.bot.on('message:photo', (ctx) => storeNonText(ctx, '[Photo]'));
|
|
||||||
this.bot.on('message:video', (ctx) => storeNonText(ctx, '[Video]'));
|
|
||||||
this.bot.on('message:voice', (ctx) => storeNonText(ctx, '[Voice message]'));
|
|
||||||
this.bot.on('message:audio', (ctx) => storeNonText(ctx, '[Audio]'));
|
|
||||||
this.bot.on('message:document', (ctx) => {
|
|
||||||
const name = ctx.message.document?.file_name || 'file';
|
|
||||||
storeNonText(ctx, `[Document: ${name}]`);
|
|
||||||
});
|
|
||||||
this.bot.on('message:sticker', (ctx) => {
|
|
||||||
const emoji = ctx.message.sticker?.emoji || '';
|
|
||||||
storeNonText(ctx, `[Sticker ${emoji}]`);
|
|
||||||
});
|
|
||||||
this.bot.on('message:location', (ctx) => storeNonText(ctx, '[Location]'));
|
|
||||||
this.bot.on('message:contact', (ctx) => storeNonText(ctx, '[Contact]'));
|
|
||||||
|
|
||||||
// Handle errors gracefully
|
|
||||||
this.bot.catch((err) => {
|
|
||||||
logger.error({ err: err.message }, 'Telegram bot error');
|
|
||||||
});
|
|
||||||
|
|
||||||
// Start polling — returns a Promise that resolves when started
|
|
||||||
return new Promise<void>((resolve) => {
|
|
||||||
this.bot!.start({
|
|
||||||
onStart: (botInfo) => {
|
|
||||||
logger.info(
|
|
||||||
{ username: botInfo.username, id: botInfo.id },
|
|
||||||
'Telegram bot connected',
|
|
||||||
);
|
|
||||||
console.log(`\n Telegram bot: @${botInfo.username}`);
|
|
||||||
console.log(
|
|
||||||
` Send /chatid to the bot to get a chat's registration ID\n`,
|
|
||||||
);
|
|
||||||
resolve();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async sendMessage(jid: string, text: string): Promise<void> {
|
|
||||||
if (!this.bot) {
|
|
||||||
logger.warn('Telegram bot not initialized');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const numericId = jid.replace(/^tg:/, '');
|
|
||||||
|
|
||||||
// Telegram has a 4096 character limit per message — split if needed
|
|
||||||
const MAX_LENGTH = 4096;
|
|
||||||
if (text.length <= MAX_LENGTH) {
|
|
||||||
await sendTelegramMessage(this.bot.api, numericId, text);
|
|
||||||
} else {
|
|
||||||
for (let i = 0; i < text.length; i += MAX_LENGTH) {
|
|
||||||
await sendTelegramMessage(
|
|
||||||
this.bot.api,
|
|
||||||
numericId,
|
|
||||||
text.slice(i, i + MAX_LENGTH),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.info({ jid, length: text.length }, 'Telegram message sent');
|
|
||||||
} catch (err) {
|
|
||||||
logger.error({ jid, err }, 'Failed to send Telegram message');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
isConnected(): boolean {
|
|
||||||
return this.bot !== null;
|
|
||||||
}
|
|
||||||
|
|
||||||
ownsJid(jid: string): boolean {
|
|
||||||
return jid.startsWith('tg:');
|
|
||||||
}
|
|
||||||
|
|
||||||
async disconnect(): Promise<void> {
|
|
||||||
if (this.bot) {
|
|
||||||
this.bot.stop();
|
|
||||||
this.bot = null;
|
|
||||||
logger.info('Telegram bot stopped');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async setTyping(jid: string, isTyping: boolean): Promise<void> {
|
|
||||||
if (!this.bot || !isTyping) return;
|
|
||||||
try {
|
|
||||||
const numericId = jid.replace(/^tg:/, '');
|
|
||||||
await this.bot.api.sendChatAction(numericId, 'typing');
|
|
||||||
} catch (err) {
|
|
||||||
logger.debug({ jid, err }, 'Failed to send Telegram typing indicator');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
registerChannel('telegram', (opts: ChannelOpts) => {
|
|
||||||
const envVars = readEnvFile(['TELEGRAM_BOT_TOKEN']);
|
|
||||||
const token =
|
|
||||||
process.env.TELEGRAM_BOT_TOKEN || envVars.TELEGRAM_BOT_TOKEN || '';
|
|
||||||
if (!token) {
|
|
||||||
logger.warn('Telegram: TELEGRAM_BOT_TOKEN not set');
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return new TelegramChannel(token, opts);
|
|
||||||
});
|
|
||||||
45
src/claw-skill.test.ts
Normal file
45
src/claw-skill.test.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import os from 'os';
|
||||||
|
import path from 'path';
|
||||||
|
import { spawnSync } from 'child_process';
|
||||||
|
|
||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
|
describe('claw skill script', () => {
|
||||||
|
it('exits zero after successful structured output even if the runtime is terminated', { timeout: 20000 }, () => {
|
||||||
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'claw-skill-test-'));
|
||||||
|
const binDir = path.join(tempDir, 'bin');
|
||||||
|
fs.mkdirSync(binDir, { recursive: true });
|
||||||
|
|
||||||
|
const runtimePath = path.join(binDir, 'container');
|
||||||
|
fs.writeFileSync(
|
||||||
|
runtimePath,
|
||||||
|
`#!/bin/sh
|
||||||
|
cat >/dev/null
|
||||||
|
printf '%s\n' '---NANOCLAW_OUTPUT_START---' '{"status":"success","result":"4","newSessionId":"sess-1"}' '---NANOCLAW_OUTPUT_END---'
|
||||||
|
sleep 30
|
||||||
|
`,
|
||||||
|
);
|
||||||
|
fs.chmodSync(runtimePath, 0o755);
|
||||||
|
|
||||||
|
const result = spawnSync(
|
||||||
|
'python3',
|
||||||
|
['.claude/skills/claw/scripts/claw', '-j', 'tg:123', 'What is 2+2?'],
|
||||||
|
{
|
||||||
|
cwd: process.cwd(),
|
||||||
|
encoding: 'utf8',
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
NANOCLAW_DIR: tempDir,
|
||||||
|
PATH: `${binDir}:${process.env.PATH || ''}`,
|
||||||
|
},
|
||||||
|
timeout: 15000,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.status).toBe(0);
|
||||||
|
expect(result.signal).toBeNull();
|
||||||
|
expect(result.stdout).toContain('4');
|
||||||
|
expect(result.stderr).toContain('[session: sess-1]');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -2,11 +2,15 @@ import os from 'os';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
import { readEnvFile } from './env.js';
|
import { readEnvFile } from './env.js';
|
||||||
|
import { isValidTimezone } from './timezone.js';
|
||||||
|
|
||||||
// Read config values from .env (falls back to process.env).
|
// Read config values from .env (falls back to process.env).
|
||||||
// Secrets (API keys, tokens) are NOT read here — they are loaded only
|
const envConfig = readEnvFile([
|
||||||
// by the credential proxy (credential-proxy.ts), never exposed to containers.
|
'ASSISTANT_NAME',
|
||||||
const envConfig = readEnvFile(['ASSISTANT_NAME', 'ASSISTANT_HAS_OWN_NUMBER']);
|
'ASSISTANT_HAS_OWN_NUMBER',
|
||||||
|
'ONECLI_URL',
|
||||||
|
'TZ',
|
||||||
|
]);
|
||||||
|
|
||||||
export const ASSISTANT_NAME =
|
export const ASSISTANT_NAME =
|
||||||
process.env.ASSISTANT_NAME || envConfig.ASSISTANT_NAME || 'Andy';
|
process.env.ASSISTANT_NAME || envConfig.ASSISTANT_NAME || 'Andy';
|
||||||
@@ -47,9 +51,11 @@ export const CONTAINER_MAX_OUTPUT_SIZE = parseInt(
|
|||||||
process.env.CONTAINER_MAX_OUTPUT_SIZE || '10485760',
|
process.env.CONTAINER_MAX_OUTPUT_SIZE || '10485760',
|
||||||
10,
|
10,
|
||||||
); // 10MB default
|
); // 10MB default
|
||||||
export const CREDENTIAL_PROXY_PORT = parseInt(
|
export const ONECLI_URL =
|
||||||
process.env.CREDENTIAL_PROXY_PORT || '4800',
|
process.env.ONECLI_URL || envConfig.ONECLI_URL || 'http://localhost:10254';
|
||||||
10,
|
export const MAX_MESSAGES_PER_PROMPT = Math.max(
|
||||||
|
1,
|
||||||
|
parseInt(process.env.MAX_MESSAGES_PER_PROMPT || '10', 10) || 10,
|
||||||
);
|
);
|
||||||
export const IPC_POLL_INTERVAL = 1000;
|
export const IPC_POLL_INTERVAL = 1000;
|
||||||
export const IDLE_TIMEOUT = parseInt(process.env.IDLE_TIMEOUT || '1800000', 10); // 30min default — how long to keep container alive after last result
|
export const IDLE_TIMEOUT = parseInt(process.env.IDLE_TIMEOUT || '1800000', 10); // 30min default — how long to keep container alive after last result
|
||||||
@@ -62,13 +68,33 @@ function escapeRegex(str: string): string {
|
|||||||
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trigger pattern - currently disabled so all messages are processed
|
export function buildTriggerPattern(trigger: string): RegExp {
|
||||||
export const TRIGGER_PATTERN = /^/;
|
return new RegExp(`^${escapeRegex(trigger.trim())}\\b`, 'i');
|
||||||
|
}
|
||||||
|
|
||||||
// Timezone for scheduled tasks (cron expressions, etc.)
|
export const DEFAULT_TRIGGER = `@${ASSISTANT_NAME}`;
|
||||||
// Uses system timezone by default
|
|
||||||
export const TIMEZONE =
|
export function getTriggerPattern(trigger?: string): RegExp {
|
||||||
process.env.TZ || Intl.DateTimeFormat().resolvedOptions().timeZone;
|
const normalizedTrigger = trigger?.trim();
|
||||||
|
return buildTriggerPattern(normalizedTrigger || DEFAULT_TRIGGER);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const TRIGGER_PATTERN = buildTriggerPattern(DEFAULT_TRIGGER);
|
||||||
|
|
||||||
|
// Timezone for scheduled tasks, message formatting, etc.
|
||||||
|
// Validates each candidate is a real IANA identifier before accepting.
|
||||||
|
function resolveConfigTimezone(): string {
|
||||||
|
const candidates = [
|
||||||
|
process.env.TZ,
|
||||||
|
envConfig.TZ,
|
||||||
|
Intl.DateTimeFormat().resolvedOptions().timeZone,
|
||||||
|
];
|
||||||
|
for (const tz of candidates) {
|
||||||
|
if (tz && isValidTimezone(tz)) return tz;
|
||||||
|
}
|
||||||
|
return 'UTC';
|
||||||
|
}
|
||||||
|
export const TIMEZONE = resolveConfigTimezone();
|
||||||
|
|
||||||
const telegramPoolEnv = readEnvFile(['TELEGRAM_BOT_POOL']);
|
const telegramPoolEnv = readEnvFile(['TELEGRAM_BOT_POOL']);
|
||||||
export const TELEGRAM_BOT_POOL = (
|
export const TELEGRAM_BOT_POOL = (
|
||||||
|
|||||||
@@ -11,10 +11,10 @@ vi.mock('./config.js', () => ({
|
|||||||
CONTAINER_IMAGE: 'nanoclaw-agent:latest',
|
CONTAINER_IMAGE: 'nanoclaw-agent:latest',
|
||||||
CONTAINER_MAX_OUTPUT_SIZE: 10485760,
|
CONTAINER_MAX_OUTPUT_SIZE: 10485760,
|
||||||
CONTAINER_TIMEOUT: 1800000, // 30min
|
CONTAINER_TIMEOUT: 1800000, // 30min
|
||||||
CREDENTIAL_PROXY_PORT: 3001,
|
|
||||||
DATA_DIR: '/tmp/nanoclaw-test-data',
|
DATA_DIR: '/tmp/nanoclaw-test-data',
|
||||||
GROUPS_DIR: '/tmp/nanoclaw-test-groups',
|
GROUPS_DIR: '/tmp/nanoclaw-test-groups',
|
||||||
IDLE_TIMEOUT: 1800000, // 30min
|
IDLE_TIMEOUT: 1800000, // 30min
|
||||||
|
ONECLI_URL: 'http://localhost:10254',
|
||||||
TIMEZONE: 'America/Los_Angeles',
|
TIMEZONE: 'America/Los_Angeles',
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -51,6 +51,25 @@ vi.mock('./mount-security.js', () => ({
|
|||||||
validateAdditionalMounts: vi.fn(() => []),
|
validateAdditionalMounts: vi.fn(() => []),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// Mock container-runtime
|
||||||
|
vi.mock('./container-runtime.js', () => ({
|
||||||
|
CONTAINER_RUNTIME_BIN: 'docker',
|
||||||
|
hostGatewayArgs: () => [],
|
||||||
|
readonlyMountArgs: (h: string, c: string) => ['-v', `${h}:${c}:ro`],
|
||||||
|
stopContainer: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock OneCLI SDK
|
||||||
|
vi.mock('@onecli-sh/sdk', () => ({
|
||||||
|
OneCLI: class {
|
||||||
|
applyContainerConfig = vi.fn().mockResolvedValue(true);
|
||||||
|
createAgent = vi.fn().mockResolvedValue({ id: 'test' });
|
||||||
|
ensureAgent = vi
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValue({ name: 'test', identifier: 'test', created: true });
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
// Create a controllable fake ChildProcess
|
// Create a controllable fake ChildProcess
|
||||||
function createFakeProcess() {
|
function createFakeProcess() {
|
||||||
const proc = new EventEmitter() as EventEmitter & {
|
const proc = new EventEmitter() as EventEmitter & {
|
||||||
|
|||||||
@@ -2,8 +2,7 @@
|
|||||||
* Container Runner for NanoClaw
|
* Container Runner for NanoClaw
|
||||||
* Spawns agent execution in containers and handles IPC
|
* Spawns agent execution in containers and handles IPC
|
||||||
*/
|
*/
|
||||||
import { ChildProcess, exec, spawn } from 'child_process';
|
import { ChildProcess, spawn } from 'child_process';
|
||||||
import os from 'os';
|
|
||||||
|
|
||||||
/** Detect if running on Apple Container runtime (vs Docker) */
|
/** Detect if running on Apple Container runtime (vs Docker) */
|
||||||
const isAppleContainer = CONTAINER_RUNTIME_BIN === 'container';
|
const isAppleContainer = CONTAINER_RUNTIME_BIN === 'container';
|
||||||
@@ -14,26 +13,27 @@ import {
|
|||||||
CONTAINER_IMAGE,
|
CONTAINER_IMAGE,
|
||||||
CONTAINER_MAX_OUTPUT_SIZE,
|
CONTAINER_MAX_OUTPUT_SIZE,
|
||||||
CONTAINER_TIMEOUT,
|
CONTAINER_TIMEOUT,
|
||||||
CREDENTIAL_PROXY_PORT,
|
|
||||||
DATA_DIR,
|
DATA_DIR,
|
||||||
GROUPS_DIR,
|
GROUPS_DIR,
|
||||||
IDLE_TIMEOUT,
|
IDLE_TIMEOUT,
|
||||||
|
ONECLI_URL,
|
||||||
TIMEZONE,
|
TIMEZONE,
|
||||||
} from './config.js';
|
} from './config.js';
|
||||||
import { resolveGroupFolderPath, resolveGroupIpcPath } from './group-folder.js';
|
import { resolveGroupFolderPath, resolveGroupIpcPath } from './group-folder.js';
|
||||||
import { logger } from './logger.js';
|
import { logger } from './logger.js';
|
||||||
import {
|
import {
|
||||||
CONTAINER_HOST_GATEWAY,
|
|
||||||
CONTAINER_RUNTIME_BIN,
|
CONTAINER_RUNTIME_BIN,
|
||||||
hostGatewayArgs,
|
hostGatewayArgs,
|
||||||
readonlyMountArgs,
|
readonlyMountArgs,
|
||||||
stopContainer,
|
stopContainer,
|
||||||
} from './container-runtime.js';
|
} from './container-runtime.js';
|
||||||
import { detectAuthMode } from './credential-proxy.js';
|
import { OneCLI } from '@onecli-sh/sdk';
|
||||||
import { readEnvFile } from './env.js';
|
import { readEnvFile } from './env.js';
|
||||||
import { validateAdditionalMounts } from './mount-security.js';
|
import { validateAdditionalMounts } from './mount-security.js';
|
||||||
import { RegisteredGroup } from './types.js';
|
import { RegisteredGroup } from './types.js';
|
||||||
|
|
||||||
|
const onecli = new OneCLI({ url: ONECLI_URL });
|
||||||
|
|
||||||
// Sentinel markers for robust output parsing (must match agent-runner)
|
// Sentinel markers for robust output parsing (must match agent-runner)
|
||||||
const OUTPUT_START_MARKER = '---NANOCLAW_OUTPUT_START---';
|
const OUTPUT_START_MARKER = '---NANOCLAW_OUTPUT_START---';
|
||||||
const OUTPUT_END_MARKER = '---NANOCLAW_OUTPUT_END---';
|
const OUTPUT_END_MARKER = '---NANOCLAW_OUTPUT_END---';
|
||||||
@@ -46,6 +46,7 @@ export interface ContainerInput {
|
|||||||
isMain: boolean;
|
isMain: boolean;
|
||||||
isScheduledTask?: boolean;
|
isScheduledTask?: boolean;
|
||||||
assistantName?: string;
|
assistantName?: string;
|
||||||
|
script?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ContainerOutput {
|
export interface ContainerOutput {
|
||||||
@@ -82,7 +83,7 @@ function buildVolumeMounts(
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Shadow .env so the agent cannot read secrets from the mounted project root.
|
// Shadow .env so the agent cannot read secrets from the mounted project root.
|
||||||
// Credentials are injected by the credential proxy, never exposed to containers.
|
// Credentials are injected by the OneCLI gateway, never exposed to containers.
|
||||||
// Skip this mount on Apple Container since it doesn't support bind-mounting /dev/null.
|
// Skip this mount on Apple Container since it doesn't support bind-mounting /dev/null.
|
||||||
const envFile = path.join(projectRoot, '.env');
|
const envFile = path.join(projectRoot, '.env');
|
||||||
if (fs.existsSync(envFile) && !isAppleContainer) {
|
if (fs.existsSync(envFile) && !isAppleContainer) {
|
||||||
@@ -205,9 +206,18 @@ function buildVolumeMounts(
|
|||||||
group.folder,
|
group.folder,
|
||||||
'agent-runner-src',
|
'agent-runner-src',
|
||||||
);
|
);
|
||||||
if (!fs.existsSync(groupAgentRunnerDir) && fs.existsSync(agentRunnerSrc)) {
|
if (fs.existsSync(agentRunnerSrc)) {
|
||||||
|
const srcIndex = path.join(agentRunnerSrc, 'index.ts');
|
||||||
|
const cachedIndex = path.join(groupAgentRunnerDir, 'index.ts');
|
||||||
|
const needsCopy =
|
||||||
|
!fs.existsSync(groupAgentRunnerDir) ||
|
||||||
|
!fs.existsSync(cachedIndex) ||
|
||||||
|
(fs.existsSync(srcIndex) &&
|
||||||
|
fs.statSync(srcIndex).mtimeMs > fs.statSync(cachedIndex).mtimeMs);
|
||||||
|
if (needsCopy) {
|
||||||
fs.cpSync(agentRunnerSrc, groupAgentRunnerDir, { recursive: true });
|
fs.cpSync(agentRunnerSrc, groupAgentRunnerDir, { recursive: true });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
mounts.push({
|
mounts.push({
|
||||||
hostPath: groupAgentRunnerDir,
|
hostPath: groupAgentRunnerDir,
|
||||||
containerPath: '/app/src',
|
containerPath: '/app/src',
|
||||||
@@ -227,11 +237,12 @@ function buildVolumeMounts(
|
|||||||
return mounts;
|
return mounts;
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildContainerArgs(
|
async function buildContainerArgs(
|
||||||
mounts: VolumeMount[],
|
mounts: VolumeMount[],
|
||||||
containerName: string,
|
containerName: string,
|
||||||
isMain: boolean,
|
isMain: boolean,
|
||||||
): string[] {
|
agentIdentifier?: string,
|
||||||
|
): Promise<string[]> {
|
||||||
const args: string[] = ['run', '-i', '--rm', '--name', containerName];
|
const args: string[] = ['run', '-i', '--rm', '--name', containerName];
|
||||||
|
|
||||||
// Pass host timezone so container's local time matches the user's
|
// Pass host timezone so container's local time matches the user's
|
||||||
@@ -240,21 +251,19 @@ function buildContainerArgs(
|
|||||||
// Prefer IPv4 for DNS resolution to avoid potential delays
|
// Prefer IPv4 for DNS resolution to avoid potential delays
|
||||||
args.push('-e', 'NODE_OPTIONS=--dns-result-order=ipv4first');
|
args.push('-e', 'NODE_OPTIONS=--dns-result-order=ipv4first');
|
||||||
|
|
||||||
// Route API traffic through the credential proxy (containers never see real secrets)
|
// OneCLI gateway handles credential injection — containers never see real secrets.
|
||||||
args.push(
|
// The gateway intercepts HTTPS traffic and injects API keys or OAuth tokens.
|
||||||
'-e',
|
const onecliApplied = await onecli.applyContainerConfig(args, {
|
||||||
`ANTHROPIC_BASE_URL=http://${CONTAINER_HOST_GATEWAY}:${CREDENTIAL_PROXY_PORT}`,
|
addHostMapping: false, // Nanoclaw already handles host gateway
|
||||||
);
|
agent: agentIdentifier,
|
||||||
|
});
|
||||||
// Mirror the host's auth method with a placeholder value.
|
if (onecliApplied) {
|
||||||
// API key mode: SDK sends x-api-key, proxy replaces with real key.
|
logger.info({ containerName }, 'OneCLI gateway config applied');
|
||||||
// OAuth mode: SDK exchanges placeholder token for temp API key,
|
|
||||||
// proxy injects real OAuth token on that exchange request.
|
|
||||||
const authMode = detectAuthMode();
|
|
||||||
if (authMode === 'api-key') {
|
|
||||||
args.push('-e', 'ANTHROPIC_API_KEY=placeholder');
|
|
||||||
} else {
|
} else {
|
||||||
args.push('-e', 'CLAUDE_CODE_OAUTH_TOKEN=placeholder');
|
logger.warn(
|
||||||
|
{ containerName },
|
||||||
|
'OneCLI gateway not reachable — container will have no credentials',
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Forward model and SDK configuration from .env to the container.
|
// Forward model and SDK configuration from .env to the container.
|
||||||
@@ -321,7 +330,16 @@ export async function runContainerAgent(
|
|||||||
const mounts = buildVolumeMounts(group, input.isMain);
|
const mounts = buildVolumeMounts(group, input.isMain);
|
||||||
const safeName = group.folder.replace(/[^a-zA-Z0-9-]/g, '-');
|
const safeName = group.folder.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||||
const containerName = `nanoclaw-${safeName}-${Date.now()}`;
|
const containerName = `nanoclaw-${safeName}-${Date.now()}`;
|
||||||
const containerArgs = buildContainerArgs(mounts, containerName, input.isMain);
|
// Main group uses the default OneCLI agent; others use their own agent.
|
||||||
|
const agentIdentifier = input.isMain
|
||||||
|
? undefined
|
||||||
|
: group.folder.toLowerCase().replace(/_/g, '-');
|
||||||
|
const containerArgs = await buildContainerArgs(
|
||||||
|
mounts,
|
||||||
|
containerName,
|
||||||
|
input.isMain,
|
||||||
|
agentIdentifier,
|
||||||
|
);
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{
|
{
|
||||||
@@ -456,15 +474,15 @@ export async function runContainerAgent(
|
|||||||
{ group: group.name, containerName },
|
{ group: group.name, containerName },
|
||||||
'Container timeout, stopping gracefully',
|
'Container timeout, stopping gracefully',
|
||||||
);
|
);
|
||||||
exec(stopContainer(containerName), { timeout: 15000 }, (err) => {
|
try {
|
||||||
if (err) {
|
stopContainer(containerName);
|
||||||
|
} catch (err) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ group: group.name, containerName, err },
|
{ group: group.name, containerName, err },
|
||||||
'Graceful stop failed, force killing',
|
'Graceful stop failed, force killing',
|
||||||
);
|
);
|
||||||
container.kill('SIGKILL');
|
container.kill('SIGKILL');
|
||||||
}
|
}
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let timeout = setTimeout(killOnTimeout, timeoutMs);
|
let timeout = setTimeout(killOnTimeout, timeoutMs);
|
||||||
@@ -702,6 +720,7 @@ export function writeTasksSnapshot(
|
|||||||
id: string;
|
id: string;
|
||||||
groupFolder: string;
|
groupFolder: string;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
|
script?: string | null;
|
||||||
schedule_type: string;
|
schedule_type: string;
|
||||||
schedule_value: string;
|
schedule_value: string;
|
||||||
status: string;
|
status: string;
|
||||||
|
|||||||
@@ -42,11 +42,20 @@ describe('readonlyMountArgs', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('stopContainer', () => {
|
describe('stopContainer', () => {
|
||||||
it('returns stop command using CONTAINER_RUNTIME_BIN', () => {
|
it('calls docker stop for valid container names', () => {
|
||||||
expect(stopContainer('nanoclaw-test-123')).toBe(
|
stopContainer('nanoclaw-test-123');
|
||||||
`${CONTAINER_RUNTIME_BIN} stop nanoclaw-test-123`,
|
expect(mockExecSync).toHaveBeenCalledWith(
|
||||||
|
`${CONTAINER_RUNTIME_BIN} stop -t 1 nanoclaw-test-123`,
|
||||||
|
{ stdio: 'pipe' },
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('rejects names with shell metacharacters', () => {
|
||||||
|
expect(() => stopContainer('foo; rm -rf /')).toThrow('Invalid container name');
|
||||||
|
expect(() => stopContainer('foo$(whoami)')).toThrow('Invalid container name');
|
||||||
|
expect(() => stopContainer('foo`id`')).toThrow('Invalid container name');
|
||||||
|
expect(mockExecSync).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// --- ensureContainerRuntimeRunning ---
|
// --- ensureContainerRuntimeRunning ---
|
||||||
@@ -59,82 +68,57 @@ describe('ensureContainerRuntimeRunning', () => {
|
|||||||
|
|
||||||
expect(mockExecSync).toHaveBeenCalledTimes(1);
|
expect(mockExecSync).toHaveBeenCalledTimes(1);
|
||||||
expect(mockExecSync).toHaveBeenCalledWith(
|
expect(mockExecSync).toHaveBeenCalledWith(
|
||||||
`${CONTAINER_RUNTIME_BIN} system status`,
|
`${CONTAINER_RUNTIME_BIN} info`,
|
||||||
{ stdio: 'pipe' },
|
{ stdio: 'pipe' },
|
||||||
);
|
);
|
||||||
expect(logger.debug).toHaveBeenCalledWith(
|
expect(logger.debug).toHaveBeenCalledWith(
|
||||||
'Container runtime already running',
|
'Docker runtime already running',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('auto-starts when system status fails', () => {
|
it('throws when docker info fails', () => {
|
||||||
// First call (system status) fails
|
|
||||||
mockExecSync.mockImplementationOnce(() => {
|
mockExecSync.mockImplementationOnce(() => {
|
||||||
throw new Error('not running');
|
throw new Error('Cannot connect to the Docker daemon');
|
||||||
});
|
|
||||||
// Second call (system start) succeeds
|
|
||||||
mockExecSync.mockReturnValueOnce('');
|
|
||||||
|
|
||||||
ensureContainerRuntimeRunning();
|
|
||||||
|
|
||||||
expect(mockExecSync).toHaveBeenCalledTimes(2);
|
|
||||||
expect(mockExecSync).toHaveBeenNthCalledWith(
|
|
||||||
2,
|
|
||||||
`${CONTAINER_RUNTIME_BIN} system start`,
|
|
||||||
{ stdio: 'pipe', timeout: 30000 },
|
|
||||||
);
|
|
||||||
expect(logger.info).toHaveBeenCalledWith('Container runtime started');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('throws when both status and start fail', () => {
|
expect(() => ensureContainerRuntimeRunning()).toThrow();
|
||||||
mockExecSync.mockImplementation(() => {
|
expect(logger.fatal).toHaveBeenCalled();
|
||||||
throw new Error('failed');
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(() => ensureContainerRuntimeRunning()).toThrow(
|
|
||||||
'Container runtime is required but failed to start',
|
|
||||||
);
|
|
||||||
expect(logger.error).toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// --- cleanupOrphans ---
|
// --- cleanupOrphans ---
|
||||||
|
|
||||||
describe('cleanupOrphans', () => {
|
describe('cleanupOrphans', () => {
|
||||||
it('stops orphaned nanoclaw containers from JSON output', () => {
|
it('stops orphaned nanoclaw containers', () => {
|
||||||
// Apple Container ls returns JSON
|
// docker ps returns container names, one per line
|
||||||
const lsOutput = JSON.stringify([
|
mockExecSync.mockReturnValueOnce(
|
||||||
{ status: 'running', configuration: { id: 'nanoclaw-group1-111' } },
|
'nanoclaw-group1-111\nnanoclaw-group2-222\n',
|
||||||
{ status: 'stopped', configuration: { id: 'nanoclaw-group2-222' } },
|
);
|
||||||
{ status: 'running', configuration: { id: 'nanoclaw-group3-333' } },
|
|
||||||
{ status: 'running', configuration: { id: 'other-container' } },
|
|
||||||
]);
|
|
||||||
mockExecSync.mockReturnValueOnce(lsOutput);
|
|
||||||
// stop calls succeed
|
// stop calls succeed
|
||||||
mockExecSync.mockReturnValue('');
|
mockExecSync.mockReturnValue('');
|
||||||
|
|
||||||
cleanupOrphans();
|
cleanupOrphans();
|
||||||
|
|
||||||
// ls + 2 stop calls (only running nanoclaw- containers)
|
// ps + 2 stop calls
|
||||||
expect(mockExecSync).toHaveBeenCalledTimes(3);
|
expect(mockExecSync).toHaveBeenCalledTimes(3);
|
||||||
expect(mockExecSync).toHaveBeenNthCalledWith(
|
expect(mockExecSync).toHaveBeenNthCalledWith(
|
||||||
2,
|
2,
|
||||||
`${CONTAINER_RUNTIME_BIN} stop nanoclaw-group1-111`,
|
`${CONTAINER_RUNTIME_BIN} stop -t 1 nanoclaw-group1-111`,
|
||||||
{ stdio: 'pipe' },
|
{ stdio: 'pipe' },
|
||||||
);
|
);
|
||||||
expect(mockExecSync).toHaveBeenNthCalledWith(
|
expect(mockExecSync).toHaveBeenNthCalledWith(
|
||||||
3,
|
3,
|
||||||
`${CONTAINER_RUNTIME_BIN} stop nanoclaw-group3-333`,
|
`${CONTAINER_RUNTIME_BIN} stop -t 1 nanoclaw-group2-222`,
|
||||||
{ stdio: 'pipe' },
|
{ stdio: 'pipe' },
|
||||||
);
|
);
|
||||||
expect(logger.info).toHaveBeenCalledWith(
|
expect(logger.info).toHaveBeenCalledWith(
|
||||||
{ count: 2, names: ['nanoclaw-group1-111', 'nanoclaw-group3-333'] },
|
{ count: 2, names: ['nanoclaw-group1-111', 'nanoclaw-group2-222'] },
|
||||||
'Stopped orphaned containers',
|
'Stopped orphaned containers',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does nothing when no orphans exist', () => {
|
it('does nothing when no orphans exist', () => {
|
||||||
mockExecSync.mockReturnValueOnce('[]');
|
mockExecSync.mockReturnValueOnce('');
|
||||||
|
|
||||||
cleanupOrphans();
|
cleanupOrphans();
|
||||||
|
|
||||||
@@ -142,9 +126,9 @@ describe('cleanupOrphans', () => {
|
|||||||
expect(logger.info).not.toHaveBeenCalled();
|
expect(logger.info).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('warns and continues when ls fails', () => {
|
it('warns and continues when ps fails', () => {
|
||||||
mockExecSync.mockImplementationOnce(() => {
|
mockExecSync.mockImplementationOnce(() => {
|
||||||
throw new Error('container not available');
|
throw new Error('docker not available');
|
||||||
});
|
});
|
||||||
|
|
||||||
cleanupOrphans(); // should not throw
|
cleanupOrphans(); // should not throw
|
||||||
@@ -156,11 +140,7 @@ describe('cleanupOrphans', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('continues stopping remaining containers when one stop fails', () => {
|
it('continues stopping remaining containers when one stop fails', () => {
|
||||||
const lsOutput = JSON.stringify([
|
mockExecSync.mockReturnValueOnce('nanoclaw-a-1\nnanoclaw-b-2\n');
|
||||||
{ status: 'running', configuration: { id: 'nanoclaw-a-1' } },
|
|
||||||
{ status: 'running', configuration: { id: 'nanoclaw-b-2' } },
|
|
||||||
]);
|
|
||||||
mockExecSync.mockReturnValueOnce(lsOutput);
|
|
||||||
// First stop fails
|
// First stop fails
|
||||||
mockExecSync.mockImplementationOnce(() => {
|
mockExecSync.mockImplementationOnce(() => {
|
||||||
throw new Error('already stopped');
|
throw new Error('already stopped');
|
||||||
|
|||||||
@@ -3,39 +3,12 @@
|
|||||||
* All runtime-specific logic lives here so swapping runtimes means changing one file.
|
* All runtime-specific logic lives here so swapping runtimes means changing one file.
|
||||||
*/
|
*/
|
||||||
import { execSync } from 'child_process';
|
import { execSync } from 'child_process';
|
||||||
import fs from 'fs';
|
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
|
|
||||||
import { logger } from './logger.js';
|
import { logger } from './logger.js';
|
||||||
|
|
||||||
/** The container runtime binary name. Switched to docker (OrbStack) for stability. */
|
/** The container runtime binary name. Switched to docker (OrbStack) for stability. */
|
||||||
export const CONTAINER_RUNTIME_BIN = 'docker';
|
export const CONTAINER_RUNTIME_BIN: string = 'docker';
|
||||||
|
|
||||||
/** Hostname containers use to reach the host machine. */
|
|
||||||
export const CONTAINER_HOST_GATEWAY = 'host.docker.internal';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Address the credential proxy binds to.
|
|
||||||
* Docker Desktop/OrbStack (macOS): 127.0.0.1 — the VM routes host.docker.internal to loopback.
|
|
||||||
*/
|
|
||||||
export const PROXY_BIND_HOST =
|
|
||||||
process.env.CREDENTIAL_PROXY_HOST || detectProxyBindHost();
|
|
||||||
|
|
||||||
function detectProxyBindHost(): string {
|
|
||||||
if (os.platform() === 'darwin') return '127.0.0.1';
|
|
||||||
|
|
||||||
// WSL uses Docker Desktop (same VM routing as macOS) — loopback is correct.
|
|
||||||
if (fs.existsSync('/proc/sys/fs/binfmt_misc/WSLInterop')) return '127.0.0.1';
|
|
||||||
|
|
||||||
// Bare-metal Linux: bind to the docker0 bridge IP instead of 0.0.0.0
|
|
||||||
const ifaces = os.networkInterfaces();
|
|
||||||
const docker0 = ifaces['docker0'];
|
|
||||||
if (docker0) {
|
|
||||||
const ipv4 = docker0.find((a) => a.family === 'IPv4');
|
|
||||||
if (ipv4) return ipv4.address;
|
|
||||||
}
|
|
||||||
return '0.0.0.0';
|
|
||||||
}
|
|
||||||
|
|
||||||
/** CLI args needed for the container to resolve the host gateway. */
|
/** CLI args needed for the container to resolve the host gateway. */
|
||||||
export function hostGatewayArgs(): string[] {
|
export function hostGatewayArgs(): string[] {
|
||||||
@@ -58,9 +31,12 @@ export function readonlyMountArgs(
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the shell command to stop a container by name. */
|
/** Stop a container by name. Uses execFileSync to avoid shell injection. */
|
||||||
export function stopContainer(name: string): string {
|
export function stopContainer(name: string): void {
|
||||||
return `${CONTAINER_RUNTIME_BIN} stop ${name}`;
|
if (!/^[a-zA-Z0-9][a-zA-Z0-9_.-]*$/.test(name)) {
|
||||||
|
throw new Error(`Invalid container name: ${name}`);
|
||||||
|
}
|
||||||
|
execSync(`${CONTAINER_RUNTIME_BIN} stop -t 1 ${name}`, { stdio: 'pipe' });
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Ensure the container runtime is running, starting it if needed. */
|
/** Ensure the container runtime is running, starting it if needed. */
|
||||||
@@ -90,8 +66,7 @@ export function cleanupOrphans(): void {
|
|||||||
const orphans = output.split('\n').filter(Boolean);
|
const orphans = output.split('\n').filter(Boolean);
|
||||||
for (const name of orphans) {
|
for (const name of orphans) {
|
||||||
try {
|
try {
|
||||||
execSync(stopContainer(name), { stdio: 'pipe' });
|
stopContainer(name);
|
||||||
execSync(`${CONTAINER_RUNTIME_BIN} rm ${name}`, { stdio: 'pipe' });
|
|
||||||
} catch {
|
} catch {
|
||||||
/* already stopped */
|
/* already stopped */
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,192 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|
||||||
import http from 'http';
|
|
||||||
import type { AddressInfo } from 'net';
|
|
||||||
|
|
||||||
const mockEnv: Record<string, string> = {};
|
|
||||||
vi.mock('./env.js', () => ({
|
|
||||||
readEnvFile: vi.fn(() => ({ ...mockEnv })),
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.mock('./logger.js', () => ({
|
|
||||||
logger: { info: vi.fn(), error: vi.fn(), debug: vi.fn(), warn: vi.fn() },
|
|
||||||
}));
|
|
||||||
|
|
||||||
import { startCredentialProxy } from './credential-proxy.js';
|
|
||||||
|
|
||||||
function makeRequest(
|
|
||||||
port: number,
|
|
||||||
options: http.RequestOptions,
|
|
||||||
body = '',
|
|
||||||
): Promise<{
|
|
||||||
statusCode: number;
|
|
||||||
body: string;
|
|
||||||
headers: http.IncomingHttpHeaders;
|
|
||||||
}> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const req = http.request(
|
|
||||||
{ ...options, hostname: '127.0.0.1', port },
|
|
||||||
(res) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
res.on('data', (c) => chunks.push(c));
|
|
||||||
res.on('end', () => {
|
|
||||||
resolve({
|
|
||||||
statusCode: res.statusCode!,
|
|
||||||
body: Buffer.concat(chunks).toString(),
|
|
||||||
headers: res.headers,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
|
||||||
req.on('error', reject);
|
|
||||||
req.write(body);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('credential-proxy', () => {
|
|
||||||
let proxyServer: http.Server;
|
|
||||||
let upstreamServer: http.Server;
|
|
||||||
let proxyPort: number;
|
|
||||||
let upstreamPort: number;
|
|
||||||
let lastUpstreamHeaders: http.IncomingHttpHeaders;
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
lastUpstreamHeaders = {};
|
|
||||||
|
|
||||||
upstreamServer = http.createServer((req, res) => {
|
|
||||||
lastUpstreamHeaders = { ...req.headers };
|
|
||||||
res.writeHead(200, { 'content-type': 'application/json' });
|
|
||||||
res.end(JSON.stringify({ ok: true }));
|
|
||||||
});
|
|
||||||
await new Promise<void>((resolve) =>
|
|
||||||
upstreamServer.listen(0, '127.0.0.1', resolve),
|
|
||||||
);
|
|
||||||
upstreamPort = (upstreamServer.address() as AddressInfo).port;
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
await new Promise<void>((r) => proxyServer?.close(() => r()));
|
|
||||||
await new Promise<void>((r) => upstreamServer?.close(() => r()));
|
|
||||||
for (const key of Object.keys(mockEnv)) delete mockEnv[key];
|
|
||||||
});
|
|
||||||
|
|
||||||
async function startProxy(env: Record<string, string>): Promise<number> {
|
|
||||||
Object.assign(mockEnv, env, {
|
|
||||||
ANTHROPIC_BASE_URL: `http://127.0.0.1:${upstreamPort}`,
|
|
||||||
});
|
|
||||||
proxyServer = await startCredentialProxy(0);
|
|
||||||
return (proxyServer.address() as AddressInfo).port;
|
|
||||||
}
|
|
||||||
|
|
||||||
it('API-key mode injects x-api-key and strips placeholder', async () => {
|
|
||||||
proxyPort = await startProxy({ ANTHROPIC_API_KEY: 'sk-ant-real-key' });
|
|
||||||
|
|
||||||
await makeRequest(
|
|
||||||
proxyPort,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
path: '/v1/messages',
|
|
||||||
headers: {
|
|
||||||
'content-type': 'application/json',
|
|
||||||
'x-api-key': 'placeholder',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'{}',
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(lastUpstreamHeaders['x-api-key']).toBe('sk-ant-real-key');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('OAuth mode replaces Authorization when container sends one', async () => {
|
|
||||||
proxyPort = await startProxy({
|
|
||||||
CLAUDE_CODE_OAUTH_TOKEN: 'real-oauth-token',
|
|
||||||
});
|
|
||||||
|
|
||||||
await makeRequest(
|
|
||||||
proxyPort,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
path: '/api/oauth/claude_cli/create_api_key',
|
|
||||||
headers: {
|
|
||||||
'content-type': 'application/json',
|
|
||||||
authorization: 'Bearer placeholder',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'{}',
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(lastUpstreamHeaders['authorization']).toBe(
|
|
||||||
'Bearer real-oauth-token',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('OAuth mode does not inject Authorization when container omits it', async () => {
|
|
||||||
proxyPort = await startProxy({
|
|
||||||
CLAUDE_CODE_OAUTH_TOKEN: 'real-oauth-token',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post-exchange: container uses x-api-key only, no Authorization header
|
|
||||||
await makeRequest(
|
|
||||||
proxyPort,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
path: '/v1/messages',
|
|
||||||
headers: {
|
|
||||||
'content-type': 'application/json',
|
|
||||||
'x-api-key': 'temp-key-from-exchange',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'{}',
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(lastUpstreamHeaders['x-api-key']).toBe('temp-key-from-exchange');
|
|
||||||
expect(lastUpstreamHeaders['authorization']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('strips hop-by-hop headers', async () => {
|
|
||||||
proxyPort = await startProxy({ ANTHROPIC_API_KEY: 'sk-ant-real-key' });
|
|
||||||
|
|
||||||
await makeRequest(
|
|
||||||
proxyPort,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
path: '/v1/messages',
|
|
||||||
headers: {
|
|
||||||
'content-type': 'application/json',
|
|
||||||
connection: 'keep-alive',
|
|
||||||
'keep-alive': 'timeout=5',
|
|
||||||
'transfer-encoding': 'chunked',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'{}',
|
|
||||||
);
|
|
||||||
|
|
||||||
// Proxy strips client hop-by-hop headers. Node's HTTP client may re-add
|
|
||||||
// its own Connection header (standard HTTP/1.1 behavior), but the client's
|
|
||||||
// custom keep-alive and transfer-encoding must not be forwarded.
|
|
||||||
expect(lastUpstreamHeaders['keep-alive']).toBeUndefined();
|
|
||||||
expect(lastUpstreamHeaders['transfer-encoding']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns 502 when upstream is unreachable', async () => {
|
|
||||||
Object.assign(mockEnv, {
|
|
||||||
ANTHROPIC_API_KEY: 'sk-ant-real-key',
|
|
||||||
ANTHROPIC_BASE_URL: 'http://127.0.0.1:59999',
|
|
||||||
});
|
|
||||||
proxyServer = await startCredentialProxy(0);
|
|
||||||
proxyPort = (proxyServer.address() as AddressInfo).port;
|
|
||||||
|
|
||||||
const res = await makeRequest(
|
|
||||||
proxyPort,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
path: '/v1/messages',
|
|
||||||
headers: { 'content-type': 'application/json' },
|
|
||||||
},
|
|
||||||
'{}',
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(res.statusCode).toBe(502);
|
|
||||||
expect(res.body).toBe('Bad Gateway');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
/**
|
|
||||||
* Credential proxy for container isolation.
|
|
||||||
* Containers connect here instead of directly to the Anthropic API.
|
|
||||||
* The proxy injects real credentials so containers never see them.
|
|
||||||
*
|
|
||||||
* Two auth modes:
|
|
||||||
* API key: Proxy injects x-api-key on every request.
|
|
||||||
* OAuth: Container CLI exchanges its placeholder token for a temp
|
|
||||||
* API key via /api/oauth/claude_cli/create_api_key.
|
|
||||||
* Proxy injects real OAuth token on that exchange request;
|
|
||||||
* subsequent requests carry the temp key which is valid as-is.
|
|
||||||
*/
|
|
||||||
import { createServer, Server } from 'http';
|
|
||||||
import { request as httpsRequest } from 'https';
|
|
||||||
import { request as httpRequest, RequestOptions } from 'http';
|
|
||||||
|
|
||||||
import { readEnvFile } from './env.js';
|
|
||||||
import { logger } from './logger.js';
|
|
||||||
|
|
||||||
export type AuthMode = 'api-key' | 'oauth';
|
|
||||||
|
|
||||||
export interface ProxyConfig {
|
|
||||||
authMode: AuthMode;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function startCredentialProxy(
|
|
||||||
port: number,
|
|
||||||
host = '0.0.0.0',
|
|
||||||
): Promise<Server> {
|
|
||||||
const secrets = readEnvFile([
|
|
||||||
'ANTHROPIC_API_KEY',
|
|
||||||
'CLAUDE_CODE_OAUTH_TOKEN',
|
|
||||||
'ANTHROPIC_AUTH_TOKEN',
|
|
||||||
'ANTHROPIC_BASE_URL',
|
|
||||||
]);
|
|
||||||
|
|
||||||
const authMode: AuthMode = secrets.ANTHROPIC_API_KEY ? 'api-key' : 'oauth';
|
|
||||||
const oauthToken =
|
|
||||||
secrets.CLAUDE_CODE_OAUTH_TOKEN || secrets.ANTHROPIC_AUTH_TOKEN;
|
|
||||||
|
|
||||||
const upstreamUrl = new URL(
|
|
||||||
secrets.ANTHROPIC_BASE_URL || 'https://api.anthropic.com',
|
|
||||||
);
|
|
||||||
// Preserve the base URL's pathname prefix (e.g. /anthropic for MiniMax)
|
|
||||||
// so requests to /v1/messages become /anthropic/v1/messages upstream.
|
|
||||||
const basePath = upstreamUrl.pathname.replace(/\/+$/, '');
|
|
||||||
const isHttps = upstreamUrl.protocol === 'https:';
|
|
||||||
const makeRequest = isHttps ? httpsRequest : httpRequest;
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const server = createServer((req, res) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
req.on('data', (c) => chunks.push(c));
|
|
||||||
req.on('end', () => {
|
|
||||||
const body = Buffer.concat(chunks);
|
|
||||||
const headers: Record<string, string | number | string[] | undefined> =
|
|
||||||
{
|
|
||||||
...(req.headers as Record<string, string>),
|
|
||||||
host: upstreamUrl.host,
|
|
||||||
'content-length': body.length,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Strip hop-by-hop headers that must not be forwarded by proxies
|
|
||||||
delete headers['connection'];
|
|
||||||
delete headers['keep-alive'];
|
|
||||||
delete headers['transfer-encoding'];
|
|
||||||
|
|
||||||
if (authMode === 'api-key') {
|
|
||||||
// API key mode: inject x-api-key on every request
|
|
||||||
delete headers['x-api-key'];
|
|
||||||
headers['x-api-key'] = secrets.ANTHROPIC_API_KEY;
|
|
||||||
} else {
|
|
||||||
// OAuth mode: replace placeholder Bearer token with the real one
|
|
||||||
// only when the container actually sends an Authorization header
|
|
||||||
// (exchange request + auth probes). Post-exchange requests use
|
|
||||||
// x-api-key only, so they pass through without token injection.
|
|
||||||
if (headers['authorization']) {
|
|
||||||
delete headers['authorization'];
|
|
||||||
if (oauthToken) {
|
|
||||||
headers['authorization'] = `Bearer ${oauthToken}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const upstream = makeRequest(
|
|
||||||
{
|
|
||||||
hostname: upstreamUrl.hostname,
|
|
||||||
port: upstreamUrl.port || (isHttps ? 443 : 80),
|
|
||||||
path: basePath + req.url,
|
|
||||||
method: req.method,
|
|
||||||
headers,
|
|
||||||
} as RequestOptions,
|
|
||||||
(upRes) => {
|
|
||||||
res.writeHead(upRes.statusCode!, upRes.headers);
|
|
||||||
upRes.pipe(res);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
upstream.on('error', (err) => {
|
|
||||||
logger.error(
|
|
||||||
{ err, url: req.url },
|
|
||||||
'Credential proxy upstream error',
|
|
||||||
);
|
|
||||||
if (!res.headersSent) {
|
|
||||||
res.writeHead(502);
|
|
||||||
res.end('Bad Gateway');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
upstream.write(body);
|
|
||||||
upstream.end();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
server.listen(port, host, () => {
|
|
||||||
logger.info({ port, host, authMode }, 'Credential proxy started');
|
|
||||||
resolve(server);
|
|
||||||
});
|
|
||||||
|
|
||||||
server.on('error', reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Detect which auth mode the host is configured for. */
|
|
||||||
export function detectAuthMode(): AuthMode {
|
|
||||||
const secrets = readEnvFile(['ANTHROPIC_API_KEY']);
|
|
||||||
return secrets.ANTHROPIC_API_KEY ? 'api-key' : 'oauth';
|
|
||||||
}
|
|
||||||
67
src/db-migration.test.ts
Normal file
67
src/db-migration.test.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import Database from 'better-sqlite3';
|
||||||
|
import fs from 'fs';
|
||||||
|
import os from 'os';
|
||||||
|
import path from 'path';
|
||||||
|
import { describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
describe('database migrations', () => {
|
||||||
|
it('defaults Telegram backfill chats to direct messages', async () => {
|
||||||
|
const repoRoot = process.cwd();
|
||||||
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nanoclaw-db-test-'));
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(tempDir);
|
||||||
|
fs.mkdirSync(path.join(tempDir, 'store'), { recursive: true });
|
||||||
|
|
||||||
|
const dbPath = path.join(tempDir, 'store', 'messages.db');
|
||||||
|
const legacyDb = new Database(dbPath);
|
||||||
|
legacyDb.exec(`
|
||||||
|
CREATE TABLE chats (
|
||||||
|
jid TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
last_message_time TEXT
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
legacyDb
|
||||||
|
.prepare(
|
||||||
|
`INSERT INTO chats (jid, name, last_message_time) VALUES (?, ?, ?)`,
|
||||||
|
)
|
||||||
|
.run('tg:12345', 'Telegram DM', '2024-01-01T00:00:00.000Z');
|
||||||
|
legacyDb
|
||||||
|
.prepare(
|
||||||
|
`INSERT INTO chats (jid, name, last_message_time) VALUES (?, ?, ?)`,
|
||||||
|
)
|
||||||
|
.run('tg:-10012345', 'Telegram Group', '2024-01-01T00:00:01.000Z');
|
||||||
|
legacyDb
|
||||||
|
.prepare(
|
||||||
|
`INSERT INTO chats (jid, name, last_message_time) VALUES (?, ?, ?)`,
|
||||||
|
)
|
||||||
|
.run('room@g.us', 'WhatsApp Group', '2024-01-01T00:00:02.000Z');
|
||||||
|
legacyDb.close();
|
||||||
|
|
||||||
|
vi.resetModules();
|
||||||
|
const { initDatabase, getAllChats, _closeDatabase } =
|
||||||
|
await import('./db.js');
|
||||||
|
|
||||||
|
initDatabase();
|
||||||
|
|
||||||
|
const chats = getAllChats();
|
||||||
|
expect(chats.find((chat) => chat.jid === 'tg:12345')).toMatchObject({
|
||||||
|
channel: 'telegram',
|
||||||
|
is_group: 0,
|
||||||
|
});
|
||||||
|
expect(chats.find((chat) => chat.jid === 'tg:-10012345')).toMatchObject({
|
||||||
|
channel: 'telegram',
|
||||||
|
is_group: 0,
|
||||||
|
});
|
||||||
|
expect(chats.find((chat) => chat.jid === 'room@g.us')).toMatchObject({
|
||||||
|
channel: 'whatsapp',
|
||||||
|
is_group: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
_closeDatabase();
|
||||||
|
} finally {
|
||||||
|
process.chdir(repoRoot);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -6,6 +6,7 @@ import {
|
|||||||
deleteTask,
|
deleteTask,
|
||||||
getAllChats,
|
getAllChats,
|
||||||
getAllRegisteredGroups,
|
getAllRegisteredGroups,
|
||||||
|
getLastBotMessageTimestamp,
|
||||||
getMessagesSince,
|
getMessagesSince,
|
||||||
getNewMessages,
|
getNewMessages,
|
||||||
getTaskById,
|
getTaskById,
|
||||||
@@ -14,6 +15,7 @@ import {
|
|||||||
storeMessage,
|
storeMessage,
|
||||||
updateTask,
|
updateTask,
|
||||||
} from './db.js';
|
} from './db.js';
|
||||||
|
import { formatMessages } from './router.js';
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
_initTestDatabase();
|
_initTestDatabase();
|
||||||
@@ -208,6 +210,92 @@ describe('getMessagesSince', () => {
|
|||||||
expect(msgs).toHaveLength(3);
|
expect(msgs).toHaveLength(3);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('recovers cursor from last bot reply when lastAgentTimestamp is missing', () => {
|
||||||
|
// beforeEach already inserts m3 (bot reply at 00:00:03) and m4 (user at 00:00:04)
|
||||||
|
// Add more old history before the bot reply
|
||||||
|
for (let i = 1; i <= 50; i++) {
|
||||||
|
store({
|
||||||
|
id: `history-${i}`,
|
||||||
|
chat_jid: 'group@g.us',
|
||||||
|
sender: 'user@s.whatsapp.net',
|
||||||
|
sender_name: 'User',
|
||||||
|
content: `old message ${i}`,
|
||||||
|
timestamp: `2023-06-${String(i).padStart(2, '0')}T12:00:00.000Z`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// New message after the bot reply (m3 at 00:00:03)
|
||||||
|
store({
|
||||||
|
id: 'new-1',
|
||||||
|
chat_jid: 'group@g.us',
|
||||||
|
sender: 'user@s.whatsapp.net',
|
||||||
|
sender_name: 'User',
|
||||||
|
content: 'new message after bot reply',
|
||||||
|
timestamp: '2024-01-02T00:00:00.000Z',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Recover cursor from the last bot message (m3 from beforeEach)
|
||||||
|
const recovered = getLastBotMessageTimestamp('group@g.us', 'Andy');
|
||||||
|
expect(recovered).toBe('2024-01-01T00:00:03.000Z');
|
||||||
|
|
||||||
|
// Using recovered cursor: only gets messages after the bot reply
|
||||||
|
const msgs = getMessagesSince('group@g.us', recovered!, 'Andy', 10);
|
||||||
|
// m4 (third, 00:00:04) + new-1 — skips all 50 old messages and m1/m2
|
||||||
|
expect(msgs).toHaveLength(2);
|
||||||
|
expect(msgs[0].content).toBe('third');
|
||||||
|
expect(msgs[1].content).toBe('new message after bot reply');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('caps messages to configured limit even with recovered cursor', () => {
|
||||||
|
// beforeEach inserts m3 (bot at 00:00:03). Add 30 messages after it.
|
||||||
|
for (let i = 1; i <= 30; i++) {
|
||||||
|
store({
|
||||||
|
id: `pending-${i}`,
|
||||||
|
chat_jid: 'group@g.us',
|
||||||
|
sender: 'user@s.whatsapp.net',
|
||||||
|
sender_name: 'User',
|
||||||
|
content: `pending message ${i}`,
|
||||||
|
timestamp: `2024-02-${String(i).padStart(2, '0')}T12:00:00.000Z`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const recovered = getLastBotMessageTimestamp('group@g.us', 'Andy');
|
||||||
|
expect(recovered).toBe('2024-01-01T00:00:03.000Z');
|
||||||
|
|
||||||
|
// With limit=10, only the 10 most recent are returned
|
||||||
|
const msgs = getMessagesSince('group@g.us', recovered!, 'Andy', 10);
|
||||||
|
expect(msgs).toHaveLength(10);
|
||||||
|
// Most recent 10: pending-21 through pending-30
|
||||||
|
expect(msgs[0].content).toBe('pending message 21');
|
||||||
|
expect(msgs[9].content).toBe('pending message 30');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns last N messages when no bot reply and no cursor exist', () => {
|
||||||
|
// Use a fresh group with no bot messages
|
||||||
|
storeChatMetadata('fresh@g.us', '2024-01-01T00:00:00.000Z');
|
||||||
|
for (let i = 1; i <= 20; i++) {
|
||||||
|
store({
|
||||||
|
id: `fresh-${i}`,
|
||||||
|
chat_jid: 'fresh@g.us',
|
||||||
|
sender: 'user@s.whatsapp.net',
|
||||||
|
sender_name: 'User',
|
||||||
|
content: `message ${i}`,
|
||||||
|
timestamp: `2024-02-${String(i).padStart(2, '0')}T12:00:00.000Z`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const recovered = getLastBotMessageTimestamp('fresh@g.us', 'Andy');
|
||||||
|
expect(recovered).toBeUndefined();
|
||||||
|
|
||||||
|
// No cursor → sinceTimestamp = '' but limit caps the result
|
||||||
|
const msgs = getMessagesSince('fresh@g.us', '', 'Andy', 10);
|
||||||
|
expect(msgs).toHaveLength(10);
|
||||||
|
|
||||||
|
const prompt = formatMessages(msgs, 'Asia/Jerusalem');
|
||||||
|
const messageTagCount = (prompt.match(/<message /g) || []).length;
|
||||||
|
expect(messageTagCount).toBe(10);
|
||||||
|
});
|
||||||
|
|
||||||
it('filters pre-migration bot messages via content prefix backstop', () => {
|
it('filters pre-migration bot messages via content prefix backstop', () => {
|
||||||
// Simulate a message written before migration: has prefix but is_bot_message = 0
|
// Simulate a message written before migration: has prefix but is_bot_message = 0
|
||||||
store({
|
store({
|
||||||
|
|||||||
43
src/db.ts
43
src/db.ts
@@ -103,6 +103,13 @@ function createSchema(database: Database.Database): void {
|
|||||||
/* column already exists */
|
/* column already exists */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add script column if it doesn't exist (migration for existing DBs)
|
||||||
|
try {
|
||||||
|
database.exec(`ALTER TABLE scheduled_tasks ADD COLUMN script TEXT`);
|
||||||
|
} catch {
|
||||||
|
/* column already exists */
|
||||||
|
}
|
||||||
|
|
||||||
// Add is_bot_message column if it doesn't exist (migration for existing DBs)
|
// Add is_bot_message column if it doesn't exist (migration for existing DBs)
|
||||||
try {
|
try {
|
||||||
database.exec(
|
database.exec(
|
||||||
@@ -144,7 +151,7 @@ function createSchema(database: Database.Database): void {
|
|||||||
`UPDATE chats SET channel = 'discord', is_group = 1 WHERE jid LIKE 'dc:%'`,
|
`UPDATE chats SET channel = 'discord', is_group = 1 WHERE jid LIKE 'dc:%'`,
|
||||||
);
|
);
|
||||||
database.exec(
|
database.exec(
|
||||||
`UPDATE chats SET channel = 'telegram', is_group = 1 WHERE jid LIKE 'tg:%'`,
|
`UPDATE chats SET channel = 'telegram', is_group = 0 WHERE jid LIKE 'tg:%'`,
|
||||||
);
|
);
|
||||||
} catch {
|
} catch {
|
||||||
/* columns already exist */
|
/* columns already exist */
|
||||||
@@ -168,6 +175,11 @@ export function _initTestDatabase(): void {
|
|||||||
createSchema(db);
|
createSchema(db);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** @internal - for tests only. */
|
||||||
|
export function _closeDatabase(): void {
|
||||||
|
db.close();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store chat metadata only (no message content).
|
* Store chat metadata only (no message content).
|
||||||
* Used for all chats to enable group discovery without storing sensitive content.
|
* Used for all chats to enable group discovery without storing sensitive content.
|
||||||
@@ -373,19 +385,33 @@ export function getMessagesSince(
|
|||||||
.all(chatJid, sinceTimestamp, `${botPrefix}:%`, limit) as NewMessage[];
|
.all(chatJid, sinceTimestamp, `${botPrefix}:%`, limit) as NewMessage[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getLastBotMessageTimestamp(
|
||||||
|
chatJid: string,
|
||||||
|
botPrefix: string,
|
||||||
|
): string | undefined {
|
||||||
|
const row = db
|
||||||
|
.prepare(
|
||||||
|
`SELECT MAX(timestamp) as ts FROM messages
|
||||||
|
WHERE chat_jid = ? AND (is_bot_message = 1 OR content LIKE ?)`,
|
||||||
|
)
|
||||||
|
.get(chatJid, `${botPrefix}:%`) as { ts: string | null } | undefined;
|
||||||
|
return row?.ts ?? undefined;
|
||||||
|
}
|
||||||
|
|
||||||
export function createTask(
|
export function createTask(
|
||||||
task: Omit<ScheduledTask, 'last_run' | 'last_result'>,
|
task: Omit<ScheduledTask, 'last_run' | 'last_result'>,
|
||||||
): void {
|
): void {
|
||||||
db.prepare(
|
db.prepare(
|
||||||
`
|
`
|
||||||
INSERT INTO scheduled_tasks (id, group_folder, chat_jid, prompt, schedule_type, schedule_value, context_mode, next_run, status, created_at)
|
INSERT INTO scheduled_tasks (id, group_folder, chat_jid, prompt, script, schedule_type, schedule_value, context_mode, next_run, status, created_at)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
`,
|
`,
|
||||||
).run(
|
).run(
|
||||||
task.id,
|
task.id,
|
||||||
task.group_folder,
|
task.group_folder,
|
||||||
task.chat_jid,
|
task.chat_jid,
|
||||||
task.prompt,
|
task.prompt,
|
||||||
|
task.script || null,
|
||||||
task.schedule_type,
|
task.schedule_type,
|
||||||
task.schedule_value,
|
task.schedule_value,
|
||||||
task.context_mode || 'isolated',
|
task.context_mode || 'isolated',
|
||||||
@@ -420,7 +446,12 @@ export function updateTask(
|
|||||||
updates: Partial<
|
updates: Partial<
|
||||||
Pick<
|
Pick<
|
||||||
ScheduledTask,
|
ScheduledTask,
|
||||||
'prompt' | 'schedule_type' | 'schedule_value' | 'next_run' | 'status'
|
| 'prompt'
|
||||||
|
| 'script'
|
||||||
|
| 'schedule_type'
|
||||||
|
| 'schedule_value'
|
||||||
|
| 'next_run'
|
||||||
|
| 'status'
|
||||||
>
|
>
|
||||||
>,
|
>,
|
||||||
): void {
|
): void {
|
||||||
@@ -431,6 +462,10 @@ export function updateTask(
|
|||||||
fields.push('prompt = ?');
|
fields.push('prompt = ?');
|
||||||
values.push(updates.prompt);
|
values.push(updates.prompt);
|
||||||
}
|
}
|
||||||
|
if (updates.script !== undefined) {
|
||||||
|
fields.push('script = ?');
|
||||||
|
values.push(updates.script || null);
|
||||||
|
}
|
||||||
if (updates.schedule_type !== undefined) {
|
if (updates.schedule_type !== undefined) {
|
||||||
fields.push('schedule_type = ?');
|
fields.push('schedule_type = ?');
|
||||||
values.push(updates.schedule_type);
|
values.push(updates.schedule_type);
|
||||||
|
|||||||
@@ -30,8 +30,9 @@ export function readEnvFile(keys: string[]): Record<string, string> {
|
|||||||
if (!wanted.has(key)) continue;
|
if (!wanted.has(key)) continue;
|
||||||
let value = trimmed.slice(eqIdx + 1).trim();
|
let value = trimmed.slice(eqIdx + 1).trim();
|
||||||
if (
|
if (
|
||||||
(value.startsWith('"') && value.endsWith('"')) ||
|
value.length >= 2 &&
|
||||||
(value.startsWith("'") && value.endsWith("'"))
|
((value.startsWith('"') && value.endsWith('"')) ||
|
||||||
|
(value.startsWith("'") && value.endsWith("'")))
|
||||||
) {
|
) {
|
||||||
value = value.slice(1, -1);
|
value = value.slice(1, -1);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
import { describe, it, expect } from 'vitest';
|
||||||
|
|
||||||
import { ASSISTANT_NAME, TRIGGER_PATTERN } from './config.js';
|
import {
|
||||||
|
ASSISTANT_NAME,
|
||||||
|
getTriggerPattern,
|
||||||
|
TRIGGER_PATTERN,
|
||||||
|
} from './config.js';
|
||||||
import {
|
import {
|
||||||
escapeXml,
|
escapeXml,
|
||||||
formatMessages,
|
formatMessages,
|
||||||
@@ -126,11 +130,60 @@ describe('formatMessages', () => {
|
|||||||
// --- TRIGGER_PATTERN ---
|
// --- TRIGGER_PATTERN ---
|
||||||
|
|
||||||
describe('TRIGGER_PATTERN', () => {
|
describe('TRIGGER_PATTERN', () => {
|
||||||
// Trigger is currently disabled - all messages match
|
const name = ASSISTANT_NAME;
|
||||||
it('matches any message (trigger disabled)', () => {
|
const lower = name.toLowerCase();
|
||||||
expect(TRIGGER_PATTERN.test('hello')).toBe(true);
|
const upper = name.toUpperCase();
|
||||||
expect(TRIGGER_PATTERN.test('@anything')).toBe(true);
|
|
||||||
expect(TRIGGER_PATTERN.test('anything')).toBe(true);
|
it('matches @name at start of message', () => {
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${name} hello`)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('matches case-insensitively', () => {
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${lower} hello`)).toBe(true);
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${upper} hello`)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not match when not at start of message', () => {
|
||||||
|
expect(TRIGGER_PATTERN.test(`hello @${name}`)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not match partial name like @NameExtra (word boundary)', () => {
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${name}extra hello`)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('matches with word boundary before apostrophe', () => {
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${name}'s thing`)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('matches @name alone (end of string is a word boundary)', () => {
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${name}`)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('matches with leading whitespace after trim', () => {
|
||||||
|
// The actual usage trims before testing: TRIGGER_PATTERN.test(m.content.trim())
|
||||||
|
expect(TRIGGER_PATTERN.test(`@${name} hey`.trim())).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getTriggerPattern', () => {
|
||||||
|
it('uses the configured per-group trigger when provided', () => {
|
||||||
|
const pattern = getTriggerPattern('@Claw');
|
||||||
|
|
||||||
|
expect(pattern.test('@Claw hello')).toBe(true);
|
||||||
|
expect(pattern.test(`@${ASSISTANT_NAME} hello`)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to the default trigger when group trigger is missing', () => {
|
||||||
|
const pattern = getTriggerPattern(undefined);
|
||||||
|
|
||||||
|
expect(pattern.test(`@${ASSISTANT_NAME} hello`)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('treats regex characters in custom triggers literally', () => {
|
||||||
|
const pattern = getTriggerPattern('@C.L.A.U.D.E');
|
||||||
|
|
||||||
|
expect(pattern.test('@C.L.A.U.D.E hello')).toBe(true);
|
||||||
|
expect(pattern.test('@CXLXAUXDXE hello')).toBe(false);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -179,18 +232,63 @@ describe('formatOutbound', () => {
|
|||||||
// --- Trigger gating with requiresTrigger flag ---
|
// --- Trigger gating with requiresTrigger flag ---
|
||||||
|
|
||||||
describe('trigger gating (requiresTrigger interaction)', () => {
|
describe('trigger gating (requiresTrigger interaction)', () => {
|
||||||
// Note: TRIGGER_PATTERN is currently disabled (matches everything)
|
// Replicates the exact logic from processGroupMessages and startMessageLoop:
|
||||||
// so all messages are processed regardless of trigger presence
|
// if (!isMainGroup && group.requiresTrigger !== false) { check group.trigger }
|
||||||
|
function shouldRequireTrigger(
|
||||||
|
isMainGroup: boolean,
|
||||||
|
requiresTrigger: boolean | undefined,
|
||||||
|
): boolean {
|
||||||
|
return !isMainGroup && requiresTrigger !== false;
|
||||||
|
}
|
||||||
|
|
||||||
it('all messages are processed (trigger disabled)', () => {
|
function shouldProcess(
|
||||||
|
isMainGroup: boolean,
|
||||||
|
requiresTrigger: boolean | undefined,
|
||||||
|
trigger: string | undefined,
|
||||||
|
messages: NewMessage[],
|
||||||
|
): boolean {
|
||||||
|
if (!shouldRequireTrigger(isMainGroup, requiresTrigger)) return true;
|
||||||
|
const triggerPattern = getTriggerPattern(trigger);
|
||||||
|
return messages.some((m) => triggerPattern.test(m.content.trim()));
|
||||||
|
}
|
||||||
|
|
||||||
|
it('main group always processes (no trigger needed)', () => {
|
||||||
const msgs = [makeMsg({ content: 'hello no trigger' })];
|
const msgs = [makeMsg({ content: 'hello no trigger' })];
|
||||||
expect(msgs.length).toBe(1); // Sanity check
|
expect(shouldProcess(true, undefined, undefined, msgs)).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('requiresTrigger=false still works', () => {
|
it('main group processes even with requiresTrigger=true', () => {
|
||||||
// This test verifies the requiresTrigger flag still works when explicitly set
|
const msgs = [makeMsg({ content: 'hello no trigger' })];
|
||||||
// But with trigger disabled, all messages are processed anyway
|
expect(shouldProcess(true, true, undefined, msgs)).toBe(true);
|
||||||
const msgs = [makeMsg({ content: 'hello' })];
|
});
|
||||||
expect(msgs.length).toBe(1);
|
|
||||||
|
it('non-main group with requiresTrigger=undefined requires trigger (defaults to true)', () => {
|
||||||
|
const msgs = [makeMsg({ content: 'hello no trigger' })];
|
||||||
|
expect(shouldProcess(false, undefined, undefined, msgs)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-main group with requiresTrigger=true requires trigger', () => {
|
||||||
|
const msgs = [makeMsg({ content: 'hello no trigger' })];
|
||||||
|
expect(shouldProcess(false, true, undefined, msgs)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-main group with requiresTrigger=true processes when trigger present', () => {
|
||||||
|
const msgs = [makeMsg({ content: `@${ASSISTANT_NAME} do something` })];
|
||||||
|
expect(shouldProcess(false, true, undefined, msgs)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-main group uses its per-group trigger instead of the default trigger', () => {
|
||||||
|
const msgs = [makeMsg({ content: '@Claw do something' })];
|
||||||
|
expect(shouldProcess(false, true, '@Claw', msgs)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-main group does not process when only the default trigger is present for a custom-trigger group', () => {
|
||||||
|
const msgs = [makeMsg({ content: `@${ASSISTANT_NAME} do something` })];
|
||||||
|
expect(shouldProcess(false, true, '@Claw', msgs)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-main group with requiresTrigger=false always processes (no trigger needed)', () => {
|
||||||
|
const msgs = [makeMsg({ content: 'hello no trigger' })];
|
||||||
|
expect(shouldProcess(false, false, undefined, msgs)).toBe(true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
129
src/index.ts
129
src/index.ts
@@ -1,16 +1,19 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
|
import { OneCLI } from '@onecli-sh/sdk';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ASSISTANT_NAME,
|
ASSISTANT_NAME,
|
||||||
CREDENTIAL_PROXY_PORT,
|
DEFAULT_TRIGGER,
|
||||||
|
getTriggerPattern,
|
||||||
|
GROUPS_DIR,
|
||||||
IDLE_TIMEOUT,
|
IDLE_TIMEOUT,
|
||||||
|
MAX_MESSAGES_PER_PROMPT,
|
||||||
|
ONECLI_URL,
|
||||||
POLL_INTERVAL,
|
POLL_INTERVAL,
|
||||||
TELEGRAM_BOT_POOL,
|
|
||||||
TIMEZONE,
|
TIMEZONE,
|
||||||
TRIGGER_PATTERN,
|
|
||||||
} from './config.js';
|
} from './config.js';
|
||||||
import { startCredentialProxy } from './credential-proxy.js';
|
|
||||||
import './channels/index.js';
|
import './channels/index.js';
|
||||||
import {
|
import {
|
||||||
getChannelFactory,
|
getChannelFactory,
|
||||||
@@ -25,13 +28,13 @@ import {
|
|||||||
import {
|
import {
|
||||||
cleanupOrphans,
|
cleanupOrphans,
|
||||||
ensureContainerRuntimeRunning,
|
ensureContainerRuntimeRunning,
|
||||||
PROXY_BIND_HOST,
|
|
||||||
} from './container-runtime.js';
|
} from './container-runtime.js';
|
||||||
import {
|
import {
|
||||||
getAllChats,
|
getAllChats,
|
||||||
getAllRegisteredGroups,
|
getAllRegisteredGroups,
|
||||||
getAllSessions,
|
getAllSessions,
|
||||||
getAllTasks,
|
getAllTasks,
|
||||||
|
getLastBotMessageTimestamp,
|
||||||
getMessagesSince,
|
getMessagesSince,
|
||||||
getNewMessages,
|
getNewMessages,
|
||||||
getRouterState,
|
getRouterState,
|
||||||
@@ -44,7 +47,6 @@ import {
|
|||||||
} from './db.js';
|
} from './db.js';
|
||||||
import { GroupQueue } from './group-queue.js';
|
import { GroupQueue } from './group-queue.js';
|
||||||
import { resolveGroupFolderPath } from './group-folder.js';
|
import { resolveGroupFolderPath } from './group-folder.js';
|
||||||
import { initBotPool } from './channels/telegram.js';
|
|
||||||
import { startIpcWatcher } from './ipc.js';
|
import { startIpcWatcher } from './ipc.js';
|
||||||
import { findChannel, formatMessages, formatOutbound } from './router.js';
|
import { findChannel, formatMessages, formatOutbound } from './router.js';
|
||||||
import {
|
import {
|
||||||
@@ -74,6 +76,27 @@ let messageLoopRunning = false;
|
|||||||
const channels: Channel[] = [];
|
const channels: Channel[] = [];
|
||||||
const queue = new GroupQueue();
|
const queue = new GroupQueue();
|
||||||
|
|
||||||
|
const onecli = new OneCLI({ url: ONECLI_URL });
|
||||||
|
|
||||||
|
function ensureOneCLIAgent(jid: string, group: RegisteredGroup): void {
|
||||||
|
if (group.isMain) return;
|
||||||
|
const identifier = group.folder.toLowerCase().replace(/_/g, '-');
|
||||||
|
onecli.ensureAgent({ name: group.name, identifier }).then(
|
||||||
|
(res) => {
|
||||||
|
logger.info(
|
||||||
|
{ jid, identifier, created: res.created },
|
||||||
|
'OneCLI agent ensured',
|
||||||
|
);
|
||||||
|
},
|
||||||
|
(err) => {
|
||||||
|
logger.debug(
|
||||||
|
{ jid, identifier, err: String(err) },
|
||||||
|
'OneCLI agent ensure skipped',
|
||||||
|
);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function loadState(): void {
|
function loadState(): void {
|
||||||
lastTimestamp = getRouterState('last_timestamp') || '';
|
lastTimestamp = getRouterState('last_timestamp') || '';
|
||||||
const agentTs = getRouterState('last_agent_timestamp');
|
const agentTs = getRouterState('last_agent_timestamp');
|
||||||
@@ -91,6 +114,27 @@ function loadState(): void {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the message cursor for a group, recovering from the last bot reply
|
||||||
|
* if lastAgentTimestamp is missing (new group, corrupted state, restart).
|
||||||
|
*/
|
||||||
|
function getOrRecoverCursor(chatJid: string): string {
|
||||||
|
const existing = lastAgentTimestamp[chatJid];
|
||||||
|
if (existing) return existing;
|
||||||
|
|
||||||
|
const botTs = getLastBotMessageTimestamp(chatJid, ASSISTANT_NAME);
|
||||||
|
if (botTs) {
|
||||||
|
logger.info(
|
||||||
|
{ chatJid, recoveredFrom: botTs },
|
||||||
|
'Recovered message cursor from last bot reply',
|
||||||
|
);
|
||||||
|
lastAgentTimestamp[chatJid] = botTs;
|
||||||
|
saveState();
|
||||||
|
return botTs;
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
function saveState(): void {
|
function saveState(): void {
|
||||||
setRouterState('last_timestamp', lastTimestamp);
|
setRouterState('last_timestamp', lastTimestamp);
|
||||||
setRouterState('last_agent_timestamp', JSON.stringify(lastAgentTimestamp));
|
setRouterState('last_agent_timestamp', JSON.stringify(lastAgentTimestamp));
|
||||||
@@ -114,6 +158,29 @@ function registerGroup(jid: string, group: RegisteredGroup): void {
|
|||||||
// Create group folder
|
// Create group folder
|
||||||
fs.mkdirSync(path.join(groupDir, 'logs'), { recursive: true });
|
fs.mkdirSync(path.join(groupDir, 'logs'), { recursive: true });
|
||||||
|
|
||||||
|
// Copy CLAUDE.md template into the new group folder so agents have
|
||||||
|
// identity and instructions from the first run. (Fixes #1391)
|
||||||
|
const groupMdFile = path.join(groupDir, 'CLAUDE.md');
|
||||||
|
if (!fs.existsSync(groupMdFile)) {
|
||||||
|
const templateFile = path.join(
|
||||||
|
GROUPS_DIR,
|
||||||
|
group.isMain ? 'main' : 'global',
|
||||||
|
'CLAUDE.md',
|
||||||
|
);
|
||||||
|
if (fs.existsSync(templateFile)) {
|
||||||
|
let content = fs.readFileSync(templateFile, 'utf-8');
|
||||||
|
if (ASSISTANT_NAME !== 'Andy') {
|
||||||
|
content = content.replace(/^# Andy$/m, `# ${ASSISTANT_NAME}`);
|
||||||
|
content = content.replace(/You are Andy/g, `You are ${ASSISTANT_NAME}`);
|
||||||
|
}
|
||||||
|
fs.writeFileSync(groupMdFile, content);
|
||||||
|
logger.info({ folder: group.folder }, 'Created CLAUDE.md from template');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure a corresponding OneCLI agent exists (best-effort, non-blocking)
|
||||||
|
ensureOneCLIAgent(jid, group);
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
{ jid, name: group.name, folder: group.folder },
|
{ jid, name: group.name, folder: group.folder },
|
||||||
'Group registered',
|
'Group registered',
|
||||||
@@ -161,21 +228,22 @@ async function processGroupMessages(chatJid: string): Promise<boolean> {
|
|||||||
|
|
||||||
const isMainGroup = group.isMain === true;
|
const isMainGroup = group.isMain === true;
|
||||||
|
|
||||||
const sinceTimestamp = lastAgentTimestamp[chatJid] || '';
|
|
||||||
const missedMessages = getMessagesSince(
|
const missedMessages = getMessagesSince(
|
||||||
chatJid,
|
chatJid,
|
||||||
sinceTimestamp,
|
getOrRecoverCursor(chatJid),
|
||||||
ASSISTANT_NAME,
|
ASSISTANT_NAME,
|
||||||
|
MAX_MESSAGES_PER_PROMPT,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (missedMessages.length === 0) return true;
|
if (missedMessages.length === 0) return true;
|
||||||
|
|
||||||
// Check if trigger is required and present
|
// For non-main groups, check if trigger is required and present
|
||||||
if (group.requiresTrigger !== false) {
|
if (!isMainGroup && group.requiresTrigger !== false) {
|
||||||
|
const triggerPattern = getTriggerPattern(group.trigger);
|
||||||
const allowlistCfg = loadSenderAllowlist();
|
const allowlistCfg = loadSenderAllowlist();
|
||||||
const hasTrigger = missedMessages.some(
|
const hasTrigger = missedMessages.some(
|
||||||
(m) =>
|
(m) =>
|
||||||
TRIGGER_PATTERN.test(m.content.trim()) &&
|
triggerPattern.test(m.content.trim()) &&
|
||||||
(m.is_from_me || isTriggerAllowed(chatJid, m.sender, allowlistCfg)),
|
(m.is_from_me || isTriggerAllowed(chatJid, m.sender, allowlistCfg)),
|
||||||
);
|
);
|
||||||
if (!hasTrigger) return true;
|
if (!hasTrigger) return true;
|
||||||
@@ -285,6 +353,7 @@ async function runAgent(
|
|||||||
id: t.id,
|
id: t.id,
|
||||||
groupFolder: t.group_folder,
|
groupFolder: t.group_folder,
|
||||||
prompt: t.prompt,
|
prompt: t.prompt,
|
||||||
|
script: t.script || undefined,
|
||||||
schedule_type: t.schedule_type,
|
schedule_type: t.schedule_type,
|
||||||
schedule_value: t.schedule_value,
|
schedule_value: t.schedule_value,
|
||||||
status: t.status,
|
status: t.status,
|
||||||
@@ -355,7 +424,7 @@ async function startMessageLoop(): Promise<void> {
|
|||||||
}
|
}
|
||||||
messageLoopRunning = true;
|
messageLoopRunning = true;
|
||||||
|
|
||||||
logger.info(`NanoClaw running (trigger: @${ASSISTANT_NAME})`);
|
logger.info(`NanoClaw running (default trigger: ${DEFAULT_TRIGGER})`);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
@@ -395,16 +464,17 @@ async function startMessageLoop(): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const isMainGroup = group.isMain === true;
|
const isMainGroup = group.isMain === true;
|
||||||
const needsTrigger = group.requiresTrigger !== false;
|
const needsTrigger = !isMainGroup && group.requiresTrigger !== false;
|
||||||
|
|
||||||
// For non-main groups, only act on trigger messages.
|
// For non-main groups, only act on trigger messages.
|
||||||
// Non-trigger messages accumulate in DB and get pulled as
|
// Non-trigger messages accumulate in DB and get pulled as
|
||||||
// context when a trigger eventually arrives.
|
// context when a trigger eventually arrives.
|
||||||
if (needsTrigger) {
|
if (needsTrigger) {
|
||||||
|
const triggerPattern = getTriggerPattern(group.trigger);
|
||||||
const allowlistCfg = loadSenderAllowlist();
|
const allowlistCfg = loadSenderAllowlist();
|
||||||
const hasTrigger = groupMessages.some(
|
const hasTrigger = groupMessages.some(
|
||||||
(m) =>
|
(m) =>
|
||||||
TRIGGER_PATTERN.test(m.content.trim()) &&
|
triggerPattern.test(m.content.trim()) &&
|
||||||
(m.is_from_me ||
|
(m.is_from_me ||
|
||||||
isTriggerAllowed(chatJid, m.sender, allowlistCfg)),
|
isTriggerAllowed(chatJid, m.sender, allowlistCfg)),
|
||||||
);
|
);
|
||||||
@@ -415,8 +485,9 @@ async function startMessageLoop(): Promise<void> {
|
|||||||
// context that accumulated between triggers is included.
|
// context that accumulated between triggers is included.
|
||||||
const allPending = getMessagesSince(
|
const allPending = getMessagesSince(
|
||||||
chatJid,
|
chatJid,
|
||||||
lastAgentTimestamp[chatJid] || '',
|
getOrRecoverCursor(chatJid),
|
||||||
ASSISTANT_NAME,
|
ASSISTANT_NAME,
|
||||||
|
MAX_MESSAGES_PER_PROMPT,
|
||||||
);
|
);
|
||||||
const messagesToSend =
|
const messagesToSend =
|
||||||
allPending.length > 0 ? allPending : groupMessages;
|
allPending.length > 0 ? allPending : groupMessages;
|
||||||
@@ -455,8 +526,12 @@ async function startMessageLoop(): Promise<void> {
|
|||||||
*/
|
*/
|
||||||
function recoverPendingMessages(): void {
|
function recoverPendingMessages(): void {
|
||||||
for (const [chatJid, group] of Object.entries(registeredGroups)) {
|
for (const [chatJid, group] of Object.entries(registeredGroups)) {
|
||||||
const sinceTimestamp = lastAgentTimestamp[chatJid] || '';
|
const pending = getMessagesSince(
|
||||||
const pending = getMessagesSince(chatJid, sinceTimestamp, ASSISTANT_NAME);
|
chatJid,
|
||||||
|
getOrRecoverCursor(chatJid),
|
||||||
|
ASSISTANT_NAME,
|
||||||
|
MAX_MESSAGES_PER_PROMPT,
|
||||||
|
);
|
||||||
if (pending.length > 0) {
|
if (pending.length > 0) {
|
||||||
logger.info(
|
logger.info(
|
||||||
{ group: group.name, pendingCount: pending.length },
|
{ group: group.name, pendingCount: pending.length },
|
||||||
@@ -477,18 +552,18 @@ async function main(): Promise<void> {
|
|||||||
initDatabase();
|
initDatabase();
|
||||||
logger.info('Database initialized');
|
logger.info('Database initialized');
|
||||||
loadState();
|
loadState();
|
||||||
restoreRemoteControl();
|
|
||||||
|
|
||||||
// Start credential proxy (containers route API calls through this)
|
// Ensure OneCLI agents exist for all registered groups.
|
||||||
const proxyServer = await startCredentialProxy(
|
// Recovers from missed creates (e.g. OneCLI was down at registration time).
|
||||||
CREDENTIAL_PROXY_PORT,
|
for (const [jid, group] of Object.entries(registeredGroups)) {
|
||||||
PROXY_BIND_HOST,
|
ensureOneCLIAgent(jid, group);
|
||||||
);
|
}
|
||||||
|
|
||||||
|
restoreRemoteControl();
|
||||||
|
|
||||||
// Graceful shutdown handlers
|
// Graceful shutdown handlers
|
||||||
const shutdown = async (signal: string) => {
|
const shutdown = async (signal: string) => {
|
||||||
logger.info({ signal }, 'Shutdown signal received');
|
logger.info({ signal }, 'Shutdown signal received');
|
||||||
proxyServer.close();
|
|
||||||
await queue.shutdown(10000);
|
await queue.shutdown(10000);
|
||||||
for (const ch of channels) await ch.disconnect();
|
for (const ch of channels) await ch.disconnect();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
@@ -598,11 +673,6 @@ async function main(): Promise<void> {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize Telegram bot pool for agent teams (swarm)
|
|
||||||
if (TELEGRAM_BOT_POOL.length > 0) {
|
|
||||||
await initBotPool(TELEGRAM_BOT_POOL);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start subsystems (independently of connection handler)
|
// Start subsystems (independently of connection handler)
|
||||||
startSchedulerLoop({
|
startSchedulerLoop({
|
||||||
registeredGroups: () => registeredGroups,
|
registeredGroups: () => registeredGroups,
|
||||||
@@ -646,6 +716,7 @@ async function main(): Promise<void> {
|
|||||||
id: t.id,
|
id: t.id,
|
||||||
groupFolder: t.group_folder,
|
groupFolder: t.group_folder,
|
||||||
prompt: t.prompt,
|
prompt: t.prompt,
|
||||||
|
script: t.script || undefined,
|
||||||
schedule_type: t.schedule_type,
|
schedule_type: t.schedule_type,
|
||||||
schedule_value: t.schedule_value,
|
schedule_value: t.schedule_value,
|
||||||
status: t.status,
|
status: t.status,
|
||||||
|
|||||||
20
src/ipc.ts
20
src/ipc.ts
@@ -4,7 +4,7 @@ import path from 'path';
|
|||||||
import { CronExpressionParser } from 'cron-parser';
|
import { CronExpressionParser } from 'cron-parser';
|
||||||
|
|
||||||
import { DATA_DIR, IPC_POLL_INTERVAL, TIMEZONE } from './config.js';
|
import { DATA_DIR, IPC_POLL_INTERVAL, TIMEZONE } from './config.js';
|
||||||
import { sendPoolMessage } from './channels/telegram.js';
|
|
||||||
import { AvailableGroup } from './container-runner.js';
|
import { AvailableGroup } from './container-runner.js';
|
||||||
import { createTask, deleteTask, getTaskById, updateTask } from './db.js';
|
import { createTask, deleteTask, getTaskById, updateTask } from './db.js';
|
||||||
import { isValidGroupFolder } from './group-folder.js';
|
import { isValidGroupFolder } from './group-folder.js';
|
||||||
@@ -82,16 +82,7 @@ export function startIpcWatcher(deps: IpcDeps): void {
|
|||||||
isMain ||
|
isMain ||
|
||||||
(targetGroup && targetGroup.folder === sourceGroup)
|
(targetGroup && targetGroup.folder === sourceGroup)
|
||||||
) {
|
) {
|
||||||
if (data.sender && data.chatJid.startsWith('tg:')) {
|
|
||||||
await sendPoolMessage(
|
|
||||||
data.chatJid,
|
|
||||||
data.text,
|
|
||||||
data.sender,
|
|
||||||
sourceGroup,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
await deps.sendMessage(data.chatJid, data.text);
|
await deps.sendMessage(data.chatJid, data.text);
|
||||||
}
|
|
||||||
logger.info(
|
logger.info(
|
||||||
{ chatJid: data.chatJid, sourceGroup, sender: data.sender },
|
{ chatJid: data.chatJid, sourceGroup, sender: data.sender },
|
||||||
'IPC message sent',
|
'IPC message sent',
|
||||||
@@ -172,6 +163,7 @@ export async function processTaskIpc(
|
|||||||
schedule_type?: string;
|
schedule_type?: string;
|
||||||
schedule_value?: string;
|
schedule_value?: string;
|
||||||
context_mode?: string;
|
context_mode?: string;
|
||||||
|
script?: string;
|
||||||
groupFolder?: string;
|
groupFolder?: string;
|
||||||
chatJid?: string;
|
chatJid?: string;
|
||||||
targetJid?: string;
|
targetJid?: string;
|
||||||
@@ -270,6 +262,7 @@ export async function processTaskIpc(
|
|||||||
group_folder: targetFolder,
|
group_folder: targetFolder,
|
||||||
chat_jid: targetJid,
|
chat_jid: targetJid,
|
||||||
prompt: data.prompt,
|
prompt: data.prompt,
|
||||||
|
script: data.script || null,
|
||||||
schedule_type: scheduleType,
|
schedule_type: scheduleType,
|
||||||
schedule_value: data.schedule_value,
|
schedule_value: data.schedule_value,
|
||||||
context_mode: contextMode,
|
context_mode: contextMode,
|
||||||
@@ -362,6 +355,7 @@ export async function processTaskIpc(
|
|||||||
|
|
||||||
const updates: Parameters<typeof updateTask>[1] = {};
|
const updates: Parameters<typeof updateTask>[1] = {};
|
||||||
if (data.prompt !== undefined) updates.prompt = data.prompt;
|
if (data.prompt !== undefined) updates.prompt = data.prompt;
|
||||||
|
if (data.script !== undefined) updates.script = data.script || null;
|
||||||
if (data.schedule_type !== undefined)
|
if (data.schedule_type !== undefined)
|
||||||
updates.schedule_type = data.schedule_type as
|
updates.schedule_type = data.schedule_type as
|
||||||
| 'cron'
|
| 'cron'
|
||||||
@@ -448,7 +442,10 @@ export async function processTaskIpc(
|
|||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Defense in depth: agent cannot set isMain via IPC
|
// Defense in depth: agent cannot set isMain via IPC.
|
||||||
|
// Preserve isMain from the existing registration so IPC config
|
||||||
|
// updates (e.g. adding additionalMounts) don't strip the flag.
|
||||||
|
const existingGroup = registeredGroups[data.jid];
|
||||||
deps.registerGroup(data.jid, {
|
deps.registerGroup(data.jid, {
|
||||||
name: data.name,
|
name: data.name,
|
||||||
folder: data.folder,
|
folder: data.folder,
|
||||||
@@ -456,6 +453,7 @@ export async function processTaskIpc(
|
|||||||
added_at: new Date().toISOString(),
|
added_at: new Date().toISOString(),
|
||||||
containerConfig: data.containerConfig,
|
containerConfig: data.containerConfig,
|
||||||
requiresTrigger: data.requiresTrigger,
|
requiresTrigger: data.requiresTrigger,
|
||||||
|
isMain: existingGroup?.isMain,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
|||||||
@@ -1,11 +1,78 @@
|
|||||||
import pino from 'pino';
|
const LEVELS = { debug: 20, info: 30, warn: 40, error: 50, fatal: 60 } as const;
|
||||||
|
type Level = keyof typeof LEVELS;
|
||||||
|
|
||||||
export const logger = pino({
|
const COLORS: Record<Level, string> = {
|
||||||
level: process.env.LOG_LEVEL || 'info',
|
debug: '\x1b[34m',
|
||||||
transport: { target: 'pino-pretty', options: { colorize: true } },
|
info: '\x1b[32m',
|
||||||
});
|
warn: '\x1b[33m',
|
||||||
|
error: '\x1b[31m',
|
||||||
|
fatal: '\x1b[41m\x1b[37m',
|
||||||
|
};
|
||||||
|
const KEY_COLOR = '\x1b[35m';
|
||||||
|
const MSG_COLOR = '\x1b[36m';
|
||||||
|
const RESET = '\x1b[39m';
|
||||||
|
const FULL_RESET = '\x1b[0m';
|
||||||
|
|
||||||
// Route uncaught errors through pino so they get timestamps in stderr
|
const threshold =
|
||||||
|
LEVELS[(process.env.LOG_LEVEL as Level) || 'info'] ?? LEVELS.info;
|
||||||
|
|
||||||
|
function formatErr(err: unknown): string {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
return `{\n "type": "${err.constructor.name}",\n "message": "${err.message}",\n "stack":\n ${err.stack}\n }`;
|
||||||
|
}
|
||||||
|
return JSON.stringify(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatData(data: Record<string, unknown>): string {
|
||||||
|
let out = '';
|
||||||
|
for (const [k, v] of Object.entries(data)) {
|
||||||
|
if (k === 'err') {
|
||||||
|
out += `\n ${KEY_COLOR}err${RESET}: ${formatErr(v)}`;
|
||||||
|
} else {
|
||||||
|
out += `\n ${KEY_COLOR}${k}${RESET}: ${JSON.stringify(v)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function ts(): string {
|
||||||
|
const d = new Date();
|
||||||
|
return `${String(d.getHours()).padStart(2, '0')}:${String(d.getMinutes()).padStart(2, '0')}:${String(d.getSeconds()).padStart(2, '0')}.${String(d.getMilliseconds()).padStart(3, '0')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function log(
|
||||||
|
level: Level,
|
||||||
|
dataOrMsg: Record<string, unknown> | string,
|
||||||
|
msg?: string,
|
||||||
|
): void {
|
||||||
|
if (LEVELS[level] < threshold) return;
|
||||||
|
const tag = `${COLORS[level]}${level.toUpperCase()}${level === 'fatal' ? FULL_RESET : RESET}`;
|
||||||
|
const stream = LEVELS[level] >= LEVELS.warn ? process.stderr : process.stdout;
|
||||||
|
if (typeof dataOrMsg === 'string') {
|
||||||
|
stream.write(
|
||||||
|
`[${ts()}] ${tag} (${process.pid}): ${MSG_COLOR}${dataOrMsg}${RESET}\n`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
stream.write(
|
||||||
|
`[${ts()}] ${tag} (${process.pid}): ${MSG_COLOR}${msg}${RESET}${formatData(dataOrMsg)}\n`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const logger = {
|
||||||
|
debug: (dataOrMsg: Record<string, unknown> | string, msg?: string) =>
|
||||||
|
log('debug', dataOrMsg, msg),
|
||||||
|
info: (dataOrMsg: Record<string, unknown> | string, msg?: string) =>
|
||||||
|
log('info', dataOrMsg, msg),
|
||||||
|
warn: (dataOrMsg: Record<string, unknown> | string, msg?: string) =>
|
||||||
|
log('warn', dataOrMsg, msg),
|
||||||
|
error: (dataOrMsg: Record<string, unknown> | string, msg?: string) =>
|
||||||
|
log('error', dataOrMsg, msg),
|
||||||
|
fatal: (dataOrMsg: Record<string, unknown> | string, msg?: string) =>
|
||||||
|
log('fatal', dataOrMsg, msg),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Route uncaught errors through logger so they get timestamps in stderr
|
||||||
process.on('uncaughtException', (err) => {
|
process.on('uncaughtException', (err) => {
|
||||||
logger.fatal({ err }, 'Uncaught exception');
|
logger.fatal({ err }, 'Uncaught exception');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
|
|||||||
@@ -9,16 +9,10 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import pino from 'pino';
|
|
||||||
|
|
||||||
import { MOUNT_ALLOWLIST_PATH } from './config.js';
|
import { MOUNT_ALLOWLIST_PATH } from './config.js';
|
||||||
|
import { logger } from './logger.js';
|
||||||
import { AdditionalMount, AllowedRoot, MountAllowlist } from './types.js';
|
import { AdditionalMount, AllowedRoot, MountAllowlist } from './types.js';
|
||||||
|
|
||||||
const logger = pino({
|
|
||||||
level: process.env.LOG_LEVEL || 'info',
|
|
||||||
transport: { target: 'pino-pretty', options: { colorize: true } },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Cache the allowlist in memory - only reloads on process restart
|
// Cache the allowlist in memory - only reloads on process restart
|
||||||
let cachedAllowlist: MountAllowlist | null = null;
|
let cachedAllowlist: MountAllowlist | null = null;
|
||||||
let allowlistLoadError: string | null = null;
|
let allowlistLoadError: string | null = null;
|
||||||
@@ -63,7 +57,8 @@ export function loadMountAllowlist(): MountAllowlist | null {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
if (!fs.existsSync(MOUNT_ALLOWLIST_PATH)) {
|
if (!fs.existsSync(MOUNT_ALLOWLIST_PATH)) {
|
||||||
allowlistLoadError = `Mount allowlist not found at ${MOUNT_ALLOWLIST_PATH}`;
|
// Do NOT cache this as an error — file may be created later without restart.
|
||||||
|
// Only parse/structural errors are permanently cached.
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ path: MOUNT_ALLOWLIST_PATH },
|
{ path: MOUNT_ALLOWLIST_PATH },
|
||||||
'Mount allowlist not found - additional mounts will be BLOCKED. ' +
|
'Mount allowlist not found - additional mounts will be BLOCKED. ' +
|
||||||
@@ -215,6 +210,11 @@ function isValidContainerPath(containerPath: string): boolean {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Must not contain colons — prevents Docker -v option injection (e.g., "repo:rw")
|
||||||
|
if (containerPath.includes(':')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -139,6 +139,7 @@ async function runTask(
|
|||||||
id: t.id,
|
id: t.id,
|
||||||
groupFolder: t.group_folder,
|
groupFolder: t.group_folder,
|
||||||
prompt: t.prompt,
|
prompt: t.prompt,
|
||||||
|
script: t.script,
|
||||||
schedule_type: t.schedule_type,
|
schedule_type: t.schedule_type,
|
||||||
schedule_value: t.schedule_value,
|
schedule_value: t.schedule_value,
|
||||||
status: t.status,
|
status: t.status,
|
||||||
@@ -179,6 +180,7 @@ async function runTask(
|
|||||||
isMain,
|
isMain,
|
||||||
isScheduledTask: true,
|
isScheduledTask: true,
|
||||||
assistantName: ASSISTANT_NAME,
|
assistantName: ASSISTANT_NAME,
|
||||||
|
script: task.script || undefined,
|
||||||
},
|
},
|
||||||
(proc, containerName) =>
|
(proc, containerName) =>
|
||||||
deps.onProcess(task.chat_jid, proc, containerName, task.group_folder),
|
deps.onProcess(task.chat_jid, proc, containerName, task.group_folder),
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
import { describe, it, expect } from 'vitest';
|
||||||
|
|
||||||
import { formatLocalTime } from './timezone.js';
|
import {
|
||||||
|
formatLocalTime,
|
||||||
|
isValidTimezone,
|
||||||
|
resolveTimezone,
|
||||||
|
} from './timezone.js';
|
||||||
|
|
||||||
// --- formatLocalTime ---
|
// --- formatLocalTime ---
|
||||||
|
|
||||||
@@ -26,4 +30,44 @@ describe('formatLocalTime', () => {
|
|||||||
expect(ny).toContain('8:00');
|
expect(ny).toContain('8:00');
|
||||||
expect(tokyo).toContain('9:00');
|
expect(tokyo).toContain('9:00');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('does not throw on invalid timezone, falls back to UTC', () => {
|
||||||
|
expect(() =>
|
||||||
|
formatLocalTime('2026-01-01T00:00:00.000Z', 'IST-2'),
|
||||||
|
).not.toThrow();
|
||||||
|
const result = formatLocalTime('2026-01-01T12:00:00.000Z', 'IST-2');
|
||||||
|
// Should format as UTC (noon UTC = 12:00 PM)
|
||||||
|
expect(result).toContain('12:00');
|
||||||
|
expect(result).toContain('PM');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('isValidTimezone', () => {
|
||||||
|
it('accepts valid IANA identifiers', () => {
|
||||||
|
expect(isValidTimezone('America/New_York')).toBe(true);
|
||||||
|
expect(isValidTimezone('UTC')).toBe(true);
|
||||||
|
expect(isValidTimezone('Asia/Tokyo')).toBe(true);
|
||||||
|
expect(isValidTimezone('Asia/Jerusalem')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects invalid timezone strings', () => {
|
||||||
|
expect(isValidTimezone('IST-2')).toBe(false);
|
||||||
|
expect(isValidTimezone('XYZ+3')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects empty and garbage strings', () => {
|
||||||
|
expect(isValidTimezone('')).toBe(false);
|
||||||
|
expect(isValidTimezone('NotATimezone')).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('resolveTimezone', () => {
|
||||||
|
it('returns the timezone if valid', () => {
|
||||||
|
expect(resolveTimezone('America/New_York')).toBe('America/New_York');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to UTC for invalid timezone', () => {
|
||||||
|
expect(resolveTimezone('IST-2')).toBe('UTC');
|
||||||
|
expect(resolveTimezone('')).toBe('UTC');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,11 +1,32 @@
|
|||||||
|
/**
|
||||||
|
* Check whether a timezone string is a valid IANA identifier
|
||||||
|
* that Intl.DateTimeFormat can use.
|
||||||
|
*/
|
||||||
|
export function isValidTimezone(tz: string): boolean {
|
||||||
|
try {
|
||||||
|
Intl.DateTimeFormat(undefined, { timeZone: tz });
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the given timezone if valid IANA, otherwise fall back to UTC.
|
||||||
|
*/
|
||||||
|
export function resolveTimezone(tz: string): string {
|
||||||
|
return isValidTimezone(tz) ? tz : 'UTC';
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a UTC ISO timestamp to a localized display string.
|
* Convert a UTC ISO timestamp to a localized display string.
|
||||||
* Uses the Intl API (no external dependencies).
|
* Uses the Intl API (no external dependencies).
|
||||||
|
* Falls back to UTC if the timezone is invalid.
|
||||||
*/
|
*/
|
||||||
export function formatLocalTime(utcIso: string, timezone: string): string {
|
export function formatLocalTime(utcIso: string, timezone: string): string {
|
||||||
const date = new Date(utcIso);
|
const date = new Date(utcIso);
|
||||||
return date.toLocaleString('en-US', {
|
return date.toLocaleString('en-US', {
|
||||||
timeZone: timezone,
|
timeZone: resolveTimezone(timezone),
|
||||||
year: 'numeric',
|
year: 'numeric',
|
||||||
month: 'short',
|
month: 'short',
|
||||||
day: 'numeric',
|
day: 'numeric',
|
||||||
|
|||||||
@@ -58,6 +58,7 @@ export interface ScheduledTask {
|
|||||||
group_folder: string;
|
group_folder: string;
|
||||||
chat_jid: string;
|
chat_jid: string;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
|
script?: string | null;
|
||||||
schedule_type: 'cron' | 'interval' | 'once';
|
schedule_type: 'cron' | 'interval' | 'once';
|
||||||
schedule_value: string;
|
schedule_value: string;
|
||||||
context_mode: 'group' | 'isolated';
|
context_mode: 'group' | 'isolated';
|
||||||
|
|||||||
Reference in New Issue
Block a user