Compare commits

...

13 Commits

Author SHA1 Message Date
Joe Fleming
c65a7160ba fix: use updated ollama ai package 2026-03-13 21:16:00 -06:00
Joe Fleming
74a5e70322 fix: require user to specify the provider 2026-03-13 20:49:21 -06:00
Joe Fleming
b6df31fcbf fix: require a model, default to empty value during onboarding
and add a WORKSPACE_PATH constant to make tracking that path easier
2026-03-13 20:42:05 -06:00
Joe Fleming
14aa1c1e7f fix: correctly load config path from args
log error and exit if config can not be loaded, don't use defaults
2026-03-13 20:29:02 -06:00
Joe Fleming
47c4db53af fix: fail to run agent or gateway without config 2026-03-13 20:11:21 -06:00
Joe Fleming
1dd953d17a fix: bug in utils path
add an optional arg to create config path
2026-03-13 20:06:59 -06:00
Joe Fleming
e915dd2922 chore: better welcome messgae, throw on config parse error
add instructions for gateway mode, change config order, and don't
hard-code the config path, pull it from the default config
2026-03-13 20:05:44 -06:00
Joe Fleming
9ac92ed536 chore: use more conventional XDG_CONFIG_HOME for config 2026-03-13 19:22:37 -06:00
Joe Fleming
398b98393a chore: remove nested ternaries 2026-03-13 19:13:50 -06:00
Joe Fleming
2d99d17d60 feat: create onboard script 2026-03-13 18:49:15 -06:00
Joe Fleming
3893d88365 chore: change run command, remove workspace cli argument 2026-03-13 15:42:16 -06:00
Joe Fleming
4f54c9837f chore: break up command handlers 2026-03-13 14:52:51 -06:00
Joe Fleming
7e28a09345 chore: format code 2026-03-13 14:46:15 -06:00
37 changed files with 796 additions and 398 deletions

4
.gitignore vendored
View File

@@ -1,6 +1,10 @@
# dependencies (bun install)
node_modules
# editors
.vscode
.openvscode-server
# output
out
dist

View File

@@ -1,7 +1,5 @@
{
"$schema": "./node_modules/oxfmt/configuration_schema.json",
"ignorePatterns": ["*.md"],
"options": {
"singleQuote": true
}
"singleQuote": true
}

View File

@@ -5,7 +5,8 @@
"correctness": "warn"
},
"rules": {
"eslint/no-unused-vars": "error"
"eslint/no-unused-vars": "error",
"unicorn/no-nested-ternary": "error"
},
"options": {
"typeAware": true,

View File

@@ -18,13 +18,17 @@ bun install # or use `mise install`
## Quick start
**1. Create a config file**
**1. Initialize workspace**
```bash
mkdir -p ~/.nanobot
bun run nanobot onboard
```
`~/.nanobot/config.json`:
This creates `~/.config/nanobot/` with a config file and templates.
**2. Edit config**
Add your API key and set provider/model:
```json
{
@@ -34,15 +38,16 @@ mkdir -p ~/.nanobot
}
},
"agent": {
"model": "openrouter/anthropic/claude-sonnet-4-5"
"provider": "openrouter",
"model": "anthropic/claude-sonnet-4-5"
}
}
```
**2. Chat**
**3. Chat**
```bash
bun run start agent
bun run nanobot agent
```
That's it.
@@ -54,22 +59,21 @@ That's it.
Chat with the agent from your terminal. Does not require a running gateway.
```
bun run start agent [options]
bun run nanobot agent [options]
```
| Option | Description |
|--------|-------------|
| `-c, --config <path>` | Path to `config.json` (default: `~/.nanobot/config.json`) |
| `-c, --config <path>` | Path to `config.json` (default: `~/.config/nanobot/config.json`) |
| `-m, --message <text>` | Send a single message and exit (non-interactive) |
| `-w, --workspace <path>` | Override the workspace directory |
| `-M, --model <model>` | Override the model for this session |
**Interactive mode** (default when no `-m` is given):
```bash
bun run start agent
bun run start agent -c ~/.nanobot-work/config.json
bun run start agent -w /tmp/scratch
bun run nanobot agent
bun run nanobot agent -c ~/.config/nanobot-work/config.json
bun run nanobot agent -w /tmp/scratch
```
Press `Ctrl+C` to exit.
@@ -77,8 +81,8 @@ Press `Ctrl+C` to exit.
**Single-shot mode:**
```bash
bun run start agent -m "What time is it in Tokyo?"
bun run start agent -m "Summarize the file ./notes.md"
bun run nanobot agent -m "What time is it in Tokyo?"
bun run nanobot agent -m "Summarize the file ./notes.md"
```
### `gateway` — Mattermost bot
@@ -86,23 +90,23 @@ bun run start agent -m "Summarize the file ./notes.md"
Runs the full stack: Mattermost WebSocket channel, agent loop, cron scheduler, and heartbeat.
```
bun run start gateway [options]
bun run nanobot gateway [options]
```
| Option | Description |
|--------|-------------|
| `-c, --config <path>` | Path to `config.json` (default: `~/.nanobot/config.json`) |
| `-c, --config <path>` | Path to `config.json` (default: `~/.config/nanobot/config.json`) |
```bash
bun run start gateway
bun run start gateway -c ~/.nanobot-work/config.json
bun run nanobot gateway
bun run nanobot gateway -c ~/.config/nanobot-work/config.json
```
Handles `SIGINT` / `SIGTERM` for graceful shutdown.
## Configuration
Config file: `~/.nanobot/config.json` (or pass `-c <path>` to any command).
Config file: `~/.config/nanobot/config.json` (or pass `-c <path>` to any command).
Environment variable overrides:
@@ -110,15 +114,15 @@ Environment variable overrides:
|----------|-------------------|
| `NANOBOT_CONFIG` | path to config file |
| `NANOBOT_MODEL` | `agent.model` |
| `NANOBOT_WORKSPACE` | `agent.workspacePath` |
### Full config reference
```json
{
"agent": {
"model": "openrouter/anthropic/claude-sonnet-4-5",
"workspacePath": "~/.nanobot",
"provider": "openrouter",
"model": "anthropic/claude-sonnet-4-5",
"workspacePath": "~/.config/nanobot",
"maxTokens": 4096,
"contextWindowTokens": 65536,
"temperature": 0.7,
@@ -129,7 +133,7 @@ Environment variable overrides:
"openai": { "apiKey": "..." },
"google": { "apiKey": "..." },
"openrouter": { "apiKey": "..." },
"ollama": { "apiBase": "http://localhost:11434/api" }
"ollama": { "apiBase": "http://localhost:11434" }
},
"channels": {
"sendProgress": true,
@@ -166,19 +170,29 @@ Environment variable overrides:
}
```
### Providers
### Provider
Model names use a `provider/model` prefix scheme:
The `agent.provider` field is **required** and must be one of:
| Prefix | Provider | Example |
|--------|----------|---------|
| `anthropic/` | Anthropic direct | `anthropic/claude-opus-4-5` |
| `openai/` | OpenAI direct | `openai/gpt-4o` |
| `google/` | Google direct | `google/gemini-2.5-pro` |
| `openrouter/` | OpenRouter (any model) | `openrouter/anthropic/claude-sonnet-4-5` |
| `ollama/` | Local Ollama | `ollama/llama3.2` |
| Provider | Description |
|----------|-------------|
| `anthropic` | Anthropic direct (Claude models) |
| `openai` | OpenAI direct (GPT models) |
| `google` | Google direct (Gemini models) |
| `openrouter` | OpenRouter (access to many models) |
| `ollama` | Local Ollama instance |
For Ollama, set `providers.ollama.apiBase` (default: `http://localhost:11434/api`).
The `agent.model` field is also **required** and should be the model ID without any provider prefix:
| Provider | Example Model |
|----------|---------------|
| `anthropic` | `claude-sonnet-4-5`, `claude-opus-4-5` |
| `openai` | `gpt-4o`, `gpt-4o-mini` |
| `google` | `gemini-2.5-pro`, `gemini-2.0-flash` |
| `openrouter` | `anthropic/claude-sonnet-4-5` (OpenRouter uses its own model IDs) |
| `ollama` | `llama3.2`, `qwen2.5` |
For Ollama, set `providers.ollama.apiBase` (default: `http://localhost:11434`).
### Mattermost setup
@@ -198,7 +212,7 @@ For Ollama, set `providers.ollama.apiBase` (default: `http://localhost:11434/api
}
```
4. Run `bun run start gateway`
4. Run `bun run nanobot gateway`
`allowFrom` controls which users the bot responds to. Use `["*"]` to allow all users.
@@ -233,10 +247,10 @@ Run separate instances with different configs — useful for isolated workspaces
```bash
# Instance A
bun run start gateway -c ~/.nanobot-a/config.json
bun run nanobot gateway -c ~/.config/nanobot-a/config.json
# Instance B
bun run start gateway -c ~/.nanobot-b/config.json
bun run nanobot gateway -c ~/.config/nanobot-b/config.json
```
Each instance needs its own config file. Set a different `agent.workspacePath` per instance to keep memory, sessions, and cron jobs isolated:
@@ -244,7 +258,7 @@ Each instance needs its own config file. Set a different `agent.workspacePath` p
```json
{
"agent": {
"workspacePath": "~/.nanobot-a"
"workspacePath": "~/.config/nanobot-a"
}
}
```
@@ -252,10 +266,10 @@ Each instance needs its own config file. Set a different `agent.workspacePath` p
To run a local CLI session against a specific instance:
```bash
bun run start agent -c ~/.nanobot-a/config.json -m "Hello"
bun run nanobot agent -c ~/.config/nanobot-a/config.json -m "Hello"
# Temporarily override the workspace for a one-off run
bun run start agent -c ~/.nanobot-a/config.json -w /tmp/scratch
bun run nanobot agent -c ~/.config/nanobot-a/config.json -w /tmp/scratch
```
## Linux service (systemd)

View File

@@ -11,12 +11,12 @@
"@mozilla/readability": "^0.6.0",
"@openrouter/ai-sdk-provider": "^2.3.0",
"ai": "^6.0.116",
"ai-sdk-ollama": "^3.8.0",
"commander": "^14.0.3",
"cron-parser": "^5.5.0",
"js-tiktoken": "^1.0.21",
"jsonrepair": "^3.13.3",
"node-html-parser": "^7.1.0",
"ollama-ai-provider": "^1.2.0",
"picocolors": "^1.1.1",
"zod": "^4.3.6",
},
@@ -151,6 +151,8 @@
"ai": ["ai@6.0.116", "", { "dependencies": { "@ai-sdk/gateway": "3.0.66", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.19", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-7yM+cTmyRLeNIXwt4Vj+mrrJgVQ9RMIW5WO0ydoLoYkewIvsMcvUmqS4j2RJTUXaF1HphwmSKUMQ/HypNRGOmA=="],
"ai-sdk-ollama": ["ai-sdk-ollama@3.8.0", "", { "dependencies": { "@ai-sdk/provider": "^3.0.8", "@ai-sdk/provider-utils": "^4.0.15", "jsonrepair": "^3.13.2", "ollama": "^0.6.3" }, "peerDependencies": { "ai": "^6.0.89" } }, "sha512-Nlla8FpK8QFMNh9m8sPCZoNqnr+n+Ud0QTqpXNds4j/b/lbVZGaji13ZcRuuFvBwPwd4xnFkNrijJzi70Ih1Tg=="],
"base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
"boolbase": ["boolbase@1.0.0", "", {}, "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="],
@@ -187,13 +189,11 @@
"luxon": ["luxon@3.7.2", "", {}, "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew=="],
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
"node-html-parser": ["node-html-parser@7.1.0", "", { "dependencies": { "css-select": "^5.1.0", "he": "1.2.0" } }, "sha512-iJo8b2uYGT40Y8BTyy5ufL6IVbN8rbm/1QK2xffXU/1a/v3AAa0d1YAoqBNYqaS4R/HajkWIpIfdE6KcyFh1AQ=="],
"nth-check": ["nth-check@2.1.1", "", { "dependencies": { "boolbase": "^1.0.0" } }, "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w=="],
"ollama-ai-provider": ["ollama-ai-provider@1.2.0", "", { "dependencies": { "@ai-sdk/provider": "^1.0.0", "@ai-sdk/provider-utils": "^2.0.0", "partial-json": "0.1.7" }, "peerDependencies": { "zod": "^3.0.0" }, "optionalPeers": ["zod"] }, "sha512-jTNFruwe3O/ruJeppI/quoOUxG7NA6blG3ZyQj3lei4+NnJo7bi3eIRWqlVpRlu/mbzbFXeJSBuYQWF6pzGKww=="],
"ollama": ["ollama@0.6.3", "", { "dependencies": { "whatwg-fetch": "^3.6.20" } }, "sha512-KEWEhIqE5wtfzEIZbDCLH51VFZ6Z3ZSa6sIOg/E/tBV8S51flyqBOXi+bRxlOYKDf8i327zG9eSTb8IJxvm3Zg=="],
"oxfmt": ["oxfmt@0.40.0", "", { "dependencies": { "tinypool": "2.1.0" }, "optionalDependencies": { "@oxfmt/binding-android-arm-eabi": "0.40.0", "@oxfmt/binding-android-arm64": "0.40.0", "@oxfmt/binding-darwin-arm64": "0.40.0", "@oxfmt/binding-darwin-x64": "0.40.0", "@oxfmt/binding-freebsd-x64": "0.40.0", "@oxfmt/binding-linux-arm-gnueabihf": "0.40.0", "@oxfmt/binding-linux-arm-musleabihf": "0.40.0", "@oxfmt/binding-linux-arm64-gnu": "0.40.0", "@oxfmt/binding-linux-arm64-musl": "0.40.0", "@oxfmt/binding-linux-ppc64-gnu": "0.40.0", "@oxfmt/binding-linux-riscv64-gnu": "0.40.0", "@oxfmt/binding-linux-riscv64-musl": "0.40.0", "@oxfmt/binding-linux-s390x-gnu": "0.40.0", "@oxfmt/binding-linux-x64-gnu": "0.40.0", "@oxfmt/binding-linux-x64-musl": "0.40.0", "@oxfmt/binding-openharmony-arm64": "0.40.0", "@oxfmt/binding-win32-arm64-msvc": "0.40.0", "@oxfmt/binding-win32-ia32-msvc": "0.40.0", "@oxfmt/binding-win32-x64-msvc": "0.40.0" }, "bin": { "oxfmt": "bin/oxfmt" } }, "sha512-g0C3I7xUj4b4DcagevM9kgH6+pUHytikxUcn3/VUkvzTNaaXBeyZqb7IBsHwojeXm4mTBEC/aBjBTMVUkZwWUQ=="],
@@ -201,22 +201,16 @@
"oxlint-tsgolint": ["oxlint-tsgolint@0.16.0", "", { "optionalDependencies": { "@oxlint-tsgolint/darwin-arm64": "0.16.0", "@oxlint-tsgolint/darwin-x64": "0.16.0", "@oxlint-tsgolint/linux-arm64": "0.16.0", "@oxlint-tsgolint/linux-x64": "0.16.0", "@oxlint-tsgolint/win32-arm64": "0.16.0", "@oxlint-tsgolint/win32-x64": "0.16.0" }, "bin": { "tsgolint": "bin/tsgolint.js" } }, "sha512-4RuJK2jP08XwqtUu+5yhCbxEauCm6tv2MFHKEMsjbosK2+vy5us82oI3VLuHwbNyZG7ekZA26U2LLHnGR4frIA=="],
"partial-json": ["partial-json@0.1.7", "", {}, "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA=="],
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
"secure-json-parse": ["secure-json-parse@2.7.0", "", {}, "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw=="],
"tinypool": ["tinypool@2.1.0", "", {}, "sha512-Pugqs6M0m7Lv1I7FtxN4aoyToKg1C4tu+/381vH35y8oENM/Ai7f7C4StcoK4/+BSw9ebcS8jRiVrORFKCALLw=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="],
"whatwg-fetch": ["whatwg-fetch@3.6.20", "", {}, "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg=="],
"zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
"ollama-ai-provider/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"ollama-ai-provider/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
}
}

View File

@@ -123,7 +123,7 @@ Wraps Vercel AI SDK `generateText()` with:
- Normalized `LLMResponse` type
### SessionManager
Persists conversation history to JSONL files in `~/.nanobot/sessions/`.
Persists conversation history to JSONL files in `~/.config/nanobot/sessions/`.
- Key format: `{channel}:{chatId}` (e.g., `mattermost:abc123`)
- Supports history truncation for context window limits
@@ -136,7 +136,7 @@ When session history exceeds token limits, summarizes old messages and archives
## Configuration
- File: `~/.nanobot/config.json`
- File: `~/.config/nanobot/config.json`
- Validation: Zod schemas in `src/config/types.ts`
- Env overrides: `NANOBOT_MODEL`, `NANOBOT_WORKSPACE`, `NANOBOT_CONFIG`

View File

@@ -129,7 +129,7 @@ const timeout = parseInt(strArg(args, 'timeout', '30'), 10);
## Session Persistence
- Format: JSONL (one JSON object per line)
- Location: `~/.nanobot/sessions/{sessionKey}.jsonl`
- Location: `~/.config/nanobot/sessions/{sessionKey}.jsonl`
- Tool results truncated at 16,000 characters
- Memory consolidation triggered when approaching context window limit
@@ -147,5 +147,5 @@ Max 3 attempts with exponential backoff.
1. CLI flags (`-c`, `-m`, `-w`, `-M`)
2. Environment variables (`NANOBOT_CONFIG`, `NANOBOT_MODEL`, `NANOBOT_WORKSPACE`)
3. Config file (`~/.nanobot/config.json`)
3. Config file (`~/.config/nanobot/config.json`)
4. Zod schema defaults

View File

@@ -19,14 +19,22 @@ Docs directory created with 4 files (PRD.md, Architecture.md, API.md, Discoverie
- **lint**: all `${err}` in template literals → `${String(err)}`; `String(args['key'] ?? '')``strArg(args, 'key')` helper; unused `onProgress` param → `_onProgress`; WebSocket `onerror` `err` type is `Event` → use `err.type`
## Work Queue (next steps)
1. [ ] Runtime smoke test: `bun run start --help`
2. [ ] Test with a real Mattermost config (optional — user can do this)
3. [ ] Write sample `~/.nanobot/config.json` in README or docs
1. [x] Create workspace helper module (src/cli/utils.ts) with ensureWorkspace() and syncTemplates()
2. [x] Create onboard command (src/cli/onboard.ts) with path argument and directory-not-empty guard
3. [x] Agent/gateway commands check workspace exists (throw if not found)
4. [x] Added required `provider` field to agent config (values: anthropic, openai, google, openrouter, ollama)
5. [x] Provider resolution uses explicit provider from config (no model prefix parsing)
6. [x] Typecheck and lint pass (0 errors)
7. [x] Test onboard and agent commands work correctly
8. [x] Updated Ollama provider from `ollama-ai-provider` to `ai-sdk-ollama`
9. [ ] Test with a real Mattermost config (optional — user can do this)
## Key Decisions Made
- Mattermost channel uses raw WebSocket + fetch (no mattermostdriver, no SSL hack)
- No MCP support (use shell tools / CLI instead)
- No reasoning/thinking token handling (can add later)
- Config is fresh Zod schema (no migration from Python config needed)
- `ollama-ai-provider` package (not `@ai-sdk/ollama` which 404s on npm)
- `ai-sdk-ollama` package for Ollama provider (replaced old `ollama-ai-provider`)
- `strArg(args, key, fallback?)` helper exported from `agent/tools/base.ts` for safe unknown→string extraction
- Agent config requires explicit `provider` field (no more model prefix like "anthropic/claude-...")
- Model names are now just the raw model ID (e.g., "claude-sonnet-4-5" not "anthropic/claude-sonnet-4-5")

View File

@@ -12,7 +12,7 @@ A personal AI assistant that connects to Mattermost (via WebSocket) and runs an
## Key design principles (from Python codebase)
- Ultra-lightweight: minimal dependencies, small codebase
- Provider-agnostic: works with Anthropic, OpenAI, Google, Ollama, OpenRouter
- Workspace-centric: everything lives in a configurable workspace directory (`~/.nanobot/`)
- Workspace-centric: everything lives in a configurable workspace directory (`~/.config/nanobot/`)
- SOUL/AGENTS/USER/TOOLS.md: workspace markdown files that define the bot's personality and rules
- Memory is just markdown files (`MEMORY.md`, `HISTORY.md`) — no database

View File

@@ -8,41 +8,21 @@
- All dependencies installed
- `src/` directory structure scaffolded
- Memory bank initialized
- All source files written (first pass):
- `src/config/types.ts` + `src/config/loader.ts`
- `src/bus/types.ts` + `src/bus/queue.ts`
- `src/provider/types.ts` + `src/provider/index.ts`
- `src/session/types.ts` + `src/session/manager.ts`
- `src/agent/tools/base.ts` (+ `strArg` helper)
- `src/agent/tools/filesystem.ts`
- `src/agent/tools/shell.ts`
- `src/agent/tools/web.ts`
- `src/agent/tools/message.ts`
- `src/agent/tools/spawn.ts` + `src/agent/subagent.ts`
- `src/agent/tools/cron.ts`
- `src/cron/types.ts` + `src/cron/service.ts`
- `src/heartbeat/service.ts`
- `src/agent/memory.ts`
- `src/agent/skills.ts`
- `src/agent/context.ts`
- `src/agent/loop.ts`
- `src/channels/base.ts` + `src/channels/mattermost.ts`
- `src/channels/manager.ts`
- `src/cli/commands.ts`
- `index.ts`
- All source files written (first pass)
- Templates and skills copied from Python repo
- **Full typecheck pass**: `tsc --noEmit` → 0 errors
- **Full lint pass**: `oxlint` → 0 errors, 0 warnings
- `package.json` scripts added: `start`, `dev`, `typecheck`
- **Docs created**: `/docs/PRD.md`, `Architecture.md`, `API.md`, `Discoveries.md`
- **Onboard command**: Created `src/cli/onboard.ts` with workspace initialization
- **Provider config**: Added required `provider` field to agent config
- **Workspace validation**: Agent/gateway commands throw if workspace doesn't exist
### 🔄 In Progress
- Nothing
### ⏳ Pending
- Runtime smoke test: `bun run start --help`
- Integration test with a real Mattermost server
- Sample `~/.nanobot/config.json` documentation
## Known Issues / Risks
- `ollama-ai-provider` v1.2.0 returns `LanguageModelV1` (not V2/V3 as expected by AI SDK v6) — cast used at call site. Works at runtime.

View File

@@ -34,7 +34,7 @@ Inbound and outbound messages are passed through a typed `AsyncQueue<T>`. The qu
- Returns a normalized `LLMResponse` type
## Config Pattern
- Config file: `~/.nanobot/config.json` (camelCase JSON)
- Config file: `~/.config/nanobot/config.json` (camelCase JSON)
- Loaded with `loadConfig()`, validated by Zod, returns inferred `Config` type
- `NANOBOT_` env vars can override fields (e.g. `NANOBOT_MODEL`)
@@ -49,6 +49,28 @@ Inbound and outbound messages are passed through a typed `AsyncQueue<T>`. The qu
## Logging Pattern
Use `console.error` / `console.warn` / `console.info` / `console.debug` — no external logger. Color via `picocolors` in CLI output only.
## CLI Command Pattern
Each command is in its own file with a registration function:
```ts
// src/cli/agent.ts
export function agentCommand(program: Command, config: Config, workspace: string): void {
program.command('agent')
.description('...')
.option('-m, --message <text>', 'Single message to process')
.action(async (opts) => { /* ... */ })
}
// src/cli/commands.ts (bootstrap)
export function createCli(): Command {
const program = new Command('nanobot')...
const config = loadConfig(opts.config);
const workspace = resolveWorkspacePath(config.agent.workspacePath);
gatewayCommand(program, config, workspace);
agentCommand(program, config, workspace);
return program;
}
```
## File Layout
```
src/
@@ -67,8 +89,12 @@ src/
tools/base.ts + filesystem.ts + shell.ts + web.ts + message.ts + spawn.ts + cron.ts
channels/
base.ts + mattermost.ts + manager.ts
cli/commands.ts
index.ts
cli/
types.ts # CommandHandler type
commands.ts # Bootstrap - loads config, registers commands
agent.ts # agentCommand() - interactive/single-shot mode
gateway.ts # gatewayCommand() - full runtime with Mattermost
index.ts
templates/ (SOUL.md, AGENTS.md, USER.md, TOOLS.md, HEARTBEAT.md, memory/MEMORY.md)
skills/ (copied from Python repo)
```

View File

@@ -4,13 +4,30 @@
"type": "module",
"module": "index.ts",
"scripts": {
"start": "bun run index.ts",
"nanobot": "bun run index.ts",
"dev": "bun --watch run index.ts",
"typecheck": "tsc --noEmit",
"fmt": "oxfmt --check",
"fmt:fix": "oxfmt",
"lint": "oxlint",
"lint:fix": "oxlint --fix"
"lint:fix": "oxlint --fix",
"checks": "bun run lint && bun run fmt"
},
"dependencies": {
"@ai-sdk/anthropic": "^3.0.58",
"@ai-sdk/google": "^3.0.43",
"@ai-sdk/openai": "^3.0.41",
"@mozilla/readability": "^0.6.0",
"@openrouter/ai-sdk-provider": "^2.3.0",
"ai": "^6.0.116",
"ai-sdk-ollama": "^3.8.0",
"commander": "^14.0.3",
"cron-parser": "^5.5.0",
"js-tiktoken": "^1.0.21",
"jsonrepair": "^3.13.3",
"node-html-parser": "^7.1.0",
"picocolors": "^1.1.1",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/bun": "latest",
@@ -21,21 +38,5 @@
},
"peerDependencies": {
"typescript": "^5"
},
"dependencies": {
"@ai-sdk/anthropic": "^3.0.58",
"@ai-sdk/google": "^3.0.43",
"@ai-sdk/openai": "^3.0.41",
"@mozilla/readability": "^0.6.0",
"@openrouter/ai-sdk-provider": "^2.3.0",
"ai": "^6.0.116",
"commander": "^14.0.3",
"cron-parser": "^5.5.0",
"js-tiktoken": "^1.0.21",
"jsonrepair": "^3.13.3",
"node-html-parser": "^7.1.0",
"ollama-ai-provider": "^1.2.0",
"picocolors": "^1.1.1",
"zod": "^4.3.6"
}
}

View File

@@ -27,21 +27,21 @@ npx --yes clawhub@latest search "web scraping" --limit 5
## Install
```bash
npx --yes clawhub@latest install <slug> --workdir ~/.nanobot/workspace
npx --yes clawhub@latest install <slug> --workdir ~/.config/nanobot/workspace
```
Replace `<slug>` with the skill name from search results. This places the skill into `~/.nanobot/workspace/skills/`, where nanobot loads workspace skills from. Always include `--workdir`.
Replace `<slug>` with the skill name from search results. This places the skill into `~/.config/nanobot/workspace/skills/`, where nanobot loads workspace skills from. Always include `--workdir`.
## Update
```bash
npx --yes clawhub@latest update --all --workdir ~/.nanobot/workspace
npx --yes clawhub@latest update --all --workdir ~/.config/nanobot/workspace
```
## List installed
```bash
npx --yes clawhub@latest list --workdir ~/.nanobot/workspace
npx --yes clawhub@latest list --workdir ~/.config/nanobot/workspace
```
## Notes
@@ -49,5 +49,5 @@ npx --yes clawhub@latest list --workdir ~/.nanobot/workspace
- Requires Node.js (`npx` comes with it).
- No API key needed for search and install.
- Login (`npx --yes clawhub@latest login`) is only required for publishing.
- `--workdir ~/.nanobot/workspace` is critical — without it, skills install to the current directory instead of the nanobot workspace.
- `--workdir ~/.config/nanobot/workspace` is critical — without it, skills install to the current directory instead of the nanobot workspace.
- After install, remind the user to start a new session to load the skill.

View File

@@ -57,7 +57,11 @@ export class AgentLoop {
this._model = opts.model ?? opts.provider.defaultModel;
this._maxIterations = opts.maxIterations ?? 40;
const execConfig = opts.execConfig ?? { timeout: 120, denyPatterns: [], restrictToWorkspace: false };
const execConfig = opts.execConfig ?? {
timeout: 120,
denyPatterns: [],
restrictToWorkspace: false,
};
this._ctx = new ContextBuilder(opts.workspace);
this._sessions = opts.sessionManager ?? new SessionManager(opts.workspace);
@@ -94,7 +98,11 @@ export class AgentLoop {
restrictToWorkspace?: boolean;
}): void {
const allowed = opts.restrictToWorkspace ? this._workspace : undefined;
const execConfig = opts.execConfig ?? { timeout: 120, denyPatterns: [], restrictToWorkspace: false };
const execConfig = opts.execConfig ?? {
timeout: 120,
denyPatterns: [],
restrictToWorkspace: false,
};
this._tools.register(new ReadFileTool({ workspace: this._workspace, allowedDir: allowed }));
this._tools.register(new WriteFileTool({ workspace: this._workspace, allowedDir: allowed }));
@@ -110,9 +118,7 @@ export class AgentLoop {
);
this._tools.register(new WebSearchTool({ apiKey: opts.braveApiKey, proxy: opts.webProxy }));
this._tools.register(new WebFetchTool({ proxy: opts.webProxy }));
this._tools.register(
new MessageTool((msg) => this._bus.publishOutbound(msg)),
);
this._tools.register(new MessageTool((msg) => this._bus.publishOutbound(msg)));
this._tools.register(new SpawnTool(this._subagents));
if (opts.cronService) {
this._tools.register(new CronTool(opts.cronService));
@@ -191,7 +197,12 @@ export class AgentLoop {
if (response) {
this._bus.publishOutbound(response);
} else if (msg.channel === 'cli') {
this._bus.publishOutbound({ channel: msg.channel, chatId: msg.chatId, content: '', metadata: msg.metadata });
this._bus.publishOutbound({
channel: msg.channel,
chatId: msg.chatId,
content: '',
metadata: msg.metadata,
});
}
} catch (err) {
if ((err as Error).name === 'AbortError') {
@@ -215,17 +226,32 @@ export class AgentLoop {
): Promise<OutboundMessage | null> {
// System messages (subagent results) routed as "system" channel
if (msg.channel === 'system') {
const [channel, chatId] = msg.chatId.includes(':') ? msg.chatId.split(':', 2) as [string, string] : ['cli', msg.chatId];
const [channel, chatId] = msg.chatId.includes(':')
? (msg.chatId.split(':', 2) as [string, string])
: ['cli', msg.chatId];
const key = `${channel}:${chatId}`;
const session = this._sessions.getOrCreate(key);
await this._consolidator.maybeConsolidateByTokens(session);
this._setToolContext(channel, chatId);
const messages = this._ctx.buildMessages({ history: session.getHistory(0) as Array<Record<string, unknown>>, currentMessage: msg.content, channel, chatId });
const { finalContent, allMessages } = await this._runAgentLoop(messages as ModelMessage[], signal);
const messages = this._ctx.buildMessages({
history: session.getHistory(0) as Array<Record<string, unknown>>,
currentMessage: msg.content,
channel,
chatId,
});
const { finalContent, allMessages } = await this._runAgentLoop(
messages as ModelMessage[],
signal,
);
this._saveTurn(session, allMessages, 1 + session.getHistory(0).length);
this._sessions.save(session);
await this._consolidator.maybeConsolidateByTokens(session);
return { channel, chatId, content: finalContent ?? 'Background task completed.', metadata: {} };
return {
channel,
chatId,
content: finalContent ?? 'Background task completed.',
metadata: {},
};
}
const preview = msg.content.length > 80 ? `${msg.content.slice(0, 80)}...` : msg.content;
@@ -238,15 +264,31 @@ export class AgentLoop {
const cmd = msg.content.trim().toLowerCase();
if (cmd === '/new') {
if (!(await this._consolidator.archiveUnconsolidated(session))) {
return { channel: msg.channel, chatId: msg.chatId, content: 'Memory archival failed, session not cleared. Please try again.', metadata: {} };
return {
channel: msg.channel,
chatId: msg.chatId,
content: 'Memory archival failed, session not cleared. Please try again.',
metadata: {},
};
}
session.clear();
this._sessions.save(session);
this._sessions.invalidate(session.key);
return { channel: msg.channel, chatId: msg.chatId, content: 'New session started.', metadata: {} };
return {
channel: msg.channel,
chatId: msg.chatId,
content: 'New session started.',
metadata: {},
};
}
if (cmd === '/help') {
return { channel: msg.channel, chatId: msg.chatId, content: 'nanobot commands:\n/new — Start a new conversation\n/stop — Stop the current task\n/help — Show available commands', metadata: {} };
return {
channel: msg.channel,
chatId: msg.chatId,
content:
'nanobot commands:\n/new — Start a new conversation\n/stop — Stop the current task\n/help — Show available commands',
metadata: {},
};
}
await this._consolidator.maybeConsolidateByTokens(session);
@@ -256,7 +298,12 @@ export class AgentLoop {
if (msgTool instanceof MessageTool) msgTool.startTurn();
const history = session.getHistory(0) as Array<Record<string, unknown>>;
const initialMessages = this._ctx.buildMessages({ history, currentMessage: msg.content, channel: msg.channel, chatId: msg.chatId });
const initialMessages = this._ctx.buildMessages({
history,
currentMessage: msg.content,
channel: msg.channel,
chatId: msg.chatId,
});
const onProgress = async (content: string, opts?: { toolHint?: boolean }) => {
this._bus.publishOutbound({
@@ -267,7 +314,11 @@ export class AgentLoop {
});
};
const { finalContent, allMessages } = await this._runAgentLoop(initialMessages as ModelMessage[], signal, onProgress);
const { finalContent, allMessages } = await this._runAgentLoop(
initialMessages as ModelMessage[],
signal,
onProgress,
);
this._saveTurn(session, allMessages, 1 + history.length);
this._sessions.save(session);
@@ -311,13 +362,18 @@ export class AgentLoop {
if (response.toolCalls.length > 0) {
if (onProgress) {
if (response.content) await onProgress(response.content);
const hint = response.toolCalls.map((tc) => {
const firstVal = Object.values(tc.arguments)[0];
const display = typeof firstVal === 'string'
? (firstVal.length > 40 ? `"${firstVal.slice(0, 40)}…"` : `"${firstVal}"`)
: '';
return `${tc.name}(${display})`;
}).join(', ');
const hint = response.toolCalls
.map((tc) => {
let display = '';
const firstVal = Object.values(tc.arguments)[0];
if (typeof firstVal === 'string') {
display = `"${firstVal.slice(0, 40) + (firstVal.length > 40 ? '…' : '')}"`;
}
return `${tc.name}(${display})`;
})
.join(', ');
await onProgress(hint, { toolHint: true });
}
@@ -353,7 +409,11 @@ export class AgentLoop {
if (role === 'assistant' && !content && !(entry['tool_calls'] as unknown[])?.length) continue;
// Truncate large tool results
if (role === 'tool' && typeof content === 'string' && content.length > TOOL_RESULT_MAX_CHARS) {
if (
role === 'tool' &&
typeof content === 'string' &&
content.length > TOOL_RESULT_MAX_CHARS
) {
entry['content'] = `${content.slice(0, TOOL_RESULT_MAX_CHARS)}\n... (truncated)`;
}

View File

@@ -94,7 +94,11 @@ export class MemoryStore {
return mem ? `## Long-term Memory\n${mem}` : '';
}
async consolidate(messages: Array<Record<string, unknown>>, provider: LLMProvider, model: string): Promise<boolean> {
async consolidate(
messages: Array<Record<string, unknown>>,
provider: LLMProvider,
model: string,
): Promise<boolean> {
if (messages.length === 0) return true;
const currentMemory = this.readLongTerm();
@@ -104,7 +108,8 @@ export class MemoryStore {
.map((m) => {
const ts = typeof m['timestamp'] === 'string' ? m['timestamp'].slice(0, 16) : '?';
const role = (typeof m['role'] === 'string' ? m['role'] : 'unknown').toUpperCase();
const content = typeof m['content'] === 'string' ? m['content'] : JSON.stringify(m['content']);
const content =
typeof m['content'] === 'string' ? m['content'] : JSON.stringify(m['content']);
return `[${ts}] ${role}: ${content}`;
})
.join('\n');
@@ -140,8 +145,10 @@ ${formatted}`;
return false;
}
const entry = typeof tc.arguments['history_entry'] === 'string' ? tc.arguments['history_entry'] : null;
const update = typeof tc.arguments['memory_update'] === 'string' ? tc.arguments['memory_update'] : null;
const entry =
typeof tc.arguments['history_entry'] === 'string' ? tc.arguments['history_entry'] : null;
const update =
typeof tc.arguments['memory_update'] === 'string' ? tc.arguments['memory_update'] : null;
if (entry) this.appendHistory(entry);
if (update && update !== currentMemory) this.writeLongTerm(update);
@@ -165,7 +172,12 @@ export class MemoryConsolidator {
private _model: string;
private _sessions: SessionManager;
private _contextWindowTokens: number;
private _buildMessages: (opts: { history: Array<Record<string, unknown>>; currentMessage: string; channel?: string; chatId?: string }) => Array<Record<string, unknown>>;
private _buildMessages: (opts: {
history: Array<Record<string, unknown>>;
currentMessage: string;
channel?: string;
chatId?: string;
}) => Array<Record<string, unknown>>;
private _getToolDefs: () => Array<Record<string, unknown>>;
private _locks = new Map<string, Promise<void>>();
@@ -175,7 +187,12 @@ export class MemoryConsolidator {
model: string;
sessions: SessionManager;
contextWindowTokens: number;
buildMessages: (opts: { history: Array<Record<string, unknown>>; currentMessage: string; channel?: string; chatId?: string }) => Array<Record<string, unknown>>;
buildMessages: (opts: {
history: Array<Record<string, unknown>>;
currentMessage: string;
channel?: string;
chatId?: string;
}) => Array<Record<string, unknown>>;
getToolDefs: () => Array<Record<string, unknown>>;
}) {
this._store = new MemoryStore(opts.workspace);
@@ -195,15 +212,23 @@ export class MemoryConsolidator {
// Chain promises per session key to serialize consolidation
const prev = this._locks.get(key) ?? Promise.resolve();
const next = prev.then(fn);
this._locks.set(key, next.catch(() => {}));
this._locks.set(
key,
next.catch(() => {}),
);
await next;
}
async archiveUnconsolidated(session: Session): Promise<boolean> {
let ok = false;
await this._withLock(session.key, async () => {
const snapshot = session.messages.slice(session.lastConsolidated) as Array<Record<string, unknown>>;
if (snapshot.length === 0) { ok = true; return; }
const snapshot = session.messages.slice(session.lastConsolidated) as Array<
Record<string, unknown>
>;
if (snapshot.length === 0) {
ok = true;
return;
}
ok = await this._store.consolidate(snapshot, this._provider, this._model);
});
return ok;
@@ -219,7 +244,8 @@ export class MemoryConsolidator {
const history = session.getHistory(0) as Array<Record<string, unknown>>;
const probe = this._buildMessages({ history, currentMessage: '[token-probe]' });
const toolTokens = estimateTokens(JSON.stringify(this._getToolDefs()));
const estimated = estimateMessagesTokens(probe as Array<Record<string, unknown>>) + toolTokens;
const estimated =
estimateMessagesTokens(probe as Array<Record<string, unknown>>) + toolTokens;
if (estimated < this._contextWindowTokens) return; // fits — done
@@ -227,10 +253,14 @@ export class MemoryConsolidator {
const boundary = this._pickBoundary(session, Math.max(1, estimated - target));
if (boundary === null) return;
const chunk = session.messages.slice(session.lastConsolidated, boundary) as Array<Record<string, unknown>>;
const chunk = session.messages.slice(session.lastConsolidated, boundary) as Array<
Record<string, unknown>
>;
if (chunk.length === 0) return;
console.info(`[memory] Token consolidation round ${round}: ~${estimated} tokens, chunk=${chunk.length} msgs`);
console.info(
`[memory] Token consolidation round ${round}: ~${estimated} tokens, chunk=${chunk.length} msgs`,
);
if (!(await this._store.consolidate(chunk, this._provider, this._model))) return;
session.lastConsolidated = boundary;

View File

@@ -138,7 +138,10 @@ export class SkillsLoader {
const colon = line.indexOf(':');
if (colon < 0) continue;
const key = line.slice(0, colon).trim();
const val = line.slice(colon + 1).trim().replace(/^["']|["']$/g, '');
const val = line
.slice(colon + 1)
.trim()
.replace(/^["']|["']$/g, '');
if (key === 'description') meta.description = val;
if (key === 'always') meta.always = val === 'true';
if (key === 'metadata') meta.metadata = val;

View File

@@ -55,12 +55,16 @@ export class CronTool implements Tool {
case 'enable': {
const id = strArg(args, 'id');
if (!id) return 'Error: id is required for enable.';
return this._service.enableJob(id, true) ? `Job ${id} enabled.` : `Error: job ${id} not found.`;
return this._service.enableJob(id, true)
? `Job ${id} enabled.`
: `Error: job ${id} not found.`;
}
case 'disable': {
const id = strArg(args, 'id');
if (!id) return 'Error: id is required for disable.';
return this._service.enableJob(id, false) ? `Job ${id} disabled.` : `Error: job ${id} not found.`;
return this._service.enableJob(id, false)
? `Job ${id} disabled.`
: `Error: job ${id} not found.`;
}
case 'run': {
const id = strArg(args, 'id');

View File

@@ -5,7 +5,16 @@ import type { Tool } from './base.ts';
const MAX_READ_CHARS = 128_000;
const MAX_ENTRIES = 2000;
const IGNORED_DIRS = new Set(['.git', 'node_modules', '__pycache__', '.venv', 'venv', 'dist', '.next', 'build']);
const IGNORED_DIRS = new Set([
'.git',
'node_modules',
'__pycache__',
'.venv',
'venv',
'dist',
'.next',
'build',
]);
// ---------------------------------------------------------------------------
// read_file
@@ -55,7 +64,10 @@ export class ReadFileTool implements Tool {
const slice = lines.slice(start, end);
const numbered = slice.map((l, i) => `${start + i + 1}: ${l}`).join('\n');
const truncated = numbered.length > MAX_READ_CHARS ? numbered.slice(0, MAX_READ_CHARS) + '\n... (truncated)' : numbered;
const truncated =
numbered.length > MAX_READ_CHARS
? numbered.slice(0, MAX_READ_CHARS) + '\n... (truncated)'
: numbered;
const totalLines = lines.length;
const header = `File: ${absPath} (${totalLines} lines total)\n`;
@@ -160,7 +172,7 @@ export class EditFileTool implements Tool {
let updated: string;
if (replaceAll) {
updated = content.split(oldString).join(newString);
count = (content.split(oldString).length - 1);
count = content.split(oldString).length - 1;
} else {
const idx = content.indexOf(oldString);
if (idx === -1) return `Error: oldString not found in ${absPath}.`;

View File

@@ -7,12 +7,7 @@ const DEFAULT_TIMEOUT_S = 120;
const MAX_TIMEOUT_S = 600;
const OUTPUT_MAX_CHARS = 32_000;
const DEFAULT_DENY_PATTERNS = [
/rm\s+-rf\s+\/(?!\S)/,
/mkfs/,
/dd\s+if=/,
/:\(\)\s*\{.*\}/,
];
const DEFAULT_DENY_PATTERNS = [/rm\s+-rf\s+\/(?!\S)/, /mkfs/, /dd\s+if=/, /:\(\)\s*\{.*\}/];
export class ExecTool implements Tool {
readonly name = 'exec';

View File

@@ -7,7 +7,10 @@ export class SpawnTool implements Tool {
readonly description =
'Spawn a background subagent to handle a long-running task autonomously. The subagent has access to filesystem, shell, and web tools. It will report its result back when done.';
readonly parameters = {
task: { type: 'string', description: 'Full description of the task for the subagent to complete.' },
task: {
type: 'string',
description: 'Full description of the task for the subagent to complete.',
},
};
readonly required = ['task'];

View File

@@ -12,7 +12,8 @@ const MAX_CONTENT_CHARS = 50_000;
export class WebSearchTool implements Tool {
readonly name = 'web_search';
readonly description = 'Search the web using Brave Search. Returns a list of results with titles, URLs, and snippets.';
readonly description =
'Search the web using Brave Search. Returns a list of results with titles, URLs, and snippets.';
readonly parameters = {
query: { type: 'string', description: 'Search query.' },
count: { type: 'number', description: 'Number of results (default 10, max 20).' },
@@ -30,7 +31,8 @@ export class WebSearchTool implements Tool {
async execute(args: Record<string, unknown>): Promise<string> {
const query = strArg(args, 'query').trim();
if (!query) return 'Error: query is required.';
if (!this._apiKey) return 'Error: BRAVE_API_KEY not configured (set tools.web.braveApiKey in config).';
if (!this._apiKey)
return 'Error: BRAVE_API_KEY not configured (set tools.web.braveApiKey in config).';
const count = Math.min(Number(args['count'] ?? 10), 20);
const url = `https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(query)}&count=${count}`;
@@ -38,7 +40,7 @@ export class WebSearchTool implements Tool {
try {
const res = await fetchWithTimeout(url, {
headers: {
'Accept': 'application/json',
Accept: 'application/json',
'Accept-Encoding': 'gzip',
'X-Subscription-Token': this._apiKey,
},
@@ -46,7 +48,9 @@ export class WebSearchTool implements Tool {
if (!res.ok) return `Error: Brave Search API returned ${res.status}: ${await res.text()}`;
const data = (await res.json()) as { web?: { results?: Array<{ title: string; url: string; description: string }> } };
const data = (await res.json()) as {
web?: { results?: Array<{ title: string; url: string; description: string }> };
};
const results = data.web?.results ?? [];
if (results.length === 0) return 'No results found.';
@@ -70,7 +74,11 @@ export class WebFetchTool implements Tool {
'Fetch a URL and return its content. HTML pages are extracted to readable text. Use mode="raw" for JSON/XML/plain text.';
readonly parameters = {
url: { type: 'string', description: 'URL to fetch.' },
mode: { type: 'string', enum: ['markdown', 'text', 'raw'], description: 'Output mode (default: text).' },
mode: {
type: 'string',
enum: ['markdown', 'text', 'raw'],
description: 'Output mode (default: text).',
},
};
readonly required = ['url'];
@@ -96,8 +104,14 @@ export class WebFetchTool implements Tool {
const contentType = res.headers.get('content-type') ?? '';
const body = await res.text();
if (mode === 'raw' || (!contentType.includes('text/html') && !body.trimStart().startsWith('<'))) {
const truncated = body.length > MAX_CONTENT_CHARS ? body.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)' : body;
if (
mode === 'raw' ||
(!contentType.includes('text/html') && !body.trimStart().startsWith('<'))
) {
const truncated =
body.length > MAX_CONTENT_CHARS
? body.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)'
: body;
return truncated;
}
@@ -114,9 +128,10 @@ export class WebFetchTool implements Tool {
const title = article?.title ?? '';
const textContent = article?.textContent ?? stripTags(body);
const trimmed = textContent.replace(/\n{3,}/g, '\n\n').trim();
const truncated = trimmed.length > MAX_CONTENT_CHARS
? trimmed.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)'
: trimmed;
const truncated =
trimmed.length > MAX_CONTENT_CHARS
? trimmed.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)'
: trimmed;
return title ? `# ${title}\n\n${truncated}` : truncated;
} catch (err) {
@@ -136,7 +151,10 @@ function fetchWithTimeout(url: string, init: RequestInit = {}): Promise<Response
}
function stripTags(html: string): string {
return html.replace(/<[^>]*>/g, ' ').replace(/\s+/g, ' ').trim();
return html
.replace(/<[^>]*>/g, ' ')
.replace(/\s+/g, ' ')
.trim();
}
/** Build a minimal pseudo-document that satisfies Readability's interface. */
@@ -166,7 +184,9 @@ function makePseudoDocument(
createTreeWalker: () => ({ nextNode: () => null }),
createRange: () => ({ selectNodeContents: () => {}, cloneContents: () => null }),
// biome-ignore lint/suspicious/noExplicitAny: Readability duck-typing
get innerHTML() { return html; },
get innerHTML() {
return html;
},
location: { href: url },
};

View File

@@ -60,7 +60,7 @@ export class ChannelManager {
}
const content = msg.content ?? '';
const chatId = msg.metadata?.['channel_id'] as string | undefined ?? msg.chatId;
const chatId = (msg.metadata?.['channel_id'] as string | undefined) ?? msg.chatId;
const rootId = msg.metadata?.['root_id'] as string | undefined;
try {

View File

@@ -165,7 +165,11 @@ export class MattermostChannel extends BaseChannel {
} else {
// Group channel
if (!this._shouldRespondInGroup(post.message, this._cfg.groupPolicy)) return;
if (this._cfg.groupPolicy === 'allowlist' && !this.isAllowed(post.user_id, this._cfg.groupAllowFrom)) return;
if (
this._cfg.groupPolicy === 'allowlist' &&
!this.isAllowed(post.user_id, this._cfg.groupAllowFrom)
)
return;
if (!this.isAllowed(post.user_id, this._cfg.allowFrom)) return;
}
@@ -226,7 +230,7 @@ export class MattermostChannel extends BaseChannel {
const res = await fetch(`${this._baseUrl}${path}`, {
method,
headers: {
'Authorization': `Bearer ${this._cfg.token}`,
Authorization: `Bearer ${this._cfg.token}`,
'Content-Type': 'application/json',
},
body: body !== undefined ? JSON.stringify(body) : undefined,

94
src/cli/agent.ts Normal file
View File

@@ -0,0 +1,94 @@
import { createInterface } from 'node:readline';
import { Command } from 'commander';
import pc from 'picocolors';
import { AgentLoop } from '../agent/loop.ts';
import { MessageBus } from '../bus/queue.ts';
import { makeProvider } from '../provider/index.ts';
import { loadConfig } from '../config/loader.ts';
import { ensureWorkspace } from './utils.ts';
export function agentCommand(program: Command): void {
program
.command('agent')
.description('Run the agent interactively or send a single message.')
.option('-c, --config <path>', 'Path to config.json')
.option('-m, --message <text>', 'Single message to process (non-interactive)')
.option('-M, --model <model>', 'Model override')
.action(async (opts: { config?: string; message?: string; model?: string }) => {
const config = loadConfig(opts.config);
const workspace = config.agent.workspacePath;
ensureWorkspace(workspace);
console.info(pc.magenta(`workspace path: ${workspace}`));
const model = opts.model ?? config.agent.model;
const provider = makeProvider(
config.providers,
config.agent.provider,
model,
config.agent.maxTokens,
config.agent.temperature,
);
const bus = new MessageBus();
const agentLoop = new AgentLoop({
bus,
provider,
workspace,
model,
maxIterations: config.agent.maxToolIterations,
contextWindowTokens: config.agent.contextWindowTokens,
braveApiKey: config.tools.web.braveApiKey,
webProxy: config.tools.web.proxy,
execConfig: config.tools.exec,
restrictToWorkspace: config.tools.restrictToWorkspace,
});
// Single-shot mode
if (opts.message) {
const result = await agentLoop.processDirect(opts.message);
console.log(result);
return;
}
// Interactive mode
console.info(pc.green('nanobot interactive mode. Type your message, Ctrl+C to exit.'));
const rl = createInterface({ input: process.stdin, output: process.stdout });
const promptUser = () => {
rl.question(pc.cyan('You: '), async (input) => {
const text = input.trim();
if (!text) {
promptUser();
return;
}
const onProgress = async (content: string, opts?: { toolHint?: boolean }) => {
if (opts?.toolHint) {
process.stdout.write(pc.dim(` [${content}]\n`));
} else {
process.stdout.write(pc.dim(` ${content}\n`));
}
};
const result = await agentLoop.processDirect(
text,
'cli:interactive',
'cli',
'interactive',
onProgress,
);
console.log(pc.bold('Bot:'), result);
promptUser();
});
};
rl.on('close', () => {
agentLoop.stop();
process.exit(0);
});
promptUser();
});
}

View File

@@ -1,178 +1,16 @@
import { mkdirSync } from 'node:fs';
import { createInterface } from 'node:readline';
import { Command } from 'commander';
import pc from 'picocolors';
import { AgentLoop } from '../agent/loop.ts';
import { MessageBus } from '../bus/queue.ts';
import { MattermostChannel } from '../channels/mattermost.ts';
import { ChannelManager } from '../channels/manager.ts';
import { loadConfig, resolveWorkspacePath } from '../config/loader.ts';
import { CronService } from '../cron/service.ts';
import { HeartbeatService } from '../heartbeat/service.ts';
import { makeProvider } from '../provider/index.ts';
import { agentCommand } from './agent.ts';
import { gatewayCommand } from './gateway.ts';
import { onboardCommand } from './onboard.ts';
export function createCli(): Command {
const program = new Command('nanobot').description('nanobot — personal AI assistant').version('1.0.0');
const program = new Command('nanobot')
.description('nanobot — personal AI assistant')
.version('1.0.0');
// ---------------------------------------------------------------------------
// gateway — full runtime: Mattermost + cron + heartbeat
// ---------------------------------------------------------------------------
program
.command('gateway')
.description('Start the full gateway: Mattermost channel, agent loop, cron, and heartbeat.')
.option('-c, --config <path>', 'Path to config.json')
.action(async (opts: { config?: string }) => {
const config = loadConfig(opts.config);
const workspace = resolveWorkspacePath(config.agent.workspacePath);
mkdirSync(workspace, { recursive: true });
const provider = makeProvider(config.providers, config.agent.model, config.agent.maxTokens, config.agent.temperature);
const bus = new MessageBus();
const channelManager = new ChannelManager(bus);
// Cron service
const cronService = new CronService(workspace, async (job) => {
bus.publishInbound({
channel: 'system',
senderId: 'cron',
chatId: `cli:cron_${job.id}`,
content: job.payload.message || `Cron job "${job.name}" triggered.`,
metadata: { cronJobId: job.id },
});
});
const agentLoop = new AgentLoop({
bus,
provider,
workspace,
model: config.agent.model,
maxIterations: config.agent.maxToolIterations,
contextWindowTokens: config.agent.contextWindowTokens,
braveApiKey: config.tools.web.braveApiKey,
webProxy: config.tools.web.proxy,
execConfig: config.tools.exec,
cronService,
restrictToWorkspace: config.tools.restrictToWorkspace,
sendProgress: config.channels.sendProgress,
sendToolHints: config.channels.sendToolHints,
});
// Mattermost
if (config.channels.mattermost) {
const mm = new MattermostChannel(bus, config.channels.mattermost);
channelManager.register(mm);
} else {
console.warn(pc.yellow('[gateway] No Mattermost config found. Running without channels.'));
}
// Heartbeat
let heartbeat: HeartbeatService | null = null;
if (config.heartbeat.enabled) {
heartbeat = new HeartbeatService({
workspace,
provider,
model: config.agent.model,
intervalMinutes: config.heartbeat.intervalMinutes,
onExecute: async (tasks) => {
const content = tasks.length > 0 ? `Heartbeat tasks:\n${tasks.map((t, i) => `${i + 1}. ${t}`).join('\n')}` : 'Heartbeat tick — check for anything to do.';
return agentLoop.processDirect(content, 'system:heartbeat', 'system', 'heartbeat');
},
onNotify: async (_result) => {
// Result already delivered via processDirect / message tool
},
});
}
// Graceful shutdown
const shutdown = () => {
console.info('\n[gateway] Shutting down...');
agentLoop.stop();
channelManager.stopAll();
heartbeat?.stop();
cronService.stop();
process.exit(0);
};
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
console.info(pc.green('[gateway] Starting...'));
cronService.start();
heartbeat?.start();
await Promise.all([agentLoop.run(), channelManager.startAll()]);
});
// ---------------------------------------------------------------------------
// agent — interactive CLI or single-shot mode
// ---------------------------------------------------------------------------
program
.command('agent')
.description('Run the agent interactively or send a single message.')
.option('-c, --config <path>', 'Path to config.json')
.option('-m, --message <text>', 'Single message to process (non-interactive)')
.option('-w, --workspace <path>', 'Workspace path override')
.option('-M, --model <model>', 'Model override')
.action(async (opts: { config?: string; message?: string; workspace?: string; model?: string }) => {
const config = loadConfig(opts.config);
const workspaceRaw = opts.workspace ?? config.agent.workspacePath;
const workspace = resolveWorkspacePath(workspaceRaw);
mkdirSync(workspace, { recursive: true });
const model = opts.model ?? config.agent.model;
const provider = makeProvider(config.providers, model, config.agent.maxTokens, config.agent.temperature);
const bus = new MessageBus();
const agentLoop = new AgentLoop({
bus,
provider,
workspace,
model,
maxIterations: config.agent.maxToolIterations,
contextWindowTokens: config.agent.contextWindowTokens,
braveApiKey: config.tools.web.braveApiKey,
webProxy: config.tools.web.proxy,
execConfig: config.tools.exec,
restrictToWorkspace: config.tools.restrictToWorkspace,
});
// Single-shot mode
if (opts.message) {
const result = await agentLoop.processDirect(opts.message);
console.log(result);
return;
}
// Interactive mode
console.info(pc.green('nanobot interactive mode. Type your message, Ctrl+C to exit.'));
const rl = createInterface({ input: process.stdin, output: process.stdout });
const promptUser = () => {
rl.question(pc.cyan('You: '), async (input) => {
const text = input.trim();
if (!text) { promptUser(); return; }
const onProgress = async (content: string, opts?: { toolHint?: boolean }) => {
if (opts?.toolHint) {
process.stdout.write(pc.dim(` [${content}]\n`));
} else {
process.stdout.write(pc.dim(` ${content}\n`));
}
};
const result = await agentLoop.processDirect(text, 'cli:interactive', 'cli', 'interactive', onProgress);
console.log(pc.bold('Bot:'), result);
promptUser();
});
};
rl.on('close', () => {
agentLoop.stop();
process.exit(0);
});
promptUser();
});
onboardCommand(program);
gatewayCommand(program);
agentCommand(program);
return program;
}

109
src/cli/gateway.ts Normal file
View File

@@ -0,0 +1,109 @@
import { ChannelManager } from '../channels/manager.ts';
import { Command } from 'commander';
import pc from 'picocolors';
import { AgentLoop } from '../agent/loop.ts';
import { MessageBus } from '../bus/queue.ts';
import { MattermostChannel } from '../channels/mattermost.ts';
import { CronService } from '../cron/service.ts';
import { HeartbeatService } from '../heartbeat/service.ts';
import { makeProvider } from '../provider/index.ts';
import { loadConfig } from '../config/loader.ts';
import { ensureWorkspace } from './utils.ts';
export function gatewayCommand(program: Command): void {
program
.command('gateway')
.option('-c, --config <path>', 'Path to config.json')
.description('Start the full gateway: Mattermost channel, agent loop, cron, and heartbeat.')
.action(async (opts: { config?: string }) => {
const config = loadConfig(opts.config);
const workspace = config.agent.workspacePath;
ensureWorkspace(workspace);
console.info(pc.magenta(`workspace path: ${workspace}`));
const provider = makeProvider(
config.providers,
config.agent.provider,
config.agent.model,
config.agent.maxTokens,
config.agent.temperature,
);
const bus = new MessageBus();
const channelManager = new ChannelManager(bus);
// Cron service
const cronService = new CronService(workspace, async (job) => {
bus.publishInbound({
channel: 'system',
senderId: 'cron',
chatId: `cli:cron_${job.id}`,
content: job.payload.message || `Cron job "${job.name}" triggered.`,
metadata: { cronJobId: job.id },
});
});
const agentLoop = new AgentLoop({
bus,
provider,
workspace,
model: config.agent.model,
maxIterations: config.agent.maxToolIterations,
contextWindowTokens: config.agent.contextWindowTokens,
braveApiKey: config.tools.web.braveApiKey,
webProxy: config.tools.web.proxy,
execConfig: config.tools.exec,
cronService,
restrictToWorkspace: config.tools.restrictToWorkspace,
sendProgress: config.channels.sendProgress,
sendToolHints: config.channels.sendToolHints,
});
// Mattermost
if (config.channels.mattermost) {
const mm = new MattermostChannel(bus, config.channels.mattermost);
channelManager.register(mm);
} else {
console.warn(pc.yellow('[gateway] No Mattermost config found. Running without channels.'));
}
// Heartbeat
let heartbeat: HeartbeatService | null = null;
if (config.heartbeat.enabled) {
heartbeat = new HeartbeatService({
workspace,
provider,
model: config.agent.model,
intervalMinutes: config.heartbeat.intervalMinutes,
onExecute: async (tasks) => {
const content =
tasks.length > 0
? `Heartbeat tasks:\n${tasks.map((t, i) => `${i + 1}. ${t}`).join('\n')}`
: 'Heartbeat tick — check for anything to do.';
return agentLoop.processDirect(content, 'system:heartbeat', 'system', 'heartbeat');
},
onNotify: async (_result) => {
// Result already delivered via processDirect / message tool
},
});
}
// Graceful shutdown
const shutdown = () => {
console.info('\n[gateway] Shutting down...');
agentLoop.stop();
channelManager.stopAll();
heartbeat?.stop();
cronService.stop();
process.exit(0);
};
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
console.info(pc.green('[gateway] Starting...'));
cronService.start();
heartbeat?.start();
await Promise.all([agentLoop.run(), channelManager.startAll()]);
});
}

69
src/cli/onboard.ts Normal file
View File

@@ -0,0 +1,69 @@
import { writeFileSync } from 'node:fs';
import { join } from 'node:path';
import { Command } from 'commander';
import pc from 'picocolors';
import { WORKSPACE_PATH } from '../config/constants.ts';
import { ensureWorkspace, resolvePath, checkWorkspaceEmpty, syncTemplates } from './utils.ts';
function logCreated(item: string) {
console.info(pc.green(` ✓ Created ${item}`));
}
export function onboardCommand(program: Command): void {
program
.command('onboard [path]')
.description('Initialize a new nanobot workspace with config and templates')
.action(async (rawPath?: string) => {
try {
// Create a minimal config template - users must fill in provider and model
const defaultConfig = {
providers: {},
agent: {
provider: '',
model: '',
},
};
const targetPath = resolvePath(rawPath ?? WORKSPACE_PATH);
const configPath = join(targetPath, 'config.json');
console.info(pc.blue('Initializing nanobot workspace...'));
console.info(pc.dim(`Target path: ${targetPath}`));
// Check if directory exists and is not empty
checkWorkspaceEmpty(targetPath);
// Create workspace directory
ensureWorkspace(targetPath, true);
logCreated('workspace directory');
// Write default config
writeFileSync(configPath, JSON.stringify(defaultConfig, null, 2), 'utf8');
logCreated('config.json');
// Sync templates
const createdFiles = syncTemplates(targetPath);
for (const file of createdFiles) {
logCreated(file);
}
console.info();
console.info(pc.green('nanobot workspace initialized successfully!'));
console.info();
console.info(pc.bold('Next steps:'));
console.info(` 1. Edit ${pc.cyan(configPath)} to add your API keys`);
console.info(
` 2. Customize ${pc.cyan(join(targetPath, 'USER.md'))} and ${pc.cyan(join(targetPath, 'SOUL.md'))} with your preferences`,
);
console.info(` 3. Start chatting: ${pc.cyan('bun run nanobot agent')}`);
console.info();
console.info(` -- For gateway mode:`);
console.info(` 1. Edit ${pc.cyan(configPath)} to add your channel config (Mattermost)`);
console.info(` 2. Connect your agent: ${pc.cyan('bun run nanobot gateway')}`);
console.info();
} catch (err) {
console.error(pc.red(String(err)));
process.exit(1);
}
});
}

4
src/cli/types.ts Normal file
View File

@@ -0,0 +1,4 @@
import type { Command } from 'commander';
import type { Config } from '../config/types.ts';
export type CommandHandler = (program: Command, config: Config, workspace: string) => void;

89
src/cli/utils.ts Normal file
View File

@@ -0,0 +1,89 @@
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'node:fs';
import { dirname, join, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { homedir } from 'node:os';
import pc from 'picocolors';
export function resolvePath(raw: string): string {
if (raw.startsWith('~/') || raw === '~') {
return resolve(homedir(), raw.slice(2));
}
return resolve(raw);
}
export function ensureWorkspace(rawPath: string, createIfMissing = false): string {
const path = resolvePath(rawPath);
if (!existsSync(path)) {
if (createIfMissing) {
mkdirSync(path, { recursive: true });
} else {
console.error(
pc.red(`Workspace does not exist: ${path}\nRun 'nanobot onboard' to initialize.`),
);
process.exit(1);
}
}
return path;
}
export function syncTemplates(workspacePath: string): string[] {
// Get project root relative to this file (src/cli/utils.ts)
const currentFile = fileURLToPath(import.meta.url);
const srcDir = dirname(currentFile);
const projectRoot = resolve(srcDir, '..', '..');
const templatesDir = resolve(projectRoot, 'templates');
if (!existsSync(templatesDir)) {
throw new Error(`Templates directory not found at ${templatesDir}`);
}
const created: string[] = [];
function copyTemplate(src: string, dest: string) {
if (existsSync(dest)) return;
mkdirSync(dirname(dest), { recursive: true });
const content = readFileSync(src, 'utf8');
writeFileSync(dest, content, 'utf8');
created.push(dest.slice(workspacePath.length + 1));
}
function copyDir(srcDir: string, destDir: string) {
if (!existsSync(srcDir)) return;
const entries = readdirSync(srcDir, { withFileTypes: true });
for (const entry of entries) {
const srcPath = join(srcDir, entry.name);
const destPath = join(destDir, entry.name);
if (entry.isDirectory()) {
copyDir(srcPath, destPath);
} else if (entry.name.endsWith('.md')) {
copyTemplate(srcPath, destPath);
}
}
}
copyDir(templatesDir, workspacePath);
// Create empty HISTORY.md
const historyPath = join(workspacePath, 'memory', 'HISTORY.md');
if (!existsSync(historyPath)) {
mkdirSync(dirname(historyPath), { recursive: true });
writeFileSync(historyPath, '# Conversation History\n\n', 'utf8');
created.push('memory/HISTORY.md');
}
// Create skills directory
const skillsPath = join(workspacePath, 'skills');
if (!existsSync(skillsPath)) {
mkdirSync(skillsPath, { recursive: true });
}
return created;
}
export function checkWorkspaceEmpty(path: string): void {
if (!existsSync(path)) return;
const entries = readdirSync(path);
if (entries.length > 0) {
throw new Error(pc.red(`Directory not empty: ${path}`));
}
}

1
src/config/constants.ts Normal file
View File

@@ -0,0 +1 @@
export const WORKSPACE_PATH = '~/.config/nanobot';

View File

@@ -1,9 +1,10 @@
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';
import { homedir } from 'node:os';
import { dirname, resolve } from 'node:path';
import pc from 'picocolors';
import { type Config, ConfigSchema } from './types.ts';
const DEFAULT_CONFIG_PATH = resolve(homedir(), '.nanobot', 'config.json');
const DEFAULT_CONFIG_PATH = resolve(homedir(), '.config', 'nanobot', 'config.json');
export function getConfigPath(override?: string): string {
return override ?? process.env['NANOBOT_CONFIG'] ?? DEFAULT_CONFIG_PATH;
@@ -13,16 +14,17 @@ export function loadConfig(configPath?: string): Config {
const path = getConfigPath(configPath);
if (!existsSync(path)) {
return ConfigSchema.parse({});
console.error(pc.red(`Failed to load config from ${configPath}`));
process.exit(1);
}
const raw = readFileSync(path, 'utf8');
let json: unknown;
try {
json = JSON.parse(raw);
} catch {
} catch (error) {
console.error(`Failed to parse config at ${path}`);
return ConfigSchema.parse({});
throw error;
}
// Apply NANOBOT_ env var overrides before validation

View File

@@ -1,4 +1,5 @@
import { z } from 'zod';
import { WORKSPACE_PATH } from './constants.ts';
// ---------------------------------------------------------------------------
// Mattermost
@@ -39,9 +40,19 @@ export type ChannelsConfig = z.infer<typeof ChannelsConfigSchema>;
// Agent
// ---------------------------------------------------------------------------
export const AgentProviderSchema = z.enum([
'anthropic',
'openai',
'google',
'openrouter',
'ollama',
]);
export type AgentProvider = z.infer<typeof AgentProviderSchema>;
export const AgentConfigSchema = z.object({
model: z.string().default('anthropic/claude-sonnet-4-5'),
workspacePath: z.string().default('~/.nanobot'),
provider: AgentProviderSchema,
model: z.string(),
workspacePath: z.string().default(WORKSPACE_PATH),
maxTokens: z.number().int().default(4096),
contextWindowTokens: z.number().int().default(65536),
temperature: z.number().default(0.7),
@@ -87,7 +98,11 @@ export const WebToolConfigSchema = z.object({
export type WebToolConfig = z.infer<typeof WebToolConfigSchema>;
export const ToolsConfigSchema = z.object({
exec: ExecToolConfigSchema.default(() => ({ timeout: 120, denyPatterns: [], restrictToWorkspace: false })),
exec: ExecToolConfigSchema.default(() => ({
timeout: 120,
denyPatterns: [],
restrictToWorkspace: false,
})),
web: WebToolConfigSchema.default(() => ({})),
restrictToWorkspace: z.boolean().default(false),
});
@@ -108,21 +123,14 @@ export type HeartbeatConfig = z.infer<typeof HeartbeatConfigSchema>;
// ---------------------------------------------------------------------------
export const ConfigSchema = z.object({
agent: AgentConfigSchema.default(() => ({
model: 'anthropic/claude-sonnet-4-5',
workspacePath: '~/.nanobot',
maxTokens: 4096,
contextWindowTokens: 65536,
temperature: 0.7,
maxToolIterations: 40,
})),
providers: ProvidersConfigSchema.default(() => ({})),
agent: AgentConfigSchema,
heartbeat: HeartbeatConfigSchema.default(() => ({ enabled: false, intervalMinutes: 30 })),
channels: ChannelsConfigSchema.default(() => ({ sendProgress: true, sendToolHints: true })),
tools: ToolsConfigSchema.default(() => ({
exec: { timeout: 120, denyPatterns: [], restrictToWorkspace: false },
web: {},
restrictToWorkspace: false,
})),
heartbeat: HeartbeatConfigSchema.default(() => ({ enabled: false, intervalMinutes: 30 })),
});
export type Config = z.infer<typeof ConfigSchema>;

View File

@@ -125,7 +125,11 @@ export class CronService {
if (delayMs === null) return;
const nextRunAtMs = Date.now() + delayMs;
const updated: CronJob = { ...job, state: { ...job.state, nextRunAtMs }, updatedAtMs: Date.now() };
const updated: CronJob = {
...job,
state: { ...job.state, nextRunAtMs },
updatedAtMs: Date.now(),
};
this._jobs.set(job.id, updated);
this._save();
@@ -159,7 +163,12 @@ export class CronService {
} catch (err) {
const updated: CronJob = {
...job,
state: { ...job.state, lastRunAtMs: Date.now(), lastStatus: 'error', lastError: String(err) },
state: {
...job.state,
lastRunAtMs: Date.now(),
lastStatus: 'error',
lastError: String(err),
},
updatedAtMs: Date.now(),
};
this._jobs.set(job.id, updated);

View File

@@ -29,8 +29,17 @@ export const CronJobSchema = z.object({
name: z.string(),
enabled: z.boolean().default(true),
schedule: CronScheduleSchema,
payload: CronPayloadSchema.default(() => ({ kind: 'agent_turn' as const, message: '', deliver: false })),
state: CronJobStateSchema.default(() => ({ nextRunAtMs: null, lastRunAtMs: null, lastStatus: null, lastError: null })),
payload: CronPayloadSchema.default(() => ({
kind: 'agent_turn' as const,
message: '',
deliver: false,
})),
state: CronJobStateSchema.default(() => ({
nextRunAtMs: null,
lastRunAtMs: null,
lastStatus: null,
lastError: null,
})),
createdAtMs: z.number().int().default(0),
updatedAtMs: z.number().int().default(0),
deleteAfterRun: z.boolean().default(false),

View File

@@ -117,9 +117,11 @@ export class HeartbeatService {
return;
}
const action = typeof decision.arguments['action'] === 'string' ? decision.arguments['action'] : 'skip';
const action =
typeof decision.arguments['action'] === 'string' ? decision.arguments['action'] : 'skip';
if (action !== 'run') {
const reason = typeof decision.arguments['reason'] === 'string' ? decision.arguments['reason'] : '';
const reason =
typeof decision.arguments['reason'] === 'string' ? decision.arguments['reason'] : '';
console.debug(`[heartbeat] Decision: skip (${reason})`);
return;
}

View File

@@ -4,8 +4,8 @@ import { createOpenAI } from '@ai-sdk/openai';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { type ModelMessage, generateText, stepCountIs } from 'ai';
import { jsonrepair } from 'jsonrepair';
import { createOllama } from 'ollama-ai-provider';
import type { ProvidersConfig } from '../config/types.ts';
import { createOllama } from 'ai-sdk-ollama';
import type { AgentProvider, ProvidersConfig } from '../config/types.ts';
import type { ChatOptions, LLMResponse, ToolDefinition } from './types.ts';
export type { ToolDefinition };
@@ -66,12 +66,20 @@ import type { LanguageModel } from 'ai';
export class LLMProvider {
private _providers: ProvidersConfig;
private _provider: AgentProvider;
private _defaultModel: string;
private _maxTokens: number;
private _temperature: number;
constructor(providers: ProvidersConfig, defaultModel: string, maxTokens = 4096, temperature = 0.7) {
constructor(
providers: ProvidersConfig,
provider: AgentProvider,
defaultModel: string,
maxTokens = 4096,
temperature = 0.7,
) {
this._providers = providers;
this._provider = provider;
this._defaultModel = defaultModel;
this._maxTokens = maxTokens;
this._temperature = temperature;
@@ -82,41 +90,33 @@ export class LLMProvider {
}
private _resolveModel(model: string): LanguageModel {
const slashIdx = model.indexOf('/');
const prefix = slashIdx >= 0 ? model.slice(0, slashIdx) : model;
const remainder = slashIdx >= 0 ? model.slice(slashIdx + 1) : model;
switch (prefix) {
switch (this._provider) {
case 'anthropic': {
const cfg = this._providers.anthropic;
return createAnthropic({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createAnthropic({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'openai': {
const cfg = this._providers.openai;
return createOpenAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createOpenAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'google': {
const cfg = this._providers.google;
return createGoogleGenerativeAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createGoogleGenerativeAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'openrouter': {
const cfg = this._providers.openrouter;
return createOpenRouter({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createOpenRouter({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'ollama': {
const cfg = this._providers.ollama;
// ollama-ai-provider returns LanguageModelV1; cast to LanguageModel (compatible at runtime)
return createOllama({ baseURL: cfg?.apiBase ?? 'http://localhost:11434/api' })(remainder) as unknown as LanguageModel;
}
default: {
// No recognized prefix — fall through to openai-compatible
const cfg = this._providers.openai;
return createOpenAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
return createOllama({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
}
}
async chat(opts: ChatOptions): Promise<{ response: LLMResponse; responseMessages: ModelMessage[] }> {
async chat(
opts: ChatOptions,
): Promise<{ response: LLMResponse; responseMessages: ModelMessage[] }> {
const model = this._resolveModel(opts.model ?? this._defaultModel);
const maxTokens = opts.maxTokens ?? this._maxTokens;
const temperature = opts.temperature ?? this._temperature;
@@ -137,12 +137,15 @@ export class LLMProvider {
: undefined;
try {
let toolChoice: 'required' | 'none' | 'auto' = 'auto';
if (opts.toolChoice === 'required' || opts.toolChoice === 'none')
toolChoice = opts.toolChoice;
const result = await generateText({
model,
messages: opts.messages as ModelMessage[],
// biome-ignore lint/suspicious/noExplicitAny: AI SDK tools type is complex
tools: aiTools as any,
toolChoice: opts.toolChoice === 'required' ? 'required' : opts.toolChoice === 'none' ? 'none' : 'auto',
toolChoice,
maxOutputTokens: maxTokens,
temperature,
stopWhen: stepCountIs(1),
@@ -182,7 +185,9 @@ export class LLMProvider {
}
}
async chatWithRetry(opts: ChatOptions): Promise<{ response: LLMResponse; responseMessages: ModelMessage[] }> {
async chatWithRetry(
opts: ChatOptions,
): Promise<{ response: LLMResponse; responseMessages: ModelMessage[] }> {
for (const delay of RETRY_DELAYS_MS) {
const result = await this.chat(opts);
if (result.response.finishReason !== 'error') return result;
@@ -199,15 +204,20 @@ export class LLMProvider {
export function makeProvider(
providers: ProvidersConfig,
provider: AgentProvider,
model: string,
maxTokens: number,
temperature: number,
): LLMProvider {
return new LLMProvider(providers, model, maxTokens, temperature);
return new LLMProvider(providers, provider, model, maxTokens, temperature);
}
/** Build a tool-result message to append after executing a tool call. */
export function toolResultMessage(toolCallId: string, toolName: string, result: string): ModelMessage {
export function toolResultMessage(
toolCallId: string,
toolName: string,
result: string,
): ModelMessage {
return {
role: 'tool',
content: [

View File

@@ -126,10 +126,7 @@ export class SessionManager {
save(session: Session): void {
session.updatedAt = new Date().toISOString();
const lines = [
JSON.stringify(session.meta),
...session.messages.map((m) => JSON.stringify(m)),
];
const lines = [JSON.stringify(session.meta), ...session.messages.map((m) => JSON.stringify(m))];
writeFileSync(this._filePath(session.key), lines.join('\n') + '\n', 'utf8');
}