Compare commits

...

4 Commits

Author SHA1 Message Date
Joe Fleming
c65a7160ba fix: use updated ollama ai package 2026-03-13 21:16:00 -06:00
Joe Fleming
74a5e70322 fix: require user to specify the provider 2026-03-13 20:49:21 -06:00
Joe Fleming
b6df31fcbf fix: require a model, default to empty value during onboarding
and add a WORKSPACE_PATH constant to make tracking that path easier
2026-03-13 20:42:05 -06:00
Joe Fleming
14aa1c1e7f fix: correctly load config path from args
log error and exit if config can not be loaded, don't use defaults
2026-03-13 20:29:02 -06:00
15 changed files with 131 additions and 133 deletions

View File

@@ -18,13 +18,17 @@ bun install # or use `mise install`
## Quick start
**1. Create a config file**
**1. Initialize workspace**
```bash
mkdir -p ~/.config/nanobot
bun run nanobot onboard
```
`~/.config/nanobot/config.json`:
This creates `~/.config/nanobot/` with a config file and templates.
**2. Edit config**
Add your API key and set provider/model:
```json
{
@@ -34,12 +38,13 @@ mkdir -p ~/.config/nanobot
}
},
"agent": {
"model": "openrouter/anthropic/claude-sonnet-4-5"
"provider": "openrouter",
"model": "anthropic/claude-sonnet-4-5"
}
}
```
**2. Chat**
**3. Chat**
```bash
bun run nanobot agent
@@ -115,7 +120,8 @@ Environment variable overrides:
```json
{
"agent": {
"model": "openrouter/anthropic/claude-sonnet-4-5",
"provider": "openrouter",
"model": "anthropic/claude-sonnet-4-5",
"workspacePath": "~/.config/nanobot",
"maxTokens": 4096,
"contextWindowTokens": 65536,
@@ -127,7 +133,7 @@ Environment variable overrides:
"openai": { "apiKey": "..." },
"google": { "apiKey": "..." },
"openrouter": { "apiKey": "..." },
"ollama": { "apiBase": "http://localhost:11434/api" }
"ollama": { "apiBase": "http://localhost:11434" }
},
"channels": {
"sendProgress": true,
@@ -164,19 +170,29 @@ Environment variable overrides:
}
```
### Providers
### Provider
Model names use a `provider/model` prefix scheme:
The `agent.provider` field is **required** and must be one of:
| Prefix | Provider | Example |
|--------|----------|---------|
| `anthropic/` | Anthropic direct | `anthropic/claude-opus-4-5` |
| `openai/` | OpenAI direct | `openai/gpt-4o` |
| `google/` | Google direct | `google/gemini-2.5-pro` |
| `openrouter/` | OpenRouter (any model) | `openrouter/anthropic/claude-sonnet-4-5` |
| `ollama/` | Local Ollama | `ollama/llama3.2` |
| Provider | Description |
|----------|-------------|
| `anthropic` | Anthropic direct (Claude models) |
| `openai` | OpenAI direct (GPT models) |
| `google` | Google direct (Gemini models) |
| `openrouter` | OpenRouter (access to many models) |
| `ollama` | Local Ollama instance |
For Ollama, set `providers.ollama.apiBase` (default: `http://localhost:11434/api`).
The `agent.model` field is also **required** and should be the model ID without any provider prefix:
| Provider | Example Model |
|----------|---------------|
| `anthropic` | `claude-sonnet-4-5`, `claude-opus-4-5` |
| `openai` | `gpt-4o`, `gpt-4o-mini` |
| `google` | `gemini-2.5-pro`, `gemini-2.0-flash` |
| `openrouter` | `anthropic/claude-sonnet-4-5` (OpenRouter uses its own model IDs) |
| `ollama` | `llama3.2`, `qwen2.5` |
For Ollama, set `providers.ollama.apiBase` (default: `http://localhost:11434`).
### Mattermost setup

View File

@@ -11,12 +11,12 @@
"@mozilla/readability": "^0.6.0",
"@openrouter/ai-sdk-provider": "^2.3.0",
"ai": "^6.0.116",
"ai-sdk-ollama": "^3.8.0",
"commander": "^14.0.3",
"cron-parser": "^5.5.0",
"js-tiktoken": "^1.0.21",
"jsonrepair": "^3.13.3",
"node-html-parser": "^7.1.0",
"ollama-ai-provider": "^1.2.0",
"picocolors": "^1.1.1",
"zod": "^4.3.6",
},
@@ -151,6 +151,8 @@
"ai": ["ai@6.0.116", "", { "dependencies": { "@ai-sdk/gateway": "3.0.66", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.19", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-7yM+cTmyRLeNIXwt4Vj+mrrJgVQ9RMIW5WO0ydoLoYkewIvsMcvUmqS4j2RJTUXaF1HphwmSKUMQ/HypNRGOmA=="],
"ai-sdk-ollama": ["ai-sdk-ollama@3.8.0", "", { "dependencies": { "@ai-sdk/provider": "^3.0.8", "@ai-sdk/provider-utils": "^4.0.15", "jsonrepair": "^3.13.2", "ollama": "^0.6.3" }, "peerDependencies": { "ai": "^6.0.89" } }, "sha512-Nlla8FpK8QFMNh9m8sPCZoNqnr+n+Ud0QTqpXNds4j/b/lbVZGaji13ZcRuuFvBwPwd4xnFkNrijJzi70Ih1Tg=="],
"base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
"boolbase": ["boolbase@1.0.0", "", {}, "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="],
@@ -187,13 +189,11 @@
"luxon": ["luxon@3.7.2", "", {}, "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew=="],
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
"node-html-parser": ["node-html-parser@7.1.0", "", { "dependencies": { "css-select": "^5.1.0", "he": "1.2.0" } }, "sha512-iJo8b2uYGT40Y8BTyy5ufL6IVbN8rbm/1QK2xffXU/1a/v3AAa0d1YAoqBNYqaS4R/HajkWIpIfdE6KcyFh1AQ=="],
"nth-check": ["nth-check@2.1.1", "", { "dependencies": { "boolbase": "^1.0.0" } }, "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w=="],
"ollama-ai-provider": ["ollama-ai-provider@1.2.0", "", { "dependencies": { "@ai-sdk/provider": "^1.0.0", "@ai-sdk/provider-utils": "^2.0.0", "partial-json": "0.1.7" }, "peerDependencies": { "zod": "^3.0.0" }, "optionalPeers": ["zod"] }, "sha512-jTNFruwe3O/ruJeppI/quoOUxG7NA6blG3ZyQj3lei4+NnJo7bi3eIRWqlVpRlu/mbzbFXeJSBuYQWF6pzGKww=="],
"ollama": ["ollama@0.6.3", "", { "dependencies": { "whatwg-fetch": "^3.6.20" } }, "sha512-KEWEhIqE5wtfzEIZbDCLH51VFZ6Z3ZSa6sIOg/E/tBV8S51flyqBOXi+bRxlOYKDf8i327zG9eSTb8IJxvm3Zg=="],
"oxfmt": ["oxfmt@0.40.0", "", { "dependencies": { "tinypool": "2.1.0" }, "optionalDependencies": { "@oxfmt/binding-android-arm-eabi": "0.40.0", "@oxfmt/binding-android-arm64": "0.40.0", "@oxfmt/binding-darwin-arm64": "0.40.0", "@oxfmt/binding-darwin-x64": "0.40.0", "@oxfmt/binding-freebsd-x64": "0.40.0", "@oxfmt/binding-linux-arm-gnueabihf": "0.40.0", "@oxfmt/binding-linux-arm-musleabihf": "0.40.0", "@oxfmt/binding-linux-arm64-gnu": "0.40.0", "@oxfmt/binding-linux-arm64-musl": "0.40.0", "@oxfmt/binding-linux-ppc64-gnu": "0.40.0", "@oxfmt/binding-linux-riscv64-gnu": "0.40.0", "@oxfmt/binding-linux-riscv64-musl": "0.40.0", "@oxfmt/binding-linux-s390x-gnu": "0.40.0", "@oxfmt/binding-linux-x64-gnu": "0.40.0", "@oxfmt/binding-linux-x64-musl": "0.40.0", "@oxfmt/binding-openharmony-arm64": "0.40.0", "@oxfmt/binding-win32-arm64-msvc": "0.40.0", "@oxfmt/binding-win32-ia32-msvc": "0.40.0", "@oxfmt/binding-win32-x64-msvc": "0.40.0" }, "bin": { "oxfmt": "bin/oxfmt" } }, "sha512-g0C3I7xUj4b4DcagevM9kgH6+pUHytikxUcn3/VUkvzTNaaXBeyZqb7IBsHwojeXm4mTBEC/aBjBTMVUkZwWUQ=="],
@@ -201,22 +201,16 @@
"oxlint-tsgolint": ["oxlint-tsgolint@0.16.0", "", { "optionalDependencies": { "@oxlint-tsgolint/darwin-arm64": "0.16.0", "@oxlint-tsgolint/darwin-x64": "0.16.0", "@oxlint-tsgolint/linux-arm64": "0.16.0", "@oxlint-tsgolint/linux-x64": "0.16.0", "@oxlint-tsgolint/win32-arm64": "0.16.0", "@oxlint-tsgolint/win32-x64": "0.16.0" }, "bin": { "tsgolint": "bin/tsgolint.js" } }, "sha512-4RuJK2jP08XwqtUu+5yhCbxEauCm6tv2MFHKEMsjbosK2+vy5us82oI3VLuHwbNyZG7ekZA26U2LLHnGR4frIA=="],
"partial-json": ["partial-json@0.1.7", "", {}, "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA=="],
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
"secure-json-parse": ["secure-json-parse@2.7.0", "", {}, "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw=="],
"tinypool": ["tinypool@2.1.0", "", {}, "sha512-Pugqs6M0m7Lv1I7FtxN4aoyToKg1C4tu+/381vH35y8oENM/Ai7f7C4StcoK4/+BSw9ebcS8jRiVrORFKCALLw=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="],
"whatwg-fetch": ["whatwg-fetch@3.6.20", "", {}, "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg=="],
"zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
"ollama-ai-provider/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"ollama-ai-provider/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
}
}

View File

@@ -19,18 +19,22 @@ Docs directory created with 4 files (PRD.md, Architecture.md, API.md, Discoverie
- **lint**: all `${err}` in template literals → `${String(err)}`; `String(args['key'] ?? '')``strArg(args, 'key')` helper; unused `onProgress` param → `_onProgress`; WebSocket `onerror` `err` type is `Event` → use `err.type`
## Work Queue (next steps)
1. [x] Create workspace helper module (src/workspace.ts) with ensureWorkspace() and syncTemplates()
1. [x] Create workspace helper module (src/cli/utils.ts) with ensureWorkspace() and syncTemplates()
2. [x] Create onboard command (src/cli/onboard.ts) with path argument and directory-not-empty guard
3. [x] Update src/cli/commands.ts to use ensureWorkspace() instead of inline mkdirSync
4. [x] Typecheck and lint pass (0 errors)
5. [x] Runtime smoke test: `bun run nanobot --help`
6. [x] Test onboard command: `bun run nanobot onboard [path]`
7. [ ] Test with a real Mattermost config (optional — user can do this)
3. [x] Agent/gateway commands check workspace exists (throw if not found)
4. [x] Added required `provider` field to agent config (values: anthropic, openai, google, openrouter, ollama)
5. [x] Provider resolution uses explicit provider from config (no model prefix parsing)
6. [x] Typecheck and lint pass (0 errors)
7. [x] Test onboard and agent commands work correctly
8. [x] Updated Ollama provider from `ollama-ai-provider` to `ai-sdk-ollama`
9. [ ] Test with a real Mattermost config (optional — user can do this)
## Key Decisions Made
- Mattermost channel uses raw WebSocket + fetch (no mattermostdriver, no SSL hack)
- No MCP support (use shell tools / CLI instead)
- No reasoning/thinking token handling (can add later)
- Config is fresh Zod schema (no migration from Python config needed)
- `ollama-ai-provider` package (not `@ai-sdk/ollama` which 404s on npm)
- `ai-sdk-ollama` package for Ollama provider (replaced old `ollama-ai-provider`)
- `strArg(args, key, fallback?)` helper exported from `agent/tools/base.ts` for safe unknown→string extraction
- Agent config requires explicit `provider` field (no more model prefix like "anthropic/claude-...")
- Model names are now just the raw model ID (e.g., "claude-sonnet-4-5" not "anthropic/claude-sonnet-4-5")

View File

@@ -8,49 +8,20 @@
- All dependencies installed
- `src/` directory structure scaffolded
- Memory bank initialized
- All source files written (first pass):
- `src/config/types.ts` + `src/config/loader.ts`
- `src/bus/types.ts` + `src/bus/queue.ts`
- `src/provider/types.ts` + `src/provider/index.ts`
- `src/session/types.ts` + `src/session/manager.ts`
- `src/agent/tools/base.ts` (+ `strArg` helper)
- `src/agent/tools/filesystem.ts`
- `src/agent/tools/shell.ts`
- `src/agent/tools/web.ts`
- `src/agent/tools/message.ts`
- `src/agent/tools/spawn.ts` + `src/agent/subagent.ts`
- `src/agent/tools/cron.ts`
- `src/cron/types.ts` + `src/cron/service.ts`
- `src/heartbeat/service.ts`
- `src/agent/memory.ts`
- `src/agent/skills.ts`
- `src/agent/context.ts`
- `src/agent/loop.ts`
- `src/channels/base.ts` + `src/channels/mattermost.ts`
- `src/channels/manager.ts`
- `src/cli/commands.ts`
- `index.ts`
- All source files written (first pass)
- Templates and skills copied from Python repo
- **Full typecheck pass**: `tsc --noEmit` → 0 errors
- **Full lint pass**: `oxlint` → 0 errors, 0 warnings
- `package.json` scripts added: `start`, `dev`, `typecheck`
- **Docs created**: `/docs/PRD.md`, `Architecture.md`, `API.md`, `Discoveries.md`
- **Onboard command**: Created `src/cli/onboard.ts` with workspace initialization
- **Provider config**: Added required `provider` field to agent config
- **Workspace validation**: Agent/gateway commands throw if workspace doesn't exist
### 🔄 In Progress
- Nothing
### ✅ Done
- Created src/workspace.ts with ensureWorkspace(), syncTemplates(), checkWorkspaceEmpty()
- Created src/cli/onboard.ts command with path argument
- Updated src/cli/commands.ts to use ensureWorkspace() helper
- Typecheck: 0 errors
- Lint: 0 warnings
### 🔄 In Progress
- Testing onboard command
### ⏳ Pending
- Runtime smoke test: `bun run nanobot --help`
- Integration test with a real Mattermost server
## Known Issues / Risks

View File

@@ -10,7 +10,8 @@
"fmt": "oxfmt --check",
"fmt:fix": "oxfmt",
"lint": "oxlint",
"lint:fix": "oxlint --fix"
"lint:fix": "oxlint --fix",
"checks": "bun run lint && bun run fmt"
},
"dependencies": {
"@ai-sdk/anthropic": "^3.0.58",
@@ -19,12 +20,12 @@
"@mozilla/readability": "^0.6.0",
"@openrouter/ai-sdk-provider": "^2.3.0",
"ai": "^6.0.116",
"ai-sdk-ollama": "^3.8.0",
"commander": "^14.0.3",
"cron-parser": "^5.5.0",
"js-tiktoken": "^1.0.21",
"jsonrepair": "^3.13.3",
"node-html-parser": "^7.1.0",
"ollama-ai-provider": "^1.2.0",
"picocolors": "^1.1.1",
"zod": "^4.3.6"
},

View File

@@ -364,11 +364,11 @@ export class AgentLoop {
if (response.content) await onProgress(response.content);
const hint = response.toolCalls
.map((tc) => {
let display = ''
let display = '';
const firstVal = Object.values(tc.arguments)[0];
if (typeof firstVal === 'string') {
display = `"${firstVal.slice(0, 40) + (firstVal.length > 40 ? '…' : '')}"`
display = `"${firstVal.slice(0, 40) + (firstVal.length > 40 ? '…' : '')}"`;
}
return `${tc.name}(${display})`;

View File

@@ -4,10 +4,10 @@ import pc from 'picocolors';
import { AgentLoop } from '../agent/loop.ts';
import { MessageBus } from '../bus/queue.ts';
import { makeProvider } from '../provider/index.ts';
import { loadConfig } from '../config/loader.ts';
import { ensureWorkspace } from './utils.ts';
import type { Config } from '../config/types.ts';
export function agentCommand(program: Command, config: Config, workspace: string): void {
export function agentCommand(program: Command): void {
program
.command('agent')
.description('Run the agent interactively or send a single message.')
@@ -15,12 +15,16 @@ export function agentCommand(program: Command, config: Config, workspace: string
.option('-m, --message <text>', 'Single message to process (non-interactive)')
.option('-M, --model <model>', 'Model override')
.action(async (opts: { config?: string; message?: string; model?: string }) => {
const config = loadConfig(opts.config);
const workspace = config.agent.workspacePath;
ensureWorkspace(workspace);
console.info(pc.magenta(`workspace path: ${workspace}`));
const model = opts.model ?? config.agent.model;
const provider = makeProvider(
config.providers,
config.agent.provider,
model,
config.agent.maxTokens,
config.agent.temperature,

View File

@@ -1,5 +1,4 @@
import { Command } from 'commander';
import { loadConfig, resolveWorkspacePath } from '../config/loader.ts';
import { agentCommand } from './agent.ts';
import { gatewayCommand } from './gateway.ts';
import { onboardCommand } from './onboard.ts';
@@ -10,13 +9,8 @@ export function createCli(): Command {
.version('1.0.0');
onboardCommand(program);
const globalOpts = program.opts();
const config = loadConfig(globalOpts.config);
const workspace = resolveWorkspacePath(config.agent.workspacePath);
gatewayCommand(program, config, workspace);
agentCommand(program, config, workspace);
gatewayCommand(program);
agentCommand(program);
return program;
}

View File

@@ -7,21 +7,24 @@ import { MattermostChannel } from '../channels/mattermost.ts';
import { CronService } from '../cron/service.ts';
import { HeartbeatService } from '../heartbeat/service.ts';
import { makeProvider } from '../provider/index.ts';
import { loadConfig } from '../config/loader.ts';
import { ensureWorkspace } from './utils.ts';
import type { Config } from '../config/types.ts';
export function gatewayCommand(program: Command, config: Config, workspace: string): void {
export function gatewayCommand(program: Command): void {
program
.command('gateway')
.option('-c, --config <path>', 'Path to config.json')
.description('Start the full gateway: Mattermost channel, agent loop, cron, and heartbeat.')
.action(async (_opts: { config?: string }) => {
.action(async (opts: { config?: string }) => {
const config = loadConfig(opts.config);
const workspace = config.agent.workspacePath;
ensureWorkspace(workspace);
console.info(pc.magenta(`workspace path: ${workspace}`));
const provider = makeProvider(
config.providers,
config.agent.provider,
config.agent.model,
config.agent.maxTokens,
config.agent.temperature,

View File

@@ -2,7 +2,7 @@ import { writeFileSync } from 'node:fs';
import { join } from 'node:path';
import { Command } from 'commander';
import pc from 'picocolors';
import { ConfigSchema, type Config } from '../config/types.ts';
import { WORKSPACE_PATH } from '../config/constants.ts';
import { ensureWorkspace, resolvePath, checkWorkspaceEmpty, syncTemplates } from './utils.ts';
function logCreated(item: string) {
@@ -15,9 +15,16 @@ export function onboardCommand(program: Command): void {
.description('Initialize a new nanobot workspace with config and templates')
.action(async (rawPath?: string) => {
try {
const defaultConfig: Config = ConfigSchema.parse({});
// Create a minimal config template - users must fill in provider and model
const defaultConfig = {
providers: {},
agent: {
provider: '',
model: '',
},
};
const targetPath = resolvePath(rawPath ?? defaultConfig.agent.workspacePath);
const targetPath = resolvePath(rawPath ?? WORKSPACE_PATH);
const configPath = join(targetPath, 'config.json');
console.info(pc.blue('Initializing nanobot workspace...'));
@@ -28,16 +35,16 @@ export function onboardCommand(program: Command): void {
// Create workspace directory
ensureWorkspace(targetPath, true);
logCreated('workspace directory')
logCreated('workspace directory');
// Write default config
writeFileSync(configPath, JSON.stringify(defaultConfig, null, 2), 'utf8');
logCreated('config.json')
logCreated('config.json');
// Sync templates
const createdFiles = syncTemplates(targetPath);
for (const file of createdFiles) {
logCreated(file)
logCreated(file);
}
console.info();
@@ -45,7 +52,9 @@ export function onboardCommand(program: Command): void {
console.info();
console.info(pc.bold('Next steps:'));
console.info(` 1. Edit ${pc.cyan(configPath)} to add your API keys`);
console.info(` 2. Customize ${pc.cyan(join(targetPath, 'USER.md'))} and ${pc.cyan(join(targetPath, 'SOUL.md'))} with your preferences`);
console.info(
` 2. Customize ${pc.cyan(join(targetPath, 'USER.md'))} and ${pc.cyan(join(targetPath, 'SOUL.md'))} with your preferences`,
);
console.info(` 3. Start chatting: ${pc.cyan('bun run nanobot agent')}`);
console.info();
console.info(` -- For gateway mode:`);

View File

@@ -17,8 +17,10 @@ export function ensureWorkspace(rawPath: string, createIfMissing = false): strin
if (createIfMissing) {
mkdirSync(path, { recursive: true });
} else {
console.error(pc.red(`Workspace does not exist: ${path}\nRun 'nanobot onboard' to initialize.`))
process.exit(1)
console.error(
pc.red(`Workspace does not exist: ${path}\nRun 'nanobot onboard' to initialize.`),
);
process.exit(1);
}
}
return path;

1
src/config/constants.ts Normal file
View File

@@ -0,0 +1 @@
export const WORKSPACE_PATH = '~/.config/nanobot';

View File

@@ -1,6 +1,7 @@
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';
import { homedir } from 'node:os';
import { dirname, resolve } from 'node:path';
import pc from 'picocolors';
import { type Config, ConfigSchema } from './types.ts';
const DEFAULT_CONFIG_PATH = resolve(homedir(), '.config', 'nanobot', 'config.json');
@@ -13,16 +14,17 @@ export function loadConfig(configPath?: string): Config {
const path = getConfigPath(configPath);
if (!existsSync(path)) {
return ConfigSchema.parse({});
console.error(pc.red(`Failed to load config from ${configPath}`));
process.exit(1);
}
const raw = readFileSync(path, 'utf8');
let json: unknown;
try {
json = JSON.parse(raw);
} catch(error) {
} catch (error) {
console.error(`Failed to parse config at ${path}`);
throw error
throw error;
}
// Apply NANOBOT_ env var overrides before validation

View File

@@ -1,4 +1,5 @@
import { z } from 'zod';
import { WORKSPACE_PATH } from './constants.ts';
// ---------------------------------------------------------------------------
// Mattermost
@@ -39,9 +40,19 @@ export type ChannelsConfig = z.infer<typeof ChannelsConfigSchema>;
// Agent
// ---------------------------------------------------------------------------
export const AgentProviderSchema = z.enum([
'anthropic',
'openai',
'google',
'openrouter',
'ollama',
]);
export type AgentProvider = z.infer<typeof AgentProviderSchema>;
export const AgentConfigSchema = z.object({
model: z.string().default('anthropic/claude-sonnet-4-5'),
workspacePath: z.string().default('~/.config/nanobot'),
provider: AgentProviderSchema,
model: z.string(),
workspacePath: z.string().default(WORKSPACE_PATH),
maxTokens: z.number().int().default(4096),
contextWindowTokens: z.number().int().default(65536),
temperature: z.number().default(0.7),
@@ -113,14 +124,7 @@ export type HeartbeatConfig = z.infer<typeof HeartbeatConfigSchema>;
export const ConfigSchema = z.object({
providers: ProvidersConfigSchema.default(() => ({})),
agent: AgentConfigSchema.default(() => ({
model: 'anthropic/claude-sonnet-4-5',
workspacePath: '~/.config/nanobot',
maxTokens: 4096,
contextWindowTokens: 65536,
temperature: 0.7,
maxToolIterations: 40,
})),
agent: AgentConfigSchema,
heartbeat: HeartbeatConfigSchema.default(() => ({ enabled: false, intervalMinutes: 30 })),
channels: ChannelsConfigSchema.default(() => ({ sendProgress: true, sendToolHints: true })),
tools: ToolsConfigSchema.default(() => ({

View File

@@ -4,8 +4,8 @@ import { createOpenAI } from '@ai-sdk/openai';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { type ModelMessage, generateText, stepCountIs } from 'ai';
import { jsonrepair } from 'jsonrepair';
import { createOllama } from 'ollama-ai-provider';
import type { ProvidersConfig } from '../config/types.ts';
import { createOllama } from 'ai-sdk-ollama';
import type { AgentProvider, ProvidersConfig } from '../config/types.ts';
import type { ChatOptions, LLMResponse, ToolDefinition } from './types.ts';
export type { ToolDefinition };
@@ -66,17 +66,20 @@ import type { LanguageModel } from 'ai';
export class LLMProvider {
private _providers: ProvidersConfig;
private _provider: AgentProvider;
private _defaultModel: string;
private _maxTokens: number;
private _temperature: number;
constructor(
providers: ProvidersConfig,
provider: AgentProvider,
defaultModel: string,
maxTokens = 4096,
temperature = 0.7,
) {
this._providers = providers;
this._provider = provider;
this._defaultModel = defaultModel;
this._maxTokens = maxTokens;
this._temperature = temperature;
@@ -87,38 +90,26 @@ export class LLMProvider {
}
private _resolveModel(model: string): LanguageModel {
const slashIdx = model.indexOf('/');
const prefix = slashIdx >= 0 ? model.slice(0, slashIdx) : model;
const remainder = slashIdx >= 0 ? model.slice(slashIdx + 1) : model;
switch (prefix) {
switch (this._provider) {
case 'anthropic': {
const cfg = this._providers.anthropic;
return createAnthropic({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createAnthropic({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'openai': {
const cfg = this._providers.openai;
return createOpenAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createOpenAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'google': {
const cfg = this._providers.google;
return createGoogleGenerativeAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createGoogleGenerativeAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'openrouter': {
const cfg = this._providers.openrouter;
return createOpenRouter({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(remainder);
return createOpenRouter({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
case 'ollama': {
const cfg = this._providers.ollama;
// ollama-ai-provider returns LanguageModelV1; cast to LanguageModel (compatible at runtime)
return createOllama({ baseURL: cfg?.apiBase ?? 'http://localhost:11434/api' })(
remainder,
) as unknown as LanguageModel;
}
default: {
// No recognized prefix — fall through to openai-compatible
const cfg = this._providers.openai;
return createOpenAI({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
return createOllama({ apiKey: cfg?.apiKey, baseURL: cfg?.apiBase })(model);
}
}
}
@@ -146,8 +137,9 @@ export class LLMProvider {
: undefined;
try {
let toolChoice: 'required' | 'none' | 'auto' = 'auto'
if (opts.toolChoice === 'required' || opts.toolChoice === 'none') toolChoice = opts.toolChoice
let toolChoice: 'required' | 'none' | 'auto' = 'auto';
if (opts.toolChoice === 'required' || opts.toolChoice === 'none')
toolChoice = opts.toolChoice;
const result = await generateText({
model,
messages: opts.messages as ModelMessage[],
@@ -212,11 +204,12 @@ export class LLMProvider {
export function makeProvider(
providers: ProvidersConfig,
provider: AgentProvider,
model: string,
maxTokens: number,
temperature: number,
): LLMProvider {
return new LLMProvider(providers, model, maxTokens, temperature);
return new LLMProvider(providers, provider, model, maxTokens, temperature);
}
/** Build a tool-result message to append after executing a tool call. */