chore: format code

This commit is contained in:
Joe Fleming
2026-03-13 14:46:15 -06:00
parent 345cfef425
commit 7e28a09345
18 changed files with 263 additions and 93 deletions

View File

@@ -12,7 +12,8 @@ const MAX_CONTENT_CHARS = 50_000;
export class WebSearchTool implements Tool {
readonly name = 'web_search';
readonly description = 'Search the web using Brave Search. Returns a list of results with titles, URLs, and snippets.';
readonly description =
'Search the web using Brave Search. Returns a list of results with titles, URLs, and snippets.';
readonly parameters = {
query: { type: 'string', description: 'Search query.' },
count: { type: 'number', description: 'Number of results (default 10, max 20).' },
@@ -30,7 +31,8 @@ export class WebSearchTool implements Tool {
async execute(args: Record<string, unknown>): Promise<string> {
const query = strArg(args, 'query').trim();
if (!query) return 'Error: query is required.';
if (!this._apiKey) return 'Error: BRAVE_API_KEY not configured (set tools.web.braveApiKey in config).';
if (!this._apiKey)
return 'Error: BRAVE_API_KEY not configured (set tools.web.braveApiKey in config).';
const count = Math.min(Number(args['count'] ?? 10), 20);
const url = `https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(query)}&count=${count}`;
@@ -38,7 +40,7 @@ export class WebSearchTool implements Tool {
try {
const res = await fetchWithTimeout(url, {
headers: {
'Accept': 'application/json',
Accept: 'application/json',
'Accept-Encoding': 'gzip',
'X-Subscription-Token': this._apiKey,
},
@@ -46,7 +48,9 @@ export class WebSearchTool implements Tool {
if (!res.ok) return `Error: Brave Search API returned ${res.status}: ${await res.text()}`;
const data = (await res.json()) as { web?: { results?: Array<{ title: string; url: string; description: string }> } };
const data = (await res.json()) as {
web?: { results?: Array<{ title: string; url: string; description: string }> };
};
const results = data.web?.results ?? [];
if (results.length === 0) return 'No results found.';
@@ -70,7 +74,11 @@ export class WebFetchTool implements Tool {
'Fetch a URL and return its content. HTML pages are extracted to readable text. Use mode="raw" for JSON/XML/plain text.';
readonly parameters = {
url: { type: 'string', description: 'URL to fetch.' },
mode: { type: 'string', enum: ['markdown', 'text', 'raw'], description: 'Output mode (default: text).' },
mode: {
type: 'string',
enum: ['markdown', 'text', 'raw'],
description: 'Output mode (default: text).',
},
};
readonly required = ['url'];
@@ -96,8 +104,14 @@ export class WebFetchTool implements Tool {
const contentType = res.headers.get('content-type') ?? '';
const body = await res.text();
if (mode === 'raw' || (!contentType.includes('text/html') && !body.trimStart().startsWith('<'))) {
const truncated = body.length > MAX_CONTENT_CHARS ? body.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)' : body;
if (
mode === 'raw' ||
(!contentType.includes('text/html') && !body.trimStart().startsWith('<'))
) {
const truncated =
body.length > MAX_CONTENT_CHARS
? body.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)'
: body;
return truncated;
}
@@ -114,9 +128,10 @@ export class WebFetchTool implements Tool {
const title = article?.title ?? '';
const textContent = article?.textContent ?? stripTags(body);
const trimmed = textContent.replace(/\n{3,}/g, '\n\n').trim();
const truncated = trimmed.length > MAX_CONTENT_CHARS
? trimmed.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)'
: trimmed;
const truncated =
trimmed.length > MAX_CONTENT_CHARS
? trimmed.slice(0, MAX_CONTENT_CHARS) + '\n... (truncated)'
: trimmed;
return title ? `# ${title}\n\n${truncated}` : truncated;
} catch (err) {
@@ -136,7 +151,10 @@ function fetchWithTimeout(url: string, init: RequestInit = {}): Promise<Response
}
function stripTags(html: string): string {
return html.replace(/<[^>]*>/g, ' ').replace(/\s+/g, ' ').trim();
return html
.replace(/<[^>]*>/g, ' ')
.replace(/\s+/g, ' ')
.trim();
}
/** Build a minimal pseudo-document that satisfies Readability's interface. */
@@ -166,7 +184,9 @@ function makePseudoDocument(
createTreeWalker: () => ({ nextNode: () => null }),
createRange: () => ({ selectNodeContents: () => {}, cloneContents: () => null }),
// biome-ignore lint/suspicious/noExplicitAny: Readability duck-typing
get innerHTML() { return html; },
get innerHTML() {
return html;
},
location: { href: url },
};