From bac25c9173dfee4a509c135d1005853a597d20ce Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 15:04:02 +0300 Subject: [PATCH 01/19] feat(llm): basic tool to get child notes --- .../src/services/llm/providers/anthropic.ts | 3 +- .../src/services/llm/tools/hierarchy_tools.ts | 35 +++++++++++++++++++ apps/server/src/services/llm/tools/index.ts | 1 + 3 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 apps/server/src/services/llm/tools/hierarchy_tools.ts diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index e8fc71cce1..af415dd020 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -3,7 +3,7 @@ import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } import type { LlmMessage } from "@triliumnext/commons"; import becca from "../../../becca/becca.js"; -import { noteTools, attributeTools, currentNoteTools } from "../tools/index.js"; +import { noteTools, attributeTools, hierarchyTools, currentNoteTools } from "../tools/index.js"; import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; const DEFAULT_MODEL = "claude-sonnet-4-6"; @@ -206,6 +206,7 @@ export class AnthropicProvider implements LlmProvider { if (config.enableNoteTools) { Object.assign(tools, noteTools); Object.assign(tools, attributeTools); + Object.assign(tools, hierarchyTools); } if (Object.keys(tools).length > 0) { diff --git a/apps/server/src/services/llm/tools/hierarchy_tools.ts b/apps/server/src/services/llm/tools/hierarchy_tools.ts new file mode 100644 index 0000000000..8da42900b9 --- /dev/null +++ b/apps/server/src/services/llm/tools/hierarchy_tools.ts @@ -0,0 +1,35 @@ +/** + * LLM tools for navigating the note hierarchy (tree structure, branches). + */ + +import { tool } from "ai"; +import { z } from "zod"; + +import becca from "../../../becca/becca.js"; + +/** + * Get the child notes of a given note. + */ +export const getChildNotes = tool({ + description: "Get the immediate child notes of a note. Returns each child's ID, title, type, and whether it has children of its own. Use noteId 'root' to list top-level notes.", + inputSchema: z.object({ + noteId: z.string().describe("The ID of the parent note (use 'root' for top-level)") + }), + execute: async ({ noteId }) => { + const note = becca.getNote(noteId); + if (!note) { + return { error: "Note not found" }; + } + + return note.getChildNotes().map((child) => ({ + noteId: child.noteId, + title: child.getTitleOrProtected(), + type: child.type, + childCount: child.getChildNotes().length + })); + } +}); + +export const hierarchyTools = { + get_child_notes: getChildNotes +}; diff --git a/apps/server/src/services/llm/tools/index.ts b/apps/server/src/services/llm/tools/index.ts index dc2257ef1b..0540b9d617 100644 --- a/apps/server/src/services/llm/tools/index.ts +++ b/apps/server/src/services/llm/tools/index.ts @@ -5,3 +5,4 @@ export { noteTools, currentNoteTools } from "./note_tools.js"; export { attributeTools } from "./attribute_tools.js"; +export { hierarchyTools } from "./hierarchy_tools.js"; From c617bea45afd6d5012d267e2018c2483dc61a4d6 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 15:15:14 +0300 Subject: [PATCH 02/19] feat(llm): basic tool to get subtree --- .../src/services/llm/tools/hierarchy_tools.ts | 69 ++++++++++++++++++- 1 file changed, 68 insertions(+), 1 deletion(-) diff --git a/apps/server/src/services/llm/tools/hierarchy_tools.ts b/apps/server/src/services/llm/tools/hierarchy_tools.ts index 8da42900b9..9bfe60afe2 100644 --- a/apps/server/src/services/llm/tools/hierarchy_tools.ts +++ b/apps/server/src/services/llm/tools/hierarchy_tools.ts @@ -6,6 +6,7 @@ import { tool } from "ai"; import { z } from "zod"; import becca from "../../../becca/becca.js"; +import type BNote from "../../../becca/entities/bnote.js"; /** * Get the child notes of a given note. @@ -30,6 +31,72 @@ export const getChildNotes = tool({ } }); +//#region Subtree tool implementation +const MAX_DEPTH = 5; +const MAX_CHILDREN_PER_LEVEL = 10; + +interface SubtreeNode { + noteId: string; + title: string; + type: string; + children?: SubtreeNode[] | string; +} + +function buildSubtree(note: BNote, depth: number, maxDepth: number): SubtreeNode { + const node: SubtreeNode = { + noteId: note.noteId, + title: note.getTitleOrProtected(), + type: note.type + }; + + if (depth >= maxDepth) { + const childCount = note.getChildNotes().length; + if (childCount > 0) { + node.children = `${childCount} children not shown (depth limit reached)`; + } + return node; + } + + const children = note.getChildNotes(); + if (children.length === 0) { + return node; + } + + const shown = children.slice(0, MAX_CHILDREN_PER_LEVEL); + node.children = shown.map((child) => buildSubtree(child, depth + 1, maxDepth)); + + if (children.length > MAX_CHILDREN_PER_LEVEL) { + node.children.push({ + noteId: "", + title: `... and ${children.length - MAX_CHILDREN_PER_LEVEL} more`, + type: "truncated" + }); + } + + return node; +} + +/** + * Get a subtree of notes up to a specified depth. + */ +export const getSubtree = tool({ + description: "Get a nested subtree of notes starting from a given note, traversing multiple levels deep. Useful for understanding the structure of a section of the note tree. Each level shows up to 10 children.", + inputSchema: z.object({ + noteId: z.string().describe("The ID of the root note for the subtree (use 'root' for the entire tree)"), + depth: z.number().min(1).max(MAX_DEPTH).optional().describe(`How many levels deep to traverse (1-${MAX_DEPTH}). Defaults to 2.`) + }), + execute: async ({ noteId, depth = 2 }) => { + const note = becca.getNote(noteId); + if (!note) { + return { error: "Note not found" }; + } + + return buildSubtree(note, 0, depth); + } +}); +//#endregion + export const hierarchyTools = { - get_child_notes: getChildNotes + get_child_notes: getChildNotes, + get_subtree: getSubtree }; From 8c0dacd6d7c0b17c6e25009ac2e98d73fee47108 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 15:36:50 +0300 Subject: [PATCH 03/19] feat(llm): basic skill to do search --- .../src/services/llm/providers/anthropic.ts | 3 +- apps/server/src/services/llm/skills/index.ts | 66 +++++++++++++++++++ .../src/services/llm/skills/search_syntax.md | 50 ++++++++++++++ apps/server/src/services/llm/tools/index.ts | 1 + .../src/services/llm/tools/note_tools.ts | 18 +++-- 5 files changed, 132 insertions(+), 6 deletions(-) create mode 100644 apps/server/src/services/llm/skills/index.ts create mode 100644 apps/server/src/services/llm/skills/search_syntax.md diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index af415dd020..50298038c1 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -3,7 +3,7 @@ import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } import type { LlmMessage } from "@triliumnext/commons"; import becca from "../../../becca/becca.js"; -import { noteTools, attributeTools, hierarchyTools, currentNoteTools } from "../tools/index.js"; +import { noteTools, attributeTools, hierarchyTools, skillTools, currentNoteTools } from "../tools/index.js"; import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; const DEFAULT_MODEL = "claude-sonnet-4-6"; @@ -207,6 +207,7 @@ export class AnthropicProvider implements LlmProvider { Object.assign(tools, noteTools); Object.assign(tools, attributeTools); Object.assign(tools, hierarchyTools); + Object.assign(tools, skillTools); } if (Object.keys(tools).length > 0) { diff --git a/apps/server/src/services/llm/skills/index.ts b/apps/server/src/services/llm/skills/index.ts new file mode 100644 index 0000000000..11f04fef7b --- /dev/null +++ b/apps/server/src/services/llm/skills/index.ts @@ -0,0 +1,66 @@ +/** + * LLM skills — on-demand instruction sets that an LLM can load when it needs + * specialized knowledge (e.g. search syntax). Only names and descriptions are + * included in the system prompt; full content is fetched via the load_skill tool. + */ + +import { tool } from "ai"; +import { readFileSync } from "fs"; +import { dirname, join } from "path"; +import { fileURLToPath } from "url"; +import { z } from "zod"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +interface SkillDefinition { + name: string; + description: string; + file: string; +} + +const SKILLS: SkillDefinition[] = [ + { + name: "search_syntax", + description: "Trilium search query syntax reference — labels, relations, note properties, boolean logic, ordering, and more.", + file: "search_syntax.md" + } +]; + +function loadSkillContent(name: string): string | null { + const skill = SKILLS.find((s) => s.name === name); + if (!skill) { + return null; + } + return readFileSync(join(__dirname, skill.file), "utf-8"); +} + +/** + * Returns a summary of available skills for inclusion in the system prompt. + */ +export function getSkillsSummary(): string { + return SKILLS + .map((s) => `- **${s.name}**: ${s.description}`) + .join("\n"); +} + +/** + * The load_skill tool — lets the LLM fetch full instructions on demand. + */ +export const loadSkill = tool({ + description: "Load a skill to get specialized instructions. Available skills:\n" + + SKILLS.map((s) => `- ${s.name}: ${s.description}`).join("\n"), + inputSchema: z.object({ + name: z.string().describe("The skill name to load") + }), + execute: async ({ name }) => { + const content = loadSkillContent(name); + if (!content) { + return { error: `Unknown skill: '${name}'. Available: ${SKILLS.map((s) => s.name).join(", ")}` }; + } + return { skill: name, instructions: content }; + } +}); + +export const skillTools = { + load_skill: loadSkill +}; diff --git a/apps/server/src/services/llm/skills/search_syntax.md b/apps/server/src/services/llm/skills/search_syntax.md new file mode 100644 index 0000000000..a43b6b25d7 --- /dev/null +++ b/apps/server/src/services/llm/skills/search_syntax.md @@ -0,0 +1,50 @@ +# Trilium Search Syntax + +## Full-text search +- `rings tolkien` — notes containing both words +- `"The Lord of the Rings"` — exact phrase match + +## Label filters +- `#book` — notes with the "book" label +- `#!book` — notes WITHOUT the "book" label +- `#publicationYear = 1954` — exact value +- `#genre *=* fan` — contains substring +- `#title =* The` — starts with +- `#title *= Rings` — ends with +- `#publicationYear >= 1950` — numeric comparison (>, >=, <, <=) +- `#dateNote >= TODAY-30` — date keywords: NOW+-seconds, TODAY+-days, MONTH+-months, YEAR+-years +- `#phone %= '\d{3}-\d{4}'` — regex match +- `#title ~= trilim` — fuzzy exact match (tolerates typos, min 3 chars) +- `#content ~* progra` — fuzzy contains match + +## Relation filters +- `~author` — notes with an "author" relation +- `~author.title *=* Tolkien` — relation target's title contains "Tolkien" +- `~author.relations.son.title = 'Christopher Tolkien'` — deep relation traversal + +## Note properties +Access via `note.` prefix: noteId, title, type, mime, text, content, rawContent, dateCreated, dateModified, isProtected, isArchived, parentCount, childrenCount, attributeCount, labelCount, relationCount, contentSize, revisionCount. +- `note.type = code AND note.mime = 'application/json'` +- `note.content *=* searchTerm` + +## Hierarchy +- `note.parents.title = 'Books'` — parent named "Books" +- `note.ancestors.title = 'Books'` — any ancestor named "Books" +- `note.children.title = 'sub-note'` — child named "sub-note" + +## Boolean logic +- AND: `#book AND #fantasy` (implicit between adjacent expressions) +- OR: `#book OR #author` +- NOT: `not(note.ancestors.title = 'Tolkien')` +- Parentheses: `(#genre = "fantasy" AND #year >= 1950) OR #award` + +## Combining full-text and attributes +- `towers #book` — full-text "towers" AND has #book label +- `tolkien #book or #author` — full-text with OR on labels + +## Ordering and limiting +- `#author=Tolkien orderBy #publicationDate desc, note.title limit 10` + +## Escaping +- `\#hash` — literal # in full-text +- Three quote types: single, double, backtick diff --git a/apps/server/src/services/llm/tools/index.ts b/apps/server/src/services/llm/tools/index.ts index 0540b9d617..615067674b 100644 --- a/apps/server/src/services/llm/tools/index.ts +++ b/apps/server/src/services/llm/tools/index.ts @@ -6,3 +6,4 @@ export { noteTools, currentNoteTools } from "./note_tools.js"; export { attributeTools } from "./attribute_tools.js"; export { hierarchyTools } from "./hierarchy_tools.js"; +export { skillTools } from "../skills/index.js"; diff --git a/apps/server/src/services/llm/tools/note_tools.ts b/apps/server/src/services/llm/tools/note_tools.ts index 90eb475e16..c3703b5a80 100644 --- a/apps/server/src/services/llm/tools/note_tools.ts +++ b/apps/server/src/services/llm/tools/note_tools.ts @@ -43,15 +43,23 @@ function setNoteContentFromLlm(note: { type: string; title: string; setContent: * Search for notes in the knowledge base. */ export const searchNotes = tool({ - description: "Search for notes in the user's knowledge base. Returns note metadata including title, type, and IDs.", + description: "Search for notes in the user's knowledge base using Trilium search syntax. Load the 'search_syntax' skill first if unsure about query format. Returns note metadata including title, type, and IDs.", inputSchema: z.object({ - query: z.string().describe("Search query (supports Trilium search syntax)") + query: z.string().describe("Search query in Trilium search syntax (e.g. '#book #year >= 2000', 'tolkien #fantasy')"), + fastSearch: z.boolean().optional().describe("If true, skip content search (only titles and attributes). Faster for large databases."), + includeArchivedNotes: z.boolean().optional().describe("If true, include archived notes in results."), + ancestorNoteId: z.string().optional().describe("Limit search to a subtree rooted at this note ID."), + limit: z.number().optional().describe("Maximum number of results to return. Defaults to 10.") }), - execute: async ({ query }) => { - const searchContext = new SearchContext({}); + execute: async ({ query, fastSearch, includeArchivedNotes, ancestorNoteId, limit = 10 }) => { + const searchContext = new SearchContext({ + fastSearch, + includeArchivedNotes, + ancestorNoteId + }); const results = searchService.findResultsWithQuery(query, searchContext); - return results.slice(0, 10).map(sr => { + return results.slice(0, limit).map(sr => { const note = becca.notes[sr.noteId]; if (!note) return null; return { From 90930e19e7c27df6d1b51b4d3655d2f672f7530e Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 15:41:56 +0300 Subject: [PATCH 04/19] feat(llm): improve search discoverability --- .../server/src/services/llm/providers/anthropic.ts | 9 +++++++++ apps/server/src/services/llm/tools/note_tools.ts | 14 ++++++++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index 50298038c1..688ef9afa4 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -3,6 +3,7 @@ import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } import type { LlmMessage } from "@triliumnext/commons"; import becca from "../../../becca/becca.js"; +import { getSkillsSummary } from "../skills/index.js"; import { noteTools, attributeTools, hierarchyTools, skillTools, currentNoteTools } from "../tools/index.js"; import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; @@ -135,6 +136,14 @@ export class AnthropicProvider implements LlmProvider { } } + // Add skills hint so the LLM knows to load skills before complex operations + if (config.enableNoteTools) { + const skillsHint = `You have access to skills that provide specialized instructions. Load a skill with the load_skill tool before performing complex operations.\n\nAvailable skills:\n${getSkillsSummary()}`; + systemPrompt = systemPrompt + ? `${systemPrompt}\n\n${skillsHint}` + : skillsHint; + } + // Convert to AI SDK message format with cache control breakpoints. // The system prompt and conversation history (all but the last user message) // are stable across turns, so we mark them for caching to reduce costs. diff --git a/apps/server/src/services/llm/tools/note_tools.ts b/apps/server/src/services/llm/tools/note_tools.ts index c3703b5a80..4e86b6aa1e 100644 --- a/apps/server/src/services/llm/tools/note_tools.ts +++ b/apps/server/src/services/llm/tools/note_tools.ts @@ -43,9 +43,19 @@ function setNoteContentFromLlm(note: { type: string; title: string; setContent: * Search for notes in the knowledge base. */ export const searchNotes = tool({ - description: "Search for notes in the user's knowledge base using Trilium search syntax. Load the 'search_syntax' skill first if unsure about query format. Returns note metadata including title, type, and IDs.", + description: [ + "Search for notes in the user's knowledge base using Trilium search syntax.", + "For complex queries (boolean logic, relations, regex, ordering), load the 'search_syntax' skill first via load_skill.", + "Common patterns:", + "- Full-text: 'rings tolkien' (notes containing both words)", + "- By label: '#book', '#status = done', '#year >= 2000'", + "- By type: 'note.type = code'", + "- By relation: '~author', '~author.title *= Tolkien'", + "- Combined: 'tolkien #book' (full-text + label filter)", + "- Negation: '#!archived' (notes WITHOUT label)" + ].join(" "), inputSchema: z.object({ - query: z.string().describe("Search query in Trilium search syntax (e.g. '#book #year >= 2000', 'tolkien #fantasy')"), + query: z.string().describe("Search query in Trilium search syntax"), fastSearch: z.boolean().optional().describe("If true, skip content search (only titles and attributes). Faster for large databases."), includeArchivedNotes: z.boolean().optional().describe("If true, include archived notes in results."), ancestorNoteId: z.string().optional().describe("Limit search to a subtree rooted at this note ID."), From c13b68ef4280f599cfea0610caefc1b5ae4ee445 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 16:01:20 +0300 Subject: [PATCH 05/19] feat(llm): basic skill to write scripts --- .../services/llm/skills/backend_scripting.md | 156 ++++++++++++++ .../services/llm/skills/frontend_scripting.md | 197 ++++++++++++++++++ apps/server/src/services/llm/skills/index.ts | 10 + 3 files changed, 363 insertions(+) create mode 100644 apps/server/src/services/llm/skills/backend_scripting.md create mode 100644 apps/server/src/services/llm/skills/frontend_scripting.md diff --git a/apps/server/src/services/llm/skills/backend_scripting.md b/apps/server/src/services/llm/skills/backend_scripting.md new file mode 100644 index 0000000000..59ec934ece --- /dev/null +++ b/apps/server/src/services/llm/skills/backend_scripting.md @@ -0,0 +1,156 @@ +# Trilium Backend Scripting + +Backend scripts run in Node.js on the server. They have direct access to notes in memory and can interact with the system (files, processes). + +## Creating a backend script + +1. Create a Code note with language "JS backend". +2. The script can be run manually (Execute button) or triggered automatically. + +## Script API (`api` global) + +### Note retrieval +- `api.getNote(noteId)` - get note by ID +- `api.searchForNotes(query, searchParams)` - search notes (returns array) +- `api.searchForNote(query)` - search notes (returns first match) +- `api.getNotesWithLabel(name, value?)` - find notes by label +- `api.getNoteWithLabel(name, value?)` - find first note by label +- `api.getBranch(branchId)` - get branch by ID +- `api.getAttribute(attributeId)` - get attribute by ID + +### Note creation +- `api.createTextNote(parentNoteId, title, content)` - create text note +- `api.createDataNote(parentNoteId, title, content)` - create JSON note +- `api.createNewNote({ parentNoteId, title, content, type })` - create note with full options + +### Branch management +- `api.ensureNoteIsPresentInParent(noteId, parentNoteId, prefix?)` - create or reuse branch +- `api.ensureNoteIsAbsentFromParent(noteId, parentNoteId)` - remove branch if exists +- `api.toggleNoteInParent(present, noteId, parentNoteId, prefix?)` - toggle branch + +### Calendar/date notes +- `api.getTodayNote()` - get/create today's day note +- `api.getDayNote(date)` - get/create day note (YYYY-MM-DD) +- `api.getWeekNote(date)` - get/create week note +- `api.getMonthNote(date)` - get/create month note (YYYY-MM) +- `api.getYearNote(year)` - get/create year note (YYYY) + +### Utilities +- `api.log(message)` - log to Trilium logs and UI +- `api.randomString(length)` - generate random string +- `api.escapeHtml(string)` / `api.unescapeHtml(string)` +- `api.getInstanceName()` - get instance name +- `api.getAppInfo()` - get application info + +### Libraries +- `api.axios` - HTTP client +- `api.dayjs` - date manipulation +- `api.xml2js` - XML parser +- `api.cheerio` - HTML/XML parser + +### Advanced +- `api.transactional(func)` - wrap code in a database transaction +- `api.sql` - direct SQL access +- `api.sortNotes(parentNoteId, sortConfig)` - sort child notes +- `api.runOnFrontend(script, params)` - execute code on all connected frontends +- `api.backupNow(backupName)` - create a backup +- `api.exportSubtreeToZipFile(noteId, format, zipFilePath)` - export subtree (format: "markdown" or "html") +- `api.duplicateSubtree(origNoteId, newParentNoteId)` - clone note and children + +## BNote object + +Available on notes returned from API methods (`api.getNote()`, `api.originEntity`, etc.). + +### Content +- `note.getContent()` / `note.setContent(content)` +- `note.getJsonContent()` / `note.setJsonContent(obj)` +- `note.getJsonContentSafely()` - returns null on parse error + +### Properties +- `note.noteId`, `note.title`, `note.type`, `note.mime` +- `note.dateCreated`, `note.dateModified` +- `note.isProtected`, `note.isArchived` + +### Hierarchy +- `note.getParentNotes()` / `note.getChildNotes()` +- `note.getParentBranches()` / `note.getChildBranches()` +- `note.hasChildren()`, `note.getAncestors()` +- `note.getSubtreeNoteIds()` - all descendant IDs +- `note.hasAncestor(ancestorNoteId)` + +### Attributes (including inherited) +- `note.getLabels(name?)` / `note.getLabelValue(name)` +- `note.getRelations(name?)` / `note.getRelation(name)` +- `note.hasLabel(name, value?)` / `note.hasRelation(name, value?)` + +### Attribute modification +- `note.setLabel(name, value?)` / `note.removeLabel(name, value?)` +- `note.setRelation(name, targetNoteId)` / `note.removeRelation(name, value?)` +- `note.addLabel(name, value?, isInheritable?)` / `note.addRelation(name, targetNoteId, isInheritable?)` +- `note.toggleLabel(enabled, name, value?)` + +### Operations +- `note.save()` - persist changes +- `note.deleteNote()` - soft delete +- `note.cloneTo(parentNoteId)` - clone to another parent + +### Type checks +- `note.isJson()`, `note.isJavaScript()`, `note.isHtml()`, `note.isImage()` +- `note.hasStringContent()` - true if not binary + +## Events and triggers + +### Global events (via `#run` label on the script note) +- `#run=backendStartup` - run when server starts +- `#run=hourly` - run once per hour (use `#runAtHour=N` to specify which hours) +- `#run=daily` - run once per day + +### Entity events (via relation from the entity to the script note) +These are defined as relations. `api.originEntity` contains the entity that triggered the event. + +| Relation | Trigger | originEntity | +|---|---|---| +| `~runOnNoteCreation` | note created | BNote | +| `~runOnChildNoteCreation` | child note created under this note | BNote (child) | +| `~runOnNoteTitleChange` | note title changed | BNote | +| `~runOnNoteContentChange` | note content changed | BNote | +| `~runOnNoteChange` | note metadata changed (not content) | BNote | +| `~runOnNoteDeletion` | note deleted | BNote | +| `~runOnBranchCreation` | branch created (clone/move) | BBranch | +| `~runOnBranchChange` | branch updated | BBranch | +| `~runOnBranchDeletion` | branch deleted | BBranch | +| `~runOnAttributeCreation` | attribute created on this note | BAttribute | +| `~runOnAttributeChange` | attribute changed/deleted on this note | BAttribute | + +Relations can be inheritable — when set, they apply to all descendant notes. + +## Example: auto-color notes by category + +```javascript +// Attach via ~runOnAttributeChange relation +const attr = api.originEntity; +if (attr.name !== "mycategory") return; +const note = api.getNote(attr.noteId); +if (attr.value === "Health") { + note.setLabel("color", "green"); +} else { + note.removeLabel("color"); +} +``` + +## Example: create a daily summary + +```javascript +// Attach #run=daily label +const today = api.getTodayNote(); +const tasks = api.searchForNotes('#task #!completed'); +let summary = "## Open Tasks\n"; +for (const task of tasks) { + summary += `- ${task.title}\n`; +} +api.createTextNote(today.noteId, "Daily Summary", summary); +``` + +## Module system + +Child notes of a script act as modules. Export with `module.exports = ...` and import via function parameters matching the child note title, or use `require('noteName')`. diff --git a/apps/server/src/services/llm/skills/frontend_scripting.md b/apps/server/src/services/llm/skills/frontend_scripting.md new file mode 100644 index 0000000000..2d17e04e56 --- /dev/null +++ b/apps/server/src/services/llm/skills/frontend_scripting.md @@ -0,0 +1,197 @@ +# Trilium Frontend Scripting + +Frontend scripts run in the browser. They can manipulate the UI, navigate notes, show dialogs, and create custom widgets. + +## Creating a frontend script + +1. Create a Code note with language "JS frontend" (or "JSX" for Preact widgets). +2. Run manually (Execute button) or set `#run=frontendStartup` to auto-run on startup. +3. For mobile, use `#run=mobileStartup` instead. + +## Script types + +| Type | Description | Required attribute | +|---|---|---| +| Regular script | Runs with current app/note context | `#run=frontendStartup` (optional) | +| Custom widget | UI element in various positions | `#widget` | +| Launch bar widget | Button in the launch bar | `#widget` | +| Render note | Custom content inside a note | None (used via render relation) | + +## Script API (`api` global) + +### Navigation & tabs +- `api.activateNote(notePath)` - navigate to a note +- `api.activateNewNote(notePath)` - navigate and wait for sync +- `api.openTabWithNote(notePath, activate?)` - open in new tab +- `api.openSplitWithNote(notePath, activate?)` - open in new split +- `api.getActiveContextNote()` - get currently active note +- `api.getActiveContextNotePath()` - get path of active note +- `api.setHoistedNoteId(noteId)` - hoist/unhoist note in current tab + +### Note access & search +- `api.getNote(noteId)` - get note by ID +- `api.getNotes(noteIds)` - bulk fetch notes +- `api.searchForNotes(searchString)` - search with full query syntax +- `api.searchForNote(searchString)` - search returning first result +- `api.reloadNotes(noteIds)` - refresh cache from backend + +### Calendar/date notes +- `api.getTodayNote()` - get/create today's note +- `api.getDayNote(date)` - get/create day note for date +- `api.getWeekNote(date)` / `api.getMonthNote(month)` / `api.getYearNote(year)` + +### Editor access +- `api.getActiveContextTextEditor()` - get CKEditor instance +- `api.getActiveContextCodeEditor()` - get CodeMirror instance +- `api.addTextToActiveContextEditor(text)` - insert text into active editor + +### Dialogs & notifications +- `api.showMessage(msg)` - show info toast +- `api.showError(msg)` - show error toast +- `api.showInfoDialog(msg)` - show info dialog +- `api.showConfirmDialog(msg)` - show confirm dialog (returns boolean) +- `api.showPromptDialog(msg)` - show prompt dialog (returns user input) + +### Links +- `api.createLink(notePath, { title?, showTooltip?, showNoteIcon? })` - create jQuery link element + +### Backend integration +- `api.runOnBackend(func, params)` - execute a function on the backend (sync) + +### Protection +- `api.protectNote(noteId, protect)` - protect/unprotect note +- `api.protectSubTree(noteId, protect)` - protect/unprotect subtree + +### UI interaction +- `api.triggerCommand(name, data)` - trigger a command +- `api.triggerEvent(name, data)` - trigger an event +- `api.bindGlobalShortcut(shortcut, handler, namespace?)` - add keyboard shortcut + +### Utilities +- `api.formatDateISO(date)` - format as YYYY-MM-DD +- `api.randomString(length)` - generate random string +- `api.dayjs` - day.js library +- `api.log(message)` - log to script log pane + +### Widget base classes +- `api.BasicWidget` - base widget class +- `api.NoteContextAwareWidget` - widget aware of note context changes +- `api.RightPanelWidget` - right sidebar widget + +## FNote object + +Available via `api.getNote()`, `api.getActiveContextNote()`, etc. + +### Properties +- `note.noteId`, `note.title`, `note.type`, `note.mime` +- `note.isProtected`, `note.isArchived` + +### Content +- `note.getContent()` - get note content +- `note.getJsonContent()` - parse content as JSON + +### Hierarchy +- `note.getParentNotes()` / `note.getChildNotes()` +- `note.hasChildren()`, `note.getSubtreeNoteIds()` + +### Attributes +- `note.getAttributes(type?, name?)` - get all attributes (including inherited) +- `note.getOwnedAttributes(type?, name?)` - get only owned attributes +- `note.hasAttribute(type, name)` - check for attribute + +## Custom widgets (legacy jQuery) + +```javascript +class MyWidget extends api.BasicWidget { + get position() { return 1; } + get parentWidget() { return "center-pane"; } + + doRender() { + this.$widget = $("
"); + this.$widget.append($("") + .on("click", () => api.showMessage("Hello!"))); + return this.$widget; + } +} + +module.exports = new MyWidget(); +``` + +### Widget locations (`parentWidget` values) +- `left-pane` - alongside the note tree +- `center-pane` - in the content area, spanning all splits +- `note-detail-pane` - inside a note (split-aware, export class not instance, use static parentWidget) +- `right-pane` - in the right sidebar (use `RightPanelWidget`) + +### Note context aware widget + +```javascript +class MyWidget extends api.NoteContextAwareWidget { + static get parentWidget() { return "note-detail-pane"; } + get position() { return 100; } + + doRender() { + this.$widget = $("
"); + return this.$widget; + } + + async refreshWithNote(note) { + // Called when the active note changes + this.$widget.text(`Current note: ${note.title}`); + } +} + +module.exports = MyWidget; // Export class, not instance! +``` + +## Custom widgets (Preact JSX) + +Requires JSX language enabled in Options -> Code Notes. + +```jsx +import { defineWidget } from "trilium:preact"; +import { useState } from "trilium:preact"; + +export default defineWidget({ + parent: "center-pane", + position: 10, + render: () => { + const [count, setCount] = useState(0); + return ( +
+ +
+ ); + } +}); +``` + +### Preact imports +- `import { showMessage, getNote, ... } from "trilium:api"` - API methods +- `import { useState, useEffect, ... } from "trilium:preact"` - hooks +- `import { defineWidget, defineLauncherWidget } from "trilium:preact"` - widget helpers +- Built-in components: Button, ActionButton, Modal, NoteAutocomplete, FormTextBox, FormToggle, etc. + +## Example: launcher button + +```javascript +// Set #run=frontendStartup +api.createOrUpdateLauncher({ + id: "my-task-button", + type: "customWidget", + title: "New Task", + icon: "bx bx-task", + action: async () => { + const todayNote = await api.getTodayNote(); + await api.runOnBackend(async (parentNoteId) => { + api.createTextNote(parentNoteId, "New Task", ""); + }, [todayNote.noteId]); + } +}); +``` + +## Module system + +Child notes of a script act as modules. For JS frontend, use `module.exports` and function parameters. For JSX, use `import`/`export` syntax. diff --git a/apps/server/src/services/llm/skills/index.ts b/apps/server/src/services/llm/skills/index.ts index 11f04fef7b..ff22097111 100644 --- a/apps/server/src/services/llm/skills/index.ts +++ b/apps/server/src/services/llm/skills/index.ts @@ -23,6 +23,16 @@ const SKILLS: SkillDefinition[] = [ name: "search_syntax", description: "Trilium search query syntax reference — labels, relations, note properties, boolean logic, ordering, and more.", file: "search_syntax.md" + }, + { + name: "backend_scripting", + description: "Backend (Node.js) scripting API — creating notes, handling events, accessing entities, database operations, and automation.", + file: "backend_scripting.md" + }, + { + name: "frontend_scripting", + description: "Frontend (browser) scripting API — UI widgets, navigation, dialogs, editor access, Preact/JSX components, and keyboard shortcuts.", + file: "frontend_scripting.md" } ]; From 20311d31f6bd43ba830ae2fb2ff1efa8e973eaa1 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 16:04:48 +0300 Subject: [PATCH 06/19] chore(llm): modify frontend script to prefer Preact --- .../services/llm/skills/frontend_scripting.md | 348 ++++++++++-------- 1 file changed, 193 insertions(+), 155 deletions(-) diff --git a/apps/server/src/services/llm/skills/frontend_scripting.md b/apps/server/src/services/llm/skills/frontend_scripting.md index 2d17e04e56..826f98dad9 100644 --- a/apps/server/src/services/llm/skills/frontend_scripting.md +++ b/apps/server/src/services/llm/skills/frontend_scripting.md @@ -2,151 +2,25 @@ Frontend scripts run in the browser. They can manipulate the UI, navigate notes, show dialogs, and create custom widgets. +IMPORTANT: Always prefer Preact JSX widgets over legacy jQuery widgets. Use JSX code notes with `import`/`export` syntax. + ## Creating a frontend script -1. Create a Code note with language "JS frontend" (or "JSX" for Preact widgets). -2. Run manually (Execute button) or set `#run=frontendStartup` to auto-run on startup. +1. Create a Code note with language "JSX" (preferred) or "JS frontend" (legacy only). +2. Add `#widget` label for widgets, or `#run=frontendStartup` for auto-run scripts. 3. For mobile, use `#run=mobileStartup` instead. ## Script types -| Type | Description | Required attribute | +| Type | Language | Required attribute | |---|---|---| -| Regular script | Runs with current app/note context | `#run=frontendStartup` (optional) | -| Custom widget | UI element in various positions | `#widget` | -| Launch bar widget | Button in the launch bar | `#widget` | -| Render note | Custom content inside a note | None (used via render relation) | +| Custom widget | JSX (preferred) | `#widget` | +| Regular script | JS frontend | `#run=frontendStartup` (optional) | +| Render note | JSX | None (used via `~renderNote` relation) | -## Script API (`api` global) +## Custom widgets (Preact JSX) — preferred -### Navigation & tabs -- `api.activateNote(notePath)` - navigate to a note -- `api.activateNewNote(notePath)` - navigate and wait for sync -- `api.openTabWithNote(notePath, activate?)` - open in new tab -- `api.openSplitWithNote(notePath, activate?)` - open in new split -- `api.getActiveContextNote()` - get currently active note -- `api.getActiveContextNotePath()` - get path of active note -- `api.setHoistedNoteId(noteId)` - hoist/unhoist note in current tab - -### Note access & search -- `api.getNote(noteId)` - get note by ID -- `api.getNotes(noteIds)` - bulk fetch notes -- `api.searchForNotes(searchString)` - search with full query syntax -- `api.searchForNote(searchString)` - search returning first result -- `api.reloadNotes(noteIds)` - refresh cache from backend - -### Calendar/date notes -- `api.getTodayNote()` - get/create today's note -- `api.getDayNote(date)` - get/create day note for date -- `api.getWeekNote(date)` / `api.getMonthNote(month)` / `api.getYearNote(year)` - -### Editor access -- `api.getActiveContextTextEditor()` - get CKEditor instance -- `api.getActiveContextCodeEditor()` - get CodeMirror instance -- `api.addTextToActiveContextEditor(text)` - insert text into active editor - -### Dialogs & notifications -- `api.showMessage(msg)` - show info toast -- `api.showError(msg)` - show error toast -- `api.showInfoDialog(msg)` - show info dialog -- `api.showConfirmDialog(msg)` - show confirm dialog (returns boolean) -- `api.showPromptDialog(msg)` - show prompt dialog (returns user input) - -### Links -- `api.createLink(notePath, { title?, showTooltip?, showNoteIcon? })` - create jQuery link element - -### Backend integration -- `api.runOnBackend(func, params)` - execute a function on the backend (sync) - -### Protection -- `api.protectNote(noteId, protect)` - protect/unprotect note -- `api.protectSubTree(noteId, protect)` - protect/unprotect subtree - -### UI interaction -- `api.triggerCommand(name, data)` - trigger a command -- `api.triggerEvent(name, data)` - trigger an event -- `api.bindGlobalShortcut(shortcut, handler, namespace?)` - add keyboard shortcut - -### Utilities -- `api.formatDateISO(date)` - format as YYYY-MM-DD -- `api.randomString(length)` - generate random string -- `api.dayjs` - day.js library -- `api.log(message)` - log to script log pane - -### Widget base classes -- `api.BasicWidget` - base widget class -- `api.NoteContextAwareWidget` - widget aware of note context changes -- `api.RightPanelWidget` - right sidebar widget - -## FNote object - -Available via `api.getNote()`, `api.getActiveContextNote()`, etc. - -### Properties -- `note.noteId`, `note.title`, `note.type`, `note.mime` -- `note.isProtected`, `note.isArchived` - -### Content -- `note.getContent()` - get note content -- `note.getJsonContent()` - parse content as JSON - -### Hierarchy -- `note.getParentNotes()` / `note.getChildNotes()` -- `note.hasChildren()`, `note.getSubtreeNoteIds()` - -### Attributes -- `note.getAttributes(type?, name?)` - get all attributes (including inherited) -- `note.getOwnedAttributes(type?, name?)` - get only owned attributes -- `note.hasAttribute(type, name)` - check for attribute - -## Custom widgets (legacy jQuery) - -```javascript -class MyWidget extends api.BasicWidget { - get position() { return 1; } - get parentWidget() { return "center-pane"; } - - doRender() { - this.$widget = $("
"); - this.$widget.append($("") - .on("click", () => api.showMessage("Hello!"))); - return this.$widget; - } -} - -module.exports = new MyWidget(); -``` - -### Widget locations (`parentWidget` values) -- `left-pane` - alongside the note tree -- `center-pane` - in the content area, spanning all splits -- `note-detail-pane` - inside a note (split-aware, export class not instance, use static parentWidget) -- `right-pane` - in the right sidebar (use `RightPanelWidget`) - -### Note context aware widget - -```javascript -class MyWidget extends api.NoteContextAwareWidget { - static get parentWidget() { return "note-detail-pane"; } - get position() { return 100; } - - doRender() { - this.$widget = $("
"); - return this.$widget; - } - - async refreshWithNote(note) { - // Called when the active note changes - this.$widget.text(`Current note: ${note.title}`); - } -} - -module.exports = MyWidget; // Export class, not instance! -``` - -## Custom widgets (Preact JSX) - -Requires JSX language enabled in Options -> Code Notes. +### Basic widget ```jsx import { defineWidget } from "trilium:preact"; @@ -168,30 +42,194 @@ export default defineWidget({ }); ``` -### Preact imports -- `import { showMessage, getNote, ... } from "trilium:api"` - API methods -- `import { useState, useEffect, ... } from "trilium:preact"` - hooks -- `import { defineWidget, defineLauncherWidget } from "trilium:preact"` - widget helpers -- Built-in components: Button, ActionButton, Modal, NoteAutocomplete, FormTextBox, FormToggle, etc. +### Note context aware widget (reacts to active note) -## Example: launcher button +```jsx +import { defineWidget, useNoteContext, useNoteProperty } from "trilium:preact"; -```javascript -// Set #run=frontendStartup -api.createOrUpdateLauncher({ - id: "my-task-button", - type: "customWidget", - title: "New Task", - icon: "bx bx-task", - action: async () => { - const todayNote = await api.getTodayNote(); - await api.runOnBackend(async (parentNoteId) => { - api.createTextNote(parentNoteId, "New Task", ""); - }, [todayNote.noteId]); +export default defineWidget({ + parent: "note-detail-pane", + position: 10, + render: () => { + const { note } = useNoteContext(); + const title = useNoteProperty(note, "title"); + return Current note: {title}; } }); ``` +### Right panel widget (sidebar) + +```jsx +import { defineWidget, RightPanelWidget, useState, useEffect } from "trilium:preact"; + +export default defineWidget({ + parent: "right-pane", + position: 1, + render() { + const [time, setTime] = useState(); + useEffect(() => { + const interval = setInterval(() => { + setTime(new Date().toLocaleString()); + }, 1000); + return () => clearInterval(interval); + }); + return ( + +

The time is: {time}

+
+ ); + } +}); +``` + +### Widget locations (`parent` values) + +| Value | Description | Notes | +|---|---|---| +| `left-pane` | Alongside the note tree | | +| `center-pane` | Content area, spanning all splits | | +| `note-detail-pane` | Inside a note, split-aware | Use `useNoteContext()` hook | +| `right-pane` | Right sidebar section | Wrap in `` | + +### Preact imports + +```jsx +// API methods +import { showMessage, showError, getNote, searchForNotes, activateNote, + runOnBackend, getActiveContextNote } from "trilium:api"; + +// Hooks and components +import { defineWidget, defineLauncherWidget, + useState, useEffect, useCallback, useMemo, useRef, + useNoteContext, useActiveNoteContext, useNoteProperty, + RightPanelWidget } from "trilium:preact"; + +// Built-in UI components +import { ActionButton, Button, LinkButton, Modal, + NoteAutocomplete, FormTextBox, FormToggle, FormCheckbox, + FormDropdownList, FormGroup, FormText, FormTextArea, + Icon, LoadingSpinner, Slider, Collapsible } from "trilium:preact"; +``` + +### Custom hooks + +- `useNoteContext()` - returns `{ note }` for the current note context (use in `note-detail-pane`) +- `useActiveNoteContext()` - returns `{ note, noteId }` for the active note (works from any widget location) +- `useNoteProperty(note, propName)` - reactively watches a note property (e.g. "title", "type") + +### Render notes (JSX) + +For rendering custom content inside a note: +1. Create a JSX code note that exports a default component. +2. Create a parent note and set `~renderNote` relation pointing to the JSX note. + +```jsx +export default function MyRenderNote() { + return ( + <> +

Custom rendered content

+

This appears inside the note.

+ + ); +} +``` + +## Script API + +In JSX, use `import { method } from "trilium:api"`. In JS frontend, use the `api` global. + +### Navigation & tabs +- `activateNote(notePath)` - navigate to a note +- `activateNewNote(notePath)` - navigate and wait for sync +- `openTabWithNote(notePath, activate?)` - open in new tab +- `openSplitWithNote(notePath, activate?)` - open in new split +- `getActiveContextNote()` - get currently active note +- `getActiveContextNotePath()` - get path of active note +- `setHoistedNoteId(noteId)` - hoist/unhoist note + +### Note access & search +- `getNote(noteId)` - get note by ID +- `getNotes(noteIds)` - bulk fetch notes +- `searchForNotes(searchString)` - search with full query syntax +- `searchForNote(searchString)` - search returning first result + +### Calendar/date notes +- `getTodayNote()` - get/create today's note +- `getDayNote(date)` / `getWeekNote(date)` / `getMonthNote(month)` / `getYearNote(year)` + +### Editor access +- `getActiveContextTextEditor()` - get CKEditor instance +- `getActiveContextCodeEditor()` - get CodeMirror instance +- `addTextToActiveContextEditor(text)` - insert text into active editor + +### Dialogs & notifications +- `showMessage(msg)` - info toast +- `showError(msg)` - error toast +- `showConfirmDialog(msg)` - confirm dialog (returns boolean) +- `showPromptDialog(msg)` - prompt dialog (returns user input) + +### Backend integration +- `runOnBackend(func, params)` - execute a function on the backend + +### UI interaction +- `triggerCommand(name, data)` - trigger a command +- `bindGlobalShortcut(shortcut, handler, namespace?)` - add keyboard shortcut + +### Utilities +- `formatDateISO(date)` - format as YYYY-MM-DD +- `randomString(length)` - generate random string +- `dayjs` - day.js library +- `log(message)` - log to script log pane + +## FNote object + +Available via `getNote()`, `getActiveContextNote()`, `useNoteContext()`, etc. + +### Properties +- `note.noteId`, `note.title`, `note.type`, `note.mime` +- `note.isProtected`, `note.isArchived` + +### Content +- `note.getContent()` - get note content +- `note.getJsonContent()` - parse content as JSON + +### Hierarchy +- `note.getParentNotes()` / `note.getChildNotes()` +- `note.hasChildren()`, `note.getSubtreeNoteIds()` + +### Attributes +- `note.getAttributes(type?, name?)` - all attributes (including inherited) +- `note.getOwnedAttributes(type?, name?)` - only owned attributes +- `note.hasAttribute(type, name)` - check for attribute + +## Legacy jQuery widgets (avoid if possible) + +Only use legacy widgets if you specifically need jQuery or cannot use JSX. + +```javascript +// Language: JS frontend, Label: #widget +class MyWidget extends api.BasicWidget { + get position() { return 1; } + get parentWidget() { return "center-pane"; } + + doRender() { + this.$widget = $("
"); + this.$widget.append($("") + .on("click", () => api.showMessage("Hello!"))); + return this.$widget; + } +} +module.exports = new MyWidget(); +``` + +Key differences from Preact: +- Use `api.` global instead of imports +- `get parentWidget()` instead of `parent` field +- `module.exports = new MyWidget()` (instance) for most widgets +- `module.exports = MyWidget` (class, no `new`) for `note-detail-pane` +- Right pane: extend `api.RightPanelWidget`, override `doRenderBody()` instead of `doRender()` + ## Module system -Child notes of a script act as modules. For JS frontend, use `module.exports` and function parameters. For JSX, use `import`/`export` syntax. +For JSX, use `import`/`export` syntax between notes. For JS frontend, use `module.exports` and function parameters matching child note titles. From 2929d64fa00b569eab12f004ddb3e9fcbc751865 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 18:07:28 +0300 Subject: [PATCH 07/19] chore(llm): improve TSX import skill --- apps/server/src/services/llm/skills/frontend_scripting.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apps/server/src/services/llm/skills/frontend_scripting.md b/apps/server/src/services/llm/skills/frontend_scripting.md index 826f98dad9..dd59e608c3 100644 --- a/apps/server/src/services/llm/skills/frontend_scripting.md +++ b/apps/server/src/services/llm/skills/frontend_scripting.md @@ -4,6 +4,8 @@ Frontend scripts run in the browser. They can manipulate the UI, navigate notes, IMPORTANT: Always prefer Preact JSX widgets over legacy jQuery widgets. Use JSX code notes with `import`/`export` syntax. +CRITICAL: In JSX notes, always use top-level `import` statements (e.g. `import { useState } from "trilium:preact"`). NEVER use dynamic `await import()` for Preact imports — this will break hooks and components. Dynamic imports are not needed because JSX notes natively support ES module `import`/`export` syntax. + ## Creating a frontend script 1. Create a Code note with language "JSX" (preferred) or "JS frontend" (legacy only). From 4d169809bdad8500792199e2f0ffc6e8a1f1f0da Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 18:12:42 +0300 Subject: [PATCH 08/19] chore(llm): improve render notes skill --- apps/server/src/services/llm/skills/frontend_scripting.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/server/src/services/llm/skills/frontend_scripting.md b/apps/server/src/services/llm/skills/frontend_scripting.md index dd59e608c3..493b4446f8 100644 --- a/apps/server/src/services/llm/skills/frontend_scripting.md +++ b/apps/server/src/services/llm/skills/frontend_scripting.md @@ -123,8 +123,11 @@ import { ActionButton, Button, LinkButton, Modal, ### Render notes (JSX) For rendering custom content inside a note: -1. Create a JSX code note that exports a default component. -2. Create a parent note and set `~renderNote` relation pointing to the JSX note. +1. Create a "render note" (type: Render Note) where you want the content to appear. +2. Create a JSX code note **as a child** of the render note, exporting a default component. +3. On the render note, add a `~renderNote` relation pointing to the child JSX note. + +IMPORTANT: Always create the JSX code note as a child of the render note, not as a sibling or at the root. This keeps them organized together. ```jsx export default function MyRenderNote() { From 5f669684c483063c998d61b8fa86c0a8707e6522 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 18:39:47 +0300 Subject: [PATCH 09/19] feat(llm): enforce MIME type in code notes --- .../src/services/llm/tools/note_tools.ts | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/apps/server/src/services/llm/tools/note_tools.ts b/apps/server/src/services/llm/tools/note_tools.ts index 4e86b6aa1e..7e65c7ebde 100644 --- a/apps/server/src/services/llm/tools/note_tools.ts +++ b/apps/server/src/services/llm/tools/note_tools.ts @@ -186,14 +186,27 @@ export const appendToNote = tool({ * Create a new note. */ export const createNote = tool({ - description: "Create a new note in the user's knowledge base. Returns the created note's ID and title.", + description: [ + "Create a new note in the user's knowledge base. Returns the created note's ID and title.", + "Set type to 'text' for rich text notes (content in Markdown) or 'code' for code notes (must also set mime).", + "Common mime values for code notes:", + "'application/javascript;env=frontend' (JS frontend),", + "'application/javascript;env=backend' (JS backend),", + "'text/jsx' (Preact JSX, preferred for frontend widgets),", + "'text/css', 'text/html', 'application/json', 'text/x-python', 'text/x-sh'." + ].join(" "), inputSchema: z.object({ - parentNoteId: z.string().describe("The ID of the parent note where the new note will be created. Use 'root' for top-level notes."), + parentNoteId: z.string().describe("The ID of the parent note. Use 'root' for top-level notes."), title: z.string().describe("The title of the new note"), content: z.string().describe("The content of the note (Markdown for text notes, plain text for code notes)"), - type: z.enum(["text", "code"]).optional().describe("The type of note to create. Defaults to 'text'.") + type: z.enum(["text", "code"]).describe("The type of note to create."), + mime: z.string().optional().describe("MIME type, REQUIRED for code notes (e.g. 'application/javascript;env=backend', 'text/jsx'). Ignored for text notes.") }), - execute: async ({ parentNoteId, title, content, type = "text" }) => { + execute: async ({ parentNoteId, title, content, type, mime }) => { + if (type === "code" && !mime) { + return { error: "mime is required when creating code notes" }; + } + const parentNote = becca.getNote(parentNoteId); if (!parentNote) { return { error: "Parent note not found" }; @@ -211,7 +224,8 @@ export const createNote = tool({ parentNoteId, title, content: htmlContent, - type + type, + ...(mime ? { mime } : {}) }); return { From cabce14a49bba3e552435b333fc4cbb92a75b324 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 18:51:19 +0300 Subject: [PATCH 10/19] chore(llm): set up for ChatGPT --- .../options/llm/AddProviderModal.tsx | 3 +- apps/server/package.json | 1 + apps/server/src/services/llm/index.ts | 4 +- .../src/services/llm/providers/openai.ts | 202 ++++++++++++++++++ pnpm-lock.yaml | 33 ++- 5 files changed, 233 insertions(+), 10 deletions(-) create mode 100644 apps/server/src/services/llm/providers/openai.ts diff --git a/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx b/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx index 6e6c455979..e81be7643a 100644 --- a/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx +++ b/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx @@ -19,7 +19,8 @@ export interface ProviderType { } export const PROVIDER_TYPES: ProviderType[] = [ - { id: "anthropic", name: "Anthropic" } + { id: "anthropic", name: "Anthropic" }, + { id: "openai", name: "OpenAI" } ]; interface AddProviderModalProps { diff --git a/apps/server/package.json b/apps/server/package.json index 76d0c06dec..2fc03d8b16 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -31,6 +31,7 @@ }, "dependencies": { "@ai-sdk/anthropic": "^2.0.0", + "@ai-sdk/openai": "2.0.101", "ai": "^5.0.0", "better-sqlite3": "12.8.0", "html-to-text": "9.0.5", diff --git a/apps/server/src/services/llm/index.ts b/apps/server/src/services/llm/index.ts index 4d04acb485..2727dc428b 100644 --- a/apps/server/src/services/llm/index.ts +++ b/apps/server/src/services/llm/index.ts @@ -1,5 +1,6 @@ import type { LlmProvider } from "./types.js"; import { AnthropicProvider } from "./providers/anthropic.js"; +import { OpenAiProvider } from "./providers/openai.js"; import optionService from "../options.js"; import log from "../log.js"; @@ -16,7 +17,8 @@ export interface LlmProviderSetup { /** Factory functions for creating provider instances */ const providerFactories: Record LlmProvider> = { - anthropic: (apiKey) => new AnthropicProvider(apiKey) + anthropic: (apiKey) => new AnthropicProvider(apiKey), + openai: (apiKey) => new OpenAiProvider(apiKey) }; /** Cache of instantiated providers by their config ID */ diff --git a/apps/server/src/services/llm/providers/openai.ts b/apps/server/src/services/llm/providers/openai.ts new file mode 100644 index 0000000000..c58399aa94 --- /dev/null +++ b/apps/server/src/services/llm/providers/openai.ts @@ -0,0 +1,202 @@ +import { createOpenAI, type OpenAIProvider as OpenAISDKProvider } from "@ai-sdk/openai"; +import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } from "ai"; +import type { LlmMessage } from "@triliumnext/commons"; + +import becca from "../../../becca/becca.js"; +import { getSkillsSummary } from "../skills/index.js"; +import { noteTools, attributeTools, hierarchyTools, skillTools, currentNoteTools } from "../tools/index.js"; +import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; + +const DEFAULT_MODEL = "gpt-4.1"; +const DEFAULT_MAX_TOKENS = 8096; +const TITLE_MODEL = "gpt-4.1-mini"; +const TITLE_MAX_TOKENS = 30; + +/** + * Calculate effective cost for comparison (weighted average: 1 input + 3 output). + */ +function effectiveCost(pricing: ModelPricing): number { + return (pricing.input + 3 * pricing.output) / 4; +} + +/** + * Available OpenAI models with pricing (USD per million tokens). + * Source: https://platform.openai.com/docs/pricing + */ +const BASE_MODELS: Omit[] = [ + // ===== Current Models ===== + { + id: "gpt-4.1", + name: "GPT-4.1", + pricing: { input: 2, output: 8 }, + contextWindow: 1047576, + isDefault: true + }, + { + id: "gpt-4.1-mini", + name: "GPT-4.1 Mini", + pricing: { input: 0.4, output: 1.6 }, + contextWindow: 1047576 + }, + { + id: "gpt-4.1-nano", + name: "GPT-4.1 Nano", + pricing: { input: 0.1, output: 0.4 }, + contextWindow: 1047576 + }, + { + id: "o3", + name: "o3", + pricing: { input: 2, output: 8 }, + contextWindow: 200000 + }, + { + id: "o4-mini", + name: "o4-mini", + pricing: { input: 1.1, output: 4.4 }, + contextWindow: 200000 + }, + // ===== Legacy Models ===== + { + id: "gpt-4o", + name: "GPT-4o", + pricing: { input: 2.5, output: 10 }, + contextWindow: 128000, + isLegacy: true + }, + { + id: "gpt-4o-mini", + name: "GPT-4o Mini", + pricing: { input: 0.15, output: 0.6 }, + contextWindow: 128000, + isLegacy: true + } +]; + +const baselineModel = BASE_MODELS.find(m => m.isDefault) || BASE_MODELS[0]; +const baselineCost = effectiveCost(baselineModel.pricing); + +const AVAILABLE_MODELS: ModelInfo[] = BASE_MODELS.map(m => ({ + ...m, + costMultiplier: Math.round((effectiveCost(m.pricing) / baselineCost) * 10) / 10 +})); + +const MODEL_PRICING: Record = Object.fromEntries( + AVAILABLE_MODELS.map(m => [m.id, m.pricing]) +); + +/** + * Build a lightweight context hint about the current note. + */ +function buildNoteHint(noteId: string): string | null { + const note = becca.getNote(noteId); + if (!note) { + return null; + } + + return `The user is currently viewing a ${note.type} note titled "${note.title}". Use the get_current_note tool to read its content if needed.`; +} + +export class OpenAiProvider implements LlmProvider { + name = "openai"; + private openai: OpenAISDKProvider; + + constructor(apiKey: string) { + if (!apiKey) { + throw new Error("API key is required for OpenAI provider"); + } + this.openai = createOpenAI({ apiKey }); + } + + chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { + let systemPrompt = config.systemPrompt || messages.find(m => m.role === "system")?.content; + const chatMessages = messages.filter(m => m.role !== "system"); + + // Add a lightweight hint about the current note + if (config.contextNoteId) { + const noteHint = buildNoteHint(config.contextNoteId); + if (noteHint) { + systemPrompt = systemPrompt + ? `${systemPrompt}\n\n${noteHint}` + : noteHint; + } + } + + // Add skills hint + if (config.enableNoteTools) { + const skillsHint = `You have access to skills that provide specialized instructions. Load a skill with the load_skill tool before performing complex operations.\n\nAvailable skills:\n${getSkillsSummary()}`; + systemPrompt = systemPrompt + ? `${systemPrompt}\n\n${skillsHint}` + : skillsHint; + } + + const coreMessages: CoreMessage[] = []; + + if (systemPrompt) { + coreMessages.push({ + role: "system", + content: systemPrompt + }); + } + + for (const m of chatMessages) { + coreMessages.push({ + role: m.role as "user" | "assistant", + content: m.content + }); + } + + const model = this.openai(config.model || DEFAULT_MODEL); + + const streamOptions: Parameters[0] = { + model, + messages: coreMessages, + maxOutputTokens: config.maxTokens || DEFAULT_MAX_TOKENS + }; + + // Build tools object + const tools: ToolSet = {}; + + if (config.contextNoteId) { + Object.assign(tools, currentNoteTools(config.contextNoteId)); + } + + if (config.enableNoteTools) { + Object.assign(tools, noteTools); + Object.assign(tools, attributeTools); + Object.assign(tools, hierarchyTools); + Object.assign(tools, skillTools); + } + + if (Object.keys(tools).length > 0) { + streamOptions.tools = tools; + streamOptions.stopWhen = stepCountIs(5); + streamOptions.toolChoice = "auto"; + } + + return streamText(streamOptions); + } + + getModelPricing(model: string): ModelPricing | undefined { + return MODEL_PRICING[model]; + } + + getAvailableModels(): ModelInfo[] { + return AVAILABLE_MODELS; + } + + async generateTitle(firstMessage: string): Promise { + const { text } = await generateText({ + model: this.openai(TITLE_MODEL), + maxOutputTokens: TITLE_MAX_TOKENS, + messages: [ + { + role: "user", + content: `Summarize the following message as a very short chat title (max 6 words). Reply with ONLY the title, no quotes or punctuation at the end.\n\nMessage: ${firstMessage}` + } + ] + }); + + return text.trim(); + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fe92e7e940..a52665529e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -559,6 +559,9 @@ importers: '@ai-sdk/anthropic': specifier: ^2.0.0 version: 2.0.71(zod@4.3.6) + '@ai-sdk/openai': + specifier: 2.0.101 + version: 2.0.101(zod@4.3.6) ai: specifier: ^5.0.0 version: 5.0.161(zod@4.3.6) @@ -1548,6 +1551,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/openai@2.0.101': + resolution: {integrity: sha512-kQ52HLV45T3bQbRzWExXW6+pkg3Nvq4dUnZHUPJXWgkUUsAhZjxHrXqPOc/0yfn/4+Dn2uLmIgAkP9IfzMMcNg==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider-utils@3.0.22': resolution: {integrity: sha512-fFT1KfUUKktfAFm5mClJhS1oux9tP2qgzmEZVl5UdwltQ1LO/s8hd7znVrgKzivwv1s1FIPza0s9OpJaNB/vHw==} engines: {node: '>=18'} @@ -16040,6 +16049,12 @@ snapshots: '@vercel/oidc': 3.1.0 zod: 4.3.6 + '@ai-sdk/openai@2.0.101(zod@4.3.6)': + dependencies: + '@ai-sdk/provider': 2.0.1 + '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + zod: 4.3.6 + '@ai-sdk/provider-utils@3.0.22(zod@4.3.6)': dependencies: '@ai-sdk/provider': 2.0.1 @@ -17247,8 +17262,6 @@ snapshots: '@ckeditor/ckeditor5-table': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-emoji@47.6.1': dependencies: @@ -17384,6 +17397,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-highlight@47.6.1': dependencies: @@ -17393,6 +17408,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-horizontal-line@47.6.1': dependencies: @@ -17402,6 +17419,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-html-embed@47.6.1': dependencies: @@ -17411,6 +17430,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-html-support@47.6.1': dependencies: @@ -17468,6 +17489,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-inspector@5.0.0': {} @@ -17592,8 +17615,6 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-operations-compressor@47.6.1': dependencies: @@ -17709,8 +17730,6 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-restricted-editing@47.6.1': dependencies: @@ -17899,8 +17918,6 @@ snapshots: '@ckeditor/ckeditor5-icons': 47.6.1 '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-upload@47.6.1': dependencies: From f04f47d17a1b34784a374fffc338ad91e911c814 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 18:59:02 +0300 Subject: [PATCH 11/19] fix(llm): not returning full list of models --- apps/client/src/services/llm_chat.ts | 6 ++-- .../type_widgets/llm_chat/useLlmChat.ts | 2 ++ apps/server/src/routes/api/llm_chat.ts | 13 +++----- apps/server/src/services/llm/index.ts | 31 ++++++++++++++++++- apps/server/src/services/llm/types.ts | 2 ++ packages/commons/src/lib/llm_api.ts | 2 ++ 6 files changed, 43 insertions(+), 13 deletions(-) diff --git a/apps/client/src/services/llm_chat.ts b/apps/client/src/services/llm_chat.ts index 13f282fe02..e4263aa896 100644 --- a/apps/client/src/services/llm_chat.ts +++ b/apps/client/src/services/llm_chat.ts @@ -3,10 +3,10 @@ import type { LlmChatConfig, LlmCitation, LlmMessage, LlmModelInfo,LlmUsage } fr import server from "./server.js"; /** - * Fetch available models for a provider. + * Fetch available models from all configured providers. */ -export async function getAvailableModels(provider: string = "anthropic"): Promise { - const response = await server.get<{ models?: LlmModelInfo[] }>(`llm-chat/models?provider=${encodeURIComponent(provider)}`); +export async function getAvailableModels(): Promise { + const response = await server.get<{ models?: LlmModelInfo[] }>("llm-chat/models"); return response.models ?? []; } diff --git a/apps/client/src/widgets/type_widgets/llm_chat/useLlmChat.ts b/apps/client/src/widgets/type_widgets/llm_chat/useLlmChat.ts index eb5033cc73..ddca7deb98 100644 --- a/apps/client/src/widgets/type_widgets/llm_chat/useLlmChat.ts +++ b/apps/client/src/widgets/type_widgets/llm_chat/useLlmChat.ts @@ -239,8 +239,10 @@ export function useLlmChat( .join("") })); + const selectedModelProvider = availableModels.find(m => m.id === selectedModel)?.provider; const streamOptions: Parameters[1] = { model: selectedModel || undefined, + provider: selectedModelProvider, enableWebSearch, enableNoteTools, contextNoteId, diff --git a/apps/server/src/routes/api/llm_chat.ts b/apps/server/src/routes/api/llm_chat.ts index 8302687e13..dd5bf149c8 100644 --- a/apps/server/src/routes/api/llm_chat.ts +++ b/apps/server/src/routes/api/llm_chat.ts @@ -2,7 +2,7 @@ import type { LlmMessage } from "@triliumnext/commons"; import type { Request, Response } from "express"; import { generateChatTitle } from "../../services/llm/chat_title.js"; -import { getProviderByType, hasConfiguredProviders, type LlmProviderConfig } from "../../services/llm/index.js"; +import { getAllModels, getProviderByType, hasConfiguredProviders, type LlmProviderConfig } from "../../services/llm/index.js"; import { streamToChunks } from "../../services/llm/stream.js"; import log from "../../services/log.js"; import { safeExtractMessageAndStackFromError } from "../../services/utils.js"; @@ -88,19 +88,14 @@ async function streamChat(req: Request, res: Response) { } /** - * Get available models for a provider. + * Get available models from all configured providers. */ -function getModels(req: Request, _res: Response) { - const providerType = req.query.provider as string || "anthropic"; - - // Return empty array when no providers configured - client handles this gracefully +function getModels(_req: Request, _res: Response) { if (!hasConfiguredProviders()) { return { models: [] }; } - const llmProvider = getProviderByType(providerType); - const models = llmProvider.getAvailableModels(); - return { models }; + return { models: getAllModels() }; } export default { diff --git a/apps/server/src/services/llm/index.ts b/apps/server/src/services/llm/index.ts index 2727dc428b..4a29c37a6e 100644 --- a/apps/server/src/services/llm/index.ts +++ b/apps/server/src/services/llm/index.ts @@ -1,4 +1,4 @@ -import type { LlmProvider } from "./types.js"; +import type { LlmProvider, ModelInfo } from "./types.js"; import { AnthropicProvider } from "./providers/anthropic.js"; import { OpenAiProvider } from "./providers/openai.js"; import optionService from "../options.js"; @@ -97,6 +97,35 @@ export function hasConfiguredProviders(): boolean { return getConfiguredProviders().length > 0; } +/** + * Get all models from all configured providers, tagged with their provider type. + */ +export function getAllModels(): ModelInfo[] { + const configs = getConfiguredProviders(); + const seenProviderTypes = new Set(); + const allModels: ModelInfo[] = []; + + for (const config of configs) { + // Only include models once per provider type (not per config instance) + if (seenProviderTypes.has(config.provider)) { + continue; + } + seenProviderTypes.add(config.provider); + + try { + const provider = getProvider(config.id); + const models = provider.getAvailableModels(); + for (const model of models) { + allModels.push({ ...model, provider: config.provider }); + } + } catch (e) { + log.error(`Failed to get models from provider ${config.provider}: ${e}`); + } + } + + return allModels; +} + /** * Clear the provider cache. Call this when provider configurations change. */ diff --git a/apps/server/src/services/llm/types.ts b/apps/server/src/services/llm/types.ts index 0e4ae71b32..3924d8601c 100644 --- a/apps/server/src/services/llm/types.ts +++ b/apps/server/src/services/llm/types.ts @@ -38,6 +38,8 @@ export interface ModelInfo { id: string; /** Human-readable name (e.g., "Claude Sonnet 4") */ name: string; + /** Provider type that owns this model (e.g., "anthropic", "openai") */ + provider?: string; /** Pricing per million tokens */ pricing: ModelPricing; /** Whether this is the default model */ diff --git a/packages/commons/src/lib/llm_api.ts b/packages/commons/src/lib/llm_api.ts index 5f6525bcc6..7554d9d40c 100644 --- a/packages/commons/src/lib/llm_api.ts +++ b/packages/commons/src/lib/llm_api.ts @@ -63,6 +63,8 @@ export interface LlmModelInfo { id: string; /** Human-readable name (e.g., "Claude Sonnet 4") */ name: string; + /** Provider type that owns this model (e.g., "anthropic", "openai") */ + provider?: string; /** Pricing per million tokens */ pricing: LlmModelPricing; /** Whether this is the default model */ From a45c1818a5a8f758f79c2439c5f73d4ba973ea7c Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 19:05:38 +0300 Subject: [PATCH 12/19] refactor(llm): deduplicate logic between providers --- .../src/services/llm/providers/anthropic.ts | 156 ++++----------- .../services/llm/providers/base_provider.ts | 179 ++++++++++++++++++ .../src/services/llm/providers/openai.ts | 147 ++------------ 3 files changed, 224 insertions(+), 258 deletions(-) create mode 100644 apps/server/src/services/llm/providers/base_provider.ts diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index 688ef9afa4..d444b34a51 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -1,30 +1,15 @@ import { createAnthropic, type AnthropicProvider as AnthropicSDKProvider } from "@ai-sdk/anthropic"; -import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } from "ai"; +import { stepCountIs, streamText, type CoreMessage, type ToolSet } from "ai"; import type { LlmMessage } from "@triliumnext/commons"; -import becca from "../../../becca/becca.js"; -import { getSkillsSummary } from "../skills/index.js"; -import { noteTools, attributeTools, hierarchyTools, skillTools, currentNoteTools } from "../tools/index.js"; -import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; - -const DEFAULT_MODEL = "claude-sonnet-4-6"; -const DEFAULT_MAX_TOKENS = 8096; -const TITLE_MODEL = "claude-haiku-4-5-20251001"; -const TITLE_MAX_TOKENS = 30; - -/** - * Calculate effective cost for comparison (weighted average: 1 input + 3 output). - * Output is weighted more heavily as it's typically the dominant cost factor. - */ -function effectiveCost(pricing: ModelPricing): number { - return (pricing.input + 3 * pricing.output) / 4; -} +import type { LlmProviderConfig, StreamResult } from "../types.js"; +import { BaseProvider, buildModelList } from "./base_provider.js"; /** * Available Anthropic models with pricing (USD per million tokens). * Source: https://docs.anthropic.com/en/docs/about-claude/models */ -const BASE_MODELS: Omit[] = [ +const { models: AVAILABLE_MODELS, pricing: MODEL_PRICING } = buildModelList([ // ===== Current Models ===== { id: "claude-sonnet-4-6", @@ -50,7 +35,7 @@ const BASE_MODELS: Omit[] = [ id: "claude-sonnet-4-5-20250929", name: "Claude Sonnet 4.5", pricing: { input: 3, output: 15 }, - contextWindow: 200000, // 1M available with beta header + contextWindow: 200000, isLegacy: true }, { @@ -71,7 +56,7 @@ const BASE_MODELS: Omit[] = [ id: "claude-sonnet-4-20250514", name: "Claude Sonnet 4.0", pricing: { input: 3, output: 15 }, - contextWindow: 200000, // 1M available with beta header + contextWindow: 200000, isLegacy: true }, { @@ -81,77 +66,44 @@ const BASE_MODELS: Omit[] = [ contextWindow: 200000, isLegacy: true } -]; +]); -// Use default model (Sonnet) as baseline for cost multiplier -const baselineModel = BASE_MODELS.find(m => m.isDefault) || BASE_MODELS[0]; -const baselineCost = effectiveCost(baselineModel.pricing); - -// Build models with cost multipliers -const AVAILABLE_MODELS: ModelInfo[] = BASE_MODELS.map(m => ({ - ...m, - costMultiplier: Math.round((effectiveCost(m.pricing) / baselineCost) * 10) / 10 -})); - -// Build pricing lookup from available models -const MODEL_PRICING: Record = Object.fromEntries( - AVAILABLE_MODELS.map(m => [m.id, m.pricing]) -); - -/** - * Build a lightweight context hint about the current note (title + type only, no content). - * The full content is available via the get_current_note tool. - */ -function buildNoteHint(noteId: string): string | null { - const note = becca.getNote(noteId); - if (!note) { - return null; - } - - return `The user is currently viewing a ${note.type} note titled "${note.title}". Use the get_current_note tool to read its content if needed.`; -} - -export class AnthropicProvider implements LlmProvider { +export class AnthropicProvider extends BaseProvider { name = "anthropic"; + protected defaultModel = "claude-sonnet-4-6"; + protected titleModel = "claude-haiku-4-5-20251001"; + protected availableModels = AVAILABLE_MODELS; + protected modelPricing = MODEL_PRICING; + private anthropic: AnthropicSDKProvider; constructor(apiKey: string) { + super(); if (!apiKey) { throw new Error("API key is required for Anthropic provider"); } this.anthropic = createAnthropic({ apiKey }); } - chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { - let systemPrompt = config.systemPrompt || messages.find(m => m.role === "system")?.content; + protected createModel(modelId: string) { + return this.anthropic(modelId); + } + + /** + * Override chat to add Anthropic-specific features: + * - Prompt caching via providerOptions + * - Extended thinking + * - Web search tool + */ + override chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { + const systemPrompt = this.buildSystemPrompt(messages, config); const chatMessages = messages.filter(m => m.role !== "system"); - // Add a lightweight hint about the current note (content available via tool) - if (config.contextNoteId) { - const noteHint = buildNoteHint(config.contextNoteId); - if (noteHint) { - systemPrompt = systemPrompt - ? `${systemPrompt}\n\n${noteHint}` - : noteHint; - } - } - - // Add skills hint so the LLM knows to load skills before complex operations - if (config.enableNoteTools) { - const skillsHint = `You have access to skills that provide specialized instructions. Load a skill with the load_skill tool before performing complex operations.\n\nAvailable skills:\n${getSkillsSummary()}`; - systemPrompt = systemPrompt - ? `${systemPrompt}\n\n${skillsHint}` - : skillsHint; - } - - // Convert to AI SDK message format with cache control breakpoints. - // The system prompt and conversation history (all but the last user message) - // are stable across turns, so we mark them for caching to reduce costs. + // Anthropic-specific: cache control breakpoints on system prompt and conversation history const CACHE_CONTROL = { anthropic: { cacheControl: { type: "ephemeral" as const } } }; const coreMessages: CoreMessage[] = []; - // System prompt as a cacheable message if (systemPrompt) { coreMessages.push({ role: "system", @@ -160,29 +112,23 @@ export class AnthropicProvider implements LlmProvider { }); } - // Conversation messages for (let i = 0; i < chatMessages.length; i++) { const m = chatMessages[i]; const isLastBeforeNewTurn = i === chatMessages.length - 2; coreMessages.push({ role: m.role as "user" | "assistant", content: m.content, - // Cache breakpoint on the second-to-last message: - // everything up to here is identical across consecutive turns. ...(isLastBeforeNewTurn && { providerOptions: CACHE_CONTROL }) }); } - const model = this.anthropic(config.model || DEFAULT_MODEL); - - // Build options for streamText const streamOptions: Parameters[0] = { - model, + model: this.createModel(config.model || this.defaultModel), messages: coreMessages, - maxOutputTokens: config.maxTokens || DEFAULT_MAX_TOKENS + maxOutputTokens: config.maxTokens || 8096 }; - // Enable extended thinking for deeper reasoning + // Anthropic-specific: extended thinking if (config.enableExtendedThinking) { const thinkingBudget = config.thinkingBudget || 10000; streamOptions.providerOptions = { @@ -194,13 +140,13 @@ export class AnthropicProvider implements LlmProvider { } }; streamOptions.maxOutputTokens = Math.max( - streamOptions.maxOutputTokens || DEFAULT_MAX_TOKENS, + streamOptions.maxOutputTokens || 8096, thinkingBudget + 4000 ); } - // Build tools object - const tools: ToolSet = {}; + // Build tools (shared + Anthropic-specific web search) + const tools: ToolSet = this.buildTools(config); if (config.enableWebSearch) { tools.web_search = this.anthropic.tools.webSearch_20250305({ @@ -208,48 +154,12 @@ export class AnthropicProvider implements LlmProvider { }); } - if (config.contextNoteId) { - Object.assign(tools, currentNoteTools(config.contextNoteId)); - } - - if (config.enableNoteTools) { - Object.assign(tools, noteTools); - Object.assign(tools, attributeTools); - Object.assign(tools, hierarchyTools); - Object.assign(tools, skillTools); - } - if (Object.keys(tools).length > 0) { streamOptions.tools = tools; - // Allow multiple tool use cycles before final response streamOptions.stopWhen = stepCountIs(5); - // Let model decide when to use tools vs respond with text streamOptions.toolChoice = "auto"; } return streamText(streamOptions); } - - getModelPricing(model: string): ModelPricing | undefined { - return MODEL_PRICING[model]; - } - - getAvailableModels(): ModelInfo[] { - return AVAILABLE_MODELS; - } - - async generateTitle(firstMessage: string): Promise { - const { text } = await generateText({ - model: this.anthropic(TITLE_MODEL), - maxOutputTokens: TITLE_MAX_TOKENS, - messages: [ - { - role: "user", - content: `Summarize the following message as a very short chat title (max 6 words). Reply with ONLY the title, no quotes or punctuation at the end.\n\nMessage: ${firstMessage}` - } - ] - }); - - return text.trim(); - } } diff --git a/apps/server/src/services/llm/providers/base_provider.ts b/apps/server/src/services/llm/providers/base_provider.ts new file mode 100644 index 0000000000..bcc22bd3c0 --- /dev/null +++ b/apps/server/src/services/llm/providers/base_provider.ts @@ -0,0 +1,179 @@ +/** + * Base class for LLM providers. Handles shared logic for system prompt building, + * tool assembly, model pricing, and title generation. + */ + +import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } from "ai"; +import type { LanguageModel } from "ai"; +import type { LlmMessage } from "@triliumnext/commons"; + +import becca from "../../../becca/becca.js"; +import { getSkillsSummary } from "../skills/index.js"; +import { noteTools, attributeTools, hierarchyTools, skillTools, currentNoteTools } from "../tools/index.js"; +import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; + +const DEFAULT_MAX_TOKENS = 8096; +const TITLE_MAX_TOKENS = 30; + +/** + * Calculate effective cost for comparison (weighted average: 1 input + 3 output). + * Output is weighted more heavily as it's typically the dominant cost factor. + */ +function effectiveCost(pricing: ModelPricing): number { + return (pricing.input + 3 * pricing.output) / 4; +} + +/** + * Build a lightweight context hint about the current note (title + type only, no content). + */ +function buildNoteHint(noteId: string): string | null { + const note = becca.getNote(noteId); + if (!note) { + return null; + } + + return `The user is currently viewing a ${note.type} note titled "${note.title}". Use the get_current_note tool to read its content if needed.`; +} + +/** + * Build the model list with cost multipliers from a base model definition array. + */ +export function buildModelList(baseModels: Omit[]): { + models: ModelInfo[]; + pricing: Record; +} { + const baselineModel = baseModels.find(m => m.isDefault) || baseModels[0]; + const baselineCost = effectiveCost(baselineModel.pricing); + + const models = baseModels.map(m => ({ + ...m, + costMultiplier: Math.round((effectiveCost(m.pricing) / baselineCost) * 10) / 10 + })); + + const pricing = Object.fromEntries( + models.map(m => [m.id, m.pricing]) + ); + + return { models, pricing }; +} + +export abstract class BaseProvider implements LlmProvider { + abstract name: string; + + protected abstract defaultModel: string; + protected abstract titleModel: string; + protected abstract availableModels: ModelInfo[]; + protected abstract modelPricing: Record; + + /** Create a language model instance for the given model ID. */ + protected abstract createModel(modelId: string): LanguageModel; + + /** + * Build the system prompt with note hints and skills summary. + */ + protected buildSystemPrompt(messages: LlmMessage[], config: LlmProviderConfig): string | undefined { + let systemPrompt = config.systemPrompt || messages.find(m => m.role === "system")?.content; + + if (config.contextNoteId) { + const noteHint = buildNoteHint(config.contextNoteId); + if (noteHint) { + systemPrompt = systemPrompt + ? `${systemPrompt}\n\n${noteHint}` + : noteHint; + } + } + + if (config.enableNoteTools) { + const skillsHint = `You have access to skills that provide specialized instructions. Load a skill with the load_skill tool before performing complex operations.\n\nAvailable skills:\n${getSkillsSummary()}`; + systemPrompt = systemPrompt + ? `${systemPrompt}\n\n${skillsHint}` + : skillsHint; + } + + return systemPrompt; + } + + /** + * Build the CoreMessage array from LlmMessages (no provider-specific options). + */ + protected buildMessages(chatMessages: LlmMessage[], systemPrompt: string | undefined): CoreMessage[] { + const coreMessages: CoreMessage[] = []; + + if (systemPrompt) { + coreMessages.push({ role: "system", content: systemPrompt }); + } + + for (const m of chatMessages) { + coreMessages.push({ + role: m.role as "user" | "assistant", + content: m.content + }); + } + + return coreMessages; + } + + /** + * Build the tool set based on config. + */ + protected buildTools(config: LlmProviderConfig): ToolSet { + const tools: ToolSet = {}; + + if (config.contextNoteId) { + Object.assign(tools, currentNoteTools(config.contextNoteId)); + } + + if (config.enableNoteTools) { + Object.assign(tools, noteTools); + Object.assign(tools, attributeTools); + Object.assign(tools, hierarchyTools); + Object.assign(tools, skillTools); + } + + return tools; + } + + chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { + const systemPrompt = this.buildSystemPrompt(messages, config); + const chatMessages = messages.filter(m => m.role !== "system"); + const coreMessages = this.buildMessages(chatMessages, systemPrompt); + + const streamOptions: Parameters[0] = { + model: this.createModel(config.model || this.defaultModel), + messages: coreMessages, + maxOutputTokens: config.maxTokens || DEFAULT_MAX_TOKENS + }; + + const tools = this.buildTools(config); + if (Object.keys(tools).length > 0) { + streamOptions.tools = tools; + streamOptions.stopWhen = stepCountIs(5); + streamOptions.toolChoice = "auto"; + } + + return streamText(streamOptions); + } + + getModelPricing(model: string): ModelPricing | undefined { + return this.modelPricing[model]; + } + + getAvailableModels(): ModelInfo[] { + return this.availableModels; + } + + async generateTitle(firstMessage: string): Promise { + const { text } = await generateText({ + model: this.createModel(this.titleModel), + maxOutputTokens: TITLE_MAX_TOKENS, + messages: [ + { + role: "user", + content: `Summarize the following message as a very short chat title (max 6 words). Reply with ONLY the title, no quotes or punctuation at the end.\n\nMessage: ${firstMessage}` + } + ] + }); + + return text.trim(); + } +} diff --git a/apps/server/src/services/llm/providers/openai.ts b/apps/server/src/services/llm/providers/openai.ts index c58399aa94..a44ac9505b 100644 --- a/apps/server/src/services/llm/providers/openai.ts +++ b/apps/server/src/services/llm/providers/openai.ts @@ -1,29 +1,12 @@ import { createOpenAI, type OpenAIProvider as OpenAISDKProvider } from "@ai-sdk/openai"; -import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } from "ai"; -import type { LlmMessage } from "@triliumnext/commons"; -import becca from "../../../becca/becca.js"; -import { getSkillsSummary } from "../skills/index.js"; -import { noteTools, attributeTools, hierarchyTools, skillTools, currentNoteTools } from "../tools/index.js"; -import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js"; - -const DEFAULT_MODEL = "gpt-4.1"; -const DEFAULT_MAX_TOKENS = 8096; -const TITLE_MODEL = "gpt-4.1-mini"; -const TITLE_MAX_TOKENS = 30; - -/** - * Calculate effective cost for comparison (weighted average: 1 input + 3 output). - */ -function effectiveCost(pricing: ModelPricing): number { - return (pricing.input + 3 * pricing.output) / 4; -} +import { BaseProvider, buildModelList } from "./base_provider.js"; /** * Available OpenAI models with pricing (USD per million tokens). * Source: https://platform.openai.com/docs/pricing */ -const BASE_MODELS: Omit[] = [ +const { models: AVAILABLE_MODELS, pricing: MODEL_PRICING } = buildModelList([ // ===== Current Models ===== { id: "gpt-4.1", @@ -71,132 +54,26 @@ const BASE_MODELS: Omit[] = [ contextWindow: 128000, isLegacy: true } -]; +]); -const baselineModel = BASE_MODELS.find(m => m.isDefault) || BASE_MODELS[0]; -const baselineCost = effectiveCost(baselineModel.pricing); - -const AVAILABLE_MODELS: ModelInfo[] = BASE_MODELS.map(m => ({ - ...m, - costMultiplier: Math.round((effectiveCost(m.pricing) / baselineCost) * 10) / 10 -})); - -const MODEL_PRICING: Record = Object.fromEntries( - AVAILABLE_MODELS.map(m => [m.id, m.pricing]) -); - -/** - * Build a lightweight context hint about the current note. - */ -function buildNoteHint(noteId: string): string | null { - const note = becca.getNote(noteId); - if (!note) { - return null; - } - - return `The user is currently viewing a ${note.type} note titled "${note.title}". Use the get_current_note tool to read its content if needed.`; -} - -export class OpenAiProvider implements LlmProvider { +export class OpenAiProvider extends BaseProvider { name = "openai"; + protected defaultModel = "gpt-4.1"; + protected titleModel = "gpt-4.1-mini"; + protected availableModels = AVAILABLE_MODELS; + protected modelPricing = MODEL_PRICING; + private openai: OpenAISDKProvider; constructor(apiKey: string) { + super(); if (!apiKey) { throw new Error("API key is required for OpenAI provider"); } this.openai = createOpenAI({ apiKey }); } - chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { - let systemPrompt = config.systemPrompt || messages.find(m => m.role === "system")?.content; - const chatMessages = messages.filter(m => m.role !== "system"); - - // Add a lightweight hint about the current note - if (config.contextNoteId) { - const noteHint = buildNoteHint(config.contextNoteId); - if (noteHint) { - systemPrompt = systemPrompt - ? `${systemPrompt}\n\n${noteHint}` - : noteHint; - } - } - - // Add skills hint - if (config.enableNoteTools) { - const skillsHint = `You have access to skills that provide specialized instructions. Load a skill with the load_skill tool before performing complex operations.\n\nAvailable skills:\n${getSkillsSummary()}`; - systemPrompt = systemPrompt - ? `${systemPrompt}\n\n${skillsHint}` - : skillsHint; - } - - const coreMessages: CoreMessage[] = []; - - if (systemPrompt) { - coreMessages.push({ - role: "system", - content: systemPrompt - }); - } - - for (const m of chatMessages) { - coreMessages.push({ - role: m.role as "user" | "assistant", - content: m.content - }); - } - - const model = this.openai(config.model || DEFAULT_MODEL); - - const streamOptions: Parameters[0] = { - model, - messages: coreMessages, - maxOutputTokens: config.maxTokens || DEFAULT_MAX_TOKENS - }; - - // Build tools object - const tools: ToolSet = {}; - - if (config.contextNoteId) { - Object.assign(tools, currentNoteTools(config.contextNoteId)); - } - - if (config.enableNoteTools) { - Object.assign(tools, noteTools); - Object.assign(tools, attributeTools); - Object.assign(tools, hierarchyTools); - Object.assign(tools, skillTools); - } - - if (Object.keys(tools).length > 0) { - streamOptions.tools = tools; - streamOptions.stopWhen = stepCountIs(5); - streamOptions.toolChoice = "auto"; - } - - return streamText(streamOptions); - } - - getModelPricing(model: string): ModelPricing | undefined { - return MODEL_PRICING[model]; - } - - getAvailableModels(): ModelInfo[] { - return AVAILABLE_MODELS; - } - - async generateTitle(firstMessage: string): Promise { - const { text } = await generateText({ - model: this.openai(TITLE_MODEL), - maxOutputTokens: TITLE_MAX_TOKENS, - messages: [ - { - role: "user", - content: `Summarize the following message as a very short chat title (max 6 words). Reply with ONLY the title, no quotes or punctuation at the end.\n\nMessage: ${firstMessage}` - } - ] - }); - - return text.trim(); + protected createModel(modelId: string) { + return this.openai(modelId); } } From 0e2c96d544d7aae6fe4278e41b238dfb815de3ce Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 19:08:41 +0300 Subject: [PATCH 13/19] feat(llm): add web search to OpenAI --- .../src/services/llm/providers/anthropic.ts | 63 +++++++++---------- .../services/llm/providers/base_provider.ts | 9 +++ .../src/services/llm/providers/openai.ts | 5 ++ 3 files changed, 45 insertions(+), 32 deletions(-) diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index d444b34a51..5175ed2370 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -89,19 +89,17 @@ export class AnthropicProvider extends BaseProvider { return this.anthropic(modelId); } + protected override addWebSearchTool(tools: ToolSet): void { + tools.web_search = this.anthropic.tools.webSearch_20250305({ + maxUses: 5 + }); + } + /** - * Override chat to add Anthropic-specific features: - * - Prompt caching via providerOptions - * - Extended thinking - * - Web search tool + * Override buildMessages to add Anthropic-specific cache control breakpoints. */ - override chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { - const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system"); - - // Anthropic-specific: cache control breakpoints on system prompt and conversation history + protected override buildMessages(chatMessages: LlmMessage[], systemPrompt: string | undefined): CoreMessage[] { const CACHE_CONTROL = { anthropic: { cacheControl: { type: "ephemeral" as const } } }; - const coreMessages: CoreMessage[] = []; if (systemPrompt) { @@ -122,38 +120,39 @@ export class AnthropicProvider extends BaseProvider { }); } + return coreMessages; + } + + /** + * Override chat to add Anthropic-specific extended thinking support. + */ + override chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { + if (!config.enableExtendedThinking) { + return super.chat(messages, config); + } + + const systemPrompt = this.buildSystemPrompt(messages, config); + const chatMessages = messages.filter(m => m.role !== "system"); + const coreMessages = this.buildMessages(chatMessages, systemPrompt); + + const thinkingBudget = config.thinkingBudget || 10000; + const maxTokens = Math.max(config.maxTokens || 8096, thinkingBudget + 4000); + const streamOptions: Parameters[0] = { model: this.createModel(config.model || this.defaultModel), messages: coreMessages, - maxOutputTokens: config.maxTokens || 8096 - }; - - // Anthropic-specific: extended thinking - if (config.enableExtendedThinking) { - const thinkingBudget = config.thinkingBudget || 10000; - streamOptions.providerOptions = { + maxOutputTokens: maxTokens, + providerOptions: { anthropic: { thinking: { type: "enabled", budgetTokens: thinkingBudget } } - }; - streamOptions.maxOutputTokens = Math.max( - streamOptions.maxOutputTokens || 8096, - thinkingBudget + 4000 - ); - } - - // Build tools (shared + Anthropic-specific web search) - const tools: ToolSet = this.buildTools(config); - - if (config.enableWebSearch) { - tools.web_search = this.anthropic.tools.webSearch_20250305({ - maxUses: 5 - }); - } + } + }; + const tools = this.buildTools(config); if (Object.keys(tools).length > 0) { streamOptions.tools = tools; streamOptions.stopWhen = stepCountIs(5); diff --git a/apps/server/src/services/llm/providers/base_provider.ts b/apps/server/src/services/llm/providers/base_provider.ts index bcc22bd3c0..02fd4c6bb3 100644 --- a/apps/server/src/services/llm/providers/base_provider.ts +++ b/apps/server/src/services/llm/providers/base_provider.ts @@ -113,12 +113,21 @@ export abstract class BaseProvider implements LlmProvider { return coreMessages; } + /** + * Add provider-specific web search tool. Override in subclasses that support it. + */ + protected addWebSearchTool(_tools: ToolSet): void {} + /** * Build the tool set based on config. */ protected buildTools(config: LlmProviderConfig): ToolSet { const tools: ToolSet = {}; + if (config.enableWebSearch) { + this.addWebSearchTool(tools); + } + if (config.contextNoteId) { Object.assign(tools, currentNoteTools(config.contextNoteId)); } diff --git a/apps/server/src/services/llm/providers/openai.ts b/apps/server/src/services/llm/providers/openai.ts index a44ac9505b..759d31c7e7 100644 --- a/apps/server/src/services/llm/providers/openai.ts +++ b/apps/server/src/services/llm/providers/openai.ts @@ -1,4 +1,5 @@ import { createOpenAI, type OpenAIProvider as OpenAISDKProvider } from "@ai-sdk/openai"; +import type { ToolSet } from "ai"; import { BaseProvider, buildModelList } from "./base_provider.js"; @@ -76,4 +77,8 @@ export class OpenAiProvider extends BaseProvider { protected createModel(modelId: string) { return this.openai(modelId); } + + protected override addWebSearchTool(tools: ToolSet): void { + tools.web_search = this.openai.tools.webSearch(); + } } From 04efa2742cf9da3337aaca2b96317f4ad9146a2c Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 19:28:42 +0300 Subject: [PATCH 14/19] feat(llm): basic support for Google Gemini --- .../options/llm/AddProviderModal.tsx | 3 +- apps/server/package.json | 1 + apps/server/src/services/llm/index.ts | 4 +- .../src/services/llm/providers/google.ts | 102 ++++++++++++++++++ pnpm-lock.yaml | 25 +++-- 5 files changed, 123 insertions(+), 12 deletions(-) create mode 100644 apps/server/src/services/llm/providers/google.ts diff --git a/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx b/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx index e81be7643a..4538cde3b8 100644 --- a/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx +++ b/apps/client/src/widgets/type_widgets/options/llm/AddProviderModal.tsx @@ -20,7 +20,8 @@ export interface ProviderType { export const PROVIDER_TYPES: ProviderType[] = [ { id: "anthropic", name: "Anthropic" }, - { id: "openai", name: "OpenAI" } + { id: "openai", name: "OpenAI" }, + { id: "google", name: "Google Gemini" } ]; interface AddProviderModalProps { diff --git a/apps/server/package.json b/apps/server/package.json index 2fc03d8b16..b9147bfde0 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -31,6 +31,7 @@ }, "dependencies": { "@ai-sdk/anthropic": "^2.0.0", + "@ai-sdk/google": "^2.0.64", "@ai-sdk/openai": "2.0.101", "ai": "^5.0.0", "better-sqlite3": "12.8.0", diff --git a/apps/server/src/services/llm/index.ts b/apps/server/src/services/llm/index.ts index 4a29c37a6e..ebf0a06639 100644 --- a/apps/server/src/services/llm/index.ts +++ b/apps/server/src/services/llm/index.ts @@ -1,5 +1,6 @@ import type { LlmProvider, ModelInfo } from "./types.js"; import { AnthropicProvider } from "./providers/anthropic.js"; +import { GoogleProvider } from "./providers/google.js"; import { OpenAiProvider } from "./providers/openai.js"; import optionService from "../options.js"; import log from "../log.js"; @@ -18,7 +19,8 @@ export interface LlmProviderSetup { /** Factory functions for creating provider instances */ const providerFactories: Record LlmProvider> = { anthropic: (apiKey) => new AnthropicProvider(apiKey), - openai: (apiKey) => new OpenAiProvider(apiKey) + openai: (apiKey) => new OpenAiProvider(apiKey), + google: (apiKey) => new GoogleProvider(apiKey) }; /** Cache of instantiated providers by their config ID */ diff --git a/apps/server/src/services/llm/providers/google.ts b/apps/server/src/services/llm/providers/google.ts new file mode 100644 index 0000000000..0902986d10 --- /dev/null +++ b/apps/server/src/services/llm/providers/google.ts @@ -0,0 +1,102 @@ +import { createGoogleGenerativeAI, type GoogleGenerativeAIProvider } from "@ai-sdk/google"; +import { streamText, stepCountIs, type ToolSet } from "ai"; +import type { LlmMessage } from "@triliumnext/commons"; + +import type { LlmProviderConfig, StreamResult } from "../types.js"; +import { BaseProvider, buildModelList } from "./base_provider.js"; + +/** + * Available Google Gemini models with pricing (USD per million tokens). + * Source: https://ai.google.dev/gemini-api/docs/pricing + */ +const { models: AVAILABLE_MODELS, pricing: MODEL_PRICING } = buildModelList([ + // ===== Current Models ===== + { + id: "gemini-2.5-pro", + name: "Gemini 2.5 Pro", + pricing: { input: 1.25, output: 10 }, + contextWindow: 1048576, + isDefault: true + }, + { + id: "gemini-2.5-flash", + name: "Gemini 2.5 Flash", + pricing: { input: 0.3, output: 2.5 }, + contextWindow: 1048576 + }, + { + id: "gemini-2.5-flash-lite", + name: "Gemini 2.5 Flash-Lite", + pricing: { input: 0.1, output: 0.4 }, + contextWindow: 1048576 + }, + { + id: "gemini-2.0-flash", + name: "Gemini 2.0 Flash", + pricing: { input: 0.1, output: 0.4 }, + contextWindow: 1048576, + isLegacy: true + } +]); + +export class GoogleProvider extends BaseProvider { + name = "google"; + protected defaultModel = "gemini-2.5-flash"; + protected titleModel = "gemini-2.5-flash-lite"; + protected availableModels = AVAILABLE_MODELS; + protected modelPricing = MODEL_PRICING; + + private google: GoogleGenerativeAIProvider; + + constructor(apiKey: string) { + super(); + if (!apiKey) { + throw new Error("API key is required for Google provider"); + } + this.google = createGoogleGenerativeAI({ apiKey }); + } + + protected createModel(modelId: string) { + return this.google(modelId); + } + + protected override addWebSearchTool(tools: ToolSet): void { + tools.google_search = this.google.tools.googleSearch({}); + } + + /** + * Override chat to add Google-specific extended thinking support. + * Gemini 2.5 uses thinkingBudget, Gemini 3.x uses thinkingLevel. + */ + override chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { + if (!config.enableExtendedThinking) { + return super.chat(messages, config); + } + + const systemPrompt = this.buildSystemPrompt(messages, config); + const chatMessages = messages.filter(m => m.role !== "system"); + const coreMessages = this.buildMessages(chatMessages, systemPrompt); + + const streamOptions: Parameters[0] = { + model: this.createModel(config.model || this.defaultModel), + messages: coreMessages, + maxOutputTokens: config.maxTokens || 8096, + providerOptions: { + google: { + thinkingConfig: { + thinkingBudget: config.thinkingBudget || 10000 + } + } + } + }; + + const tools = this.buildTools(config); + if (Object.keys(tools).length > 0) { + streamOptions.tools = tools; + streamOptions.stopWhen = stepCountIs(5); + streamOptions.toolChoice = "auto"; + } + + return streamText(streamOptions); + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a52665529e..f4fe3771ae 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -559,6 +559,9 @@ importers: '@ai-sdk/anthropic': specifier: ^2.0.0 version: 2.0.71(zod@4.3.6) + '@ai-sdk/google': + specifier: ^2.0.64 + version: 2.0.64(zod@4.3.6) '@ai-sdk/openai': specifier: 2.0.101 version: 2.0.101(zod@4.3.6) @@ -1551,6 +1554,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/google@2.0.64': + resolution: {integrity: sha512-FUVSkdpC+j2o3anRHabJ5UXXPfnqs8uRkv5zh5x4u8p1e7C4y+YtTxeTD2aSSMGV+8ef+VNEAp5gponXpwKk0g==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/openai@2.0.101': resolution: {integrity: sha512-kQ52HLV45T3bQbRzWExXW6+pkg3Nvq4dUnZHUPJXWgkUUsAhZjxHrXqPOc/0yfn/4+Dn2uLmIgAkP9IfzMMcNg==} engines: {node: '>=18'} @@ -16049,6 +16058,12 @@ snapshots: '@vercel/oidc': 3.1.0 zod: 4.3.6 + '@ai-sdk/google@2.0.64(zod@4.3.6)': + dependencies: + '@ai-sdk/provider': 2.0.1 + '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + zod: 4.3.6 + '@ai-sdk/openai@2.0.101(zod@4.3.6)': dependencies: '@ai-sdk/provider': 2.0.1 @@ -17408,8 +17423,6 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-horizontal-line@47.6.1': dependencies: @@ -17419,8 +17432,6 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-html-embed@47.6.1': dependencies: @@ -17430,8 +17441,6 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-html-support@47.6.1': dependencies: @@ -17447,8 +17456,6 @@ snapshots: '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 es-toolkit: 1.39.5 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-icons@47.6.1': {} @@ -17489,8 +17496,6 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 - transitivePeerDependencies: - - supports-color '@ckeditor/ckeditor5-inspector@5.0.0': {} From 708180a0373989abfcba6d10e4e2765fc601ed25 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 19:47:39 +0300 Subject: [PATCH 15/19] fix(llm): sending empty messages crashes on Anthropic --- apps/server/src/services/llm/providers/anthropic.ts | 2 +- apps/server/src/services/llm/providers/base_provider.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index 5175ed2370..bfcd434ea7 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -132,7 +132,7 @@ export class AnthropicProvider extends BaseProvider { } const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system"); + const chatMessages = messages.filter(m => m.role !== "system" && m.content); const coreMessages = this.buildMessages(chatMessages, systemPrompt); const thinkingBudget = config.thinkingBudget || 10000; diff --git a/apps/server/src/services/llm/providers/base_provider.ts b/apps/server/src/services/llm/providers/base_provider.ts index 02fd4c6bb3..47842013d2 100644 --- a/apps/server/src/services/llm/providers/base_provider.ts +++ b/apps/server/src/services/llm/providers/base_provider.ts @@ -144,7 +144,7 @@ export abstract class BaseProvider implements LlmProvider { chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system"); + const chatMessages = messages.filter(m => m.role !== "system" && m.content); const coreMessages = this.buildMessages(chatMessages, systemPrompt); const streamOptions: Parameters[0] = { From 6e7a14fb3ee8074502889cd15f1d65d8d88620df Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 20:23:45 +0300 Subject: [PATCH 16/19] chore(llm): update to AI SDK 6 --- apps/server/package.json | 8 +- .../src/services/llm/providers/anthropic.ts | 6 +- .../services/llm/providers/base_provider.ts | 8 +- pnpm-lock.yaml | 98 +++++++++++-------- 4 files changed, 68 insertions(+), 52 deletions(-) diff --git a/apps/server/package.json b/apps/server/package.json index b9147bfde0..e15726253d 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -30,10 +30,10 @@ "proxy-nginx-subdir": "docker run --name trilium-nginx-subdir --rm --network=host -v ./docker/nginx.conf:/etc/nginx/conf.d/default.conf:ro nginx:latest" }, "dependencies": { - "@ai-sdk/anthropic": "^2.0.0", - "@ai-sdk/google": "^2.0.64", - "@ai-sdk/openai": "2.0.101", - "ai": "^5.0.0", + "@ai-sdk/anthropic": "3.0.64", + "@ai-sdk/google": "3.0.54", + "@ai-sdk/openai": "3.0.49", + "ai": "6.0.142", "better-sqlite3": "12.8.0", "html-to-text": "9.0.5", "node-html-parser": "7.1.0", diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index bfcd434ea7..48a06bbc4e 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -1,5 +1,5 @@ import { createAnthropic, type AnthropicProvider as AnthropicSDKProvider } from "@ai-sdk/anthropic"; -import { stepCountIs, streamText, type CoreMessage, type ToolSet } from "ai"; +import { stepCountIs, streamText, type ModelMessage, type ToolSet } from "ai"; import type { LlmMessage } from "@triliumnext/commons"; import type { LlmProviderConfig, StreamResult } from "../types.js"; @@ -98,9 +98,9 @@ export class AnthropicProvider extends BaseProvider { /** * Override buildMessages to add Anthropic-specific cache control breakpoints. */ - protected override buildMessages(chatMessages: LlmMessage[], systemPrompt: string | undefined): CoreMessage[] { + protected override buildMessages(chatMessages: LlmMessage[], systemPrompt: string | undefined): ModelMessage[] { const CACHE_CONTROL = { anthropic: { cacheControl: { type: "ephemeral" as const } } }; - const coreMessages: CoreMessage[] = []; + const coreMessages: ModelMessage[] = []; if (systemPrompt) { coreMessages.push({ diff --git a/apps/server/src/services/llm/providers/base_provider.ts b/apps/server/src/services/llm/providers/base_provider.ts index 47842013d2..ca975fa895 100644 --- a/apps/server/src/services/llm/providers/base_provider.ts +++ b/apps/server/src/services/llm/providers/base_provider.ts @@ -3,7 +3,7 @@ * tool assembly, model pricing, and title generation. */ -import { generateText, streamText, stepCountIs, type CoreMessage, type ToolSet } from "ai"; +import { generateText, streamText, stepCountIs, type ModelMessage, type ToolSet } from "ai"; import type { LanguageModel } from "ai"; import type { LlmMessage } from "@triliumnext/commons"; @@ -94,10 +94,10 @@ export abstract class BaseProvider implements LlmProvider { } /** - * Build the CoreMessage array from LlmMessages (no provider-specific options). + * Build the ModelMessage array from LlmMessages (no provider-specific options). */ - protected buildMessages(chatMessages: LlmMessage[], systemPrompt: string | undefined): CoreMessage[] { - const coreMessages: CoreMessage[] = []; + protected buildMessages(chatMessages: LlmMessage[], systemPrompt: string | undefined): ModelMessage[] { + const coreMessages: ModelMessage[] = []; if (systemPrompt) { coreMessages.push({ role: "system", content: systemPrompt }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f4fe3771ae..e5f083fc40 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -557,17 +557,17 @@ importers: apps/server: dependencies: '@ai-sdk/anthropic': - specifier: ^2.0.0 - version: 2.0.71(zod@4.3.6) + specifier: ^3.0.64 + version: 3.0.64(zod@4.3.6) '@ai-sdk/google': - specifier: ^2.0.64 - version: 2.0.64(zod@4.3.6) + specifier: ^3.0.54 + version: 3.0.54(zod@4.3.6) '@ai-sdk/openai': - specifier: 2.0.101 - version: 2.0.101(zod@4.3.6) + specifier: 3.0.49 + version: 3.0.49(zod@4.3.6) ai: - specifier: ^5.0.0 - version: 5.0.161(zod@4.3.6) + specifier: ^6.0.142 + version: 6.0.142(zod@4.3.6) better-sqlite3: specifier: 12.8.0 version: 12.8.0 @@ -1542,38 +1542,38 @@ packages: '@adobe/css-tools@4.4.4': resolution: {integrity: sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==} - '@ai-sdk/anthropic@2.0.71': - resolution: {integrity: sha512-JXTtAwlyxGzzRtpiAXk/O93aOTgdfoVX28EoUuRNVqZRgtkoniLQTtqeb8uZ4oXljNJlXzaJLNasS/U90w/wjw==} + '@ai-sdk/anthropic@3.0.64': + resolution: {integrity: sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 - '@ai-sdk/gateway@2.0.65': - resolution: {integrity: sha512-yaWzvQQWgAzV0m3eidfpRub1+PggDOr2hLnSOI+L2ZispyJ/7EoSzhjKzNCADj6PHnnPaOMH933Xhl1Z/NSxJw==} + '@ai-sdk/gateway@3.0.84': + resolution: {integrity: sha512-RnUw6UNvkaw9MEaJU9cIjA+WBP+ZR5+M/9nfbfJHcGKtTbcWXijJuYKx9nYRnm+qU+iiakb0XvQA/vvho6lTsw==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 - '@ai-sdk/google@2.0.64': - resolution: {integrity: sha512-FUVSkdpC+j2o3anRHabJ5UXXPfnqs8uRkv5zh5x4u8p1e7C4y+YtTxeTD2aSSMGV+8ef+VNEAp5gponXpwKk0g==} + '@ai-sdk/google@3.0.54': + resolution: {integrity: sha512-EgYYdA2LpHZefLDU/FIpmeTlL5Hi4WKQZY3nACMh0wVhrS1fAvlfrdwnD1G4ISCOKWMWrMcRZX9ubs3NM/KHfA==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 - '@ai-sdk/openai@2.0.101': - resolution: {integrity: sha512-kQ52HLV45T3bQbRzWExXW6+pkg3Nvq4dUnZHUPJXWgkUUsAhZjxHrXqPOc/0yfn/4+Dn2uLmIgAkP9IfzMMcNg==} + '@ai-sdk/openai@3.0.49': + resolution: {integrity: sha512-U2f0pCyNn/jQH3wjgxr8o9VvCkuDFTtXbIhbFFtgXqCzMbed6rBnvzQcAMEK0/Pa44byL9zfcvCOFOflvkRA8w==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 - '@ai-sdk/provider-utils@3.0.22': - resolution: {integrity: sha512-fFT1KfUUKktfAFm5mClJhS1oux9tP2qgzmEZVl5UdwltQ1LO/s8hd7znVrgKzivwv1s1FIPza0s9OpJaNB/vHw==} + '@ai-sdk/provider-utils@4.0.21': + resolution: {integrity: sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 - '@ai-sdk/provider@2.0.1': - resolution: {integrity: sha512-KCUwswvsC5VsW2PWFqF8eJgSCu5Ysj7m1TxiHTVA6g7k360bk0RNQENT8KTMAYEs+8fWPD3Uu4dEmzGHc+jGng==} + '@ai-sdk/provider@3.0.8': + resolution: {integrity: sha512-oGMAgGoQdBXbZqNG0Ze56CHjDZ1IDYOwGYxYjO5KLSlz5HiNQ9udIXsPZ61VWaHGZ5XW/jyjmr6t2xz2jGVwbQ==} engines: {node: '>=18'} '@aklinker1/rollup-plugin-visualizer@5.12.0': @@ -7368,8 +7368,8 @@ packages: resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} engines: {node: '>=8'} - ai@5.0.161: - resolution: {integrity: sha512-CVANs7auUNEi/hRhdJDKcPYaCLWXveIfmoiekNSRel3i8WUieB6iEncDS5smcubWsx7hGtTgXxNRTg0YG0ljtA==} + ai@6.0.142: + resolution: {integrity: sha512-ZoxAsnTL/dFg5WdcwC8QNhKVlLtqwwT3I7p/4i8IJJP+6ZwqF1ljuwMsAsPYYvppZ+RzUxjxxFGb1cbEhNH3dg==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 @@ -16045,39 +16045,39 @@ snapshots: '@adobe/css-tools@4.4.4': {} - '@ai-sdk/anthropic@2.0.71(zod@4.3.6)': + '@ai-sdk/anthropic@3.0.64(zod@4.3.6)': dependencies: - '@ai-sdk/provider': 2.0.1 - '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.21(zod@4.3.6) zod: 4.3.6 - '@ai-sdk/gateway@2.0.65(zod@4.3.6)': + '@ai-sdk/gateway@3.0.84(zod@4.3.6)': dependencies: - '@ai-sdk/provider': 2.0.1 - '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.21(zod@4.3.6) '@vercel/oidc': 3.1.0 zod: 4.3.6 - '@ai-sdk/google@2.0.64(zod@4.3.6)': + '@ai-sdk/google@3.0.54(zod@4.3.6)': dependencies: - '@ai-sdk/provider': 2.0.1 - '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.21(zod@4.3.6) zod: 4.3.6 - '@ai-sdk/openai@2.0.101(zod@4.3.6)': + '@ai-sdk/openai@3.0.49(zod@4.3.6)': dependencies: - '@ai-sdk/provider': 2.0.1 - '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.21(zod@4.3.6) zod: 4.3.6 - '@ai-sdk/provider-utils@3.0.22(zod@4.3.6)': + '@ai-sdk/provider-utils@4.0.21(zod@4.3.6)': dependencies: - '@ai-sdk/provider': 2.0.1 + '@ai-sdk/provider': 3.0.8 '@standard-schema/spec': 1.1.0 eventsource-parser: 3.0.6 zod: 4.3.6 - '@ai-sdk/provider@2.0.1': + '@ai-sdk/provider@3.0.8': dependencies: json-schema: 0.4.0 @@ -17028,6 +17028,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 es-toolkit: 1.39.5 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-cloud-services@47.6.1': dependencies: @@ -17423,6 +17425,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-horizontal-line@47.6.1': dependencies: @@ -17432,6 +17436,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-html-embed@47.6.1': dependencies: @@ -17441,6 +17447,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-html-support@47.6.1': dependencies: @@ -17456,6 +17464,8 @@ snapshots: '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 es-toolkit: 1.39.5 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-icons@47.6.1': {} @@ -17473,6 +17483,8 @@ snapshots: '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 es-toolkit: 1.39.5 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-import-word@47.6.1': dependencies: @@ -17496,6 +17508,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-inspector@5.0.0': {} @@ -17506,6 +17520,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-line-height@47.6.1': dependencies: @@ -24533,11 +24549,11 @@ snapshots: clean-stack: 2.2.0 indent-string: 4.0.0 - ai@5.0.161(zod@4.3.6): + ai@6.0.142(zod@4.3.6): dependencies: - '@ai-sdk/gateway': 2.0.65(zod@4.3.6) - '@ai-sdk/provider': 2.0.1 - '@ai-sdk/provider-utils': 3.0.22(zod@4.3.6) + '@ai-sdk/gateway': 3.0.84(zod@4.3.6) + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.21(zod@4.3.6) '@opentelemetry/api': 1.9.0 zod: 4.3.6 From a6b8785341da72a42a732395709d9950d1cc0e1d Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 20:32:19 +0300 Subject: [PATCH 17/19] chore(llm): address requested changes --- apps/server/src/services/llm/providers/anthropic.ts | 8 ++++++-- apps/server/src/services/llm/providers/base_provider.ts | 2 +- apps/server/src/services/llm/providers/google.ts | 6 +++--- apps/server/src/services/llm/skills/index.ts | 8 ++++---- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index 48a06bbc4e..aef87f8258 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -113,9 +113,13 @@ export class AnthropicProvider extends BaseProvider { for (let i = 0; i < chatMessages.length; i++) { const m = chatMessages[i]; const isLastBeforeNewTurn = i === chatMessages.length - 2; + // Anthropic rejects empty text content blocks. Replace empty + // content (e.g. tool-only assistant turns) with a placeholder + // to preserve conversation flow. + const content = m.content || "(tool use)"; coreMessages.push({ role: m.role as "user" | "assistant", - content: m.content, + content, ...(isLastBeforeNewTurn && { providerOptions: CACHE_CONTROL }) }); } @@ -132,7 +136,7 @@ export class AnthropicProvider extends BaseProvider { } const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system" && m.content); + const chatMessages = messages.filter(m => m.role !== "system"); const coreMessages = this.buildMessages(chatMessages, systemPrompt); const thinkingBudget = config.thinkingBudget || 10000; diff --git a/apps/server/src/services/llm/providers/base_provider.ts b/apps/server/src/services/llm/providers/base_provider.ts index ca975fa895..fda95856c3 100644 --- a/apps/server/src/services/llm/providers/base_provider.ts +++ b/apps/server/src/services/llm/providers/base_provider.ts @@ -144,7 +144,7 @@ export abstract class BaseProvider implements LlmProvider { chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system" && m.content); + const chatMessages = messages.filter(m => m.role !== "system"); const coreMessages = this.buildMessages(chatMessages, systemPrompt); const streamOptions: Parameters[0] = { diff --git a/apps/server/src/services/llm/providers/google.ts b/apps/server/src/services/llm/providers/google.ts index 0902986d10..e33b1bccca 100644 --- a/apps/server/src/services/llm/providers/google.ts +++ b/apps/server/src/services/llm/providers/google.ts @@ -15,14 +15,14 @@ const { models: AVAILABLE_MODELS, pricing: MODEL_PRICING } = buildModelList([ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro", pricing: { input: 1.25, output: 10 }, - contextWindow: 1048576, - isDefault: true + contextWindow: 1048576 }, { id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", pricing: { input: 0.3, output: 2.5 }, - contextWindow: 1048576 + contextWindow: 1048576, + isDefault: true }, { id: "gemini-2.5-flash-lite", diff --git a/apps/server/src/services/llm/skills/index.ts b/apps/server/src/services/llm/skills/index.ts index ff22097111..a38ee13a81 100644 --- a/apps/server/src/services/llm/skills/index.ts +++ b/apps/server/src/services/llm/skills/index.ts @@ -5,7 +5,7 @@ */ import { tool } from "ai"; -import { readFileSync } from "fs"; +import { readFile } from "fs/promises"; import { dirname, join } from "path"; import { fileURLToPath } from "url"; import { z } from "zod"; @@ -36,12 +36,12 @@ const SKILLS: SkillDefinition[] = [ } ]; -function loadSkillContent(name: string): string | null { +async function loadSkillContent(name: string): Promise { const skill = SKILLS.find((s) => s.name === name); if (!skill) { return null; } - return readFileSync(join(__dirname, skill.file), "utf-8"); + return readFile(join(__dirname, skill.file), "utf-8"); } /** @@ -63,7 +63,7 @@ export const loadSkill = tool({ name: z.string().describe("The skill name to load") }), execute: async ({ name }) => { - const content = loadSkillContent(name); + const content = await loadSkillContent(name); if (!content) { return { error: `Unknown skill: '${name}'. Available: ${SKILLS.map((s) => s.name).join(", ")}` }; } From f528833232ff073fa77468b11c6a33516c859451 Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 20:52:17 +0300 Subject: [PATCH 18/19] chore(llm): relocate skills to assets --- CLAUDE.md | 6 ++++++ .../llm/skills/backend_scripting.md | 0 .../llm/skills/frontend_scripting.md | 0 .../{services => assets}/llm/skills/search_syntax.md | 0 apps/server/src/services/llm/skills/index.ts | 12 +++++++----- 5 files changed, 13 insertions(+), 5 deletions(-) rename apps/server/src/{services => assets}/llm/skills/backend_scripting.md (100%) rename apps/server/src/{services => assets}/llm/skills/frontend_scripting.md (100%) rename apps/server/src/{services => assets}/llm/skills/search_syntax.md (100%) diff --git a/CLAUDE.md b/CLAUDE.md index be265e5bd0..7e03baa2fb 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -153,6 +153,12 @@ Trilium provides powerful user scripting capabilities: - Add migration scripts in `apps/server/src/migrations/` - Update schema in `apps/server/src/assets/db/schema.sql` +### Server-Side Static Assets +- Static assets (templates, SQL, translations, etc.) go in `apps/server/src/assets/` +- Access them at runtime via `RESOURCE_DIR` from `apps/server/src/services/resource_dir.ts` (e.g. `path.join(RESOURCE_DIR, "llm", "skills", "file.md")`) +- **Do not use `import.meta.url`/`fileURLToPath`** to resolve file paths — the server is bundled into CJS for production, so `import.meta.url` will not point to the source directory +- **Do not use `__dirname` with relative paths** from source files — after bundling, `__dirname` points to the bundle output, not the original source tree + ## Build System Notes - Uses pnpm for monorepo management - Vite for fast development builds diff --git a/apps/server/src/services/llm/skills/backend_scripting.md b/apps/server/src/assets/llm/skills/backend_scripting.md similarity index 100% rename from apps/server/src/services/llm/skills/backend_scripting.md rename to apps/server/src/assets/llm/skills/backend_scripting.md diff --git a/apps/server/src/services/llm/skills/frontend_scripting.md b/apps/server/src/assets/llm/skills/frontend_scripting.md similarity index 100% rename from apps/server/src/services/llm/skills/frontend_scripting.md rename to apps/server/src/assets/llm/skills/frontend_scripting.md diff --git a/apps/server/src/services/llm/skills/search_syntax.md b/apps/server/src/assets/llm/skills/search_syntax.md similarity index 100% rename from apps/server/src/services/llm/skills/search_syntax.md rename to apps/server/src/assets/llm/skills/search_syntax.md diff --git a/apps/server/src/services/llm/skills/index.ts b/apps/server/src/services/llm/skills/index.ts index a38ee13a81..614820a5fa 100644 --- a/apps/server/src/services/llm/skills/index.ts +++ b/apps/server/src/services/llm/skills/index.ts @@ -4,13 +4,15 @@ * included in the system prompt; full content is fetched via the load_skill tool. */ -import { tool } from "ai"; import { readFile } from "fs/promises"; -import { dirname, join } from "path"; -import { fileURLToPath } from "url"; +import { join } from "path"; + +import { tool } from "ai"; import { z } from "zod"; -const __dirname = dirname(fileURLToPath(import.meta.url)); +import resourceDir from "../../resource_dir.js"; + +const SKILLS_DIR = join(resourceDir.RESOURCE_DIR, "llm", "skills"); interface SkillDefinition { name: string; @@ -41,7 +43,7 @@ async function loadSkillContent(name: string): Promise { if (!skill) { return null; } - return readFile(join(__dirname, skill.file), "utf-8"); + return readFile(join(SKILLS_DIR, skill.file), "utf-8"); } /** From dffdeff798af77f41c59b1e7216d847fdf04487e Mon Sep 17 00:00:00 2001 From: Elian Doran Date: Tue, 31 Mar 2026 21:52:55 +0300 Subject: [PATCH 19/19] chore(deps): fix flake lock --- pnpm-lock.yaml | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e5f083fc40..f57d8e6ba3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -557,16 +557,16 @@ importers: apps/server: dependencies: '@ai-sdk/anthropic': - specifier: ^3.0.64 + specifier: 3.0.64 version: 3.0.64(zod@4.3.6) '@ai-sdk/google': - specifier: ^3.0.54 + specifier: 3.0.54 version: 3.0.54(zod@4.3.6) '@ai-sdk/openai': specifier: 3.0.49 version: 3.0.49(zod@4.3.6) ai: - specifier: ^6.0.142 + specifier: 6.0.142 version: 6.0.142(zod@4.3.6) better-sqlite3: specifier: 12.8.0 @@ -17049,6 +17049,8 @@ snapshots: '@ckeditor/ckeditor5-ui': 47.6.1 '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-collaboration-core@47.6.1': dependencies: @@ -17546,6 +17548,8 @@ snapshots: '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 es-toolkit: 1.39.5 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-list-multi-level@47.6.1': dependencies: @@ -17570,6 +17574,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 ckeditor5: 47.6.1 es-toolkit: 1.39.5 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-markdown-gfm@47.6.1': dependencies: @@ -17607,6 +17613,8 @@ snapshots: '@ckeditor/ckeditor5-utils': 47.6.1 '@ckeditor/ckeditor5-widget': 47.6.1 ckeditor5: 47.6.1 + transitivePeerDependencies: + - supports-color '@ckeditor/ckeditor5-mention@47.6.1(patch_hash=5981fb59ba35829e4dff1d39cf771000f8a8fdfa7a34b51d8af9549541f2d62d)': dependencies: @@ -25396,6 +25404,8 @@ snapshots: ckeditor5-collaboration@47.6.1: dependencies: '@ckeditor/ckeditor5-collaboration-core': 47.6.1 + transitivePeerDependencies: + - supports-color ckeditor5-premium-features@47.6.1(bufferutil@4.0.9)(ckeditor5@47.6.1)(utf-8-validate@6.0.5): dependencies: