diff --git a/apps/server/src/services/llm/providers/anthropic.ts b/apps/server/src/services/llm/providers/anthropic.ts index 48a06bbc4e..aef87f8258 100644 --- a/apps/server/src/services/llm/providers/anthropic.ts +++ b/apps/server/src/services/llm/providers/anthropic.ts @@ -113,9 +113,13 @@ export class AnthropicProvider extends BaseProvider { for (let i = 0; i < chatMessages.length; i++) { const m = chatMessages[i]; const isLastBeforeNewTurn = i === chatMessages.length - 2; + // Anthropic rejects empty text content blocks. Replace empty + // content (e.g. tool-only assistant turns) with a placeholder + // to preserve conversation flow. + const content = m.content || "(tool use)"; coreMessages.push({ role: m.role as "user" | "assistant", - content: m.content, + content, ...(isLastBeforeNewTurn && { providerOptions: CACHE_CONTROL }) }); } @@ -132,7 +136,7 @@ export class AnthropicProvider extends BaseProvider { } const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system" && m.content); + const chatMessages = messages.filter(m => m.role !== "system"); const coreMessages = this.buildMessages(chatMessages, systemPrompt); const thinkingBudget = config.thinkingBudget || 10000; diff --git a/apps/server/src/services/llm/providers/base_provider.ts b/apps/server/src/services/llm/providers/base_provider.ts index ca975fa895..fda95856c3 100644 --- a/apps/server/src/services/llm/providers/base_provider.ts +++ b/apps/server/src/services/llm/providers/base_provider.ts @@ -144,7 +144,7 @@ export abstract class BaseProvider implements LlmProvider { chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult { const systemPrompt = this.buildSystemPrompt(messages, config); - const chatMessages = messages.filter(m => m.role !== "system" && m.content); + const chatMessages = messages.filter(m => m.role !== "system"); const coreMessages = this.buildMessages(chatMessages, systemPrompt); const streamOptions: Parameters[0] = { diff --git a/apps/server/src/services/llm/providers/google.ts b/apps/server/src/services/llm/providers/google.ts index 0902986d10..e33b1bccca 100644 --- a/apps/server/src/services/llm/providers/google.ts +++ b/apps/server/src/services/llm/providers/google.ts @@ -15,14 +15,14 @@ const { models: AVAILABLE_MODELS, pricing: MODEL_PRICING } = buildModelList([ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro", pricing: { input: 1.25, output: 10 }, - contextWindow: 1048576, - isDefault: true + contextWindow: 1048576 }, { id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", pricing: { input: 0.3, output: 2.5 }, - contextWindow: 1048576 + contextWindow: 1048576, + isDefault: true }, { id: "gemini-2.5-flash-lite", diff --git a/apps/server/src/services/llm/skills/index.ts b/apps/server/src/services/llm/skills/index.ts index ff22097111..a38ee13a81 100644 --- a/apps/server/src/services/llm/skills/index.ts +++ b/apps/server/src/services/llm/skills/index.ts @@ -5,7 +5,7 @@ */ import { tool } from "ai"; -import { readFileSync } from "fs"; +import { readFile } from "fs/promises"; import { dirname, join } from "path"; import { fileURLToPath } from "url"; import { z } from "zod"; @@ -36,12 +36,12 @@ const SKILLS: SkillDefinition[] = [ } ]; -function loadSkillContent(name: string): string | null { +async function loadSkillContent(name: string): Promise { const skill = SKILLS.find((s) => s.name === name); if (!skill) { return null; } - return readFileSync(join(__dirname, skill.file), "utf-8"); + return readFile(join(__dirname, skill.file), "utf-8"); } /** @@ -63,7 +63,7 @@ export const loadSkill = tool({ name: z.string().describe("The skill name to load") }), execute: async ({ name }) => { - const content = loadSkillContent(name); + const content = await loadSkillContent(name); if (!content) { return { error: `Unknown skill: '${name}'. Available: ${SKILLS.map((s) => s.name).join(", ")}` }; }