2025-03-11 17:30:50 +00:00
|
|
|
import options from '../../options.js';
|
|
|
|
|
import { BaseAIService } from '../base_ai_service.js';
|
2025-04-09 21:08:30 +00:00
|
|
|
import type { ChatCompletionOptions, ChatResponse, Message, StreamChunk } from '../ai_interface.js';
|
2025-03-26 17:56:37 +00:00
|
|
|
import { PROVIDER_CONSTANTS } from '../constants/provider_constants.js';
|
2025-04-09 19:11:27 +00:00
|
|
|
import type { AnthropicOptions } from './provider_options.js';
|
|
|
|
|
import { getAnthropicOptions } from './providers.js';
|
|
|
|
|
import log from '../../log.js';
|
2025-04-09 21:08:30 +00:00
|
|
|
import Anthropic from '@anthropic-ai/sdk';
|
2025-03-28 21:47:28 +00:00
|
|
|
|
2025-03-02 19:39:10 -08:00
|
|
|
export class AnthropicService extends BaseAIService {
|
2025-04-09 21:08:30 +00:00
|
|
|
private client: any = null;
|
|
|
|
|
|
2025-03-02 19:39:10 -08:00
|
|
|
constructor() {
|
|
|
|
|
super('Anthropic');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
isAvailable(): boolean {
|
|
|
|
|
return super.isAvailable() && !!options.getOption('anthropicApiKey');
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
private getClient(apiKey: string, baseUrl: string, apiVersion?: string, betaVersion?: string): any {
|
|
|
|
|
if (!this.client) {
|
|
|
|
|
this.client = new Anthropic({
|
|
|
|
|
apiKey,
|
|
|
|
|
baseURL: baseUrl,
|
|
|
|
|
defaultHeaders: {
|
|
|
|
|
'anthropic-version': apiVersion || PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION,
|
|
|
|
|
'anthropic-beta': betaVersion || PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
return this.client;
|
|
|
|
|
}
|
|
|
|
|
|
2025-03-02 19:39:10 -08:00
|
|
|
async generateChatCompletion(messages: Message[], opts: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
|
|
|
|
if (!this.isAvailable()) {
|
|
|
|
|
throw new Error('Anthropic service is not available. Check API key and AI settings.');
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-09 19:11:27 +00:00
|
|
|
// Get provider-specific options from the central provider manager
|
|
|
|
|
const providerOptions = getAnthropicOptions(opts);
|
2025-03-26 04:13:04 +00:00
|
|
|
|
2025-04-09 19:11:27 +00:00
|
|
|
// Log provider metadata if available
|
|
|
|
|
if (providerOptions.providerMetadata) {
|
|
|
|
|
log.info(`Using model ${providerOptions.model} from provider ${providerOptions.providerMetadata.provider}`);
|
2025-03-02 19:39:10 -08:00
|
|
|
|
2025-04-09 19:11:27 +00:00
|
|
|
// Log capabilities if available
|
|
|
|
|
const capabilities = providerOptions.providerMetadata.capabilities;
|
|
|
|
|
if (capabilities) {
|
|
|
|
|
log.info(`Model capabilities: ${JSON.stringify(capabilities)}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
// Get system prompt
|
2025-04-09 19:11:27 +00:00
|
|
|
const systemPrompt = this.getSystemPrompt(providerOptions.systemPrompt || options.getOption('aiSystemPrompt'));
|
2025-03-02 19:39:10 -08:00
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
// Format messages for Anthropic's API
|
|
|
|
|
const anthropicMessages = this.formatMessages(messages);
|
2025-04-09 19:11:27 +00:00
|
|
|
|
2025-03-02 19:39:10 -08:00
|
|
|
try {
|
2025-04-09 21:08:30 +00:00
|
|
|
// Initialize the Anthropic client
|
|
|
|
|
const client = this.getClient(
|
|
|
|
|
providerOptions.apiKey,
|
|
|
|
|
providerOptions.baseUrl,
|
|
|
|
|
providerOptions.apiVersion,
|
|
|
|
|
providerOptions.betaVersion
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
log.info(`Using Anthropic API with model: ${providerOptions.model}`);
|
|
|
|
|
|
|
|
|
|
// Configure request parameters
|
|
|
|
|
const requestParams = {
|
2025-04-09 19:11:27 +00:00
|
|
|
model: providerOptions.model,
|
2025-04-09 21:08:30 +00:00
|
|
|
messages: anthropicMessages,
|
|
|
|
|
system: systemPrompt,
|
|
|
|
|
max_tokens: providerOptions.max_tokens || 4096,
|
2025-04-09 19:11:27 +00:00
|
|
|
temperature: providerOptions.temperature,
|
2025-04-09 21:08:30 +00:00
|
|
|
top_p: providerOptions.top_p,
|
|
|
|
|
stream: !!providerOptions.stream
|
2025-04-09 19:11:27 +00:00
|
|
|
};
|
|
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
// Handle streaming responses
|
|
|
|
|
if (providerOptions.stream) {
|
|
|
|
|
return this.handleStreamingResponse(client, requestParams, opts, providerOptions);
|
|
|
|
|
} else {
|
|
|
|
|
// Non-streaming request
|
|
|
|
|
const response = await client.messages.create(requestParams);
|
|
|
|
|
|
|
|
|
|
// Get the assistant's response text from the content blocks
|
|
|
|
|
const textContent = response.content
|
|
|
|
|
.filter((block: any) => block.type === 'text')
|
|
|
|
|
.map((block: any) => block.text)
|
|
|
|
|
.join('');
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
text: textContent,
|
|
|
|
|
model: response.model,
|
|
|
|
|
provider: this.getName(),
|
|
|
|
|
usage: {
|
|
|
|
|
// Anthropic provides token counts in the response
|
|
|
|
|
promptTokens: response.usage?.input_tokens,
|
|
|
|
|
completionTokens: response.usage?.output_tokens,
|
|
|
|
|
totalTokens: (response.usage?.input_tokens || 0) + (response.usage?.output_tokens || 0)
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
log.error(`Anthropic service error: ${error}`);
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Handle streaming response from Anthropic
|
2025-04-10 21:00:12 +00:00
|
|
|
*
|
|
|
|
|
* Simplified implementation that leverages the Anthropic SDK's streaming capabilities
|
2025-04-09 21:08:30 +00:00
|
|
|
*/
|
|
|
|
|
private async handleStreamingResponse(
|
|
|
|
|
client: any,
|
|
|
|
|
params: any,
|
|
|
|
|
opts: ChatCompletionOptions,
|
|
|
|
|
providerOptions: AnthropicOptions
|
|
|
|
|
): Promise<ChatResponse> {
|
2025-04-10 21:00:12 +00:00
|
|
|
// Create a stream handler function that processes the SDK's stream
|
2025-04-09 21:08:30 +00:00
|
|
|
const streamHandler = async (callback: (chunk: StreamChunk) => Promise<void> | void): Promise<string> => {
|
2025-04-10 21:00:12 +00:00
|
|
|
let completeText = '';
|
|
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
try {
|
2025-04-10 21:00:12 +00:00
|
|
|
// Request a streaming response from Anthropic
|
2025-04-09 21:08:30 +00:00
|
|
|
const streamResponse = await client.messages.create({
|
|
|
|
|
...params,
|
|
|
|
|
stream: true
|
|
|
|
|
});
|
|
|
|
|
|
2025-04-10 21:00:12 +00:00
|
|
|
// Process each chunk in the stream
|
2025-04-09 21:08:30 +00:00
|
|
|
for await (const chunk of streamResponse) {
|
2025-04-10 21:00:12 +00:00
|
|
|
// Only process text content deltas
|
2025-04-09 21:08:30 +00:00
|
|
|
if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'text_delta') {
|
|
|
|
|
const text = chunk.delta.text || '';
|
|
|
|
|
completeText += text;
|
|
|
|
|
|
2025-04-10 21:00:12 +00:00
|
|
|
// Send the chunk to the caller
|
2025-04-09 21:08:30 +00:00
|
|
|
await callback({
|
|
|
|
|
text,
|
|
|
|
|
done: false,
|
2025-04-10 21:00:12 +00:00
|
|
|
raw: chunk // Include the raw chunk for advanced processing
|
2025-04-09 21:08:30 +00:00
|
|
|
});
|
|
|
|
|
}
|
2025-04-09 19:11:27 +00:00
|
|
|
}
|
2025-03-02 19:39:10 -08:00
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
// Signal completion
|
|
|
|
|
await callback({
|
|
|
|
|
text: '',
|
2025-04-10 21:00:12 +00:00
|
|
|
done: true
|
2025-04-09 21:08:30 +00:00
|
|
|
});
|
2025-03-02 19:39:10 -08:00
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
return completeText;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
log.error(`Error in Anthropic streaming: ${error}`);
|
|
|
|
|
throw error;
|
2025-03-02 19:39:10 -08:00
|
|
|
}
|
2025-04-09 21:08:30 +00:00
|
|
|
};
|
2025-03-02 19:39:10 -08:00
|
|
|
|
2025-04-10 21:00:12 +00:00
|
|
|
// Return a response object with the stream handler
|
2025-04-09 21:08:30 +00:00
|
|
|
return {
|
2025-04-10 21:00:12 +00:00
|
|
|
text: '', // Initial text is empty, will be populated during streaming
|
2025-04-09 21:08:30 +00:00
|
|
|
model: providerOptions.model,
|
|
|
|
|
provider: this.getName(),
|
2025-04-10 21:00:12 +00:00
|
|
|
stream: streamHandler
|
2025-04-09 21:08:30 +00:00
|
|
|
};
|
2025-03-02 19:39:10 -08:00
|
|
|
}
|
|
|
|
|
|
2025-03-28 21:47:28 +00:00
|
|
|
/**
|
|
|
|
|
* Format messages for the Anthropic API
|
|
|
|
|
*/
|
2025-04-09 21:08:30 +00:00
|
|
|
private formatMessages(messages: Message[]): any[] {
|
|
|
|
|
const anthropicMessages: any[] = [];
|
2025-03-28 21:47:28 +00:00
|
|
|
|
|
|
|
|
// Process each message
|
|
|
|
|
for (const msg of messages) {
|
|
|
|
|
if (msg.role === 'system') {
|
2025-04-09 21:08:30 +00:00
|
|
|
// System messages are handled separately in the API call
|
|
|
|
|
continue;
|
|
|
|
|
} else if (msg.role === 'user' || msg.role === 'assistant') {
|
|
|
|
|
// Convert to Anthropic format
|
|
|
|
|
anthropicMessages.push({
|
2025-03-28 21:47:28 +00:00
|
|
|
role: msg.role,
|
|
|
|
|
content: msg.content
|
|
|
|
|
});
|
2025-04-09 21:08:30 +00:00
|
|
|
} else if (msg.role === 'tool') {
|
|
|
|
|
// Tool response messages - typically follow a tool call from the assistant
|
|
|
|
|
anthropicMessages.push({
|
|
|
|
|
role: 'user',
|
|
|
|
|
content: msg.content
|
|
|
|
|
});
|
2025-03-28 21:47:28 +00:00
|
|
|
}
|
|
|
|
|
}
|
2025-03-02 19:39:10 -08:00
|
|
|
|
2025-04-09 21:08:30 +00:00
|
|
|
return anthropicMessages;
|
2025-03-02 19:39:10 -08:00
|
|
|
}
|
2025-04-09 21:08:30 +00:00
|
|
|
}
|