| 
									
										
										
										
											2025-03-11 17:30:50 +00:00
										 |  |  | import options from '../../options.js'; | 
					
						
							|  |  |  | import { BaseAIService } from '../base_ai_service.js'; | 
					
						
							| 
									
										
										
										
											2025-04-14 20:15:14 +00:00
										 |  |  | import type { ChatCompletionOptions, ChatResponse, Message, StreamChunk } from '../ai_interface.js'; | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  | import { getOpenAIOptions } from './providers.js'; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  | import OpenAI from 'openai'; | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  | import { PROVIDER_PROMPTS } from '../constants/llm_prompt_constants.js'; | 
					
						
							|  |  |  | import log from '../../log.js'; | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | export class OpenAIService extends BaseAIService { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |     private openai: OpenAI | null = null; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |     constructor() { | 
					
						
							|  |  |  |         super('OpenAI'); | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 13:34:42 +03:00
										 |  |  |     override isAvailable(): boolean { | 
					
						
							| 
									
										
										
										
											2025-06-06 19:22:39 +00:00
										 |  |  |         // Make API key optional to support OpenAI-compatible endpoints that don't require authentication
 | 
					
						
							|  |  |  |         // The provider is considered available as long as the parent checks pass
 | 
					
						
							|  |  |  |         return super.isAvailable(); | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |     private getClient(apiKey: string, baseUrl?: string): OpenAI { | 
					
						
							|  |  |  |         if (!this.openai) { | 
					
						
							|  |  |  |             this.openai = new OpenAI({ | 
					
						
							|  |  |  |                 apiKey, | 
					
						
							|  |  |  |                 baseURL: baseUrl | 
					
						
							|  |  |  |             }); | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |         return this.openai; | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |     async generateChatCompletion(messages: Message[], opts: ChatCompletionOptions = {}): Promise<ChatResponse> { | 
					
						
							|  |  |  |         if (!this.isAvailable()) { | 
					
						
							| 
									
										
										
										
											2025-06-06 19:22:39 +00:00
										 |  |  |             throw new Error('OpenAI service is not available. Check AI settings.'); | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |         // Get provider-specific options from the central provider manager
 | 
					
						
							|  |  |  |         const providerOptions = getOpenAIOptions(opts); | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |         // Initialize the OpenAI client
 | 
					
						
							|  |  |  |         const client = this.getClient(providerOptions.apiKey, providerOptions.baseUrl); | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  |         // Get base system prompt
 | 
					
						
							|  |  |  |         let systemPrompt = this.getSystemPrompt(providerOptions.systemPrompt || options.getOption('aiSystemPrompt')); | 
					
						
							|  |  |  |          | 
					
						
							|  |  |  |         // Check if tools are enabled for this request
 | 
					
						
							|  |  |  |         const willUseTools = providerOptions.enableTools && providerOptions.tools && providerOptions.tools.length > 0; | 
					
						
							|  |  |  |          | 
					
						
							|  |  |  |         // Add tool instructions to system prompt if tools are enabled
 | 
					
						
							|  |  |  |         if (willUseTools && PROVIDER_PROMPTS.OPENAI.TOOL_INSTRUCTIONS) { | 
					
						
							|  |  |  |             log.info('Adding tool instructions to system prompt for OpenAI'); | 
					
						
							|  |  |  |             systemPrompt = `${systemPrompt}\n\n${PROVIDER_PROMPTS.OPENAI.TOOL_INSTRUCTIONS}`; | 
					
						
							|  |  |  |         } | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         // Ensure we have a system message
 | 
					
						
							|  |  |  |         const systemMessageExists = messages.some(m => m.role === 'system'); | 
					
						
							|  |  |  |         const messagesWithSystem = systemMessageExists | 
					
						
							|  |  |  |             ? messages | 
					
						
							|  |  |  |             : [{ role: 'system', content: systemPrompt }, ...messages]; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |             // Create params object for the OpenAI SDK
 | 
					
						
							|  |  |  |             const params: OpenAI.Chat.ChatCompletionCreateParams = { | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |                 model: providerOptions.model, | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 messages: messagesWithSystem as OpenAI.Chat.ChatCompletionMessageParam[], | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |                 temperature: providerOptions.temperature, | 
					
						
							|  |  |  |                 max_tokens: providerOptions.max_tokens, | 
					
						
							|  |  |  |                 stream: providerOptions.stream, | 
					
						
							|  |  |  |                 top_p: providerOptions.top_p, | 
					
						
							|  |  |  |                 frequency_penalty: providerOptions.frequency_penalty, | 
					
						
							|  |  |  |                 presence_penalty: providerOptions.presence_penalty | 
					
						
							|  |  |  |             }; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             // Add tools if enabled
 | 
					
						
							|  |  |  |             if (providerOptions.enableTools && providerOptions.tools && providerOptions.tools.length > 0) { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 params.tools = providerOptions.tools as OpenAI.Chat.ChatCompletionTool[]; | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |             } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             if (providerOptions.tool_choice) { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 params.tool_choice = providerOptions.tool_choice as OpenAI.Chat.ChatCompletionToolChoiceOption; | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |             } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |             // Log the request parameters
 | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  |             log.info(`OpenAI API Request: ${JSON.stringify({ | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |                 endpoint: 'chat.completions.create', | 
					
						
							|  |  |  |                 model: params.model, | 
					
						
							|  |  |  |                 messages: params.messages, | 
					
						
							|  |  |  |                 temperature: params.temperature, | 
					
						
							|  |  |  |                 max_tokens: params.max_tokens, | 
					
						
							|  |  |  |                 stream: params.stream, | 
					
						
							|  |  |  |                 tools: params.tools, | 
					
						
							|  |  |  |                 tool_choice: params.tool_choice | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  |             }, null, 2)}`);
 | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |             // If streaming is requested
 | 
					
						
							|  |  |  |             if (providerOptions.stream) { | 
					
						
							|  |  |  |                 params.stream = true; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                 // Get stream from OpenAI SDK
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 const stream = await client.chat.completions.create(params); | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  |                 log.info('OpenAI API Stream Started'); | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |                 // Create a closure to hold accumulated tool calls
 | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  |                 const accumulatedToolCalls: OpenAI.Chat.ChatCompletionMessageToolCall[] = []; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                 // Return a response with the stream handler
 | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  |                 const response: ChatResponse = { | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                     text: '', // Initial empty text, will be populated during streaming
 | 
					
						
							|  |  |  |                     model: params.model, | 
					
						
							|  |  |  |                     provider: this.getName(), | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  |                     // Add tool_calls property that will be populated during streaming
 | 
					
						
							|  |  |  |                     tool_calls: [], | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                     stream: async (callback) => { | 
					
						
							|  |  |  |                         let completeText = ''; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                         try { | 
					
						
							|  |  |  |                             // Process the stream
 | 
					
						
							|  |  |  |                             if (Symbol.asyncIterator in stream) { | 
					
						
							|  |  |  |                                 for await (const chunk of stream as AsyncIterable<OpenAI.Chat.ChatCompletionChunk>) { | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |                                     // Log each chunk received from OpenAI
 | 
					
						
							| 
									
										
										
										
											2025-05-29 21:58:03 +00:00
										 |  |  |                                     // Use info level as debug is not available
 | 
					
						
							|  |  |  |                                     log.info(`OpenAI API Stream Chunk: ${JSON.stringify(chunk, null, 2)}`); | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                     const content = chunk.choices[0]?.delta?.content || ''; | 
					
						
							|  |  |  |                                     const isDone = !!chunk.choices[0]?.finish_reason; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |                                     // Check for tool calls in the delta
 | 
					
						
							|  |  |  |                                     const deltaToolCalls = chunk.choices[0]?.delta?.tool_calls; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                                     if (deltaToolCalls) { | 
					
						
							|  |  |  |                                         // Process and accumulate tool calls from this chunk
 | 
					
						
							|  |  |  |                                         for (const deltaToolCall of deltaToolCalls) { | 
					
						
							|  |  |  |                                             const toolCallId = deltaToolCall.index; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                                             // Initialize or update the accumulated tool call
 | 
					
						
							|  |  |  |                                             if (!accumulatedToolCalls[toolCallId]) { | 
					
						
							|  |  |  |                                                 accumulatedToolCalls[toolCallId] = { | 
					
						
							|  |  |  |                                                     id: deltaToolCall.id || `call_${toolCallId}`, | 
					
						
							|  |  |  |                                                     type: deltaToolCall.type || 'function', | 
					
						
							|  |  |  |                                                     function: { | 
					
						
							|  |  |  |                                                         name: '', | 
					
						
							|  |  |  |                                                         arguments: '' | 
					
						
							|  |  |  |                                                     } | 
					
						
							|  |  |  |                                                 }; | 
					
						
							|  |  |  |                                             } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                                             // Update function name if present
 | 
					
						
							|  |  |  |                                             if (deltaToolCall.function?.name) { | 
					
						
							|  |  |  |                                                 accumulatedToolCalls[toolCallId].function.name = | 
					
						
							|  |  |  |                                                     deltaToolCall.function.name; | 
					
						
							|  |  |  |                                             } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                                             // Append to function arguments if present
 | 
					
						
							|  |  |  |                                             if (deltaToolCall.function?.arguments) { | 
					
						
							|  |  |  |                                                 accumulatedToolCalls[toolCallId].function.arguments += | 
					
						
							|  |  |  |                                                     deltaToolCall.function.arguments; | 
					
						
							|  |  |  |                                             } | 
					
						
							|  |  |  |                                         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                                         // Important: Update the response's tool_calls with accumulated tool calls
 | 
					
						
							|  |  |  |                                         response.tool_calls = accumulatedToolCalls.filter(Boolean); | 
					
						
							|  |  |  |                                     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                     if (content) { | 
					
						
							|  |  |  |                                         completeText += content; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                                     } | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |                                     // Send the chunk to the caller with raw data and any accumulated tool calls
 | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  |                                     const streamChunk: StreamChunk = { | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                         text: content, | 
					
						
							|  |  |  |                                         done: isDone, | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  |                                         raw: chunk as unknown as Record<string, unknown> | 
					
						
							| 
									
										
										
										
											2025-04-14 20:15:14 +00:00
										 |  |  |                                     }; | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-14 20:15:14 +00:00
										 |  |  |                                     // Add accumulated tool calls to raw data for compatibility with tool execution display
 | 
					
						
							|  |  |  |                                     if (accumulatedToolCalls.length > 0) { | 
					
						
							|  |  |  |                                         // Add tool calls to raw data for proper display
 | 
					
						
							|  |  |  |                                         streamChunk.raw = { | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  |                                             ...streamChunk.raw as object, | 
					
						
							| 
									
										
										
										
											2025-04-14 20:15:14 +00:00
										 |  |  |                                             tool_calls: accumulatedToolCalls.filter(Boolean) | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  |                                         } as Record<string, unknown>; | 
					
						
							| 
									
										
										
										
											2025-04-14 20:15:14 +00:00
										 |  |  |                                     } | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-14 20:15:14 +00:00
										 |  |  |                                     await callback(streamChunk); | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                     if (isDone) { | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |                                         console.log('OpenAI API Stream Complete. Final text length:', completeText.length); | 
					
						
							|  |  |  |                                         if (accumulatedToolCalls.length > 0) { | 
					
						
							|  |  |  |                                             console.log('OpenAI API Tool Calls:', JSON.stringify(accumulatedToolCalls, null, 2)); | 
					
						
							|  |  |  |                                         } | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                         break; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                                     } | 
					
						
							|  |  |  |                                 } | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                             } else { | 
					
						
							|  |  |  |                                 // Fallback for non-iterable response
 | 
					
						
							|  |  |  |                                 console.warn('Stream is not iterable, falling back to non-streaming response'); | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |                                 console.log('OpenAI API Non-iterable Stream Response:', JSON.stringify(stream, null, 2)); | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                 if ('choices' in stream) { | 
					
						
							|  |  |  |                                     const content = stream.choices[0]?.message?.content || ''; | 
					
						
							|  |  |  |                                     completeText = content; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |                                     // Check if there are tool calls in the non-stream response
 | 
					
						
							|  |  |  |                                     const toolCalls = stream.choices[0]?.message?.tool_calls; | 
					
						
							|  |  |  |                                     if (toolCalls) { | 
					
						
							|  |  |  |                                         response.tool_calls = toolCalls; | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |                                         console.log('OpenAI API Tool Calls in Non-iterable Response:', JSON.stringify(toolCalls, null, 2)); | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  |                                     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                     await callback({ | 
					
						
							|  |  |  |                                         text: content, | 
					
						
							|  |  |  |                                         done: true, | 
					
						
							| 
									
										
										
										
											2025-04-16 20:33:04 +00:00
										 |  |  |                                         raw: stream as unknown as Record<string, unknown>, | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  |                                         tool_calls: toolCalls | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                     }); | 
					
						
							|  |  |  |                                 } | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                             } | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                         } catch (error) { | 
					
						
							|  |  |  |                             console.error('Error processing stream:', error); | 
					
						
							|  |  |  |                             throw error; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                         } | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |                         // Update the response's text with the complete text
 | 
					
						
							|  |  |  |                         response.text = completeText; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                         // Return the complete text
 | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                         return completeText; | 
					
						
							|  |  |  |                     } | 
					
						
							|  |  |  |                 }; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |                 return response; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |             } else { | 
					
						
							|  |  |  |                 // Non-streaming response
 | 
					
						
							|  |  |  |                 params.stream = false; | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 const completion = await client.chat.completions.create(params); | 
					
						
							| 
									
										
										
										
											2025-04-14 19:39:29 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-16 21:09:44 +00:00
										 |  |  |                 // Log the full response from OpenAI
 | 
					
						
							|  |  |  |                 console.log('OpenAI API Response:', JSON.stringify(completion, null, 2)); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 if (!('choices' in completion)) { | 
					
						
							|  |  |  |                     throw new Error('Unexpected response format from OpenAI API'); | 
					
						
							|  |  |  |                 } | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 return { | 
					
						
							|  |  |  |                     text: completion.choices[0].message.content || '', | 
					
						
							|  |  |  |                     model: completion.model, | 
					
						
							|  |  |  |                     provider: this.getName(), | 
					
						
							|  |  |  |                     usage: { | 
					
						
							|  |  |  |                         promptTokens: completion.usage?.prompt_tokens, | 
					
						
							|  |  |  |                         completionTokens: completion.usage?.completion_tokens, | 
					
						
							|  |  |  |                         totalTokens: completion.usage?.total_tokens | 
					
						
							|  |  |  |                     }, | 
					
						
							|  |  |  |                     tool_calls: completion.choices[0].message.tool_calls | 
					
						
							|  |  |  |                 }; | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |             } | 
					
						
							|  |  |  |         } catch (error) { | 
					
						
							|  |  |  |             console.error('OpenAI service error:', error); | 
					
						
							|  |  |  |             throw error; | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2025-06-04 20:13:13 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  |     /** | 
					
						
							|  |  |  |      * Clear cached OpenAI client to force recreation with new settings | 
					
						
							|  |  |  |      */ | 
					
						
							|  |  |  |     clearCache(): void { | 
					
						
							|  |  |  |         this.openai = null; | 
					
						
							|  |  |  |         log.info('OpenAI client cache cleared'); | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | } |