| 
									
										
										
										
											2025-03-11 17:30:50 +00:00
										 |  |  | import options from '../../options.js'; | 
					
						
							|  |  |  | import { BaseAIService } from '../base_ai_service.js'; | 
					
						
							|  |  |  | import type { ChatCompletionOptions, ChatResponse, Message } from '../ai_interface.js'; | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  | import { getOpenAIOptions } from './providers.js'; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  | import OpenAI from 'openai'; | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | export class OpenAIService extends BaseAIService { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |     private openai: OpenAI | null = null; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |     constructor() { | 
					
						
							|  |  |  |         super('OpenAI'); | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     isAvailable(): boolean { | 
					
						
							|  |  |  |         return super.isAvailable() && !!options.getOption('openaiApiKey'); | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |     private getClient(apiKey: string, baseUrl?: string): OpenAI { | 
					
						
							|  |  |  |         if (!this.openai) { | 
					
						
							|  |  |  |             this.openai = new OpenAI({ | 
					
						
							|  |  |  |                 apiKey, | 
					
						
							|  |  |  |                 baseURL: baseUrl | 
					
						
							|  |  |  |             }); | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |         return this.openai; | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |     async generateChatCompletion(messages: Message[], opts: ChatCompletionOptions = {}): Promise<ChatResponse> { | 
					
						
							|  |  |  |         if (!this.isAvailable()) { | 
					
						
							|  |  |  |             throw new Error('OpenAI service is not available. Check API key and AI settings.'); | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |         // Get provider-specific options from the central provider manager
 | 
					
						
							|  |  |  |         const providerOptions = getOpenAIOptions(opts); | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |          | 
					
						
							|  |  |  |         // Initialize the OpenAI client
 | 
					
						
							|  |  |  |         const client = this.getClient(providerOptions.apiKey, providerOptions.baseUrl); | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |         const systemPrompt = this.getSystemPrompt(providerOptions.systemPrompt || options.getOption('aiSystemPrompt')); | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         // Ensure we have a system message
 | 
					
						
							|  |  |  |         const systemMessageExists = messages.some(m => m.role === 'system'); | 
					
						
							|  |  |  |         const messagesWithSystem = systemMessageExists | 
					
						
							|  |  |  |             ? messages | 
					
						
							|  |  |  |             : [{ role: 'system', content: systemPrompt }, ...messages]; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |             // Create params object for the OpenAI SDK
 | 
					
						
							|  |  |  |             const params: OpenAI.Chat.ChatCompletionCreateParams = { | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |                 model: providerOptions.model, | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 messages: messagesWithSystem as OpenAI.Chat.ChatCompletionMessageParam[], | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |                 temperature: providerOptions.temperature, | 
					
						
							|  |  |  |                 max_tokens: providerOptions.max_tokens, | 
					
						
							|  |  |  |                 stream: providerOptions.stream, | 
					
						
							|  |  |  |                 top_p: providerOptions.top_p, | 
					
						
							|  |  |  |                 frequency_penalty: providerOptions.frequency_penalty, | 
					
						
							|  |  |  |                 presence_penalty: providerOptions.presence_penalty | 
					
						
							|  |  |  |             }; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             // Add tools if enabled
 | 
					
						
							|  |  |  |             if (providerOptions.enableTools && providerOptions.tools && providerOptions.tools.length > 0) { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 params.tools = providerOptions.tools as OpenAI.Chat.ChatCompletionTool[]; | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |             } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             if (providerOptions.tool_choice) { | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 params.tool_choice = providerOptions.tool_choice as OpenAI.Chat.ChatCompletionToolChoiceOption; | 
					
						
							| 
									
										
										
										
											2025-04-09 19:11:27 +00:00
										 |  |  |             } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |             // If streaming is requested
 | 
					
						
							|  |  |  |             if (providerOptions.stream) { | 
					
						
							|  |  |  |                 params.stream = true; | 
					
						
							|  |  |  |                  | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                 // Get stream from OpenAI SDK
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 const stream = await client.chat.completions.create(params); | 
					
						
							|  |  |  |                  | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                 // Return a response with the stream handler
 | 
					
						
							|  |  |  |                 return { | 
					
						
							|  |  |  |                     text: '', // Initial empty text, will be populated during streaming
 | 
					
						
							|  |  |  |                     model: params.model, | 
					
						
							|  |  |  |                     provider: this.getName(), | 
					
						
							|  |  |  |                     stream: async (callback) => { | 
					
						
							|  |  |  |                         let completeText = ''; | 
					
						
							|  |  |  |                          | 
					
						
							|  |  |  |                         try { | 
					
						
							|  |  |  |                             // Process the stream
 | 
					
						
							|  |  |  |                             if (Symbol.asyncIterator in stream) { | 
					
						
							|  |  |  |                                 for await (const chunk of stream as AsyncIterable<OpenAI.Chat.ChatCompletionChunk>) { | 
					
						
							|  |  |  |                                     const content = chunk.choices[0]?.delta?.content || ''; | 
					
						
							|  |  |  |                                     const isDone = !!chunk.choices[0]?.finish_reason; | 
					
						
							|  |  |  |                                      | 
					
						
							|  |  |  |                                     if (content) { | 
					
						
							|  |  |  |                                         completeText += content; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                                     } | 
					
						
							|  |  |  |                                      | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                                     // Send the chunk to the caller with raw data
 | 
					
						
							|  |  |  |                                     await callback({ | 
					
						
							|  |  |  |                                         text: content, | 
					
						
							|  |  |  |                                         done: isDone, | 
					
						
							|  |  |  |                                         raw: chunk // Include the raw chunk for advanced processing
 | 
					
						
							|  |  |  |                                     }); | 
					
						
							|  |  |  |                                      | 
					
						
							|  |  |  |                                     if (isDone) { | 
					
						
							|  |  |  |                                         break; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                                     } | 
					
						
							|  |  |  |                                 } | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                             } else { | 
					
						
							|  |  |  |                                 // Fallback for non-iterable response
 | 
					
						
							|  |  |  |                                 console.warn('Stream is not iterable, falling back to non-streaming response'); | 
					
						
							|  |  |  |                                  | 
					
						
							|  |  |  |                                 if ('choices' in stream) { | 
					
						
							|  |  |  |                                     const content = stream.choices[0]?.message?.content || ''; | 
					
						
							|  |  |  |                                     completeText = content; | 
					
						
							|  |  |  |                                     await callback({ | 
					
						
							|  |  |  |                                         text: content, | 
					
						
							|  |  |  |                                         done: true, | 
					
						
							|  |  |  |                                         raw: stream | 
					
						
							|  |  |  |                                     }); | 
					
						
							|  |  |  |                                 } | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                             } | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                         } catch (error) { | 
					
						
							|  |  |  |                             console.error('Error processing stream:', error); | 
					
						
							|  |  |  |                             throw error; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                         } | 
					
						
							| 
									
										
										
										
											2025-04-10 21:00:12 +00:00
										 |  |  |                          | 
					
						
							|  |  |  |                         return completeText; | 
					
						
							|  |  |  |                     } | 
					
						
							|  |  |  |                 }; | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |             } else { | 
					
						
							|  |  |  |                 // Non-streaming response
 | 
					
						
							|  |  |  |                 params.stream = false; | 
					
						
							|  |  |  |                  | 
					
						
							|  |  |  |                 const completion = await client.chat.completions.create(params); | 
					
						
							|  |  |  |                  | 
					
						
							|  |  |  |                 if (!('choices' in completion)) { | 
					
						
							|  |  |  |                     throw new Error('Unexpected response format from OpenAI API'); | 
					
						
							|  |  |  |                 } | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-04-09 21:16:29 +00:00
										 |  |  |                 return { | 
					
						
							|  |  |  |                     text: completion.choices[0].message.content || '', | 
					
						
							|  |  |  |                     model: completion.model, | 
					
						
							|  |  |  |                     provider: this.getName(), | 
					
						
							|  |  |  |                     usage: { | 
					
						
							|  |  |  |                         promptTokens: completion.usage?.prompt_tokens, | 
					
						
							|  |  |  |                         completionTokens: completion.usage?.completion_tokens, | 
					
						
							|  |  |  |                         totalTokens: completion.usage?.total_tokens | 
					
						
							|  |  |  |                     }, | 
					
						
							|  |  |  |                     tool_calls: completion.choices[0].message.tool_calls | 
					
						
							|  |  |  |                 }; | 
					
						
							| 
									
										
										
										
											2025-03-02 19:39:10 -08:00
										 |  |  |             } | 
					
						
							|  |  |  |         } catch (error) { | 
					
						
							|  |  |  |             console.error('OpenAI service error:', error); | 
					
						
							|  |  |  |             throw error; | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | } |