mirror of
				https://github.com/zadam/trilium.git
				synced 2025-10-31 18:36:30 +01:00 
			
		
		
		
	feat(react/settings): port LLM settings
This commit is contained in:
		| @@ -1253,7 +1253,12 @@ | ||||
|     "selected_provider": "Selected Provider", | ||||
|     "selected_provider_description": "Choose the AI provider for chat and completion features", | ||||
|     "select_model": "Select model...", | ||||
|     "select_provider": "Select provider..." | ||||
|     "select_provider": "Select provider...", | ||||
|     "ai_enabled": "AI features enabled", | ||||
|     "ai_disabled": "AI features disabled", | ||||
|     "no_models_found_online": "No models found. Please check your API key and settings.", | ||||
|     "no_models_found_ollama": "No Ollama models found. Please check if Ollama is running.", | ||||
|     "error_fetching": "Error fetching models: {{error}}" | ||||
|   }, | ||||
|   "zoom_factor": { | ||||
|     "title": "Zoom Factor (desktop build only)", | ||||
|   | ||||
							
								
								
									
										15
									
								
								apps/client/src/widgets/react/FormTextArea.tsx
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								apps/client/src/widgets/react/FormTextArea.tsx
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| interface FormTextAreaProps { | ||||
|     currentValue: string; | ||||
|     onBlur?(newValue: string): void; | ||||
|     rows: number; | ||||
| } | ||||
| export default function FormTextArea({ onBlur, rows, currentValue }: FormTextAreaProps) { | ||||
|     return ( | ||||
|         <textarea | ||||
|             rows={rows} | ||||
|             onBlur={(e) => { | ||||
|                 onBlur?.(e.currentTarget.value); | ||||
|             }} | ||||
|         >{currentValue}</textarea> | ||||
|     ) | ||||
| } | ||||
| @@ -1,5 +1,4 @@ | ||||
| import TypeWidget from "./type_widget.js"; | ||||
| import AiSettingsOptions from "./options/ai_settings.js"; | ||||
| import type FNote from "../../entities/fnote.js"; | ||||
| import type NoteContextAwareWidget from "../note_context_aware_widget.js"; | ||||
| import { t } from "../../services/i18n.js"; | ||||
| @@ -21,6 +20,7 @@ import CodeNoteSettings from "./options/code_notes.jsx"; | ||||
| import OtherSettings from "./options/other.jsx"; | ||||
| import BackendLogWidget from "./content/backend_log.js"; | ||||
| import MultiFactorAuthenticationSettings from "./options/multi_factor_authentication.js"; | ||||
| import AiSettings from "./options/ai_settings.jsx"; | ||||
|  | ||||
| const TPL = /*html*/`<div class="note-detail-content-widget note-detail-printable"> | ||||
|     <style> | ||||
| @@ -59,7 +59,7 @@ const CONTENT_WIDGETS: Record<OptionPages | "_backendLog", ((typeof NoteContextA | ||||
|     _optionsEtapi: <EtapiSettings />, | ||||
|     _optionsBackup: <BackupSettings />, | ||||
|     _optionsSync: <SyncOptions />, | ||||
|     _optionsAi: [AiSettingsOptions], | ||||
|     _optionsAi: <AiSettings />, | ||||
|     _optionsOther: <OtherSettings />, | ||||
|     _optionsLocalization: <InternationalizationOptions />, | ||||
|     _optionsAdvanced: <AdvancedSettings />, | ||||
|   | ||||
| @@ -1,2 +0,0 @@ | ||||
| import AiSettingsWidget from './ai_settings/index.js'; | ||||
| export default AiSettingsWidget; | ||||
							
								
								
									
										238
									
								
								apps/client/src/widgets/type_widgets/options/ai_settings.tsx
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										238
									
								
								apps/client/src/widgets/type_widgets/options/ai_settings.tsx
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,238 @@ | ||||
| import { useCallback, useEffect, useState } from "preact/hooks"; | ||||
| import { t } from "../../../services/i18n"; | ||||
| import toast from "../../../services/toast"; | ||||
| import FormCheckbox from "../../react/FormCheckbox"; | ||||
| import FormGroup from "../../react/FormGroup"; | ||||
| import { useTriliumOption, useTriliumOptionBool } from "../../react/hooks"; | ||||
| import OptionsSection from "./components/OptionsSection"; | ||||
| import Admonition from "../../react/Admonition"; | ||||
| import FormSelect from "../../react/FormSelect"; | ||||
| import FormTextBox from "../../react/FormTextBox"; | ||||
| import type { OllamaModelResponse, OpenAiOrAnthropicModelResponse, OptionNames } from "@triliumnext/commons"; | ||||
| import server from "../../../services/server"; | ||||
| import Button from "../../react/Button"; | ||||
| import FormTextArea from "../../react/FormTextArea"; | ||||
|  | ||||
| export default function AiSettings() { | ||||
|     return ( | ||||
|         <> | ||||
|             <EnableAiSettings /> | ||||
|             <ProviderSettings /> | ||||
|         </> | ||||
|     ); | ||||
| } | ||||
|  | ||||
| function EnableAiSettings() { | ||||
|     const [ aiEnabled, setAiEnabled ] = useTriliumOptionBool("aiEnabled"); | ||||
|  | ||||
|     return ( | ||||
|         <>             | ||||
|             <OptionsSection title={t("ai_llm.title")}>                             | ||||
|                 <FormGroup description={t("ai_llm.enable_ai_description")}> | ||||
|                     <FormCheckbox | ||||
|                         name="ai-enabled" | ||||
|                         label={t("ai_llm.enable_ai_features")} | ||||
|                         currentValue={aiEnabled} onChange={(isEnabled) => { | ||||
|                             if (isEnabled) { | ||||
|                                 toast.showMessage(t("ai_llm.ai_enabled")); | ||||
|                             } else { | ||||
|                                 toast.showMessage(t("ai_llm.ai_disabled")); | ||||
|                             } | ||||
|  | ||||
|                             setAiEnabled(isEnabled); | ||||
|                         }} | ||||
|                     /> | ||||
|                 </FormGroup> | ||||
|                 {aiEnabled && <Admonition type="warning">{t("ai_llm.experimental_warning")}</Admonition>} | ||||
|             </OptionsSection> | ||||
|         </>         | ||||
|     ); | ||||
| } | ||||
|  | ||||
| function ProviderSettings() { | ||||
|     const [ aiSelectedProvider, setAiSelectedProvider ] = useTriliumOption("aiSelectedProvider"); | ||||
|     const [ aiTemperature, setAiTemperature ] = useTriliumOption("aiTemperature"); | ||||
|     const [ aiSystemPrompt, setAiSystemPrompt ] = useTriliumOption("aiSystemPrompt"); | ||||
|  | ||||
|     return ( | ||||
|         <OptionsSection title={t("ai_llm.provider_configuration")}> | ||||
|             <FormGroup label={t("ai_llm.selected_provider")} description={t("ai_llm.selected_provider_description")}> | ||||
|                 <FormSelect | ||||
|                     values={[ | ||||
|                         { value: "", text: t("ai_llm.select_provider") }, | ||||
|                         { value: "openai", text: "OpenAI" }, | ||||
|                         { value: "anthropic", text: "Anthropic" }, | ||||
|                         { value: "ollama", text: "Ollama" } | ||||
|                     ]} | ||||
|                     currentValue={aiSelectedProvider} onChange={setAiSelectedProvider} | ||||
|                     keyProperty="value" titleProperty="text" | ||||
|                 /> | ||||
|             </FormGroup> | ||||
|  | ||||
|             { | ||||
|                 aiSelectedProvider === "openai" ? | ||||
|                     <SingleProviderSettings | ||||
|                         title={t("ai_llm.openai_settings")} | ||||
|                         apiKeyDescription={t("ai_llm.openai_api_key_description")} | ||||
|                         baseUrlDescription={t("ai_llm.openai_url_description")} | ||||
|                         modelDescription={t("ai_llm.openai_model_description")} | ||||
|                         validationErrorMessage={t("ai_llm.empty_key_warning.openai")} | ||||
|                         apiKeyOption="openaiApiKey" baseUrlOption="openaiBaseUrl" modelOption="openaiDefaultModel" | ||||
|                         provider={aiSelectedProvider} | ||||
|                     /> | ||||
|                 : aiSelectedProvider === "anthropic" ? | ||||
|                     <SingleProviderSettings | ||||
|                         title={t("ai_llm.anthropic_settings")} | ||||
|                         apiKeyDescription={t("ai_llm.anthropic_api_key_description")} | ||||
|                         modelDescription={t("ai_llm.anthropic_model_description")} | ||||
|                         baseUrlDescription={t("ai_llm.anthropic_url_description")} | ||||
|                         validationErrorMessage={t("ai_llm.empty_key_warning.anthropic")} | ||||
|                         apiKeyOption="anthropicApiKey" baseUrlOption="anthropicBaseUrl" modelOption="anthropicDefaultModel" | ||||
|                         provider={aiSelectedProvider} | ||||
|                     /> | ||||
|                 : aiSelectedProvider === "ollama" ? | ||||
|                     <SingleProviderSettings | ||||
|                         title={t("ai_llm.ollama_settings")} | ||||
|                         baseUrlDescription={t("ai_llm.ollama_url_description")} | ||||
|                         modelDescription={t("ai_llm.ollama_model_description")} | ||||
|                         validationErrorMessage={t("ai_llm.ollama_no_url")} | ||||
|                         baseUrlOption="ollamaBaseUrl" | ||||
|                         provider={aiSelectedProvider} modelOption="ollamaDefaultModel" | ||||
|                     /> | ||||
|                 : | ||||
|                     <></> | ||||
|             }     | ||||
|  | ||||
|             <FormGroup label={t("ai_llm.temperature")} description={t("ai_llm.temperature_description")}> | ||||
|                 <FormTextBox | ||||
|                     name="ai-temperature" | ||||
|                     type="number" min="0" max="2" step="0.1" | ||||
|                     currentValue={aiTemperature} onChange={setAiTemperature} | ||||
|                 /> | ||||
|             </FormGroup> | ||||
|  | ||||
|             <FormGroup label={t("ai_llm.system_prompt")} description={t("ai_llm.system_prompt_description")}> | ||||
|                 <FormTextArea | ||||
|                     rows={3} | ||||
|                     currentValue={aiSystemPrompt} onBlur={setAiSystemPrompt} | ||||
|                 /> | ||||
|             </FormGroup> | ||||
|         </OptionsSection> | ||||
|     ) | ||||
| } | ||||
|  | ||||
| interface SingleProviderSettingsProps { | ||||
|     provider: string; | ||||
|     title: string;     | ||||
|     apiKeyDescription?: string; | ||||
|     baseUrlDescription: string; | ||||
|     modelDescription: string; | ||||
|     validationErrorMessage: string; | ||||
|     apiKeyOption?: OptionNames; | ||||
|     baseUrlOption: OptionNames; | ||||
|     modelOption: OptionNames; | ||||
| } | ||||
|  | ||||
| function SingleProviderSettings({ provider, title, apiKeyDescription, baseUrlDescription, modelDescription, validationErrorMessage, apiKeyOption, baseUrlOption, modelOption }: SingleProviderSettingsProps) { | ||||
|     const [ apiKey, setApiKey ] = apiKeyOption ? useTriliumOption(apiKeyOption) : []; | ||||
|     const [ baseUrl, setBaseUrl ] = useTriliumOption(baseUrlOption); | ||||
|     const isValid = (apiKeyOption ? !!apiKey : !!baseUrl); | ||||
|  | ||||
|     return ( | ||||
|         <div class="provider-settings"> | ||||
|             <div class="card mt-3"> | ||||
|                 <div class="card-header"> | ||||
|                     <h5>{title}</h5> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="card-body"> | ||||
|                     {!isValid && <Admonition type="caution">{validationErrorMessage}</Admonition> } | ||||
|  | ||||
|                     {apiKeyOption && ( | ||||
|                         <FormGroup label={t("ai_llm.api_key")} description={apiKeyDescription}> | ||||
|                             <FormTextBox | ||||
|                                 type="password" autoComplete="off" | ||||
|                                 currentValue={apiKey} onChange={setApiKey} | ||||
|                             /> | ||||
|                         </FormGroup> | ||||
|                     )} | ||||
|  | ||||
|                     <FormGroup label={t("ai_llm.url")} description={baseUrlDescription}> | ||||
|                         <FormTextBox | ||||
|                             currentValue={baseUrl ?? "https://api.openai.com/v1"} onChange={setBaseUrl} | ||||
|                         /> | ||||
|                     </FormGroup> | ||||
|  | ||||
|                     {isValid &&  | ||||
|                         <FormGroup label={t("ai_llm.model")} description={modelDescription}> | ||||
|                             <ModelSelector provider={provider} baseUrl={baseUrl} modelOption={modelOption} /> | ||||
|                         </FormGroup> | ||||
|                     } | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     ) | ||||
| } | ||||
|  | ||||
| function ModelSelector({ provider, baseUrl, modelOption }: { provider: string; baseUrl: string, modelOption: OptionNames }) { | ||||
|     const [ model, setModel ] = useTriliumOption(modelOption); | ||||
|     const [ models, setModels ] = useState<{ name: string, id: string }[]>([]); | ||||
|  | ||||
|     const loadProviders = useCallback(async () => { | ||||
|         switch (provider) { | ||||
|             case "openai": | ||||
|             case "anthropic": { | ||||
|                 try { | ||||
|                     const response = await server.get<OpenAiOrAnthropicModelResponse>(`llm/providers/${provider}/models?baseUrl=${encodeURIComponent(baseUrl)}`); | ||||
|                     if (response.success) { | ||||
|                         setModels(response.chatModels.toSorted((a, b) => a.name.localeCompare(b.name))); | ||||
|                     } else { | ||||
|                         toast.showError(t("ai_llm.no_models_found_online")); | ||||
|                     } | ||||
|                 } catch (e) { | ||||
|                     toast.showError(t("ai_llm.error_fetching", { error: e })); | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|             case "ollama": { | ||||
|                 try { | ||||
|                     const response = await server.get<OllamaModelResponse>(`llm/providers/ollama/models?baseUrl=${encodeURIComponent(baseUrl)}`); | ||||
|                     if (response.success) { | ||||
|                         setModels(response.models | ||||
|                             .map(model => ({ | ||||
|                                 name: model.name, | ||||
|                                 id: model.model | ||||
|                             })) | ||||
|                             .toSorted((a, b) => a.name.localeCompare(b.name))); | ||||
|                     } else { | ||||
|                         toast.showError(t("ai_llm.no_models_found_ollama")); | ||||
|                     } | ||||
|                 } catch (e) { | ||||
|                     toast.showError(t("ai_llm.error_fetching", { error: e })); | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|         } | ||||
|     }, [provider]); | ||||
|  | ||||
|     useEffect(() => { | ||||
|         loadProviders(); | ||||
|     }, [provider]); | ||||
|  | ||||
|     return ( | ||||
|         <> | ||||
|             <FormSelect | ||||
|                 values={models} | ||||
|                 keyProperty="id" titleProperty="name" | ||||
|                 currentValue={model} onChange={setModel} | ||||
|             /> | ||||
|  | ||||
|             <Button | ||||
|                 text={t("ai_llm.refresh_models")} | ||||
|                 onClick={loadProviders} | ||||
|                 size="small" | ||||
|                 style={{ marginTop: "0.5em" }} | ||||
|             /> | ||||
|         </> | ||||
|     ) | ||||
| } | ||||
| @@ -1,362 +0,0 @@ | ||||
| import OptionsWidget from "../options_widget.js"; | ||||
| import { TPL } from "./template.js"; | ||||
| import { t } from "../../../../services/i18n.js"; | ||||
| import type { OptionDefinitions, OptionMap } from "@triliumnext/commons"; | ||||
| import server from "../../../../services/server.js"; | ||||
| import toastService from "../../../../services/toast.js"; | ||||
| import { ProviderService } from "./providers.js"; | ||||
|  | ||||
| export default class AiSettingsWidget extends OptionsWidget { | ||||
|     private ollamaModelsRefreshed = false; | ||||
|     private openaiModelsRefreshed = false; | ||||
|     private anthropicModelsRefreshed = false; | ||||
|     private providerService: ProviderService | null = null; | ||||
|  | ||||
|     doRender() { | ||||
|         this.$widget = $(TPL); | ||||
|         this.providerService = new ProviderService(this.$widget); | ||||
|  | ||||
|         // Setup event handlers for options | ||||
|         this.setupEventHandlers(); | ||||
|  | ||||
|         return this.$widget; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Helper method to set up a change event handler for an option | ||||
|      * @param selector The jQuery selector for the element | ||||
|      * @param optionName The name of the option to update | ||||
|      * @param validateAfter Whether to run validation after the update | ||||
|      * @param isCheckbox Whether the element is a checkbox | ||||
|      */ | ||||
|     setupChangeHandler(selector: string, optionName: keyof OptionDefinitions, validateAfter: boolean = false, isCheckbox: boolean = false) { | ||||
|         if (!this.$widget) return; | ||||
|  | ||||
|         const $element = this.$widget.find(selector); | ||||
|         $element.on('change', async () => { | ||||
|             let value: string; | ||||
|  | ||||
|             if (isCheckbox) { | ||||
|                 value = $element.prop('checked') ? 'true' : 'false'; | ||||
|             } else { | ||||
|                 value = $element.val() as string; | ||||
|             } | ||||
|  | ||||
|             await this.updateOption(optionName, value); | ||||
|  | ||||
|             // Special handling for aiEnabled option | ||||
|             if (optionName === 'aiEnabled') { | ||||
|                 try { | ||||
|                     const isEnabled = value === 'true'; | ||||
|  | ||||
|                     if (isEnabled) { | ||||
|                         toastService.showMessage(t("ai_llm.ai_enabled") || "AI features enabled"); | ||||
|                     } else { | ||||
|                         toastService.showMessage(t("ai_llm.ai_disabled") || "AI features disabled"); | ||||
|                     } | ||||
|                 } catch (error) { | ||||
|                     console.error('Error toggling AI:', error); | ||||
|                     toastService.showError(t("ai_llm.ai_toggle_error") || "Error toggling AI features"); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             if (validateAfter) { | ||||
|                 await this.displayValidationWarnings(); | ||||
|             } | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set up all event handlers for options | ||||
|      */ | ||||
|     setupEventHandlers() { | ||||
|         if (!this.$widget) return; | ||||
|  | ||||
|         // Core AI options | ||||
|         this.setupChangeHandler('.ai-enabled', 'aiEnabled', true, true); | ||||
|         this.setupChangeHandler('.ai-selected-provider', 'aiSelectedProvider', true); | ||||
|         this.setupChangeHandler('.ai-temperature', 'aiTemperature'); | ||||
|         this.setupChangeHandler('.ai-system-prompt', 'aiSystemPrompt'); | ||||
|  | ||||
|         // OpenAI options | ||||
|         this.setupChangeHandler('.openai-api-key', 'openaiApiKey', true); | ||||
|         this.setupChangeHandler('.openai-base-url', 'openaiBaseUrl', true); | ||||
|         this.setupChangeHandler('.openai-default-model', 'openaiDefaultModel'); | ||||
|  | ||||
|         // Anthropic options | ||||
|         this.setupChangeHandler('.anthropic-api-key', 'anthropicApiKey', true); | ||||
|         this.setupChangeHandler('.anthropic-default-model', 'anthropicDefaultModel'); | ||||
|         this.setupChangeHandler('.anthropic-base-url', 'anthropicBaseUrl'); | ||||
|  | ||||
|         // Voyage options | ||||
|         this.setupChangeHandler('.voyage-api-key', 'voyageApiKey'); | ||||
|  | ||||
|         // Ollama options | ||||
|         this.setupChangeHandler('.ollama-base-url', 'ollamaBaseUrl'); | ||||
|         this.setupChangeHandler('.ollama-default-model', 'ollamaDefaultModel'); | ||||
|  | ||||
|         const $refreshModels = this.$widget.find('.refresh-models'); | ||||
|         $refreshModels.on('click', async () => { | ||||
|             this.ollamaModelsRefreshed = await this.providerService?.refreshOllamaModels(true, this.ollamaModelsRefreshed) || false; | ||||
|         }); | ||||
|  | ||||
|         // Add tab change handler for Ollama tab | ||||
|         const $ollamaTab = this.$widget.find('#nav-ollama-tab'); | ||||
|         $ollamaTab.on('shown.bs.tab', async () => { | ||||
|             // Only refresh the models if we haven't done it before | ||||
|             this.ollamaModelsRefreshed = await this.providerService?.refreshOllamaModels(false, this.ollamaModelsRefreshed) || false; | ||||
|         }); | ||||
|  | ||||
|         // OpenAI models refresh button | ||||
|         const $refreshOpenAIModels = this.$widget.find('.refresh-openai-models'); | ||||
|         $refreshOpenAIModels.on('click', async () => { | ||||
|             this.openaiModelsRefreshed = await this.providerService?.refreshOpenAIModels(true, this.openaiModelsRefreshed) || false; | ||||
|         }); | ||||
|  | ||||
|         // Add tab change handler for OpenAI tab | ||||
|         const $openaiTab = this.$widget.find('#nav-openai-tab'); | ||||
|         $openaiTab.on('shown.bs.tab', async () => { | ||||
|             // Only refresh the models if we haven't done it before | ||||
|             this.openaiModelsRefreshed = await this.providerService?.refreshOpenAIModels(false, this.openaiModelsRefreshed) || false; | ||||
|         }); | ||||
|  | ||||
|         // Anthropic models refresh button | ||||
|         const $refreshAnthropicModels = this.$widget.find('.refresh-anthropic-models'); | ||||
|         $refreshAnthropicModels.on('click', async () => { | ||||
|             this.anthropicModelsRefreshed = await this.providerService?.refreshAnthropicModels(true, this.anthropicModelsRefreshed) || false; | ||||
|         }); | ||||
|  | ||||
|         // Add tab change handler for Anthropic tab | ||||
|         const $anthropicTab = this.$widget.find('#nav-anthropic-tab'); | ||||
|         $anthropicTab.on('shown.bs.tab', async () => { | ||||
|             // Only refresh the models if we haven't done it before | ||||
|             this.anthropicModelsRefreshed = await this.providerService?.refreshAnthropicModels(false, this.anthropicModelsRefreshed) || false; | ||||
|         }); | ||||
|  | ||||
|  | ||||
|         // Add provider selection change handlers for dynamic settings visibility | ||||
|         this.$widget.find('.ai-selected-provider').on('change', async () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             this.$widget.find('.provider-settings').hide(); | ||||
|             if (selectedProvider) { | ||||
|                 this.$widget.find(`.${selectedProvider}-provider-settings`).show(); | ||||
|                 // Automatically fetch models for the newly selected provider | ||||
|                 await this.fetchModelsForProvider(selectedProvider, 'chat'); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|  | ||||
|         // Add base URL change handlers to trigger model fetching | ||||
|         this.$widget.find('.openai-base-url').on('change', async () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             if (selectedProvider === 'openai') { | ||||
|                 await this.fetchModelsForProvider('openai', 'chat'); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         this.$widget.find('.anthropic-base-url').on('change', async () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             if (selectedProvider === 'anthropic') { | ||||
|                 await this.fetchModelsForProvider('anthropic', 'chat'); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         this.$widget.find('.ollama-base-url').on('change', async () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             if (selectedProvider === 'ollama') { | ||||
|                 await this.fetchModelsForProvider('ollama', 'chat'); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         // Add API key change handlers to trigger model fetching | ||||
|         this.$widget.find('.openai-api-key').on('change', async () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             if (selectedProvider === 'openai') { | ||||
|                 await this.fetchModelsForProvider('openai', 'chat'); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         this.$widget.find('.anthropic-api-key').on('change', async () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             if (selectedProvider === 'anthropic') { | ||||
|                 await this.fetchModelsForProvider('anthropic', 'chat'); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Display warnings for validation issues with providers | ||||
|      */ | ||||
|     async displayValidationWarnings() { | ||||
|         if (!this.$widget) return; | ||||
|  | ||||
|         const $warningDiv = this.$widget.find('.provider-validation-warning'); | ||||
|  | ||||
|         // Check if AI is enabled | ||||
|         const aiEnabled = this.$widget.find('.ai-enabled').prop('checked'); | ||||
|         if (!aiEnabled) { | ||||
|             $warningDiv.hide(); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         // Get selected provider | ||||
|         const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|  | ||||
|         // Start with experimental warning | ||||
|         const allWarnings = [ | ||||
|             t("ai_llm.experimental_warning") | ||||
|         ]; | ||||
|  | ||||
|         // Check for selected provider configuration | ||||
|         const providerWarnings: string[] = []; | ||||
|         if (selectedProvider === 'openai') { | ||||
|             const openaiApiKey = this.$widget.find('.openai-api-key').val(); | ||||
|             if (!openaiApiKey) { | ||||
|                 providerWarnings.push(t("ai_llm.empty_key_warning.openai")); | ||||
|             } | ||||
|         } else if (selectedProvider === 'anthropic') { | ||||
|             const anthropicApiKey = this.$widget.find('.anthropic-api-key').val(); | ||||
|             if (!anthropicApiKey) { | ||||
|                 providerWarnings.push(t("ai_llm.empty_key_warning.anthropic")); | ||||
|             } | ||||
|         } else if (selectedProvider === 'ollama') { | ||||
|             const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val(); | ||||
|             if (!ollamaBaseUrl) { | ||||
|                 providerWarnings.push(t("ai_llm.ollama_no_url")); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Add provider warnings to all warnings | ||||
|         allWarnings.push(...providerWarnings); | ||||
|  | ||||
|         // Show or hide warnings | ||||
|         if (allWarnings.length > 0) { | ||||
|             const warningHtml = '<strong>' + t("ai_llm.configuration_warnings") + '</strong><ul>' + | ||||
|                 allWarnings.map(warning => `<li>${warning}</li>`).join('') + '</ul>'; | ||||
|             $warningDiv.html(warningHtml).show(); | ||||
|         } else { | ||||
|             $warningDiv.hide(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Helper to get display name for providers | ||||
|      */ | ||||
|     getProviderDisplayName(provider: string): string { | ||||
|         switch(provider) { | ||||
|             case 'openai': return 'OpenAI'; | ||||
|             case 'anthropic': return 'Anthropic'; | ||||
|             case 'ollama': return 'Ollama'; | ||||
|             case 'voyage': return 'Voyage'; | ||||
|             case 'local': return 'Local'; | ||||
|             default: return provider.charAt(0).toUpperCase() + provider.slice(1); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set model dropdown value, adding the option if it doesn't exist | ||||
|      */ | ||||
|     setModelDropdownValue(selector: string, value: string | undefined) { | ||||
|         if (!this.$widget || !value) return; | ||||
|  | ||||
|         const $dropdown = this.$widget.find(selector); | ||||
|  | ||||
|         // Check if the value already exists as an option | ||||
|         if ($dropdown.find(`option[value="${value}"]`).length === 0) { | ||||
|             // Add the custom value as an option | ||||
|             $dropdown.append(`<option value="${value}">${value} (current)</option>`); | ||||
|         } | ||||
|  | ||||
|         // Set the value | ||||
|         $dropdown.val(value); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetch models for a specific provider and model type | ||||
|      */ | ||||
|     async fetchModelsForProvider(provider: string, modelType: 'chat') { | ||||
|         if (!this.providerService) return; | ||||
|  | ||||
|         try { | ||||
|             switch (provider) { | ||||
|                 case 'openai': | ||||
|                     this.openaiModelsRefreshed = await this.providerService.refreshOpenAIModels(false, this.openaiModelsRefreshed); | ||||
|                     break; | ||||
|                 case 'anthropic': | ||||
|                     this.anthropicModelsRefreshed = await this.providerService.refreshAnthropicModels(false, this.anthropicModelsRefreshed); | ||||
|                     break; | ||||
|                 case 'ollama': | ||||
|                     this.ollamaModelsRefreshed = await this.providerService.refreshOllamaModels(false, this.ollamaModelsRefreshed); | ||||
|                     break; | ||||
|                 default: | ||||
|                     console.log(`Model fetching not implemented for provider: ${provider}`); | ||||
|             } | ||||
|         } catch (error) { | ||||
|             console.error(`Error fetching models for ${provider}:`, error); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Update provider settings visibility based on selected providers | ||||
|      */ | ||||
|     updateProviderSettingsVisibility() { | ||||
|         if (!this.$widget) return; | ||||
|  | ||||
|         // Update AI provider settings visibility | ||||
|         const selectedAiProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|         this.$widget.find('.provider-settings').hide(); | ||||
|         if (selectedAiProvider) { | ||||
|             this.$widget.find(`.${selectedAiProvider}-provider-settings`).show(); | ||||
|         } | ||||
|  | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Called when the options have been loaded from the server | ||||
|      */ | ||||
|     async optionsLoaded(options: OptionMap) { | ||||
|         if (!this.$widget) return; | ||||
|  | ||||
|         // AI Options | ||||
|         this.$widget.find('.ai-enabled').prop('checked', options.aiEnabled !== 'false'); | ||||
|         this.$widget.find('.ai-temperature').val(options.aiTemperature || '0.7'); | ||||
|         this.$widget.find('.ai-system-prompt').val(options.aiSystemPrompt || ''); | ||||
|         this.$widget.find('.ai-selected-provider').val(options.aiSelectedProvider || 'openai'); | ||||
|  | ||||
|         // OpenAI Section | ||||
|         this.$widget.find('.openai-api-key').val(options.openaiApiKey || ''); | ||||
|         this.$widget.find('.openai-base-url').val(options.openaiBaseUrl || 'https://api.openai.com/v1'); | ||||
|         this.setModelDropdownValue('.openai-default-model', options.openaiDefaultModel); | ||||
|  | ||||
|         // Anthropic Section | ||||
|         this.$widget.find('.anthropic-api-key').val(options.anthropicApiKey || ''); | ||||
|         this.$widget.find('.anthropic-base-url').val(options.anthropicBaseUrl || 'https://api.anthropic.com'); | ||||
|         this.setModelDropdownValue('.anthropic-default-model', options.anthropicDefaultModel); | ||||
|  | ||||
|         // Voyage Section | ||||
|         this.$widget.find('.voyage-api-key').val(options.voyageApiKey || ''); | ||||
|  | ||||
|         // Ollama Section | ||||
|         this.$widget.find('.ollama-base-url').val(options.ollamaBaseUrl || 'http://localhost:11434'); | ||||
|         this.setModelDropdownValue('.ollama-default-model', options.ollamaDefaultModel); | ||||
|  | ||||
|         // Show/hide provider settings based on selected providers | ||||
|         this.updateProviderSettingsVisibility(); | ||||
|  | ||||
|         // Automatically fetch models for currently selected providers | ||||
|         const selectedAiProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|  | ||||
|         if (selectedAiProvider) { | ||||
|             await this.fetchModelsForProvider(selectedAiProvider, 'chat'); | ||||
|         } | ||||
|  | ||||
|         // Display validation warnings | ||||
|         this.displayValidationWarnings(); | ||||
|     } | ||||
|  | ||||
|     cleanup() { | ||||
|         // Cleanup method for widget | ||||
|     } | ||||
| } | ||||
| @@ -1,2 +0,0 @@ | ||||
| import AiSettingsWidget from './ai_settings_widget.js'; | ||||
| export default AiSettingsWidget; | ||||
| @@ -1,31 +0,0 @@ | ||||
| // Interface for the Ollama model response | ||||
| export interface OllamaModelResponse { | ||||
|     success: boolean; | ||||
|     models: Array<{ | ||||
|         name: string; | ||||
|         model: string; | ||||
|         details?: { | ||||
|             family?: string; | ||||
|             parameter_size?: string; | ||||
|         } | ||||
|     }>; | ||||
| } | ||||
|  | ||||
|  | ||||
| export interface OpenAIModelResponse { | ||||
|     success: boolean; | ||||
|     chatModels: Array<{ | ||||
|         id: string; | ||||
|         name: string; | ||||
|         type: string; | ||||
|     }>; | ||||
| } | ||||
|  | ||||
| export interface AnthropicModelResponse { | ||||
|     success: boolean; | ||||
|     chatModels: Array<{ | ||||
|         id: string; | ||||
|         name: string; | ||||
|         type: string; | ||||
|     }>; | ||||
| } | ||||
| @@ -1,252 +0,0 @@ | ||||
| import server from "../../../../services/server.js"; | ||||
| import toastService from "../../../../services/toast.js"; | ||||
| import { t } from "../../../../services/i18n.js"; | ||||
| import options from "../../../../services/options.js"; | ||||
| import type { OpenAIModelResponse, AnthropicModelResponse, OllamaModelResponse } from "./interfaces.js"; | ||||
|  | ||||
| export class ProviderService { | ||||
|     constructor(private $widget: JQuery<HTMLElement>) { | ||||
|         // AI provider settings | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Ensures the dropdown has the correct value set, prioritizing: | ||||
|      * 1. Current UI value if present | ||||
|      * 2. Value from database options if available | ||||
|      * 3. Falling back to first option if neither is available | ||||
|      */ | ||||
|     private ensureSelectedValue($select: JQuery<HTMLElement>, currentValue: string | number | string[] | undefined | null, optionName: string) { | ||||
|         if (currentValue) { | ||||
|             $select.val(currentValue); | ||||
|             // If the value doesn't exist anymore, select the first option | ||||
|             if (!$select.val()) { | ||||
|                 $select.prop('selectedIndex', 0); | ||||
|             } | ||||
|         } else { | ||||
|             // If no current value exists in the dropdown but there's a default in the database | ||||
|             const savedModel = options.get(optionName); | ||||
|             if (savedModel) { | ||||
|                 $select.val(savedModel); | ||||
|                 // If the saved model isn't in the dropdown, select the first option | ||||
|                 if (!$select.val()) { | ||||
|                     $select.prop('selectedIndex', 0); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Refreshes the list of OpenAI models | ||||
|      * @param showLoading Whether to show loading indicators and toasts | ||||
|      * @param openaiModelsRefreshed Reference to track if models have been refreshed | ||||
|      * @returns Promise that resolves when the refresh is complete | ||||
|      */ | ||||
|     async refreshOpenAIModels(showLoading: boolean, openaiModelsRefreshed: boolean): Promise<boolean> { | ||||
|         if (!this.$widget) return false; | ||||
|  | ||||
|         const $refreshOpenAIModels = this.$widget.find('.refresh-openai-models'); | ||||
|  | ||||
|         // If we've already refreshed and we're not forcing a refresh, don't do it again | ||||
|         if (openaiModelsRefreshed && !showLoading) { | ||||
|             return openaiModelsRefreshed; | ||||
|         } | ||||
|  | ||||
|         if (showLoading) { | ||||
|             $refreshOpenAIModels.prop('disabled', true); | ||||
|             $refreshOpenAIModels.html(`<i class="spinner-border spinner-border-sm"></i>`); | ||||
|         } | ||||
|  | ||||
|         try { | ||||
|             const openaiBaseUrl = this.$widget.find('.openai-base-url').val() as string; | ||||
|             const response = await server.get<OpenAIModelResponse>(`llm/providers/openai/models?baseUrl=${encodeURIComponent(openaiBaseUrl)}`); | ||||
|  | ||||
|             if (response && response.success) { | ||||
|                 // Update the chat models dropdown | ||||
|                 if (response.chatModels?.length > 0) { | ||||
|                     const $chatModelSelect = this.$widget.find('.openai-default-model'); | ||||
|                     const currentChatValue = $chatModelSelect.val(); | ||||
|  | ||||
|                     // Clear existing options | ||||
|                     $chatModelSelect.empty(); | ||||
|  | ||||
|                     // Sort models by name | ||||
|                     const sortedChatModels = [...response.chatModels].sort((a, b) => a.name.localeCompare(b.name)); | ||||
|  | ||||
|                     // Add models to the dropdown | ||||
|                     sortedChatModels.forEach(model => { | ||||
|                         $chatModelSelect.append(`<option value="${model.id}">${model.name}</option>`); | ||||
|                     }); | ||||
|  | ||||
|                     // Try to restore the previously selected value | ||||
|                     this.ensureSelectedValue($chatModelSelect, currentChatValue, 'openaiDefaultModel'); | ||||
|                 } | ||||
|  | ||||
|  | ||||
|                 if (showLoading) { | ||||
|                     // Show success message | ||||
|                     const totalModels = (response.chatModels?.length || 0); | ||||
|                     toastService.showMessage(`${totalModels} OpenAI models found.`); | ||||
|                 } | ||||
|  | ||||
|                 return true; | ||||
|             } else if (showLoading) { | ||||
|                 toastService.showError(`No OpenAI models found. Please check your API key and settings.`); | ||||
|             } | ||||
|  | ||||
|             return openaiModelsRefreshed; | ||||
|         } catch (e) { | ||||
|             console.error(`Error fetching OpenAI models:`, e); | ||||
|             if (showLoading) { | ||||
|                 toastService.showError(`Error fetching OpenAI models: ${e}`); | ||||
|             } | ||||
|             return openaiModelsRefreshed; | ||||
|         } finally { | ||||
|             if (showLoading) { | ||||
|                 $refreshOpenAIModels.prop('disabled', false); | ||||
|                 $refreshOpenAIModels.html(`<span class="bx bx-refresh"></span>`); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Refreshes the list of Anthropic models | ||||
|      * @param showLoading Whether to show loading indicators and toasts | ||||
|      * @param anthropicModelsRefreshed Reference to track if models have been refreshed | ||||
|      * @returns Promise that resolves when the refresh is complete | ||||
|      */ | ||||
|     async refreshAnthropicModels(showLoading: boolean, anthropicModelsRefreshed: boolean): Promise<boolean> { | ||||
|         if (!this.$widget) return false; | ||||
|  | ||||
|         const $refreshAnthropicModels = this.$widget.find('.refresh-anthropic-models'); | ||||
|  | ||||
|         // If we've already refreshed and we're not forcing a refresh, don't do it again | ||||
|         if (anthropicModelsRefreshed && !showLoading) { | ||||
|             return anthropicModelsRefreshed; | ||||
|         } | ||||
|  | ||||
|         if (showLoading) { | ||||
|             $refreshAnthropicModels.prop('disabled', true); | ||||
|             $refreshAnthropicModels.html(`<i class="spinner-border spinner-border-sm"></i>`); | ||||
|         } | ||||
|  | ||||
|         try { | ||||
|             const anthropicBaseUrl = this.$widget.find('.anthropic-base-url').val() as string; | ||||
|             const response = await server.get<AnthropicModelResponse>(`llm/providers/anthropic/models?baseUrl=${encodeURIComponent(anthropicBaseUrl)}`); | ||||
|  | ||||
|             if (response && response.success) { | ||||
|                 // Update the chat models dropdown | ||||
|                 if (response.chatModels?.length > 0) { | ||||
|                     const $chatModelSelect = this.$widget.find('.anthropic-default-model'); | ||||
|                     const currentChatValue = $chatModelSelect.val(); | ||||
|  | ||||
|                     // Clear existing options | ||||
|                     $chatModelSelect.empty(); | ||||
|  | ||||
|                     // Sort models by name | ||||
|                     const sortedChatModels = [...response.chatModels].sort((a, b) => a.name.localeCompare(b.name)); | ||||
|  | ||||
|                     // Add models to the dropdown | ||||
|                     sortedChatModels.forEach(model => { | ||||
|                         $chatModelSelect.append(`<option value="${model.id}">${model.name}</option>`); | ||||
|                     }); | ||||
|  | ||||
|                     // Try to restore the previously selected value | ||||
|                     this.ensureSelectedValue($chatModelSelect, currentChatValue, 'anthropicDefaultModel'); | ||||
|                 } | ||||
|  | ||||
|                 if (showLoading) { | ||||
|                     // Show success message | ||||
|                     const totalModels = (response.chatModels?.length || 0); | ||||
|                     toastService.showMessage(`${totalModels} Anthropic models found.`); | ||||
|                 } | ||||
|  | ||||
|                 return true; | ||||
|             } else if (showLoading) { | ||||
|                 toastService.showError(`No Anthropic models found. Please check your API key and settings.`); | ||||
|             } | ||||
|  | ||||
|             return anthropicModelsRefreshed; | ||||
|         } catch (e) { | ||||
|             console.error(`Error fetching Anthropic models:`, e); | ||||
|             if (showLoading) { | ||||
|                 toastService.showError(`Error fetching Anthropic models: ${e}`); | ||||
|             } | ||||
|             return anthropicModelsRefreshed; | ||||
|         } finally { | ||||
|             if (showLoading) { | ||||
|                 $refreshAnthropicModels.prop('disabled', false); | ||||
|                 $refreshAnthropicModels.html(`<span class="bx bx-refresh"></span>`); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Refreshes the list of Ollama models | ||||
|      * @param showLoading Whether to show loading indicators and toasts | ||||
|      * @param ollamaModelsRefreshed Reference to track if models have been refreshed | ||||
|      * @returns Promise that resolves when the refresh is complete | ||||
|      */ | ||||
|     async refreshOllamaModels(showLoading: boolean, ollamaModelsRefreshed: boolean): Promise<boolean> { | ||||
|         if (!this.$widget) return false; | ||||
|  | ||||
|         const $refreshModels = this.$widget.find('.refresh-models'); | ||||
|  | ||||
|         // If we've already refreshed and we're not forcing a refresh, don't do it again | ||||
|         if (ollamaModelsRefreshed && !showLoading) { | ||||
|             return ollamaModelsRefreshed; | ||||
|         } | ||||
|  | ||||
|         if (showLoading) { | ||||
|             $refreshModels.prop('disabled', true); | ||||
|             $refreshModels.text(t("ai_llm.refreshing_models")); | ||||
|         } | ||||
|  | ||||
|         try { | ||||
|             // Use the general Ollama base URL | ||||
|             const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val() as string; | ||||
|  | ||||
|             const response = await server.get<OllamaModelResponse>(`llm/providers/ollama/models?baseUrl=${encodeURIComponent(ollamaBaseUrl)}`); | ||||
|  | ||||
|             if (response && response.success && response.models && response.models.length > 0) { | ||||
|                 // Update the LLM model dropdown | ||||
|                 const $modelSelect = this.$widget.find('.ollama-default-model'); | ||||
|                 const currentModelValue = $modelSelect.val(); | ||||
|  | ||||
|                 // Clear existing options | ||||
|                 $modelSelect.empty(); | ||||
|  | ||||
|                 // Sort models by name to make them easier to find | ||||
|                 const sortedModels = [...response.models].sort((a, b) => a.name.localeCompare(b.name)); | ||||
|  | ||||
|                 // Add all models to the dropdown | ||||
|                 sortedModels.forEach(model => { | ||||
|                     $modelSelect.append(`<option value="${model.name}">${model.name}</option>`); | ||||
|                 }); | ||||
|  | ||||
|                 // Try to restore the previously selected value | ||||
|                 this.ensureSelectedValue($modelSelect, currentModelValue, 'ollamaDefaultModel'); | ||||
|  | ||||
|                 if (showLoading) { | ||||
|                     toastService.showMessage(`${response.models.length} Ollama models found.`); | ||||
|                 } | ||||
|  | ||||
|                 return true; | ||||
|             } else if (showLoading) { | ||||
|                 toastService.showError(`No Ollama models found. Please check if Ollama is running.`); | ||||
|             } | ||||
|  | ||||
|             return ollamaModelsRefreshed; | ||||
|         } catch (e) { | ||||
|             console.error(`Error fetching Ollama models:`, e); | ||||
|             if (showLoading) { | ||||
|                 toastService.showError(`Error fetching Ollama models: ${e}`); | ||||
|             } | ||||
|             return ollamaModelsRefreshed; | ||||
|         } finally { | ||||
|             if (showLoading) { | ||||
|                 $refreshModels.prop('disabled', false); | ||||
|                 $refreshModels.html(`<span class="bx bx-refresh"></span>`); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -1,135 +0,0 @@ | ||||
| import { t } from "../../../../services/i18n.js"; | ||||
|  | ||||
| export const TPL = ` | ||||
| <div class="options-section"> | ||||
|     <h4>${t("ai_llm.title")}</h4> | ||||
|  | ||||
|     <!-- Add warning alert div --> | ||||
|     <div class="provider-validation-warning alert alert-warning" style="display: none;"></div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label class="tn-checkbox"> | ||||
|             <input class="ai-enabled form-check-input" type="checkbox"> | ||||
|             ${t("ai_llm.enable_ai_features")} | ||||
|         </label> | ||||
|         <div class="form-text">${t("ai_llm.enable_ai_description")}</div> | ||||
|     </div> | ||||
| </div> | ||||
|  | ||||
| <!-- AI settings template --> | ||||
|  | ||||
| <div class="ai-providers-section options-section"> | ||||
|     <h4>${t("ai_llm.provider_configuration")}</h4> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.selected_provider")}</label> | ||||
|         <select class="ai-selected-provider form-control"> | ||||
|             <option value="">${t("ai_llm.select_provider")}</option> | ||||
|             <option value="openai">OpenAI</option> | ||||
|             <option value="anthropic">Anthropic</option> | ||||
|             <option value="ollama">Ollama</option> | ||||
|         </select> | ||||
|         <div class="form-text">${t("ai_llm.selected_provider_description")}</div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- OpenAI Provider Settings --> | ||||
|     <div class="provider-settings openai-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.openai_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.api_key")}</label> | ||||
|                     <input type="password" class="openai-api-key form-control" autocomplete="off" /> | ||||
|                     <div class="form-text">${t("ai_llm.openai_api_key_description")}</div> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.url")}</label> | ||||
|                     <input type="text" class="openai-base-url form-control" /> | ||||
|                     <div class="form-text">${t("ai_llm.openai_url_description")}</div> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.model")}</label> | ||||
|                     <select class="openai-default-model form-control"> | ||||
|                         <option value="">${t("ai_llm.select_model")}</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.openai_model_description")}</div> | ||||
|                     <button class="btn btn-sm btn-outline-secondary refresh-openai-models">${t("ai_llm.refresh_models")}</button> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- Anthropic Provider Settings --> | ||||
|     <div class="provider-settings anthropic-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.anthropic_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.api_key")}</label> | ||||
|                     <input type="password" class="anthropic-api-key form-control" autocomplete="off" /> | ||||
|                     <div class="form-text">${t("ai_llm.anthropic_api_key_description")}</div> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.url")}</label> | ||||
|                     <input type="text" class="anthropic-base-url form-control" /> | ||||
|                     <div class="form-text">${t("ai_llm.anthropic_url_description")}</div> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.model")}</label> | ||||
|                     <select class="anthropic-default-model form-control"> | ||||
|                         <option value="">${t("ai_llm.select_model")}</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.anthropic_model_description")}</div> | ||||
|                     <button class="btn btn-sm btn-outline-secondary refresh-anthropic-models">${t("ai_llm.refresh_models")}</button> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- Ollama Provider Settings --> | ||||
|     <div class="provider-settings ollama-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.ollama_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.url")}</label> | ||||
|                     <input type="text" class="ollama-base-url form-control" /> | ||||
|                     <div class="form-text">${t("ai_llm.ollama_url_description")}</div> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.model")}</label> | ||||
|                     <select class="ollama-default-model form-control"> | ||||
|                         <option value="">${t("ai_llm.select_model")}</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.ollama_model_description")}</div> | ||||
|                     <button class="btn btn-sm btn-outline-secondary refresh-models"><span class="bx bx-refresh"></span></button> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.temperature")}</label> | ||||
|         <input class="ai-temperature form-control" type="number" min="0" max="2" step="0.1"> | ||||
|         <div class="form-text">${t("ai_llm.temperature_description")}</div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.system_prompt")}</label> | ||||
|         <textarea class="ai-system-prompt form-control" rows="3"></textarea> | ||||
|         <div class="form-text">${t("ai_llm.system_prompt_description")}</div> | ||||
|     </div> | ||||
| </div> | ||||
|  | ||||
| `; | ||||
| @@ -132,3 +132,26 @@ export interface OAuthStatus { | ||||
|     email?: string; | ||||
|     missingVars?: string[]; | ||||
| } | ||||
|  | ||||
| // Interface for the Ollama model response | ||||
| export interface OllamaModelResponse { | ||||
|     success: boolean; | ||||
|     models: Array<{ | ||||
|         name: string; | ||||
|         model: string; | ||||
|         details?: { | ||||
|             family?: string; | ||||
|             parameter_size?: string; | ||||
|         } | ||||
|     }>; | ||||
| } | ||||
|  | ||||
|  | ||||
| export interface OpenAiOrAnthropicModelResponse { | ||||
|     success: boolean; | ||||
|     chatModels: Array<{ | ||||
|         id: string; | ||||
|         name: string; | ||||
|         type: string; | ||||
|     }>; | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user