mirror of
https://github.com/zadam/trilium.git
synced 2025-11-18 03:00:41 +01:00
feat(llm): add warning to the top of LLM Chat Notes and LLM settings that the feature is experimental
This commit is contained in:
@@ -2,6 +2,7 @@
|
||||
* Validation functions for LLM Chat
|
||||
*/
|
||||
import options from "../../services/options.js";
|
||||
import { t } from "../../services/i18n.js";
|
||||
|
||||
/**
|
||||
* Validate providers configuration
|
||||
@@ -37,6 +38,9 @@ export async function validateProviders(validationWarning: HTMLElement): Promise
|
||||
// Check for configuration issues with providers in the precedence list
|
||||
const configIssues: string[] = [];
|
||||
|
||||
// Always add experimental warning as the first item
|
||||
configIssues.push(t("ai_llm.experimental_warning"));
|
||||
|
||||
// Check each provider in the precedence list for proper configuration
|
||||
for (const provider of precedenceList) {
|
||||
if (provider === 'openai') {
|
||||
|
||||
@@ -203,6 +203,11 @@ export default class AiSettingsWidget extends OptionsWidget {
|
||||
// Get selected provider
|
||||
const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string;
|
||||
|
||||
// Start with experimental warning
|
||||
const allWarnings = [
|
||||
t("ai_llm.experimental_warning")
|
||||
];
|
||||
|
||||
// Check for selected provider configuration
|
||||
const providerWarnings: string[] = [];
|
||||
if (selectedProvider === 'openai') {
|
||||
@@ -222,10 +227,8 @@ export default class AiSettingsWidget extends OptionsWidget {
|
||||
}
|
||||
}
|
||||
|
||||
// Combine all warnings
|
||||
const allWarnings = [
|
||||
...providerWarnings
|
||||
];
|
||||
// Add provider warnings to all warnings
|
||||
allWarnings.push(...providerWarnings);
|
||||
|
||||
// Show or hide warnings
|
||||
if (allWarnings.length > 0) {
|
||||
|
||||
Reference in New Issue
Block a user