feat(llm): add warning to the top of LLM Chat Notes and LLM settings that the feature is experimental

This commit is contained in:
perf3ct
2025-06-08 04:44:57 +00:00
parent ebb1654d0e
commit 29d9c9834a
3 changed files with 12 additions and 4 deletions

View File

@@ -2,6 +2,7 @@
* Validation functions for LLM Chat
*/
import options from "../../services/options.js";
import { t } from "../../services/i18n.js";
/**
* Validate providers configuration
@@ -37,6 +38,9 @@ export async function validateProviders(validationWarning: HTMLElement): Promise
// Check for configuration issues with providers in the precedence list
const configIssues: string[] = [];
// Always add experimental warning as the first item
configIssues.push(t("ai_llm.experimental_warning"));
// Check each provider in the precedence list for proper configuration
for (const provider of precedenceList) {
if (provider === 'openai') {

View File

@@ -203,6 +203,11 @@ export default class AiSettingsWidget extends OptionsWidget {
// Get selected provider
const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string;
// Start with experimental warning
const allWarnings = [
t("ai_llm.experimental_warning")
];
// Check for selected provider configuration
const providerWarnings: string[] = [];
if (selectedProvider === 'openai') {
@@ -222,10 +227,8 @@ export default class AiSettingsWidget extends OptionsWidget {
}
}
// Combine all warnings
const allWarnings = [
...providerWarnings
];
// Add provider warnings to all warnings
allWarnings.push(...providerWarnings);
// Show or hide warnings
if (allWarnings.length > 0) {