feat(llm): remove everything to do with embeddings, part 3

This commit is contained in:
perf3ct
2025-06-07 18:30:46 +00:00
parent 44a2e7df21
commit 4550c12c6e
17 changed files with 44 additions and 221 deletions

View File

@@ -44,18 +44,7 @@ interface OptionRow {}
interface NoteReorderingRow {}
interface NoteEmbeddingRow {
embedId: string;
noteId: string;
providerId: string;
modelId: string;
dimension: number;
version: number;
dateCreated: string;
utcDateCreated: string;
dateModified: string;
utcDateModified: string;
}
type EntityRowMappings = {
notes: NoteRow;

View File

@@ -1195,7 +1195,7 @@
"restore_provider": "Restore provider to search",
"similarity_threshold": "Similarity Threshold",
"similarity_threshold_description": "Minimum similarity score (0-1) for notes to be included in context for LLM queries",
"reprocess_started": "Embedding reprocessing started in the background",
"reprocess_index": "Rebuild Search Index",
"reprocessing_index": "Rebuilding...",
"reprocess_index_started": "Search index optimization started in the background",

View File

@@ -6,7 +6,7 @@ import type { OpenAIModelResponse, AnthropicModelResponse, OllamaModelResponse }
export class ProviderService {
constructor(private $widget: JQuery<HTMLElement>) {
// Embedding functionality removed
// AI provider settings
}
/**
@@ -204,7 +204,7 @@ export class ProviderService {
try {
// Use the general Ollama base URL
const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val() as string;
const response = await server.get<OllamaModelResponse>(`llm/providers/ollama/models?baseUrl=${encodeURIComponent(ollamaBaseUrl)}`);
if (response && response.success && response.models && response.models.length > 0) {

View File

@@ -16,7 +16,7 @@ export const TPL = `
</div>
</div>
<!-- Embedding statistics section removed -->
<!-- AI settings template -->
<div class="ai-providers-section options-section">
<h4>${t("ai_llm.provider_configuration")}</h4>