saving chats finally works again, even if the UI is kinda...broken

wow
This commit is contained in:
perf3ct
2025-04-13 21:16:18 +00:00
parent f252f53e82
commit 9a68155edc
9 changed files with 912 additions and 41 deletions

View File

@@ -42,6 +42,31 @@ export default class LlmChatPanel extends BasicWidget {
private onSaveData: ((data: any) => Promise<void>) | null = null;
private onGetData: (() => Promise<any>) | null = null;
private messages: MessageData[] = [];
private sources: Array<{noteId: string; title: string; similarity?: number; content?: string}> = [];
private metadata: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
lastUpdated?: string;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
} = {
model: 'default',
temperature: 0.7,
toolExecutions: []
};
// Public getters and setters for private properties
public getCurrentNoteId(): string | null {
@@ -136,13 +161,92 @@ export default class LlmChatPanel extends BasicWidget {
// Extract current tool execution steps if any exist
const toolSteps = extractInChatToolSteps(this.noteContextChatMessages);
// Get tool executions from both UI and any cached executions in metadata
let toolExecutions: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}> = [];
// First include any tool executions already in metadata (from streaming events)
if (this.metadata?.toolExecutions && Array.isArray(this.metadata.toolExecutions)) {
toolExecutions = [...this.metadata.toolExecutions];
console.log(`Including ${toolExecutions.length} tool executions from metadata`);
}
// Also extract any visible tool steps from the UI
const extractedExecutions = toolSteps.map(step => {
// Parse tool execution information
if (step.type === 'tool-execution') {
try {
const content = JSON.parse(step.content);
return {
id: content.toolCallId || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: content.tool || 'unknown',
arguments: content.args || {},
result: content.result || {},
error: content.error,
timestamp: new Date().toISOString()
};
} catch (e) {
// If we can't parse it, create a basic record
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: 'unknown',
arguments: {},
result: step.content,
timestamp: new Date().toISOString()
};
}
} else if (step.type === 'result' && step.name) {
// Handle result steps with a name
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: step.name,
arguments: {},
result: step.content,
timestamp: new Date().toISOString()
};
}
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: 'unknown',
arguments: {},
result: 'Unrecognized tool step',
timestamp: new Date().toISOString()
};
});
// Merge the tool executions, keeping only unique IDs
const existingIds = new Set(toolExecutions.map((t: {id: string}) => t.id));
for (const exec of extractedExecutions) {
if (!existingIds.has(exec.id)) {
toolExecutions.push(exec);
existingIds.add(exec.id);
}
}
const dataToSave: ChatData = {
messages: this.messages,
sessionId: this.sessionId,
toolSteps: toolSteps
toolSteps: toolSteps,
// Add sources if we have them
sources: this.sources || [],
// Add metadata
metadata: {
model: this.metadata?.model || 'default',
provider: this.metadata?.provider || undefined,
temperature: this.metadata?.temperature || 0.7,
lastUpdated: new Date().toISOString(),
// Add tool executions
toolExecutions: toolExecutions
}
};
console.log(`Saving chat data with sessionId: ${this.sessionId} and ${toolSteps.length} tool steps`);
console.log(`Saving chat data with sessionId: ${this.sessionId}, ${toolSteps.length} tool steps, ${this.sources?.length || 0} sources, ${toolExecutions.length} tool executions`);
await this.onSaveData(dataToSave);
} catch (error) {
@@ -179,6 +283,39 @@ export default class LlmChatPanel extends BasicWidget {
this.restoreInChatToolSteps(savedData.toolSteps);
}
// Load sources if available
if (savedData.sources && Array.isArray(savedData.sources)) {
this.sources = savedData.sources;
console.log(`Loaded ${this.sources.length} sources from saved data`);
// Show sources in the UI if they exist
if (this.sources.length > 0) {
this.showSources(this.sources);
}
}
// Load metadata if available
if (savedData.metadata) {
this.metadata = {
...this.metadata,
...savedData.metadata
};
// Ensure tool executions are loaded
if (savedData.metadata.toolExecutions && Array.isArray(savedData.metadata.toolExecutions)) {
console.log(`Loaded ${savedData.metadata.toolExecutions.length} tool executions from saved data`);
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Make sure we don't lose any tool executions
this.metadata.toolExecutions = savedData.metadata.toolExecutions;
}
console.log(`Loaded metadata from saved data:`, this.metadata);
}
// Load session ID if available
if (savedData.sessionId) {
try {
@@ -188,6 +325,53 @@ export default class LlmChatPanel extends BasicWidget {
if (sessionExists) {
console.log(`Restored session ${savedData.sessionId}`);
this.sessionId = savedData.sessionId;
// If we successfully restored a session, also fetch the latest session data
try {
const sessionData = await server.get<{
metadata?: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
lastUpdated?: string;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
};
sources?: Array<{
noteId: string;
title: string;
similarity?: number;
content?: string;
}>;
}>(`llm/sessions/${savedData.sessionId}`);
if (sessionData && sessionData.metadata) {
// Update our metadata with the latest from the server
this.metadata = {
...this.metadata,
...sessionData.metadata
};
console.log(`Updated metadata from server for session ${savedData.sessionId}`);
// If server has sources, update those too
if (sessionData.sources && sessionData.sources.length > 0) {
this.sources = sessionData.sources;
}
}
} catch (fetchError) {
console.warn(`Could not fetch latest session data: ${fetchError}`);
}
} else {
console.log(`Saved session ${savedData.sessionId} not found, will create new one`);
this.sessionId = null;
@@ -466,12 +650,24 @@ export default class LlmChatPanel extends BasicWidget {
// If the POST request returned content directly, display it
if (postResponse && postResponse.content) {
this.processAssistantResponse(postResponse.content);
// If there are sources, show them
// Store metadata from the response
if (postResponse.metadata) {
console.log("Received metadata from response:", postResponse.metadata);
this.metadata = {
...this.metadata,
...postResponse.metadata
};
}
// Store sources from the response
if (postResponse.sources && postResponse.sources.length > 0) {
console.log(`Received ${postResponse.sources.length} sources from response`);
this.sources = postResponse.sources;
this.showSources(postResponse.sources);
}
// Process the assistant response
this.processAssistantResponse(postResponse.content, postResponse);
hideLoadingIndicator(this.loadingIndicator);
return true;
@@ -487,7 +683,7 @@ export default class LlmChatPanel extends BasicWidget {
/**
* Process an assistant response - add to UI and save
*/
private async processAssistantResponse(content: string) {
private async processAssistantResponse(content: string, fullResponse?: any) {
// Add the response to the chat UI
this.addMessageToChat('assistant', content);
@@ -497,6 +693,21 @@ export default class LlmChatPanel extends BasicWidget {
content,
timestamp: new Date()
});
// If we received tool execution information, add it to metadata
if (fullResponse?.metadata?.toolExecutions) {
console.log(`Storing ${fullResponse.metadata.toolExecutions.length} tool executions from response`);
// Make sure our metadata has toolExecutions
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Add new tool executions
this.metadata.toolExecutions = [
...this.metadata.toolExecutions,
...fullResponse.metadata.toolExecutions
];
}
// Save to note
this.saveCurrentData().catch(err => {
@@ -512,12 +723,94 @@ export default class LlmChatPanel extends BasicWidget {
throw new Error("No session ID available");
}
// Store tool executions captured during streaming
const toolExecutionsCache: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}> = [];
return setupStreamingResponse(
this.sessionId,
messageParams,
// Content update handler
(content: string) => {
this.updateStreamingUI(content);
(content: string, isDone: boolean = false) => {
this.updateStreamingUI(content, isDone);
// Update session data with additional metadata when streaming is complete
if (isDone) {
// Update our metadata with info from the server
server.get<{
metadata?: {
model?: string;
provider?: string;
temperature?: number;
maxTokens?: number;
toolExecutions?: Array<{
id: string;
name: string;
arguments: any;
result: any;
error?: string;
timestamp: string;
}>;
lastUpdated?: string;
usage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
};
sources?: Array<{
noteId: string;
title: string;
similarity?: number;
content?: string;
}>;
}>(`llm/sessions/${this.sessionId}`)
.then((sessionData) => {
console.log("Got updated session data:", sessionData);
// Store metadata
if (sessionData.metadata) {
this.metadata = {
...this.metadata,
...sessionData.metadata
};
}
// Store sources
if (sessionData.sources && sessionData.sources.length > 0) {
this.sources = sessionData.sources;
this.showSources(sessionData.sources);
}
// Make sure we include the cached tool executions
if (toolExecutionsCache.length > 0) {
console.log(`Including ${toolExecutionsCache.length} cached tool executions in metadata`);
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Add any tool executions from our cache that aren't already in metadata
const existingIds = new Set((this.metadata.toolExecutions || []).map((t: {id: string}) => t.id));
for (const toolExec of toolExecutionsCache) {
if (!existingIds.has(toolExec.id)) {
this.metadata.toolExecutions.push(toolExec);
existingIds.add(toolExec.id);
}
}
}
// Save the updated data to the note
this.saveCurrentData()
.catch(err => console.error("Failed to save data after streaming completed:", err));
})
.catch(err => console.error("Error fetching session data after streaming:", err));
}
},
// Thinking update handler
(thinking: string) => {
@@ -526,6 +819,38 @@ export default class LlmChatPanel extends BasicWidget {
// Tool execution handler
(toolData: any) => {
this.showToolExecutionInfo(toolData);
// Cache tools we see during streaming to include them in the final saved data
if (toolData && toolData.action === 'result' && toolData.tool) {
// Create a tool execution record
const toolExec = {
id: toolData.toolCallId || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
name: toolData.tool,
arguments: toolData.args || {},
result: toolData.result || {},
error: toolData.error,
timestamp: new Date().toISOString()
};
// Add to both our local cache for immediate saving and to metadata for later saving
toolExecutionsCache.push(toolExec);
// Initialize toolExecutions array if it doesn't exist
if (!this.metadata.toolExecutions) {
this.metadata.toolExecutions = [];
}
// Add tool execution to our metadata
this.metadata.toolExecutions.push(toolExec);
console.log(`Cached tool execution for ${toolData.tool} to be saved later`);
// Save immediately after receiving a tool execution
// This ensures we don't lose tool execution data if streaming fails
this.saveCurrentData().catch(err => {
console.error("Failed to save tool execution data:", err);
});
}
},
// Complete handler
() => {
@@ -541,9 +866,9 @@ export default class LlmChatPanel extends BasicWidget {
/**
* Update the UI with streaming content
*/
private updateStreamingUI(assistantResponse: string) {
private updateStreamingUI(assistantResponse: string, isDone: boolean = false) {
const logId = `LlmChatPanel-${Date.now()}`;
console.log(`[${logId}] Updating UI with response text: ${assistantResponse.length} chars`);
console.log(`[${logId}] Updating UI with response text: ${assistantResponse.length} chars, isDone=${isDone}`);
if (!this.noteContextChatMessages) {
console.error(`[${logId}] noteContextChatMessages element not available`);
@@ -587,6 +912,31 @@ export default class LlmChatPanel extends BasicWidget {
this.addMessageToChat('assistant', assistantResponse);
console.log(`[${logId}] Successfully added new assistant message`);
}
// Update messages array only if this is the first update or the final update
if (!this.messages.some(m => m.role === 'assistant') || isDone) {
// Add or update the assistant message in our local array
const existingIndex = this.messages.findIndex(m => m.role === 'assistant');
if (existingIndex >= 0) {
// Update existing message
this.messages[existingIndex].content = assistantResponse;
} else {
// Add new message
this.messages.push({
role: 'assistant',
content: assistantResponse,
timestamp: new Date()
});
}
// If this is the final update, save the data
if (isDone) {
console.log(`[${logId}] Streaming finished, saving data to note`);
this.saveCurrentData().catch(err => {
console.error(`[${logId}] Failed to save streaming response to note:`, err);
});
}
}
}
// Always try to scroll to the latest content