yes, this finally does set streaming to true

This commit is contained in:
perf3ct
2025-04-09 19:53:45 +00:00
parent 59a358a3ee
commit b05b88dd76
9 changed files with 134 additions and 19 deletions

View File

@@ -20,6 +20,18 @@ export interface StreamChunk {
};
}
/**
* Options for chat completion requests
*
* Key properties:
* - stream: If true, the response will be streamed
* - model: Model name to use
* - provider: Provider to use (openai, anthropic, ollama, etc.)
* - enableTools: If true, enables tool support
*
* The stream option is particularly important and should be consistently handled
* throughout the pipeline. It should be explicitly set to true or false.
*/
export interface ChatCompletionOptions {
model?: string;
temperature?: number;