streamingLlmCallFn
streamingLlmCallFn: ((args: {
input: any[];
max_tokens?: number;
model: string;
signal?: AbortSignal;
temperature?: number;
text_format?: {
name: string;
schema: Record<string, any>;
strict: boolean;
type: "json_schema";
};
}) => Promise<{
data: Promise<any>;
stream: AsyncIterable<string>;
}>)
The streaming LLM call function. Must return { stream: AsyncIterable, data: Promise }