interface StreamingRunnerOptions {
    streamingLlmCallFn: ((args: {
        input: any[];
        max_tokens?: number;
        model: string;
        stop?: string[];
        temperature?: number;
    }) => Promise<{
        data: Promise<any>;
        stream: AsyncIterable<string>;
    }>);
    verbose?: boolean;
}

Properties

streamingLlmCallFn: ((args: {
    input: any[];
    max_tokens?: number;
    model: string;
    stop?: string[];
    temperature?: number;
}) => Promise<{
    data: Promise<any>;
    stream: AsyncIterable<string>;
}>)

The streaming LLM call function. Must return { stream: AsyncIterable, data: Promise }

verbose?: boolean

Log every raw chunk to console (togglable at runtime via runner.verbose)