interface LLMResponse {
    finish_reason: string;
    latency_ms: number;
    model: string;
    output_text: string;
    provider: node.apis.llm_service.Provider;
    raw?: any;
    usage: LLMUsage;
}

Properties

finish_reason: string
latency_ms: number
model: string
output_text: string
raw?: any
usage: LLMUsage