LLM Invocation Data (convenience interface)

interface LLMInvocationData {
    completion_tokens: number;
    context?: Record<string, any>;
    error?: string;
    latency_ms: number;
    mode?: string;
    model: string;
    prompt_tokens: number;
    provider: string;
    status: string;
}

Properties

completion_tokens: number
context?: Record<string, any>
error?: string
latency_ms: number
mode?: string
model: string
prompt_tokens: number
provider: string
status: string