LlmInferenceBaseOptions constructor
Implementation
external factory LlmInferenceBaseOptions({
String? modelAssetPath, // For cacheApi/none modes (Blob URL)
JSAny?
modelAssetBuffer, // For streaming mode (ReadableStreamDefaultReader from OPFS)
});