diff --git a/src/index.html b/src/index.html index 174c5a7..5e3eb5f 100644 --- a/src/index.html +++ b/src/index.html @@ -19,6 +19,9 @@ + + + diff --git a/src/libav-6.5.7.1-webm-vp9.js b/src/libav-6.5.7.1-webm-vp9.js new file mode 100644 index 0000000..78a7117 --- /dev/null +++ b/src/libav-6.5.7.1-webm-vp9.js @@ -0,0 +1 @@ +(function(){function isWebAssemblySupported(module){module=module||[0,97,115,109,1,0,0,0];if(typeof WebAssembly!=="object"||typeof WebAssembly.instantiate!=="function")return false;try{var module=new WebAssembly.Module(new Uint8Array(module));if(module instanceof WebAssembly.Module)return new WebAssembly.Instance(module)instanceof WebAssembly.Instance}catch(e){}return false}function isThreadingSupported(){try{var mem=new WebAssembly.Memory({initial:1,maximum:1,shared:true});if(!(mem.buffer instanceof SharedArrayBuffer))return false;return true}catch(e){}return false}var libav;var nodejs=typeof process!=="undefined";if(typeof LibAV==="undefined")LibAV={};libav=LibAV;if(!libav.base){if(typeof __dirname==="string"){libav.base=__dirname}else{if(typeof document!=="undefined"&&document&&document.currentScript)libav.base=document.currentScript.src;else if(typeof self!=="undefined"&&self&&self.location)libav.base=self.location.href;else libav.base="./.";libav.base=libav.base.replace(/\/[^\/]*$/,"")}}libav.isWebAssemblySupported=isWebAssemblySupported;libav.isThreadingSupported=isThreadingSupported;function target(opts){opts=opts||{};var wasm=!opts.nowasm&&isWebAssemblySupported();var thr=opts.yesthreads&&wasm&&!opts.nothreads&&isThreadingSupported();if(!wasm)return"asm";else if(thr)return"thr";else return"wasm"}libav.target=target;libav.VER="6.5.7.1";libav.CONFIG="webm-vp9";libav.DBG="";libav.factories={};var libavStatics={};libavStatics.i64tof64=function(lo,hi){if(!hi&&lo>=0)return lo;if(hi===-1&&lo<0)return lo;return hi*4294967296+lo+(lo<0?4294967296:0)};libavStatics.f64toi64=function(val){return[~~val,Math.floor(val/4294967296)]};libavStatics.i64ToBigInt=function(lo,hi){var dv=new DataView(new ArrayBuffer(8));dv.setInt32(0,lo,true);dv.setInt32(4,hi,true);return dv.getBigInt64(0,true)};libavStatics.bigIntToi64=function(val){var dv=new DataView(new ArrayBuffer(8));dv.setBigInt64(0,val,true);return[dv.getInt32(0,true),dv.getInt32(4,true)]};libavStatics.ff_channel_layout=function(frame){if(frame.channel_layout)return frame.channel_layout;else if(frame.channels&&frame.channels!==1)return(1<>=1}return channels}else{return 1}};function enume(vals,first){if(typeof first===undefined)first=0;var i=first;vals.forEach((function(val){libavStatics[val]=i++}))}libavStatics.AV_TIME_BASE=1e6;libavStatics.AV_OPT_SEARCH_CHILDREN=1;enume(["AVMEDIA_TYPE_UNKNOWN","AVMEDIA_TYPE_VIDEO","AVMEDIA_TYPE_AUDIO","AVMEDIA_TYPE_DATA","AVMEDIA_TYPE_SUBTITLE","AVMEDIA_TYPE_ATTACHMENT"],-1);enume(["AV_SAMPLE_FMT_NONE","AV_SAMPLE_FMT_U8","AV_SAMPLE_FMT_S16","AV_SAMPLE_FMT_S32","AV_SAMPLE_FMT_FLT","AV_SAMPLE_FMT_DBL","AV_SAMPLE_FMT_U8P","AV_SAMPLE_FMT_S16P","AV_SAMPLE_FMT_S32P","AV_SAMPLE_FMT_FLTP","AV_SAMPLE_FMT_DBLP","AV_SAMPLE_FMT_S64","AV_SAMPLE_FMT_S64P","AV_SAMPLE_FMT_NB"],-1);enume(["AV_PIX_FMT_NONE","AV_PIX_FMT_YUV420P","AV_PIX_FMT_YUYV422","AV_PIX_FMT_RGB24","AV_PIX_FMT_BGR24","AV_PIX_FMT_YUV422P","AV_PIX_FMT_YUV444P","AV_PIX_FMT_YUV410P","AV_PIX_FMT_YUV411P","AV_PIX_FMT_GRAY8","AV_PIX_FMT_MONOWHITE","AV_PIX_FMT_MONOBLACK","AV_PIX_FMT_PAL8","AV_PIX_FMT_YUVJ420P","AV_PIX_FMT_YUVJ422P","AV_PIX_FMT_YUVJ444P","AV_PIX_FMT_UYVY422","AV_PIX_FMT_UYYVYY411","AV_PIX_FMT_BGR8","AV_PIX_FMT_BGR4","AV_PIX_FMT_BGR4_BYTE","AV_PIX_FMT_RGB8","AV_PIX_FMT_RGB4","AV_PIX_FMT_RGB4_BYTE","AV_PIX_FMT_NV12","AV_PIX_FMT_NV21","AV_PIX_FMT_ARGB","AV_PIX_FMT_RGBA","AV_PIX_FMT_ABGR","AV_PIX_FMT_BGRA","AV_PIX_FMT_GRAY16BE","AV_PIX_FMT_GRAY16LE","AV_PIX_FMT_YUV440P","AV_PIX_FMT_YUVJ440P","AV_PIX_FMT_YUVA420P","AV_PIX_FMT_RGB48BE","AV_PIX_FMT_RGB48LE","AV_PIX_FMT_RGB565BE","AV_PIX_FMT_RGB565LE","AV_PIX_FMT_RGB555BE","AV_PIX_FMT_RGB555LE","AV_PIX_FMT_BGR565BE","AV_PIX_FMT_BGR565LE","AV_PIX_FMT_BGR555BE","AV_PIX_FMT_BGR555LE"],-1);libavStatics.AVIO_FLAG_READ=1;libavStatics.AVIO_FLAG_WRITE=2;libavStatics.AVIO_FLAG_READ_WRITE=3;libavStatics.AVIO_FLAG_NONBLOCK=8;libavStatics.AVIO_FLAG_DIRECT=32768;libavStatics.AVFMT_FLAG_NOBUFFER=64;libavStatics.AVFMT_FLAG_FLUSH_PACKETS=512;libavStatics.AVSEEK_FLAG_BACKWARD=1;libavStatics.AVSEEK_FLAG_BYTE=2;libavStatics.AVSEEK_FLAG_ANY=4;libavStatics.AVSEEK_FLAG_FRAME=8;libavStatics.AVDISCARD_NONE=-16;libavStatics.AVDISCARD_DEFAULT=0;libavStatics.AVDISCARD_NONREF=8;libavStatics.AVDISCARD_BIDIR=16;libavStatics.AVDISCARD_NONINTRA=24;libavStatics.AVDISCARD_NONKEY=32;libavStatics.AVDISCARD_ALL=48;libavStatics.AV_LOG_QUIET=-8;libavStatics.AV_LOG_PANIC=0;libavStatics.AV_LOG_FATAL=8;libavStatics.AV_LOG_ERROR=16;libavStatics.AV_LOG_WARNING=24;libavStatics.AV_LOG_INFO=32;libavStatics.AV_LOG_VERBOSE=40;libavStatics.AV_LOG_DEBUG=48;libavStatics.AV_LOG_TRACE=56;libavStatics.AV_PKT_FLAG_KEY=1;libavStatics.AV_PKT_FLAG_CORRUPT=2;libavStatics.AV_PKT_FLAG_DISCARD=4;libavStatics.AV_PKT_FLAG_TRUSTED=8;libavStatics.AV_PKT_FLAG_DISPOSABLE=16;enume(["E2BIG","EPERM","EADDRINUSE","EADDRNOTAVAIL","EAFNOSUPPORT","EAGAIN","EALREADY","EBADF","EBADMSG","EBUSY","ECANCELED","ECHILD","ECONNABORTED","ECONNREFUSED","ECONNRESET","EDEADLOCK","EDESTADDRREQ","EDOM","EDQUOT","EEXIST","EFAULT","EFBIG","EHOSTUNREACH","EIDRM","EILSEQ","EINPROGRESS","EINTR","EINVAL","EIO","EISCONN","EISDIR","ELOOP","EMFILE","EMLINK","EMSGSIZE","EMULTIHOP","ENAMETOOLONG","ENETDOWN","ENETRESET","ENETUNREACH","ENFILE","ENOBUFS","ENODEV","ENOENT"],1);libavStatics.AVERROR_EOF=-541478725;Object.assign(libav,libavStatics);libav.LibAV=function(opts){opts=opts||{};var base=opts.base||libav.base;var t=target(opts);var variant="webm-vp9";if(t==="asm"){variant=opts.variant||libav.variant||"webm-vp9"}var toImport=opts.toImport||libav.toImport||base+"/libav-6.5.7.1-"+variant+"."+t+"."+"js";var ret;var mode="direct";if(t==="thr")mode="threads";else if(!nodejs&&!opts.noworker&&typeof Worker!=="undefined")mode="worker";return Promise.all([]).then((function(){if(opts.factory||libav.factory)return opts.factory||libav.factory;if(libav.factories[toImport])return libav.factories[toImport];if(mode==="worker"){}else if(nodejs){return libav.factories[toImport]=require(toImport)}else if(typeof importScripts!=="undefined"){importScripts(toImport);return libav.factories[toImport]=LibAVFactory}else{return new Promise((function(res,rej){var scr=document.createElement("script");scr.src=toImport;scr.addEventListener("load",res);scr.addEventListener("error",rej);scr.async=true;document.body.appendChild(scr)})).then((function(){return libav.factories[toImport]=LibAVFactory}))}})).then((function(factory){if(mode==="worker"){ret={};ret.worker=new Worker(toImport);ret.worker.postMessage({config:{variant:opts.variant||libav.variant,wasmurl:opts.wasmurl||libav.wasmurl}});return new Promise((function(res,rej){ret.on=1;ret.handlers={onready:[function(){res()},null],onwrite:[function(args){if(ret.onwrite)ret.onwrite.apply(ret,args)},null],onread:[function(args){try{var rr=null;if(ret.onread)rr=ret.onread.apply(ret,args);if(rr&&rr.then&&rr.catch){rr.catch((function(ex){ret.ff_reader_dev_send(args[0],null,{error:ex})}))}}catch(ex){ret.ff_reader_dev_send(args[0],null,{error:ex})}},null],onblockread:[function(args){try{var brr=null;if(ret.onblockread)brr=ret.onblockread.apply(ret,args);if(brr&&brr.then&&brr.catch){brr.catch((function(ex){ret.ff_block_reader_dev_send(args[0],args[1],null,{error:ex})}))}}catch(ex){ret.ff_block_reader_dev_send(args[0],args[1],null,{error:ex})}},null]};ret.c=function(){var msg=Array.prototype.slice.call(arguments);var transfer=[];for(var i=0;i + * Copyright (C) + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * Also add information on how to contact you by electronic and paper mail. + * + * You should also get your employer (if you work as a programmer) or your + * school, if any, to sign a "copyright disclaimer" for the library, if + * necessary. Here is a sample; alter the names: + * + * Yoyodyne, Inc., hereby disclaims all copyright interest in the + * library `Frob' (a library for tweaking knobs) written by James Random Hacker. + * + * , 1 April 1990 + * Ty Coon, President of Vice + * + * That's all there is to it! + * + * + * --- + * + * ffmpeg oggdec: + * + * Copyright (C) 2005 Michael Ahlberg, Måns Rullgård + * Copyright (C) 2005 Matthieu CASTET, Alex Beregszaszi + * Copyright (C) 2008 Reimar Döffinger + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * + * + * --- + * + * opus: + * + * Copyright 2001-2011 Xiph.Org, Skype Limited, Octasic, + * Jean-Marc Valin, Timothy B. Terriberry, + * CSIRO, Gregory Maxwell, Mark Borgerding, + * Erik de Castro Lopo + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * - Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * - Neither the name of Internet Society, IETF or IETF Trust, nor the names of + * specific contributors, may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS + * IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * Opus is subject to the royalty-free patent licenses which are specified at: + * + * Xiph.Org Foundation: + * https://datatracker.ietf.org/ipr/1524/ + * + * Microsoft Corporation: + * https://datatracker.ietf.org/ipr/1914/ + * + * Broadcom Corporation: + * https://datatracker.ietf.org/ipr/1526/ + * + * + * --- + * + * libvpx: + * + * Copyright (c) 2010, The WebM Project authors. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * * Neither the name of Google, nor the WebM Project, nor the names + * of its contributors may be used to endorse or promote products + * derived from this software without specific prior written + * permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * + * --- + * + * emscripten and musl: + * + * Copyright (c) 2010-2024 Emscripten authors, see AUTHORS file. + * Copyright © 2005-2024 Rich Felker, et al. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * emcc (Emscripten gcc/clang-like replacement + linker emulating GNU ld) 3.1.70 (b53978ee3f540dc74761eba127aa7f1b8761a125) + * + */ + +var LibAVFactory = (() => { + var _scriptName = typeof document != 'undefined' ? document.currentScript?.src : undefined; + if (typeof __filename != 'undefined') _scriptName = _scriptName || __filename; + return ( +function(moduleArg = {}) { + var moduleRtn; + +var Module=moduleArg;var readyPromiseResolve,readyPromiseReject;var readyPromise=new Promise((resolve,reject)=>{readyPromiseResolve=resolve;readyPromiseReject=reject});var ENVIRONMENT_IS_WEB=typeof window=="object";var ENVIRONMENT_IS_WORKER=typeof importScripts=="function";var ENVIRONMENT_IS_NODE=typeof process=="object"&&typeof process.versions=="object"&&typeof process.versions.node=="string"&&process.type!="renderer";if(ENVIRONMENT_IS_NODE){}if(typeof _scriptName==="undefined"){if(typeof LibAV==="object"&&LibAV&&LibAV.base)_scriptName=LibAV.base+"/libav-6.5.7.1-webm-vp9.wasm.js";else if(typeof self==="object"&&self&&self.location)_scriptName=self.location.href}Module.locateFile=function(path,prefix){if(path.lastIndexOf(".wasm")===path.length-5&&path.indexOf("libav-")!==-1){if(Module.wasmurl)return Module.wasmurl;if(Module.variant)return prefix+"libav-6.5.7.1-"+Module.variant+".wasm.wasm"}return prefix+path};var moduleOverrides=Object.assign({},Module);var arguments_=[];var thisProgram="./this.program";var quit_=(status,toThrow)=>{throw toThrow};var scriptDirectory="";function locateFile(path){if(Module["locateFile"]){return Module["locateFile"](path,scriptDirectory)}return scriptDirectory+path}var readAsync,readBinary;if(ENVIRONMENT_IS_NODE){var fs=require("fs");var nodePath=require("path");scriptDirectory=__dirname+"/";readBinary=filename=>{filename=isFileURI(filename)?new URL(filename):nodePath.normalize(filename);var ret=fs.readFileSync(filename);return ret};readAsync=(filename,binary=true)=>{filename=isFileURI(filename)?new URL(filename):nodePath.normalize(filename);return new Promise((resolve,reject)=>{fs.readFile(filename,binary?undefined:"utf8",(err,data)=>{if(err)reject(err);else resolve(binary?data.buffer:data)})})};if(!Module["thisProgram"]&&process.argv.length>1){thisProgram=process.argv[1].replace(/\\/g,"/")}arguments_=process.argv.slice(2);quit_=(status,toThrow)=>{process.exitCode=status;throw toThrow}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(typeof document!="undefined"&&document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptName){scriptDirectory=_scriptName}if(scriptDirectory.startsWith("blob:")){scriptDirectory=""}else{scriptDirectory=scriptDirectory.substr(0,scriptDirectory.replace(/[?#].*/,"").lastIndexOf("/")+1)}{if(ENVIRONMENT_IS_WORKER){readBinary=url=>{var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=url=>{if(isFileURI(url)){return new Promise((resolve,reject)=>{var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=()=>{if(xhr.status==200||xhr.status==0&&xhr.response){resolve(xhr.response);return}reject(xhr.status)};xhr.onerror=reject;xhr.send(null)})}return fetch(url,{credentials:"same-origin"}).then(response=>{if(response.ok){return response.arrayBuffer()}return Promise.reject(new Error(response.status+" : "+response.url))})}}}else{}var out=Module["print"]||console.log.bind(console);var err=Module["printErr"]||console.error.bind(console);Object.assign(Module,moduleOverrides);moduleOverrides=null;if(Module["arguments"])arguments_=Module["arguments"];if(Module["thisProgram"])thisProgram=Module["thisProgram"];var wasmBinary=Module["wasmBinary"];var wasmMemory;var ABORT=false;var EXITSTATUS;var HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateMemoryViews(){var b=wasmMemory.buffer;Module["HEAP8"]=HEAP8=new Int8Array(b);Module["HEAP16"]=HEAP16=new Int16Array(b);Module["HEAPU8"]=HEAPU8=new Uint8Array(b);Module["HEAPU16"]=HEAPU16=new Uint16Array(b);Module["HEAP32"]=HEAP32=new Int32Array(b);Module["HEAPU32"]=HEAPU32=new Uint32Array(b);Module["HEAPF32"]=HEAPF32=new Float32Array(b);Module["HEAPF64"]=HEAPF64=new Float64Array(b)}var __ATPRERUN__=[];var __ATINIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;function preRun(){var preRuns=Module["preRun"];if(preRuns){if(typeof preRuns=="function")preRuns=[preRuns];preRuns.forEach(addOnPreRun)}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;if(!Module["noFSInit"]&&!FS.initialized)FS.init();FS.ignorePermissions=false;TTY.init();callRuntimeCallbacks(__ATINIT__)}function postRun(){var postRuns=Module["postRun"];if(postRuns){if(typeof postRuns=="function")postRuns=[postRuns];postRuns.forEach(addOnPostRun)}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnInit(cb){__ATINIT__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function getUniqueRunDependency(id){return id}function addRunDependency(id){runDependencies++;Module["monitorRunDependencies"]?.(runDependencies)}function removeRunDependency(id){runDependencies--;Module["monitorRunDependencies"]?.(runDependencies);if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}function abort(what){Module["onAbort"]?.(what);what="Aborted("+what+")";err(what);ABORT=true;what+=". Build with -sASSERTIONS for more info.";var e=new WebAssembly.RuntimeError(what);readyPromiseReject(e);throw e}var dataURIPrefix="data:application/octet-stream;base64,";var isDataURI=filename=>filename.startsWith(dataURIPrefix);var isFileURI=filename=>filename.startsWith("file://");function findWasmBinary(){var f="libav-6.5.7.1-webm-vp9.wasm.wasm";if(!isDataURI(f)){return locateFile(f)}return f}var wasmBinaryFile;function getBinarySync(file){if(file==wasmBinaryFile&&wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(file)}throw"both async and sync fetching of the wasm failed"}function getBinaryPromise(binaryFile){if(!wasmBinary){return readAsync(binaryFile).then(response=>new Uint8Array(response),()=>getBinarySync(binaryFile))}return Promise.resolve().then(()=>getBinarySync(binaryFile))}function instantiateArrayBuffer(binaryFile,imports,receiver){return getBinaryPromise(binaryFile).then(binary=>WebAssembly.instantiate(binary,imports)).then(receiver,reason=>{err(`failed to asynchronously prepare wasm: ${reason}`);abort(reason)})}function instantiateAsync(binary,binaryFile,imports,callback){if(!binary&&typeof WebAssembly.instantiateStreaming=="function"&&!isDataURI(binaryFile)&&!isFileURI(binaryFile)&&!ENVIRONMENT_IS_NODE&&typeof fetch=="function"){return fetch(binaryFile,{credentials:"same-origin"}).then(response=>{var result=WebAssembly.instantiateStreaming(response,imports);return result.then(callback,function(reason){err(`wasm streaming compile failed: ${reason}`);err("falling back to ArrayBuffer instantiation");return instantiateArrayBuffer(binaryFile,imports,callback)})})}return instantiateArrayBuffer(binaryFile,imports,callback)}function getWasmImports(){return{a:wasmImports}}function createWasm(){var info=getWasmImports();function receiveInstance(instance,module){wasmExports=instance.exports;wasmExports=Asyncify.instrumentWasmExports(wasmExports);wasmMemory=wasmExports["_"];updateMemoryViews();wasmTable=wasmExports["Hf"];addOnInit(wasmExports["$"]);removeRunDependency("wasm-instantiate");return wasmExports}addRunDependency("wasm-instantiate");function receiveInstantiationResult(result){receiveInstance(result["instance"])}if(Module["instantiateWasm"]){try{return Module["instantiateWasm"](info,receiveInstance)}catch(e){err(`Module.instantiateWasm callback failed with error: ${e}`);readyPromiseReject(e)}}wasmBinaryFile??=findWasmBinary();instantiateAsync(wasmBinary,wasmBinaryFile,info,receiveInstantiationResult).catch(readyPromiseReject);return{}}var tempDouble;var tempI64;function writeoutEmscriptenOOM(){throw new Error("Out of memory")}function libavjs_wait_reader(fd){return Asyncify.handleAsync(function(){return new Promise(function(res){var name=Module.fdName(fd);var waiters=Module.ff_reader_dev_waiters[name];if(!waiters)waiters=Module.ff_reader_dev_waiters[name]=[];waiters.push(res)})})}function ExitStatus(status){this.name="ExitStatus";this.message=`Program terminated with exit(${status})`;this.status=status}var callRuntimeCallbacks=callbacks=>{callbacks.forEach(f=>f(Module))};var noExitRuntime=Module["noExitRuntime"]||true;var stackRestore=val=>__emscripten_stack_restore(val);var stackSave=()=>_emscripten_stack_get_current();var PATH={isAbs:path=>path.charAt(0)==="/",splitPath:filename=>{var splitPathRe=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:(parts,allowAboveRoot)=>{var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last==="."){parts.splice(i,1)}else if(last===".."){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift("..")}}return parts},normalize:path=>{var isAbsolute=PATH.isAbs(path),trailingSlash=path.substr(-1)==="/";path=PATH.normalizeArray(path.split("/").filter(p=>!!p),!isAbsolute).join("/");if(!path&&!isAbsolute){path="."}if(path&&trailingSlash){path+="/"}return(isAbsolute?"/":"")+path},dirname:path=>{var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return"."}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:path=>{if(path==="/")return"/";path=PATH.normalize(path);path=path.replace(/\/$/,"");var lastSlash=path.lastIndexOf("/");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},join:(...paths)=>PATH.normalize(paths.join("/")),join2:(l,r)=>PATH.normalize(l+"/"+r)};var initRandomFill=()=>{if(typeof crypto=="object"&&typeof crypto["getRandomValues"]=="function"){return view=>crypto.getRandomValues(view)}else if(ENVIRONMENT_IS_NODE){try{var crypto_module=require("crypto");var randomFillSync=crypto_module["randomFillSync"];if(randomFillSync){return view=>crypto_module["randomFillSync"](view)}var randomBytes=crypto_module["randomBytes"];return view=>(view.set(randomBytes(view.byteLength)),view)}catch(e){}}abort("initRandomDevice")};var randomFill=view=>(randomFill=initRandomFill())(view);var PATH_FS={resolve:(...args)=>{var resolvedPath="",resolvedAbsolute=false;for(var i=args.length-1;i>=-1&&!resolvedAbsolute;i--){var path=i>=0?args[i]:FS.cwd();if(typeof path!="string"){throw new TypeError("Arguments to path.resolve must be strings")}else if(!path){return""}resolvedPath=path+"/"+resolvedPath;resolvedAbsolute=PATH.isAbs(path)}resolvedPath=PATH.normalizeArray(resolvedPath.split("/").filter(p=>!!p),!resolvedAbsolute).join("/");return(resolvedAbsolute?"/":"")+resolvedPath||"."},relative:(from,to)=>{from=PATH_FS.resolve(from).substr(1);to=PATH_FS.resolve(to).substr(1);function trim(arr){var start=0;for(;start=0;end--){if(arr[end]!=="")break}if(start>end)return[];return arr.slice(start,end-start+1)}var fromParts=trim(from.split("/"));var toParts=trim(to.split("/"));var length=Math.min(fromParts.length,toParts.length);var samePartsLength=length;for(var i=0;i{var endIdx=idx+maxBytesToRead;var endPtr=idx;while(heapOrArray[endPtr]&&!(endPtr>=endIdx))++endPtr;if(endPtr-idx>16&&heapOrArray.buffer&&UTF8Decoder){return UTF8Decoder.decode(heapOrArray.subarray(idx,endPtr))}var str="";while(idx>10,56320|ch&1023)}}return str};var FS_stdin_getChar_buffer=[];var lengthBytesUTF8=str=>{var len=0;for(var i=0;i=55296&&c<=57343){len+=4;++i}else{len+=3}}return len};var stringToUTF8Array=(str,heap,outIdx,maxBytesToWrite)=>{if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx};function intArrayFromString(stringy,dontAddNull,length){var len=length>0?length:lengthBytesUTF8(stringy)+1;var u8array=new Array(len);var numBytesWritten=stringToUTF8Array(stringy,u8array,0,u8array.length);if(dontAddNull)u8array.length=numBytesWritten;return u8array}var FS_stdin_getChar=()=>{if(!FS_stdin_getChar_buffer.length){var result=null;if(ENVIRONMENT_IS_NODE){var BUFSIZE=256;var buf=Buffer.alloc(BUFSIZE);var bytesRead=0;var fd=process.stdin.fd;try{bytesRead=fs.readSync(fd,buf,0,BUFSIZE)}catch(e){if(e.toString().includes("EOF"))bytesRead=0;else throw e}if(bytesRead>0){result=buf.slice(0,bytesRead).toString("utf-8")}}else if(typeof window!="undefined"&&typeof window.prompt=="function"){result=window.prompt("Input: ");if(result!==null){result+="\n"}}else{}if(!result){return null}FS_stdin_getChar_buffer=intArrayFromString(result,true)}return FS_stdin_getChar_buffer.shift()};var TTY={ttys:[],init(){},shutdown(){},register(dev,ops){TTY.ttys[dev]={input:[],output:[],ops};FS.registerDevice(dev,TTY.stream_ops)},stream_ops:{open(stream){var tty=TTY.ttys[stream.node.rdev];if(!tty){throw new FS.ErrnoError(43)}stream.tty=tty;stream.seekable=false},close(stream){stream.tty.ops.fsync(stream.tty)},fsync(stream){stream.tty.ops.fsync(stream.tty)},read(stream,buffer,offset,length,pos){if(!stream.tty||!stream.tty.ops.get_char){throw new FS.ErrnoError(60)}var bytesRead=0;for(var i=0;i0){out(UTF8ArrayToString(tty.output));tty.output=[]}},ioctl_tcgets(tty){return{c_iflag:25856,c_oflag:5,c_cflag:191,c_lflag:35387,c_cc:[3,28,127,21,4,0,1,0,17,19,26,0,18,15,23,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]}},ioctl_tcsets(tty,optional_actions,data){return 0},ioctl_tiocgwinsz(tty){return[24,80]}},default_tty1_ops:{put_char(tty,val){if(val===null||val===10){err(UTF8ArrayToString(tty.output));tty.output=[]}else{if(val!=0)tty.output.push(val)}},fsync(tty){if(tty.output&&tty.output.length>0){err(UTF8ArrayToString(tty.output));tty.output=[]}}}};var alignMemory=(size,alignment)=>Math.ceil(size/alignment)*alignment;var mmapAlloc=size=>{abort()};var MEMFS={ops_table:null,mount(mount){return MEMFS.createNode(null,"/",16384|511,0)},createNode(parent,name,mode,dev){if(FS.isBlkdev(mode)||FS.isFIFO(mode)){throw new FS.ErrnoError(63)}MEMFS.ops_table||={dir:{node:{getattr:MEMFS.node_ops.getattr,setattr:MEMFS.node_ops.setattr,lookup:MEMFS.node_ops.lookup,mknod:MEMFS.node_ops.mknod,rename:MEMFS.node_ops.rename,unlink:MEMFS.node_ops.unlink,rmdir:MEMFS.node_ops.rmdir,readdir:MEMFS.node_ops.readdir,symlink:MEMFS.node_ops.symlink},stream:{llseek:MEMFS.stream_ops.llseek}},file:{node:{getattr:MEMFS.node_ops.getattr,setattr:MEMFS.node_ops.setattr},stream:{llseek:MEMFS.stream_ops.llseek,read:MEMFS.stream_ops.read,write:MEMFS.stream_ops.write,allocate:MEMFS.stream_ops.allocate,mmap:MEMFS.stream_ops.mmap,msync:MEMFS.stream_ops.msync}},link:{node:{getattr:MEMFS.node_ops.getattr,setattr:MEMFS.node_ops.setattr,readlink:MEMFS.node_ops.readlink},stream:{}},chrdev:{node:{getattr:MEMFS.node_ops.getattr,setattr:MEMFS.node_ops.setattr},stream:FS.chrdev_stream_ops}};var node=FS.createNode(parent,name,mode,dev);if(FS.isDir(node.mode)){node.node_ops=MEMFS.ops_table.dir.node;node.stream_ops=MEMFS.ops_table.dir.stream;node.contents={}}else if(FS.isFile(node.mode)){node.node_ops=MEMFS.ops_table.file.node;node.stream_ops=MEMFS.ops_table.file.stream;node.usedBytes=0;node.contents=null}else if(FS.isLink(node.mode)){node.node_ops=MEMFS.ops_table.link.node;node.stream_ops=MEMFS.ops_table.link.stream}else if(FS.isChrdev(node.mode)){node.node_ops=MEMFS.ops_table.chrdev.node;node.stream_ops=MEMFS.ops_table.chrdev.stream}node.timestamp=Date.now();if(parent){parent.contents[name]=node;parent.timestamp=node.timestamp}return node},getFileDataAsTypedArray(node){if(!node.contents)return new Uint8Array(0);if(node.contents.subarray)return node.contents.subarray(0,node.usedBytes);return new Uint8Array(node.contents)},expandFileStorage(node,newCapacity){var prevCapacity=node.contents?node.contents.length:0;if(prevCapacity>=newCapacity)return;var CAPACITY_DOUBLING_MAX=1024*1024;newCapacity=Math.max(newCapacity,prevCapacity*(prevCapacity>>0);if(prevCapacity!=0)newCapacity=Math.max(newCapacity,256);var oldContents=node.contents;node.contents=new Uint8Array(newCapacity);if(node.usedBytes>0)node.contents.set(oldContents.subarray(0,node.usedBytes),0)},resizeFileStorage(node,newSize){if(node.usedBytes==newSize)return;if(newSize==0){node.contents=null;node.usedBytes=0}else{var oldContents=node.contents;node.contents=new Uint8Array(newSize);if(oldContents){node.contents.set(oldContents.subarray(0,Math.min(newSize,node.usedBytes)))}node.usedBytes=newSize}},node_ops:{getattr(node){var attr={};attr.dev=FS.isChrdev(node.mode)?node.id:1;attr.ino=node.id;attr.mode=node.mode;attr.nlink=1;attr.uid=0;attr.gid=0;attr.rdev=node.rdev;if(FS.isDir(node.mode)){attr.size=4096}else if(FS.isFile(node.mode)){attr.size=node.usedBytes}else if(FS.isLink(node.mode)){attr.size=node.link.length}else{attr.size=0}attr.atime=new Date(node.timestamp);attr.mtime=new Date(node.timestamp);attr.ctime=new Date(node.timestamp);attr.blksize=4096;attr.blocks=Math.ceil(attr.size/attr.blksize);return attr},setattr(node,attr){if(attr.mode!==undefined){node.mode=attr.mode}if(attr.timestamp!==undefined){node.timestamp=attr.timestamp}if(attr.size!==undefined){MEMFS.resizeFileStorage(node,attr.size)}},lookup(parent,name){throw FS.genericErrors[44]},mknod(parent,name,mode,dev){return MEMFS.createNode(parent,name,mode,dev)},rename(old_node,new_dir,new_name){if(FS.isDir(old_node.mode)){var new_node;try{new_node=FS.lookupNode(new_dir,new_name)}catch(e){}if(new_node){for(var i in new_node.contents){throw new FS.ErrnoError(55)}}}delete old_node.parent.contents[old_node.name];old_node.parent.timestamp=Date.now();old_node.name=new_name;new_dir.contents[new_name]=old_node;new_dir.timestamp=old_node.parent.timestamp},unlink(parent,name){delete parent.contents[name];parent.timestamp=Date.now()},rmdir(parent,name){var node=FS.lookupNode(parent,name);for(var i in node.contents){throw new FS.ErrnoError(55)}delete parent.contents[name];parent.timestamp=Date.now()},readdir(node){var entries=[".",".."];for(var key of Object.keys(node.contents)){entries.push(key)}return entries},symlink(parent,newname,oldpath){var node=MEMFS.createNode(parent,newname,511|40960,0);node.link=oldpath;return node},readlink(node){if(!FS.isLink(node.mode)){throw new FS.ErrnoError(28)}return node.link}},stream_ops:{read(stream,buffer,offset,length,position){var contents=stream.node.contents;if(position>=stream.node.usedBytes)return 0;var size=Math.min(stream.node.usedBytes-position,length);if(size>8&&contents.subarray){buffer.set(contents.subarray(position,position+size),offset)}else{for(var i=0;i0||position+length{var dep=!noRunDep?getUniqueRunDependency(`al ${url}`):"";readAsync(url).then(arrayBuffer=>{onload(new Uint8Array(arrayBuffer));if(dep)removeRunDependency(dep)},err=>{if(onerror){onerror()}else{throw`Loading data file "${url}" failed.`}});if(dep)addRunDependency(dep)};var FS_createDataFile=(parent,name,fileData,canRead,canWrite,canOwn)=>{FS.createDataFile(parent,name,fileData,canRead,canWrite,canOwn)};var preloadPlugins=Module["preloadPlugins"]||[];var FS_handledByPreloadPlugin=(byteArray,fullname,finish,onerror)=>{if(typeof Browser!="undefined")Browser.init();var handled=false;preloadPlugins.forEach(plugin=>{if(handled)return;if(plugin["canHandle"](fullname)){plugin["handle"](byteArray,fullname,finish,onerror);handled=true}});return handled};var FS_createPreloadedFile=(parent,name,url,canRead,canWrite,onload,onerror,dontCreateFile,canOwn,preFinish)=>{var fullname=name?PATH_FS.resolve(PATH.join2(parent,name)):parent;var dep=getUniqueRunDependency(`cp ${fullname}`);function processData(byteArray){function finish(byteArray){preFinish?.();if(!dontCreateFile){FS_createDataFile(parent,name,byteArray,canRead,canWrite,canOwn)}onload?.();removeRunDependency(dep)}if(FS_handledByPreloadPlugin(byteArray,fullname,finish,()=>{onerror?.();removeRunDependency(dep)})){return}finish(byteArray)}addRunDependency(dep);if(typeof url=="string"){asyncLoad(url,processData,onerror)}else{processData(url)}};var FS_modeStringToFlags=str=>{var flagModes={r:0,"r+":2,w:512|64|1,"w+":512|64|2,a:1024|64|1,"a+":1024|64|2};var flags=flagModes[str];if(typeof flags=="undefined"){throw new Error(`Unknown file open mode: ${str}`)}return flags};var FS_getMode=(canRead,canWrite)=>{var mode=0;if(canRead)mode|=292|73;if(canWrite)mode|=146;return mode};var FS={root:null,mounts:[],devices:{},streams:[],nextInode:1,nameTable:null,currentPath:"/",initialized:false,ignorePermissions:true,ErrnoError:class{constructor(errno){this.name="ErrnoError";this.errno=errno}},genericErrors:{},filesystems:null,syncFSRequests:0,readFiles:{},FSStream:class{constructor(){this.shared={}}get object(){return this.node}set object(val){this.node=val}get isRead(){return(this.flags&2097155)!==1}get isWrite(){return(this.flags&2097155)!==0}get isAppend(){return this.flags&1024}get flags(){return this.shared.flags}set flags(val){this.shared.flags=val}get position(){return this.shared.position}set position(val){this.shared.position=val}},FSNode:class{constructor(parent,name,mode,rdev){if(!parent){parent=this}this.parent=parent;this.mount=parent.mount;this.mounted=null;this.id=FS.nextInode++;this.name=name;this.mode=mode;this.node_ops={};this.stream_ops={};this.rdev=rdev;this.readMode=292|73;this.writeMode=146}get read(){return(this.mode&this.readMode)===this.readMode}set read(val){val?this.mode|=this.readMode:this.mode&=~this.readMode}get write(){return(this.mode&this.writeMode)===this.writeMode}set write(val){val?this.mode|=this.writeMode:this.mode&=~this.writeMode}get isFolder(){return FS.isDir(this.mode)}get isDevice(){return FS.isChrdev(this.mode)}},lookupPath(path,opts={}){path=PATH_FS.resolve(path);if(!path)return{path:"",node:null};var defaults={follow_mount:true,recurse_count:0};opts=Object.assign(defaults,opts);if(opts.recurse_count>8){throw new FS.ErrnoError(32)}var parts=path.split("/").filter(p=>!!p);var current=FS.root;var current_path="/";for(var i=0;i40){throw new FS.ErrnoError(32)}}}}return{path:current_path,node:current}},getPath(node){var path;while(true){if(FS.isRoot(node)){var mount=node.mount.mountpoint;if(!path)return mount;return mount[mount.length-1]!=="/"?`${mount}/${path}`:mount+path}path=path?`${node.name}/${path}`:node.name;node=node.parent}},hashName(parentid,name){var hash=0;for(var i=0;i>>0)%FS.nameTable.length},hashAddNode(node){var hash=FS.hashName(node.parent.id,node.name);node.name_next=FS.nameTable[hash];FS.nameTable[hash]=node},hashRemoveNode(node){var hash=FS.hashName(node.parent.id,node.name);if(FS.nameTable[hash]===node){FS.nameTable[hash]=node.name_next}else{var current=FS.nameTable[hash];while(current){if(current.name_next===node){current.name_next=node.name_next;break}current=current.name_next}}},lookupNode(parent,name){var errCode=FS.mayLookup(parent);if(errCode){throw new FS.ErrnoError(errCode)}var hash=FS.hashName(parent.id,name);for(var node=FS.nameTable[hash];node;node=node.name_next){var nodeName=node.name;if(node.parent.id===parent.id&&nodeName===name){return node}}return FS.lookup(parent,name)},createNode(parent,name,mode,rdev){var node=new FS.FSNode(parent,name,mode,rdev);FS.hashAddNode(node);return node},destroyNode(node){FS.hashRemoveNode(node)},isRoot(node){return node===node.parent},isMountpoint(node){return!!node.mounted},isFile(mode){return(mode&61440)===32768},isDir(mode){return(mode&61440)===16384},isLink(mode){return(mode&61440)===40960},isChrdev(mode){return(mode&61440)===8192},isBlkdev(mode){return(mode&61440)===24576},isFIFO(mode){return(mode&61440)===4096},isSocket(mode){return(mode&49152)===49152},flagsToPermissionString(flag){var perms=["r","w","rw"][flag&3];if(flag&512){perms+="w"}return perms},nodePermissions(node,perms){if(FS.ignorePermissions){return 0}if(perms.includes("r")&&!(node.mode&292)){return 2}else if(perms.includes("w")&&!(node.mode&146)){return 2}else if(perms.includes("x")&&!(node.mode&73)){return 2}return 0},mayLookup(dir){if(!FS.isDir(dir.mode))return 54;var errCode=FS.nodePermissions(dir,"x");if(errCode)return errCode;if(!dir.node_ops.lookup)return 2;return 0},mayCreate(dir,name){try{var node=FS.lookupNode(dir,name);return 20}catch(e){}return FS.nodePermissions(dir,"wx")},mayDelete(dir,name,isdir){var node;try{node=FS.lookupNode(dir,name)}catch(e){return e.errno}var errCode=FS.nodePermissions(dir,"wx");if(errCode){return errCode}if(isdir){if(!FS.isDir(node.mode)){return 54}if(FS.isRoot(node)||FS.getPath(node)===FS.cwd()){return 10}}else{if(FS.isDir(node.mode)){return 31}}return 0},mayOpen(node,flags){if(!node){return 44}if(FS.isLink(node.mode)){return 32}else if(FS.isDir(node.mode)){if(FS.flagsToPermissionString(flags)!=="r"||flags&512){return 31}}return FS.nodePermissions(node,FS.flagsToPermissionString(flags))},MAX_OPEN_FDS:4096,nextfd(){for(var fd=0;fd<=FS.MAX_OPEN_FDS;fd++){if(!FS.streams[fd]){return fd}}throw new FS.ErrnoError(33)},getStreamChecked(fd){var stream=FS.getStream(fd);if(!stream){throw new FS.ErrnoError(8)}return stream},getStream:fd=>FS.streams[fd],createStream(stream,fd=-1){stream=Object.assign(new FS.FSStream,stream);if(fd==-1){fd=FS.nextfd()}stream.fd=fd;FS.streams[fd]=stream;return stream},closeStream(fd){FS.streams[fd]=null},dupStream(origStream,fd=-1){var stream=FS.createStream(origStream,fd);stream.stream_ops?.dup?.(stream);return stream},chrdev_stream_ops:{open(stream){var device=FS.getDevice(stream.node.rdev);stream.stream_ops=device.stream_ops;stream.stream_ops.open?.(stream)},llseek(){throw new FS.ErrnoError(70)}},major:dev=>dev>>8,minor:dev=>dev&255,makedev:(ma,mi)=>ma<<8|mi,registerDevice(dev,ops){FS.devices[dev]={stream_ops:ops}},getDevice:dev=>FS.devices[dev],getMounts(mount){var mounts=[];var check=[mount];while(check.length){var m=check.pop();mounts.push(m);check.push(...m.mounts)}return mounts},syncfs(populate,callback){if(typeof populate=="function"){callback=populate;populate=false}FS.syncFSRequests++;if(FS.syncFSRequests>1){err(`warning: ${FS.syncFSRequests} FS.syncfs operations in flight at once, probably just doing extra work`)}var mounts=FS.getMounts(FS.root.mount);var completed=0;function doCallback(errCode){FS.syncFSRequests--;return callback(errCode)}function done(errCode){if(errCode){if(!done.errored){done.errored=true;return doCallback(errCode)}return}if(++completed>=mounts.length){doCallback(null)}}mounts.forEach(mount=>{if(!mount.type.syncfs){return done(null)}mount.type.syncfs(mount,populate,done)})},mount(type,opts,mountpoint){var root=mountpoint==="/";var pseudo=!mountpoint;var node;if(root&&FS.root){throw new FS.ErrnoError(10)}else if(!root&&!pseudo){var lookup=FS.lookupPath(mountpoint,{follow_mount:false});mountpoint=lookup.path;node=lookup.node;if(FS.isMountpoint(node)){throw new FS.ErrnoError(10)}if(!FS.isDir(node.mode)){throw new FS.ErrnoError(54)}}var mount={type,opts,mountpoint,mounts:[]};var mountRoot=type.mount(mount);mountRoot.mount=mount;mount.root=mountRoot;if(root){FS.root=mountRoot}else if(node){node.mounted=mount;if(node.mount){node.mount.mounts.push(mount)}}return mountRoot},unmount(mountpoint){var lookup=FS.lookupPath(mountpoint,{follow_mount:false});if(!FS.isMountpoint(lookup.node)){throw new FS.ErrnoError(28)}var node=lookup.node;var mount=node.mounted;var mounts=FS.getMounts(mount);Object.keys(FS.nameTable).forEach(hash=>{var current=FS.nameTable[hash];while(current){var next=current.name_next;if(mounts.includes(current.mount)){FS.destroyNode(current)}current=next}});node.mounted=null;var idx=node.mount.mounts.indexOf(mount);node.mount.mounts.splice(idx,1)},lookup(parent,name){return parent.node_ops.lookup(parent,name)},mknod(path,mode,dev){var lookup=FS.lookupPath(path,{parent:true});var parent=lookup.node;var name=PATH.basename(path);if(!name||name==="."||name===".."){throw new FS.ErrnoError(28)}var errCode=FS.mayCreate(parent,name);if(errCode){throw new FS.ErrnoError(errCode)}if(!parent.node_ops.mknod){throw new FS.ErrnoError(63)}return parent.node_ops.mknod(parent,name,mode,dev)},create(path,mode){mode=mode!==undefined?mode:438;mode&=4095;mode|=32768;return FS.mknod(path,mode,0)},mkdir(path,mode){mode=mode!==undefined?mode:511;mode&=511|512;mode|=16384;return FS.mknod(path,mode,0)},mkdirTree(path,mode){var dirs=path.split("/");var d="";for(var i=0;iFS.currentPath,chdir(path){var lookup=FS.lookupPath(path,{follow:true});if(lookup.node===null){throw new FS.ErrnoError(44)}if(!FS.isDir(lookup.node.mode)){throw new FS.ErrnoError(54)}var errCode=FS.nodePermissions(lookup.node,"x");if(errCode){throw new FS.ErrnoError(errCode)}FS.currentPath=lookup.path},createDefaultDirectories(){FS.mkdir("/tmp");FS.mkdir("/home");FS.mkdir("/home/web_user")},createDefaultDevices(){FS.mkdir("/dev");FS.registerDevice(FS.makedev(1,3),{read:()=>0,write:(stream,buffer,offset,length,pos)=>length});FS.mkdev("/dev/null",FS.makedev(1,3));TTY.register(FS.makedev(5,0),TTY.default_tty_ops);TTY.register(FS.makedev(6,0),TTY.default_tty1_ops);FS.mkdev("/dev/tty",FS.makedev(5,0));FS.mkdev("/dev/tty1",FS.makedev(6,0));var randomBuffer=new Uint8Array(1024),randomLeft=0;var randomByte=()=>{if(randomLeft===0){randomLeft=randomFill(randomBuffer).byteLength}return randomBuffer[--randomLeft]};FS.createDevice("/dev","random",randomByte);FS.createDevice("/dev","urandom",randomByte);FS.mkdir("/dev/shm");FS.mkdir("/dev/shm/tmp")},createSpecialDirectories(){FS.mkdir("/proc");var proc_self=FS.mkdir("/proc/self");FS.mkdir("/proc/self/fd");FS.mount({mount(){var node=FS.createNode(proc_self,"fd",16384|511,73);node.node_ops={lookup(parent,name){var fd=+name;var stream=FS.getStreamChecked(fd);var ret={parent:null,mount:{mountpoint:"fake"},node_ops:{readlink:()=>stream.path}};ret.parent=ret;return ret}};return node}},{},"/proc/self/fd")},createStandardStreams(input,output,error){if(input){FS.createDevice("/dev","stdin",input)}else{FS.symlink("/dev/tty","/dev/stdin")}if(output){FS.createDevice("/dev","stdout",null,output)}else{FS.symlink("/dev/tty","/dev/stdout")}if(error){FS.createDevice("/dev","stderr",null,error)}else{FS.symlink("/dev/tty1","/dev/stderr")}var stdin=FS.open("/dev/stdin",0);var stdout=FS.open("/dev/stdout",1);var stderr=FS.open("/dev/stderr",1)},staticInit(){[44].forEach(code=>{FS.genericErrors[code]=new FS.ErrnoError(code);FS.genericErrors[code].stack=""});FS.nameTable=new Array(4096);FS.mount(MEMFS,{},"/");FS.createDefaultDirectories();FS.createDefaultDevices();FS.createSpecialDirectories();FS.filesystems={MEMFS}},init(input,output,error){FS.initialized=true;input??=Module["stdin"];output??=Module["stdout"];error??=Module["stderr"];FS.createStandardStreams(input,output,error)},quit(){FS.initialized=false;for(var i=0;ithis.length-1||idx<0){return undefined}var chunkOffset=idx%this.chunkSize;var chunkNum=idx/this.chunkSize|0;return this.getter(chunkNum)[chunkOffset]}setDataGetter(getter){this.getter=getter}cacheLength(){var xhr=new XMLHttpRequest;xhr.open("HEAD",url,false);xhr.send(null);if(!(xhr.status>=200&&xhr.status<300||xhr.status===304))throw new Error("Couldn't load "+url+". Status: "+xhr.status);var datalength=Number(xhr.getResponseHeader("Content-length"));var header;var hasByteServing=(header=xhr.getResponseHeader("Accept-Ranges"))&&header==="bytes";var usesGzip=(header=xhr.getResponseHeader("Content-Encoding"))&&header==="gzip";var chunkSize=1024*1024;if(!hasByteServing)chunkSize=datalength;var doXHR=(from,to)=>{if(from>to)throw new Error("invalid range ("+from+", "+to+") or no bytes requested!");if(to>datalength-1)throw new Error("only "+datalength+" bytes available! programmer error!");var xhr=new XMLHttpRequest;xhr.open("GET",url,false);if(datalength!==chunkSize)xhr.setRequestHeader("Range","bytes="+from+"-"+to);xhr.responseType="arraybuffer";if(xhr.overrideMimeType){xhr.overrideMimeType("text/plain; charset=x-user-defined")}xhr.send(null);if(!(xhr.status>=200&&xhr.status<300||xhr.status===304))throw new Error("Couldn't load "+url+". Status: "+xhr.status);if(xhr.response!==undefined){return new Uint8Array(xhr.response||[])}return intArrayFromString(xhr.responseText||"",true)};var lazyArray=this;lazyArray.setDataGetter(chunkNum=>{var start=chunkNum*chunkSize;var end=(chunkNum+1)*chunkSize-1;end=Math.min(end,datalength-1);if(typeof lazyArray.chunks[chunkNum]=="undefined"){lazyArray.chunks[chunkNum]=doXHR(start,end)}if(typeof lazyArray.chunks[chunkNum]=="undefined")throw new Error("doXHR failed!");return lazyArray.chunks[chunkNum]});if(usesGzip||!datalength){chunkSize=datalength=1;datalength=this.getter(0).length;chunkSize=datalength;out("LazyFiles on gzip forces download of the whole file when length is accessed")}this._length=datalength;this._chunkSize=chunkSize;this.lengthKnown=true}get length(){if(!this.lengthKnown){this.cacheLength()}return this._length}get chunkSize(){if(!this.lengthKnown){this.cacheLength()}return this._chunkSize}}if(typeof XMLHttpRequest!="undefined"){if(!ENVIRONMENT_IS_WORKER)throw"Cannot do synchronous binary XHRs outside webworkers in modern browsers. Use --embed-file or --preload-file in emcc";var lazyArray=new LazyUint8Array;var properties={isDevice:false,contents:lazyArray}}else{var properties={isDevice:false,url}}var node=FS.createFile(parent,name,properties,canRead,canWrite);if(properties.contents){node.contents=properties.contents}else if(properties.url){node.contents=null;node.url=properties.url}Object.defineProperties(node,{usedBytes:{get:function(){return this.contents.length}}});var stream_ops={};var keys=Object.keys(node.stream_ops);keys.forEach(key=>{var fn=node.stream_ops[key];stream_ops[key]=(...args)=>{FS.forceLoadFile(node);return fn(...args)}});function writeChunks(stream,buffer,offset,length,position){var contents=stream.node.contents;if(position>=contents.length)return 0;var size=Math.min(contents.length-position,length);if(contents.slice){for(var i=0;i{FS.forceLoadFile(node);return writeChunks(stream,buffer,offset,length,position)};stream_ops.mmap=(stream,length,position,prot,flags)=>{FS.forceLoadFile(node);var ptr=mmapAlloc(length);if(!ptr){throw new FS.ErrnoError(48)}writeChunks(stream,HEAP8,ptr,length,position);return{ptr,allocated:true}};node.stream_ops=stream_ops;return node}};var UTF8ToString=(ptr,maxBytesToRead)=>ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):"";var SYSCALLS={DEFAULT_POLLMASK:5,calculateAt(dirfd,path,allowEmpty){if(PATH.isAbs(path)){return path}var dir;if(dirfd===-100){dir=FS.cwd()}else{var dirstream=SYSCALLS.getStreamFromFD(dirfd);dir=dirstream.path}if(path.length==0){if(!allowEmpty){throw new FS.ErrnoError(44)}return dir}return PATH.join2(dir,path)},doStat(func,path,buf){var stat=func(path);HEAP32[buf>>2]=stat.dev;HEAP32[buf+4>>2]=stat.mode;HEAPU32[buf+8>>2]=stat.nlink;HEAP32[buf+12>>2]=stat.uid;HEAP32[buf+16>>2]=stat.gid;HEAP32[buf+20>>2]=stat.rdev;tempI64=[stat.size>>>0,(tempDouble=stat.size,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[buf+24>>2]=tempI64[0],HEAP32[buf+28>>2]=tempI64[1];HEAP32[buf+32>>2]=4096;HEAP32[buf+36>>2]=stat.blocks;var atime=stat.atime.getTime();var mtime=stat.mtime.getTime();var ctime=stat.ctime.getTime();tempI64=[Math.floor(atime/1e3)>>>0,(tempDouble=Math.floor(atime/1e3),+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[buf+40>>2]=tempI64[0],HEAP32[buf+44>>2]=tempI64[1];HEAPU32[buf+48>>2]=atime%1e3*1e3*1e3;tempI64=[Math.floor(mtime/1e3)>>>0,(tempDouble=Math.floor(mtime/1e3),+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[buf+56>>2]=tempI64[0],HEAP32[buf+60>>2]=tempI64[1];HEAPU32[buf+64>>2]=mtime%1e3*1e3*1e3;tempI64=[Math.floor(ctime/1e3)>>>0,(tempDouble=Math.floor(ctime/1e3),+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[buf+72>>2]=tempI64[0],HEAP32[buf+76>>2]=tempI64[1];HEAPU32[buf+80>>2]=ctime%1e3*1e3*1e3;tempI64=[stat.ino>>>0,(tempDouble=stat.ino,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[buf+88>>2]=tempI64[0],HEAP32[buf+92>>2]=tempI64[1];return 0},doMsync(addr,stream,len,flags,offset){if(!FS.isFile(stream.node.mode)){throw new FS.ErrnoError(43)}if(flags&2){return 0}var buffer=HEAPU8.slice(addr,addr+len);FS.msync(stream,buffer,offset,len,flags)},getStreamFromFD(fd){var stream=FS.getStreamChecked(fd);return stream},varargs:undefined,getStr(ptr){var ret=UTF8ToString(ptr);return ret}};function ___syscall_dup3(fd,newfd,flags){try{var old=SYSCALLS.getStreamFromFD(fd);if(old.fd===newfd)return-28;if(newfd<0||newfd>=FS.MAX_OPEN_FDS)return-8;var existing=FS.getStream(newfd);if(existing)FS.close(existing);return FS.dupStream(old,newfd).fd}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_faccessat(dirfd,path,amode,flags){try{path=SYSCALLS.getStr(path);path=SYSCALLS.calculateAt(dirfd,path);if(amode&~7){return-28}var lookup=FS.lookupPath(path,{follow:true});var node=lookup.node;if(!node){return-44}var perms="";if(amode&4)perms+="r";if(amode&2)perms+="w";if(amode&1)perms+="x";if(perms&&FS.nodePermissions(node,perms)){return-2}return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function syscallGetVarargI(){var ret=HEAP32[+SYSCALLS.varargs>>2];SYSCALLS.varargs+=4;return ret}var syscallGetVarargP=syscallGetVarargI;function ___syscall_fcntl64(fd,cmd,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.getStreamFromFD(fd);switch(cmd){case 0:{var arg=syscallGetVarargI();if(arg<0){return-28}while(FS.streams[arg]){arg++}var newStream;newStream=FS.dupStream(stream,arg);return newStream.fd}case 1:case 2:return 0;case 3:return stream.flags;case 4:{var arg=syscallGetVarargI();stream.flags|=arg;return 0}case 12:{var arg=syscallGetVarargP();var offset=0;HEAP16[arg+offset>>1]=2;return 0}case 13:case 14:return 0}return-28}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_fstat64(fd,buf){try{var stream=SYSCALLS.getStreamFromFD(fd);return SYSCALLS.doStat(FS.stat,stream.path,buf)}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}var stringToUTF8=(str,outPtr,maxBytesToWrite)=>stringToUTF8Array(str,HEAPU8,outPtr,maxBytesToWrite);function ___syscall_getdents64(fd,dirp,count){try{var stream=SYSCALLS.getStreamFromFD(fd);stream.getdents||=FS.readdir(stream.path);var struct_size=280;var pos=0;var off=FS.llseek(stream,0,1);var idx=Math.floor(off/struct_size);while(idx>>0,(tempDouble=id,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[dirp+pos>>2]=tempI64[0],HEAP32[dirp+pos+4>>2]=tempI64[1];tempI64=[(idx+1)*struct_size>>>0,(tempDouble=(idx+1)*struct_size,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[dirp+pos+8>>2]=tempI64[0],HEAP32[dirp+pos+12>>2]=tempI64[1];HEAP16[dirp+pos+16>>1]=280;HEAP8[dirp+pos+18]=type;stringToUTF8(name,dirp+pos+19,256);pos+=struct_size;idx+=1}FS.llseek(stream,idx*struct_size,0);return pos}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_ioctl(fd,op,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.getStreamFromFD(fd);switch(op){case 21509:{if(!stream.tty)return-59;return 0}case 21505:{if(!stream.tty)return-59;if(stream.tty.ops.ioctl_tcgets){var termios=stream.tty.ops.ioctl_tcgets(stream);var argp=syscallGetVarargP();HEAP32[argp>>2]=termios.c_iflag||0;HEAP32[argp+4>>2]=termios.c_oflag||0;HEAP32[argp+8>>2]=termios.c_cflag||0;HEAP32[argp+12>>2]=termios.c_lflag||0;for(var i=0;i<32;i++){HEAP8[argp+i+17]=termios.c_cc[i]||0}return 0}return 0}case 21510:case 21511:case 21512:{if(!stream.tty)return-59;return 0}case 21506:case 21507:case 21508:{if(!stream.tty)return-59;if(stream.tty.ops.ioctl_tcsets){var argp=syscallGetVarargP();var c_iflag=HEAP32[argp>>2];var c_oflag=HEAP32[argp+4>>2];var c_cflag=HEAP32[argp+8>>2];var c_lflag=HEAP32[argp+12>>2];var c_cc=[];for(var i=0;i<32;i++){c_cc.push(HEAP8[argp+i+17])}return stream.tty.ops.ioctl_tcsets(stream.tty,op,{c_iflag,c_oflag,c_cflag,c_lflag,c_cc})}return 0}case 21519:{if(!stream.tty)return-59;var argp=syscallGetVarargP();HEAP32[argp>>2]=0;return 0}case 21520:{if(!stream.tty)return-59;return-28}case 21531:{var argp=syscallGetVarargP();return FS.ioctl(stream,op,argp)}case 21523:{if(!stream.tty)return-59;if(stream.tty.ops.ioctl_tiocgwinsz){var winsize=stream.tty.ops.ioctl_tiocgwinsz(stream.tty);var argp=syscallGetVarargP();HEAP16[argp>>1]=winsize[0];HEAP16[argp+2>>1]=winsize[1]}return 0}case 21524:{if(!stream.tty)return-59;return 0}case 21515:{if(!stream.tty)return-59;return 0}default:return-28}}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_lstat64(path,buf){try{path=SYSCALLS.getStr(path);return SYSCALLS.doStat(FS.lstat,path,buf)}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_newfstatat(dirfd,path,buf,flags){try{path=SYSCALLS.getStr(path);var nofollow=flags&256;var allowEmpty=flags&4096;flags=flags&~6400;path=SYSCALLS.calculateAt(dirfd,path,allowEmpty);return SYSCALLS.doStat(nofollow?FS.lstat:FS.stat,path,buf)}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_openat(dirfd,path,flags,varargs){SYSCALLS.varargs=varargs;try{path=SYSCALLS.getStr(path);path=SYSCALLS.calculateAt(dirfd,path);var mode=varargs?syscallGetVarargI():0;return FS.open(path,flags,mode).fd}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_renameat(olddirfd,oldpath,newdirfd,newpath){try{oldpath=SYSCALLS.getStr(oldpath);newpath=SYSCALLS.getStr(newpath);oldpath=SYSCALLS.calculateAt(olddirfd,oldpath);newpath=SYSCALLS.calculateAt(newdirfd,newpath);FS.rename(oldpath,newpath);return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_rmdir(path){try{path=SYSCALLS.getStr(path);FS.rmdir(path);return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_stat64(path,buf){try{path=SYSCALLS.getStr(path);return SYSCALLS.doStat(FS.stat,path,buf)}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}function ___syscall_unlinkat(dirfd,path,flags){try{path=SYSCALLS.getStr(path);path=SYSCALLS.calculateAt(dirfd,path);if(flags===0){FS.unlink(path)}else if(flags===512){FS.rmdir(path)}else{abort("Invalid flags passed to unlinkat")}return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return-e.errno}}var __abort_js=()=>{abort("")};var nowIsMonotonic=1;var __emscripten_get_now_is_monotonic=()=>nowIsMonotonic;var __emscripten_throw_longjmp=()=>{throw Infinity};var convertI32PairToI53Checked=(lo,hi)=>hi+2097152>>>0<4194305-!!lo?(lo>>>0)+hi*4294967296:NaN;function __gmtime_js(time_low,time_high,tmPtr){var time=convertI32PairToI53Checked(time_low,time_high);var date=new Date(time*1e3);HEAP32[tmPtr>>2]=date.getUTCSeconds();HEAP32[tmPtr+4>>2]=date.getUTCMinutes();HEAP32[tmPtr+8>>2]=date.getUTCHours();HEAP32[tmPtr+12>>2]=date.getUTCDate();HEAP32[tmPtr+16>>2]=date.getUTCMonth();HEAP32[tmPtr+20>>2]=date.getUTCFullYear()-1900;HEAP32[tmPtr+24>>2]=date.getUTCDay();var start=Date.UTC(date.getUTCFullYear(),0,1,0,0,0,0);var yday=(date.getTime()-start)/(1e3*60*60*24)|0;HEAP32[tmPtr+28>>2]=yday}var isLeapYear=year=>year%4===0&&(year%100!==0||year%400===0);var MONTH_DAYS_LEAP_CUMULATIVE=[0,31,60,91,121,152,182,213,244,274,305,335];var MONTH_DAYS_REGULAR_CUMULATIVE=[0,31,59,90,120,151,181,212,243,273,304,334];var ydayFromDate=date=>{var leap=isLeapYear(date.getFullYear());var monthDaysCumulative=leap?MONTH_DAYS_LEAP_CUMULATIVE:MONTH_DAYS_REGULAR_CUMULATIVE;var yday=monthDaysCumulative[date.getMonth()]+date.getDate()-1;return yday};function __localtime_js(time_low,time_high,tmPtr){var time=convertI32PairToI53Checked(time_low,time_high);var date=new Date(time*1e3);HEAP32[tmPtr>>2]=date.getSeconds();HEAP32[tmPtr+4>>2]=date.getMinutes();HEAP32[tmPtr+8>>2]=date.getHours();HEAP32[tmPtr+12>>2]=date.getDate();HEAP32[tmPtr+16>>2]=date.getMonth();HEAP32[tmPtr+20>>2]=date.getFullYear()-1900;HEAP32[tmPtr+24>>2]=date.getDay();var yday=ydayFromDate(date)|0;HEAP32[tmPtr+28>>2]=yday;HEAP32[tmPtr+36>>2]=-(date.getTimezoneOffset()*60);var start=new Date(date.getFullYear(),0,1);var summerOffset=new Date(date.getFullYear(),6,1).getTimezoneOffset();var winterOffset=start.getTimezoneOffset();var dst=(summerOffset!=winterOffset&&date.getTimezoneOffset()==Math.min(winterOffset,summerOffset))|0;HEAP32[tmPtr+32>>2]=dst}var setTempRet0=val=>__emscripten_tempret_set(val);var __mktime_js=function(tmPtr){var ret=(()=>{var date=new Date(HEAP32[tmPtr+20>>2]+1900,HEAP32[tmPtr+16>>2],HEAP32[tmPtr+12>>2],HEAP32[tmPtr+8>>2],HEAP32[tmPtr+4>>2],HEAP32[tmPtr>>2],0);var dst=HEAP32[tmPtr+32>>2];var guessedOffset=date.getTimezoneOffset();var start=new Date(date.getFullYear(),0,1);var summerOffset=new Date(date.getFullYear(),6,1).getTimezoneOffset();var winterOffset=start.getTimezoneOffset();var dstOffset=Math.min(winterOffset,summerOffset);if(dst<0){HEAP32[tmPtr+32>>2]=Number(summerOffset!=winterOffset&&dstOffset==guessedOffset)}else if(dst>0!=(dstOffset==guessedOffset)){var nonDstOffset=Math.max(winterOffset,summerOffset);var trueOffset=dst>0?dstOffset:nonDstOffset;date.setTime(date.getTime()+(trueOffset-guessedOffset)*6e4)}HEAP32[tmPtr+24>>2]=date.getDay();var yday=ydayFromDate(date)|0;HEAP32[tmPtr+28>>2]=yday;HEAP32[tmPtr>>2]=date.getSeconds();HEAP32[tmPtr+4>>2]=date.getMinutes();HEAP32[tmPtr+8>>2]=date.getHours();HEAP32[tmPtr+12>>2]=date.getDate();HEAP32[tmPtr+16>>2]=date.getMonth();HEAP32[tmPtr+20>>2]=date.getYear();var timeMs=date.getTime();if(isNaN(timeMs)){return-1}return timeMs/1e3})();return setTempRet0((tempDouble=ret,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)),ret>>>0};var __tzset_js=(timezone,daylight,std_name,dst_name)=>{var currentYear=(new Date).getFullYear();var winter=new Date(currentYear,0,1);var summer=new Date(currentYear,6,1);var winterOffset=winter.getTimezoneOffset();var summerOffset=summer.getTimezoneOffset();var stdTimezoneOffset=Math.max(winterOffset,summerOffset);HEAPU32[timezone>>2]=stdTimezoneOffset*60;HEAP32[daylight>>2]=Number(winterOffset!=summerOffset);var extractZone=timezoneOffset=>{var sign=timezoneOffset>=0?"-":"+";var absOffset=Math.abs(timezoneOffset);var hours=String(Math.floor(absOffset/60)).padStart(2,"0");var minutes=String(absOffset%60).padStart(2,"0");return`UTC${sign}${hours}${minutes}`};var winterName=extractZone(winterOffset);var summerName=extractZone(summerOffset);if(summerOffsetDate.now();var getHeapMax=()=>2147483648;var _emscripten_get_heap_max=()=>getHeapMax();var _emscripten_get_now=()=>performance.now();var growMemory=size=>{var b=wasmMemory.buffer;var pages=(size-b.byteLength+65535)/65536|0;try{wasmMemory.grow(pages);updateMemoryViews();return 1}catch(e){}};var _emscripten_resize_heap=requestedSize=>{var oldSize=HEAPU8.length;requestedSize>>>=0;var maxHeapSize=getHeapMax();if(requestedSize>maxHeapSize){return false}for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignMemory(Math.max(requestedSize,overGrownHeapSize),65536));var replacement=growMemory(newSize);if(replacement){return true}}return false};var ENV={};var getExecutableName=()=>thisProgram||"./this.program";var getEnvStrings=()=>{if(!getEnvStrings.strings){var lang=(typeof navigator=="object"&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8";var env={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:lang,_:getExecutableName()};for(var x in ENV){if(ENV[x]===undefined)delete env[x];else env[x]=ENV[x]}var strings=[];for(var x in env){strings.push(`${x}=${env[x]}`)}getEnvStrings.strings=strings}return getEnvStrings.strings};var stringToAscii=(str,buffer)=>{for(var i=0;i{var bufSize=0;getEnvStrings().forEach((string,i)=>{var ptr=environ_buf+bufSize;HEAPU32[__environ+i*4>>2]=ptr;stringToAscii(string,ptr);bufSize+=string.length+1});return 0};var _environ_sizes_get=(penviron_count,penviron_buf_size)=>{var strings=getEnvStrings();HEAPU32[penviron_count>>2]=strings.length;var bufSize=0;strings.forEach(string=>bufSize+=string.length+1);HEAPU32[penviron_buf_size>>2]=bufSize;return 0};function _fd_close(fd){try{var stream=SYSCALLS.getStreamFromFD(fd);FS.close(stream);return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return e.errno}}function _fd_fdstat_get(fd,pbuf){try{var rightsBase=0;var rightsInheriting=0;var flags=0;{var stream=SYSCALLS.getStreamFromFD(fd);var type=stream.tty?2:FS.isDir(stream.mode)?3:FS.isLink(stream.mode)?7:4}HEAP8[pbuf]=type;HEAP16[pbuf+2>>1]=flags;tempI64=[rightsBase>>>0,(tempDouble=rightsBase,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[pbuf+8>>2]=tempI64[0],HEAP32[pbuf+12>>2]=tempI64[1];tempI64=[rightsInheriting>>>0,(tempDouble=rightsInheriting,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[pbuf+16>>2]=tempI64[0],HEAP32[pbuf+20>>2]=tempI64[1];return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return e.errno}}var doReadv=(stream,iov,iovcnt,offset)=>{var ret=0;for(var i=0;i>2];var len=HEAPU32[iov+4>>2];iov+=8;var curr=FS.read(stream,HEAP8,ptr,len,offset);if(curr<0)return-1;ret+=curr;if(curr>2]=num;return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return e.errno}}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){var offset=convertI32PairToI53Checked(offset_low,offset_high);try{if(isNaN(offset))return 61;var stream=SYSCALLS.getStreamFromFD(fd);FS.llseek(stream,offset,whence);tempI64=[stream.position>>>0,(tempDouble=stream.position,+Math.abs(tempDouble)>=1?tempDouble>0?+Math.floor(tempDouble/4294967296)>>>0:~~+Math.ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[newOffset>>2]=tempI64[0],HEAP32[newOffset+4>>2]=tempI64[1];if(stream.getdents&&offset===0&&whence===0)stream.getdents=null;return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return e.errno}}var doWritev=(stream,iov,iovcnt,offset)=>{var ret=0;for(var i=0;i>2];var len=HEAPU32[iov+4>>2];iov+=8;var curr=FS.write(stream,HEAP8,ptr,len,offset);if(curr<0)return-1;ret+=curr;if(curr>2]=num;return 0}catch(e){if(typeof FS=="undefined"||!(e.name==="ErrnoError"))throw e;return e.errno}}var wasmTable;var runAndAbortIfError=func=>{try{return func()}catch(e){abort(e)}};var handleException=e=>{if(e instanceof ExitStatus||e=="unwind"){return EXITSTATUS}quit_(1,e)};var runtimeKeepaliveCounter=0;var keepRuntimeAlive=()=>noExitRuntime||runtimeKeepaliveCounter>0;var _proc_exit=code=>{EXITSTATUS=code;if(!keepRuntimeAlive()){Module["onExit"]?.(code);ABORT=true}quit_(code,new ExitStatus(code))};var exitJS=(status,implicit)=>{EXITSTATUS=status;_proc_exit(status)};var _exit=exitJS;var maybeExit=()=>{if(!keepRuntimeAlive()){try{_exit(EXITSTATUS)}catch(e){handleException(e)}}};var callUserCallback=func=>{if(ABORT){return}try{func();maybeExit()}catch(e){handleException(e)}};var runtimeKeepalivePush=()=>{runtimeKeepaliveCounter+=1};var runtimeKeepalivePop=()=>{runtimeKeepaliveCounter-=1};var Asyncify={instrumentWasmImports(imports){var importPattern=/^(libavjs_wait_reader|invoke_.*|__asyncjs__.*)$/;for(let[x,original]of Object.entries(imports)){if(typeof original=="function"){let isAsyncifyImport=original.isAsync||importPattern.test(x)}}},instrumentWasmExports(exports){var ret={};for(let[x,original]of Object.entries(exports)){if(typeof original=="function"){ret[x]=(...args)=>{Asyncify.exportCallStack.push(x);try{return original(...args)}finally{if(!ABORT){var y=Asyncify.exportCallStack.pop();Asyncify.maybeStopUnwind()}}}}else{ret[x]=original}}return ret},State:{Normal:0,Unwinding:1,Rewinding:2,Disabled:3},state:0,StackSize:4096,currData:null,handleSleepReturnValue:0,exportCallStack:[],callStackNameToId:{},callStackIdToName:{},callStackId:0,asyncPromiseHandlers:null,sleepCallbacks:[],getCallStackId(funcName){var id=Asyncify.callStackNameToId[funcName];if(id===undefined){id=Asyncify.callStackId++;Asyncify.callStackNameToId[funcName]=id;Asyncify.callStackIdToName[id]=funcName}return id},maybeStopUnwind(){if(Asyncify.currData&&Asyncify.state===Asyncify.State.Unwinding&&Asyncify.exportCallStack.length===0){Asyncify.state=Asyncify.State.Normal;runAndAbortIfError(_asyncify_stop_unwind);if(typeof Fibers!="undefined"){Fibers.trampoline()}}},whenDone(){return new Promise((resolve,reject)=>{Asyncify.asyncPromiseHandlers={resolve,reject}})},allocateData(){var ptr=_malloc(12+Asyncify.StackSize);Asyncify.setDataHeader(ptr,ptr+12,Asyncify.StackSize);Asyncify.setDataRewindFunc(ptr);return ptr},setDataHeader(ptr,stack,stackSize){HEAPU32[ptr>>2]=stack;HEAPU32[ptr+4>>2]=stack+stackSize},setDataRewindFunc(ptr){var bottomOfCallStack=Asyncify.exportCallStack[0];var rewindId=Asyncify.getCallStackId(bottomOfCallStack);HEAP32[ptr+8>>2]=rewindId},getDataRewindFuncName(ptr){var id=HEAP32[ptr+8>>2];var name=Asyncify.callStackIdToName[id];return name},getDataRewindFunc(name){var func=wasmExports[name];return func},doRewind(ptr){var name=Asyncify.getDataRewindFuncName(ptr);var func=Asyncify.getDataRewindFunc(name);return func()},handleSleep(startAsync){if(ABORT)return;if(Asyncify.state===Asyncify.State.Normal){var reachedCallback=false;var reachedAfterCallback=false;startAsync((handleSleepReturnValue=0)=>{if(ABORT)return;Asyncify.handleSleepReturnValue=handleSleepReturnValue;reachedCallback=true;if(!reachedAfterCallback){return}Asyncify.state=Asyncify.State.Rewinding;runAndAbortIfError(()=>_asyncify_start_rewind(Asyncify.currData));if(typeof MainLoop!="undefined"&&MainLoop.func){MainLoop.resume()}var asyncWasmReturnValue,isError=false;try{asyncWasmReturnValue=Asyncify.doRewind(Asyncify.currData)}catch(err){asyncWasmReturnValue=err;isError=true}var handled=false;if(!Asyncify.currData){var asyncPromiseHandlers=Asyncify.asyncPromiseHandlers;if(asyncPromiseHandlers){Asyncify.asyncPromiseHandlers=null;(isError?asyncPromiseHandlers.reject:asyncPromiseHandlers.resolve)(asyncWasmReturnValue);handled=true}}if(isError&&!handled){throw asyncWasmReturnValue}});reachedAfterCallback=true;if(!reachedCallback){Asyncify.state=Asyncify.State.Unwinding;Asyncify.currData=Asyncify.allocateData();if(typeof MainLoop!="undefined"&&MainLoop.func){MainLoop.pause()}runAndAbortIfError(()=>_asyncify_start_unwind(Asyncify.currData))}}else if(Asyncify.state===Asyncify.State.Rewinding){Asyncify.state=Asyncify.State.Normal;runAndAbortIfError(_asyncify_stop_rewind);_free(Asyncify.currData);Asyncify.currData=null;Asyncify.sleepCallbacks.forEach(callUserCallback)}else{abort(`invalid state: ${Asyncify.state}`)}return Asyncify.handleSleepReturnValue},handleAsync(startAsync){return Asyncify.handleSleep(wakeUp=>{startAsync().then(wakeUp)})}};var getCFunc=ident=>{var func=Module["_"+ident];return func};var writeArrayToMemory=(array,buffer)=>{HEAP8.set(array,buffer)};var stackAlloc=sz=>__emscripten_stack_alloc(sz);var stringToUTF8OnStack=str=>{var size=lengthBytesUTF8(str)+1;var ret=stackAlloc(size);stringToUTF8(str,ret,size);return ret};var ccall=(ident,returnType,argTypes,args,opts)=>{var toC={string:str=>{var ret=0;if(str!==null&&str!==undefined&&str!==0){ret=stringToUTF8OnStack(str)}return ret},array:arr=>{var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType==="string"){return UTF8ToString(ret)}if(returnType==="boolean")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i{var numericArgs=!argTypes||argTypes.every(type=>type==="number"||type==="boolean");var numericRet=returnType!=="string";if(numericRet&&numericArgs&&!opts){return getCFunc(ident)}return(...args)=>ccall(ident,returnType,argTypes,args,opts)};FS.createPreloadedFile=FS_createPreloadedFile;FS.staticInit();var wasmImports={C:___syscall_dup3,S:___syscall_faccessat,n:___syscall_fcntl64,Q:___syscall_fstat64,J:___syscall_getdents64,U:___syscall_ioctl,N:___syscall_lstat64,O:___syscall_newfstatat,q:___syscall_openat,I:___syscall_renameat,H:___syscall_rmdir,P:___syscall_stat64,F:___syscall_unlinkat,T:__abort_js,R:__emscripten_get_now_is_monotonic,D:__emscripten_throw_longjmp,w:__gmtime_js,x:__localtime_js,y:__mktime_js,K:__tzset_js,k:_emscripten_date_now,G:_emscripten_get_heap_max,j:_emscripten_get_now,E:_emscripten_resize_heap,L:_environ_get,M:_environ_sizes_get,h:_fd_close,p:_fd_fdstat_get,r:_fd_read,z:_fd_seek,l:_fd_write,o:invoke_i,d:invoke_ii,c:invoke_iii,g:invoke_iiii,m:invoke_iiiii,W:invoke_iiiiii,v:invoke_iiiiiiiii,u:invoke_iiiiiiiiii,B:invoke_iiiijj,A:invoke_jij,a:invoke_vi,f:invoke_vii,e:invoke_viii,X:invoke_viiid,b:invoke_viiii,i:invoke_viiiii,V:invoke_viiiiii,s:invoke_viiiiiii,t:invoke_viiiiiiii,Y:libavjs_wait_reader,Z:writeoutEmscriptenOOM};var wasmExports=createWasm();var ___wasm_call_ctors=()=>(___wasm_call_ctors=wasmExports["$"])();var _ff_nothing=Module["_ff_nothing"]=()=>(_ff_nothing=Module["_ff_nothing"]=wasmExports["aa"])();var _AVFrame_crop_bottom=Module["_AVFrame_crop_bottom"]=a0=>(_AVFrame_crop_bottom=Module["_AVFrame_crop_bottom"]=wasmExports["ba"])(a0);var _AVFrame_crop_bottom_s=Module["_AVFrame_crop_bottom_s"]=(a0,a1)=>(_AVFrame_crop_bottom_s=Module["_AVFrame_crop_bottom_s"]=wasmExports["ca"])(a0,a1);var _AVFrame_crop_left=Module["_AVFrame_crop_left"]=a0=>(_AVFrame_crop_left=Module["_AVFrame_crop_left"]=wasmExports["da"])(a0);var _AVFrame_crop_left_s=Module["_AVFrame_crop_left_s"]=(a0,a1)=>(_AVFrame_crop_left_s=Module["_AVFrame_crop_left_s"]=wasmExports["ea"])(a0,a1);var _AVFrame_crop_right=Module["_AVFrame_crop_right"]=a0=>(_AVFrame_crop_right=Module["_AVFrame_crop_right"]=wasmExports["fa"])(a0);var _AVFrame_crop_right_s=Module["_AVFrame_crop_right_s"]=(a0,a1)=>(_AVFrame_crop_right_s=Module["_AVFrame_crop_right_s"]=wasmExports["ga"])(a0,a1);var _AVFrame_crop_top=Module["_AVFrame_crop_top"]=a0=>(_AVFrame_crop_top=Module["_AVFrame_crop_top"]=wasmExports["ha"])(a0);var _AVFrame_crop_top_s=Module["_AVFrame_crop_top_s"]=(a0,a1)=>(_AVFrame_crop_top_s=Module["_AVFrame_crop_top_s"]=wasmExports["ia"])(a0,a1);var _AVFrame_data_a=Module["_AVFrame_data_a"]=(a0,a1)=>(_AVFrame_data_a=Module["_AVFrame_data_a"]=wasmExports["ja"])(a0,a1);var _AVFrame_data_a_s=Module["_AVFrame_data_a_s"]=(a0,a1,a2)=>(_AVFrame_data_a_s=Module["_AVFrame_data_a_s"]=wasmExports["ka"])(a0,a1,a2);var _AVFrame_format=Module["_AVFrame_format"]=a0=>(_AVFrame_format=Module["_AVFrame_format"]=wasmExports["la"])(a0);var _AVFrame_format_s=Module["_AVFrame_format_s"]=(a0,a1)=>(_AVFrame_format_s=Module["_AVFrame_format_s"]=wasmExports["ma"])(a0,a1);var _AVFrame_height=Module["_AVFrame_height"]=a0=>(_AVFrame_height=Module["_AVFrame_height"]=wasmExports["na"])(a0);var _AVFrame_height_s=Module["_AVFrame_height_s"]=(a0,a1)=>(_AVFrame_height_s=Module["_AVFrame_height_s"]=wasmExports["oa"])(a0,a1);var _AVFrame_key_frame=Module["_AVFrame_key_frame"]=a0=>(_AVFrame_key_frame=Module["_AVFrame_key_frame"]=wasmExports["pa"])(a0);var _AVFrame_key_frame_s=Module["_AVFrame_key_frame_s"]=(a0,a1)=>(_AVFrame_key_frame_s=Module["_AVFrame_key_frame_s"]=wasmExports["qa"])(a0,a1);var _AVFrame_linesize_a=Module["_AVFrame_linesize_a"]=(a0,a1)=>(_AVFrame_linesize_a=Module["_AVFrame_linesize_a"]=wasmExports["ra"])(a0,a1);var _AVFrame_linesize_a_s=Module["_AVFrame_linesize_a_s"]=(a0,a1,a2)=>(_AVFrame_linesize_a_s=Module["_AVFrame_linesize_a_s"]=wasmExports["sa"])(a0,a1,a2);var _AVFrame_nb_samples=Module["_AVFrame_nb_samples"]=a0=>(_AVFrame_nb_samples=Module["_AVFrame_nb_samples"]=wasmExports["ta"])(a0);var _AVFrame_nb_samples_s=Module["_AVFrame_nb_samples_s"]=(a0,a1)=>(_AVFrame_nb_samples_s=Module["_AVFrame_nb_samples_s"]=wasmExports["ua"])(a0,a1);var _AVFrame_pict_type=Module["_AVFrame_pict_type"]=a0=>(_AVFrame_pict_type=Module["_AVFrame_pict_type"]=wasmExports["va"])(a0);var _AVFrame_pict_type_s=Module["_AVFrame_pict_type_s"]=(a0,a1)=>(_AVFrame_pict_type_s=Module["_AVFrame_pict_type_s"]=wasmExports["wa"])(a0,a1);var _AVFrame_pts=Module["_AVFrame_pts"]=a0=>(_AVFrame_pts=Module["_AVFrame_pts"]=wasmExports["xa"])(a0);var _AVFrame_ptshi=Module["_AVFrame_ptshi"]=a0=>(_AVFrame_ptshi=Module["_AVFrame_ptshi"]=wasmExports["ya"])(a0);var _AVFrame_pts_s=Module["_AVFrame_pts_s"]=(a0,a1)=>(_AVFrame_pts_s=Module["_AVFrame_pts_s"]=wasmExports["za"])(a0,a1);var _AVFrame_ptshi_s=Module["_AVFrame_ptshi_s"]=(a0,a1)=>(_AVFrame_ptshi_s=Module["_AVFrame_ptshi_s"]=wasmExports["Aa"])(a0,a1);var _AVFrame_sample_rate=Module["_AVFrame_sample_rate"]=a0=>(_AVFrame_sample_rate=Module["_AVFrame_sample_rate"]=wasmExports["Ba"])(a0);var _AVFrame_sample_rate_s=Module["_AVFrame_sample_rate_s"]=(a0,a1)=>(_AVFrame_sample_rate_s=Module["_AVFrame_sample_rate_s"]=wasmExports["Ca"])(a0,a1);var _AVFrame_width=Module["_AVFrame_width"]=a0=>(_AVFrame_width=Module["_AVFrame_width"]=wasmExports["Da"])(a0);var _AVFrame_width_s=Module["_AVFrame_width_s"]=(a0,a1)=>(_AVFrame_width_s=Module["_AVFrame_width_s"]=wasmExports["Ea"])(a0,a1);var _AVFrame_sample_aspect_ratio_num=Module["_AVFrame_sample_aspect_ratio_num"]=a0=>(_AVFrame_sample_aspect_ratio_num=Module["_AVFrame_sample_aspect_ratio_num"]=wasmExports["Fa"])(a0);var _AVFrame_sample_aspect_ratio_den=Module["_AVFrame_sample_aspect_ratio_den"]=a0=>(_AVFrame_sample_aspect_ratio_den=Module["_AVFrame_sample_aspect_ratio_den"]=wasmExports["Ga"])(a0);var _AVFrame_sample_aspect_ratio_num_s=Module["_AVFrame_sample_aspect_ratio_num_s"]=(a0,a1)=>(_AVFrame_sample_aspect_ratio_num_s=Module["_AVFrame_sample_aspect_ratio_num_s"]=wasmExports["Ha"])(a0,a1);var _AVFrame_sample_aspect_ratio_den_s=Module["_AVFrame_sample_aspect_ratio_den_s"]=(a0,a1)=>(_AVFrame_sample_aspect_ratio_den_s=Module["_AVFrame_sample_aspect_ratio_den_s"]=wasmExports["Ia"])(a0,a1);var _AVFrame_sample_aspect_ratio_s=Module["_AVFrame_sample_aspect_ratio_s"]=(a0,a1,a2)=>(_AVFrame_sample_aspect_ratio_s=Module["_AVFrame_sample_aspect_ratio_s"]=wasmExports["Ja"])(a0,a1,a2);var _AVFrame_time_base_num=Module["_AVFrame_time_base_num"]=a0=>(_AVFrame_time_base_num=Module["_AVFrame_time_base_num"]=wasmExports["Ka"])(a0);var _AVFrame_time_base_den=Module["_AVFrame_time_base_den"]=a0=>(_AVFrame_time_base_den=Module["_AVFrame_time_base_den"]=wasmExports["La"])(a0);var _AVFrame_time_base_num_s=Module["_AVFrame_time_base_num_s"]=(a0,a1)=>(_AVFrame_time_base_num_s=Module["_AVFrame_time_base_num_s"]=wasmExports["Ma"])(a0,a1);var _AVFrame_time_base_den_s=Module["_AVFrame_time_base_den_s"]=(a0,a1)=>(_AVFrame_time_base_den_s=Module["_AVFrame_time_base_den_s"]=wasmExports["Na"])(a0,a1);var _AVFrame_time_base_s=Module["_AVFrame_time_base_s"]=(a0,a1,a2)=>(_AVFrame_time_base_s=Module["_AVFrame_time_base_s"]=wasmExports["Oa"])(a0,a1,a2);var _AVFrame_channel_layoutmask_s=Module["_AVFrame_channel_layoutmask_s"]=(a0,a1,a2)=>(_AVFrame_channel_layoutmask_s=Module["_AVFrame_channel_layoutmask_s"]=wasmExports["Pa"])(a0,a1,a2);var _AVFrame_channel_layoutmask=Module["_AVFrame_channel_layoutmask"]=a0=>(_AVFrame_channel_layoutmask=Module["_AVFrame_channel_layoutmask"]=wasmExports["Qa"])(a0);var _AVFrame_channels=Module["_AVFrame_channels"]=a0=>(_AVFrame_channels=Module["_AVFrame_channels"]=wasmExports["Ra"])(a0);var _AVFrame_channels_s=Module["_AVFrame_channels_s"]=(a0,a1)=>(_AVFrame_channels_s=Module["_AVFrame_channels_s"]=wasmExports["Sa"])(a0,a1);var _AVFrame_ch_layout_nb_channels=Module["_AVFrame_ch_layout_nb_channels"]=a0=>(_AVFrame_ch_layout_nb_channels=Module["_AVFrame_ch_layout_nb_channels"]=wasmExports["Ta"])(a0);var _AVFrame_ch_layout_nb_channels_s=Module["_AVFrame_ch_layout_nb_channels_s"]=(a0,a1)=>(_AVFrame_ch_layout_nb_channels_s=Module["_AVFrame_ch_layout_nb_channels_s"]=wasmExports["Ua"])(a0,a1);var _AVFrame_channel_layout=Module["_AVFrame_channel_layout"]=a0=>(_AVFrame_channel_layout=Module["_AVFrame_channel_layout"]=wasmExports["Va"])(a0);var _AVFrame_channel_layouthi=Module["_AVFrame_channel_layouthi"]=a0=>(_AVFrame_channel_layouthi=Module["_AVFrame_channel_layouthi"]=wasmExports["Wa"])(a0);var _AVFrame_channel_layout_s=Module["_AVFrame_channel_layout_s"]=(a0,a1)=>(_AVFrame_channel_layout_s=Module["_AVFrame_channel_layout_s"]=wasmExports["Xa"])(a0,a1);var _AVFrame_channel_layouthi_s=Module["_AVFrame_channel_layouthi_s"]=(a0,a1)=>(_AVFrame_channel_layouthi_s=Module["_AVFrame_channel_layouthi_s"]=wasmExports["Ya"])(a0,a1);var _ff_frame_rescale_ts_js=Module["_ff_frame_rescale_ts_js"]=(a0,a1,a2,a3,a4)=>(_ff_frame_rescale_ts_js=Module["_ff_frame_rescale_ts_js"]=wasmExports["Za"])(a0,a1,a2,a3,a4);var _AVPixFmtDescriptor_flags=Module["_AVPixFmtDescriptor_flags"]=a0=>(_AVPixFmtDescriptor_flags=Module["_AVPixFmtDescriptor_flags"]=wasmExports["_a"])(a0);var _AVPixFmtDescriptor_flags_s=Module["_AVPixFmtDescriptor_flags_s"]=(a0,a1,a2)=>(_AVPixFmtDescriptor_flags_s=Module["_AVPixFmtDescriptor_flags_s"]=wasmExports["$a"])(a0,a1,a2);var _AVPixFmtDescriptor_nb_components=Module["_AVPixFmtDescriptor_nb_components"]=a0=>(_AVPixFmtDescriptor_nb_components=Module["_AVPixFmtDescriptor_nb_components"]=wasmExports["ab"])(a0);var _AVPixFmtDescriptor_nb_components_s=Module["_AVPixFmtDescriptor_nb_components_s"]=(a0,a1)=>(_AVPixFmtDescriptor_nb_components_s=Module["_AVPixFmtDescriptor_nb_components_s"]=wasmExports["bb"])(a0,a1);var _AVPixFmtDescriptor_log2_chroma_h=Module["_AVPixFmtDescriptor_log2_chroma_h"]=a0=>(_AVPixFmtDescriptor_log2_chroma_h=Module["_AVPixFmtDescriptor_log2_chroma_h"]=wasmExports["cb"])(a0);var _AVPixFmtDescriptor_log2_chroma_h_s=Module["_AVPixFmtDescriptor_log2_chroma_h_s"]=(a0,a1)=>(_AVPixFmtDescriptor_log2_chroma_h_s=Module["_AVPixFmtDescriptor_log2_chroma_h_s"]=wasmExports["db"])(a0,a1);var _AVPixFmtDescriptor_log2_chroma_w=Module["_AVPixFmtDescriptor_log2_chroma_w"]=a0=>(_AVPixFmtDescriptor_log2_chroma_w=Module["_AVPixFmtDescriptor_log2_chroma_w"]=wasmExports["eb"])(a0);var _AVPixFmtDescriptor_log2_chroma_w_s=Module["_AVPixFmtDescriptor_log2_chroma_w_s"]=(a0,a1)=>(_AVPixFmtDescriptor_log2_chroma_w_s=Module["_AVPixFmtDescriptor_log2_chroma_w_s"]=wasmExports["fb"])(a0,a1);var _AVPixFmtDescriptor_comp_depth=Module["_AVPixFmtDescriptor_comp_depth"]=(a0,a1)=>(_AVPixFmtDescriptor_comp_depth=Module["_AVPixFmtDescriptor_comp_depth"]=wasmExports["gb"])(a0,a1);var _av_opt_set_int_list_js=Module["_av_opt_set_int_list_js"]=(a0,a1,a2,a3,a4,a5)=>(_av_opt_set_int_list_js=Module["_av_opt_set_int_list_js"]=wasmExports["hb"])(a0,a1,a2,a3,a4,a5);var _AVCodec_name=Module["_AVCodec_name"]=a0=>(_AVCodec_name=Module["_AVCodec_name"]=wasmExports["ib"])(a0);var _AVCodec_sample_fmts=Module["_AVCodec_sample_fmts"]=a0=>(_AVCodec_sample_fmts=Module["_AVCodec_sample_fmts"]=wasmExports["jb"])(a0);var _AVCodec_sample_fmts_s=Module["_AVCodec_sample_fmts_s"]=(a0,a1)=>(_AVCodec_sample_fmts_s=Module["_AVCodec_sample_fmts_s"]=wasmExports["kb"])(a0,a1);var _AVCodec_sample_fmts_a=Module["_AVCodec_sample_fmts_a"]=(a0,a1)=>(_AVCodec_sample_fmts_a=Module["_AVCodec_sample_fmts_a"]=wasmExports["lb"])(a0,a1);var _AVCodec_sample_fmts_a_s=Module["_AVCodec_sample_fmts_a_s"]=(a0,a1,a2)=>(_AVCodec_sample_fmts_a_s=Module["_AVCodec_sample_fmts_a_s"]=wasmExports["mb"])(a0,a1,a2);var _AVCodec_supported_samplerates=Module["_AVCodec_supported_samplerates"]=a0=>(_AVCodec_supported_samplerates=Module["_AVCodec_supported_samplerates"]=wasmExports["nb"])(a0);var _AVCodec_supported_samplerates_s=Module["_AVCodec_supported_samplerates_s"]=(a0,a1)=>(_AVCodec_supported_samplerates_s=Module["_AVCodec_supported_samplerates_s"]=wasmExports["ob"])(a0,a1);var _AVCodec_supported_samplerates_a=Module["_AVCodec_supported_samplerates_a"]=(a0,a1)=>(_AVCodec_supported_samplerates_a=Module["_AVCodec_supported_samplerates_a"]=wasmExports["pb"])(a0,a1);var _AVCodec_supported_samplerates_a_s=Module["_AVCodec_supported_samplerates_a_s"]=(a0,a1,a2)=>(_AVCodec_supported_samplerates_a_s=Module["_AVCodec_supported_samplerates_a_s"]=wasmExports["qb"])(a0,a1,a2);var _AVCodec_type=Module["_AVCodec_type"]=a0=>(_AVCodec_type=Module["_AVCodec_type"]=wasmExports["rb"])(a0);var _AVCodec_type_s=Module["_AVCodec_type_s"]=(a0,a1)=>(_AVCodec_type_s=Module["_AVCodec_type_s"]=wasmExports["sb"])(a0,a1);var _AVCodecContext_codec_id=Module["_AVCodecContext_codec_id"]=a0=>(_AVCodecContext_codec_id=Module["_AVCodecContext_codec_id"]=wasmExports["tb"])(a0);var _AVCodecContext_codec_id_s=Module["_AVCodecContext_codec_id_s"]=(a0,a1)=>(_AVCodecContext_codec_id_s=Module["_AVCodecContext_codec_id_s"]=wasmExports["ub"])(a0,a1);var _AVCodecContext_codec_type=Module["_AVCodecContext_codec_type"]=a0=>(_AVCodecContext_codec_type=Module["_AVCodecContext_codec_type"]=wasmExports["vb"])(a0);var _AVCodecContext_codec_type_s=Module["_AVCodecContext_codec_type_s"]=(a0,a1)=>(_AVCodecContext_codec_type_s=Module["_AVCodecContext_codec_type_s"]=wasmExports["wb"])(a0,a1);var _AVCodecContext_bit_rate=Module["_AVCodecContext_bit_rate"]=a0=>(_AVCodecContext_bit_rate=Module["_AVCodecContext_bit_rate"]=wasmExports["xb"])(a0);var _AVCodecContext_bit_ratehi=Module["_AVCodecContext_bit_ratehi"]=a0=>(_AVCodecContext_bit_ratehi=Module["_AVCodecContext_bit_ratehi"]=wasmExports["yb"])(a0);var _AVCodecContext_bit_rate_s=Module["_AVCodecContext_bit_rate_s"]=(a0,a1)=>(_AVCodecContext_bit_rate_s=Module["_AVCodecContext_bit_rate_s"]=wasmExports["zb"])(a0,a1);var _AVCodecContext_bit_ratehi_s=Module["_AVCodecContext_bit_ratehi_s"]=(a0,a1)=>(_AVCodecContext_bit_ratehi_s=Module["_AVCodecContext_bit_ratehi_s"]=wasmExports["Ab"])(a0,a1);var _AVCodecContext_extradata=Module["_AVCodecContext_extradata"]=a0=>(_AVCodecContext_extradata=Module["_AVCodecContext_extradata"]=wasmExports["Bb"])(a0);var _AVCodecContext_extradata_s=Module["_AVCodecContext_extradata_s"]=(a0,a1)=>(_AVCodecContext_extradata_s=Module["_AVCodecContext_extradata_s"]=wasmExports["Cb"])(a0,a1);var _AVCodecContext_extradata_size=Module["_AVCodecContext_extradata_size"]=a0=>(_AVCodecContext_extradata_size=Module["_AVCodecContext_extradata_size"]=wasmExports["Db"])(a0);var _AVCodecContext_extradata_size_s=Module["_AVCodecContext_extradata_size_s"]=(a0,a1)=>(_AVCodecContext_extradata_size_s=Module["_AVCodecContext_extradata_size_s"]=wasmExports["Eb"])(a0,a1);var _AVCodecContext_frame_size=Module["_AVCodecContext_frame_size"]=a0=>(_AVCodecContext_frame_size=Module["_AVCodecContext_frame_size"]=wasmExports["Fb"])(a0);var _AVCodecContext_frame_size_s=Module["_AVCodecContext_frame_size_s"]=(a0,a1)=>(_AVCodecContext_frame_size_s=Module["_AVCodecContext_frame_size_s"]=wasmExports["Gb"])(a0,a1);var _AVCodecContext_gop_size=Module["_AVCodecContext_gop_size"]=a0=>(_AVCodecContext_gop_size=Module["_AVCodecContext_gop_size"]=wasmExports["Hb"])(a0);var _AVCodecContext_gop_size_s=Module["_AVCodecContext_gop_size_s"]=(a0,a1)=>(_AVCodecContext_gop_size_s=Module["_AVCodecContext_gop_size_s"]=wasmExports["Ib"])(a0,a1);var _AVCodecContext_height=Module["_AVCodecContext_height"]=a0=>(_AVCodecContext_height=Module["_AVCodecContext_height"]=wasmExports["Jb"])(a0);var _AVCodecContext_height_s=Module["_AVCodecContext_height_s"]=(a0,a1)=>(_AVCodecContext_height_s=Module["_AVCodecContext_height_s"]=wasmExports["Kb"])(a0,a1);var _AVCodecContext_keyint_min=Module["_AVCodecContext_keyint_min"]=a0=>(_AVCodecContext_keyint_min=Module["_AVCodecContext_keyint_min"]=wasmExports["Lb"])(a0);var _AVCodecContext_keyint_min_s=Module["_AVCodecContext_keyint_min_s"]=(a0,a1)=>(_AVCodecContext_keyint_min_s=Module["_AVCodecContext_keyint_min_s"]=wasmExports["Mb"])(a0,a1);var _AVCodecContext_level=Module["_AVCodecContext_level"]=a0=>(_AVCodecContext_level=Module["_AVCodecContext_level"]=wasmExports["Nb"])(a0);var _AVCodecContext_level_s=Module["_AVCodecContext_level_s"]=(a0,a1)=>(_AVCodecContext_level_s=Module["_AVCodecContext_level_s"]=wasmExports["Ob"])(a0,a1);var _AVCodecContext_max_b_frames=Module["_AVCodecContext_max_b_frames"]=a0=>(_AVCodecContext_max_b_frames=Module["_AVCodecContext_max_b_frames"]=wasmExports["Pb"])(a0);var _AVCodecContext_max_b_frames_s=Module["_AVCodecContext_max_b_frames_s"]=(a0,a1)=>(_AVCodecContext_max_b_frames_s=Module["_AVCodecContext_max_b_frames_s"]=wasmExports["Qb"])(a0,a1);var _AVCodecContext_pix_fmt=Module["_AVCodecContext_pix_fmt"]=a0=>(_AVCodecContext_pix_fmt=Module["_AVCodecContext_pix_fmt"]=wasmExports["Rb"])(a0);var _AVCodecContext_pix_fmt_s=Module["_AVCodecContext_pix_fmt_s"]=(a0,a1)=>(_AVCodecContext_pix_fmt_s=Module["_AVCodecContext_pix_fmt_s"]=wasmExports["Sb"])(a0,a1);var _AVCodecContext_profile=Module["_AVCodecContext_profile"]=a0=>(_AVCodecContext_profile=Module["_AVCodecContext_profile"]=wasmExports["Tb"])(a0);var _AVCodecContext_profile_s=Module["_AVCodecContext_profile_s"]=(a0,a1)=>(_AVCodecContext_profile_s=Module["_AVCodecContext_profile_s"]=wasmExports["Ub"])(a0,a1);var _AVCodecContext_rc_max_rate=Module["_AVCodecContext_rc_max_rate"]=a0=>(_AVCodecContext_rc_max_rate=Module["_AVCodecContext_rc_max_rate"]=wasmExports["Vb"])(a0);var _AVCodecContext_rc_max_ratehi=Module["_AVCodecContext_rc_max_ratehi"]=a0=>(_AVCodecContext_rc_max_ratehi=Module["_AVCodecContext_rc_max_ratehi"]=wasmExports["Wb"])(a0);var _AVCodecContext_rc_max_rate_s=Module["_AVCodecContext_rc_max_rate_s"]=(a0,a1)=>(_AVCodecContext_rc_max_rate_s=Module["_AVCodecContext_rc_max_rate_s"]=wasmExports["Xb"])(a0,a1);var _AVCodecContext_rc_max_ratehi_s=Module["_AVCodecContext_rc_max_ratehi_s"]=(a0,a1)=>(_AVCodecContext_rc_max_ratehi_s=Module["_AVCodecContext_rc_max_ratehi_s"]=wasmExports["Yb"])(a0,a1);var _AVCodecContext_rc_min_rate=Module["_AVCodecContext_rc_min_rate"]=a0=>(_AVCodecContext_rc_min_rate=Module["_AVCodecContext_rc_min_rate"]=wasmExports["Zb"])(a0);var _AVCodecContext_rc_min_ratehi=Module["_AVCodecContext_rc_min_ratehi"]=a0=>(_AVCodecContext_rc_min_ratehi=Module["_AVCodecContext_rc_min_ratehi"]=wasmExports["_b"])(a0);var _AVCodecContext_rc_min_rate_s=Module["_AVCodecContext_rc_min_rate_s"]=(a0,a1)=>(_AVCodecContext_rc_min_rate_s=Module["_AVCodecContext_rc_min_rate_s"]=wasmExports["$b"])(a0,a1);var _AVCodecContext_rc_min_ratehi_s=Module["_AVCodecContext_rc_min_ratehi_s"]=(a0,a1)=>(_AVCodecContext_rc_min_ratehi_s=Module["_AVCodecContext_rc_min_ratehi_s"]=wasmExports["ac"])(a0,a1);var _AVCodecContext_sample_fmt=Module["_AVCodecContext_sample_fmt"]=a0=>(_AVCodecContext_sample_fmt=Module["_AVCodecContext_sample_fmt"]=wasmExports["bc"])(a0);var _AVCodecContext_sample_fmt_s=Module["_AVCodecContext_sample_fmt_s"]=(a0,a1)=>(_AVCodecContext_sample_fmt_s=Module["_AVCodecContext_sample_fmt_s"]=wasmExports["cc"])(a0,a1);var _AVCodecContext_sample_rate=Module["_AVCodecContext_sample_rate"]=a0=>(_AVCodecContext_sample_rate=Module["_AVCodecContext_sample_rate"]=wasmExports["dc"])(a0);var _AVCodecContext_sample_rate_s=Module["_AVCodecContext_sample_rate_s"]=(a0,a1)=>(_AVCodecContext_sample_rate_s=Module["_AVCodecContext_sample_rate_s"]=wasmExports["ec"])(a0,a1);var _AVCodecContext_qmax=Module["_AVCodecContext_qmax"]=a0=>(_AVCodecContext_qmax=Module["_AVCodecContext_qmax"]=wasmExports["fc"])(a0);var _AVCodecContext_qmax_s=Module["_AVCodecContext_qmax_s"]=(a0,a1)=>(_AVCodecContext_qmax_s=Module["_AVCodecContext_qmax_s"]=wasmExports["gc"])(a0,a1);var _AVCodecContext_qmin=Module["_AVCodecContext_qmin"]=a0=>(_AVCodecContext_qmin=Module["_AVCodecContext_qmin"]=wasmExports["hc"])(a0);var _AVCodecContext_qmin_s=Module["_AVCodecContext_qmin_s"]=(a0,a1)=>(_AVCodecContext_qmin_s=Module["_AVCodecContext_qmin_s"]=wasmExports["ic"])(a0,a1);var _AVCodecContext_width=Module["_AVCodecContext_width"]=a0=>(_AVCodecContext_width=Module["_AVCodecContext_width"]=wasmExports["jc"])(a0);var _AVCodecContext_width_s=Module["_AVCodecContext_width_s"]=(a0,a1)=>(_AVCodecContext_width_s=Module["_AVCodecContext_width_s"]=wasmExports["kc"])(a0,a1);var _AVCodecContext_framerate_num=Module["_AVCodecContext_framerate_num"]=a0=>(_AVCodecContext_framerate_num=Module["_AVCodecContext_framerate_num"]=wasmExports["lc"])(a0);var _AVCodecContext_framerate_den=Module["_AVCodecContext_framerate_den"]=a0=>(_AVCodecContext_framerate_den=Module["_AVCodecContext_framerate_den"]=wasmExports["mc"])(a0);var _AVCodecContext_framerate_num_s=Module["_AVCodecContext_framerate_num_s"]=(a0,a1)=>(_AVCodecContext_framerate_num_s=Module["_AVCodecContext_framerate_num_s"]=wasmExports["nc"])(a0,a1);var _AVCodecContext_framerate_den_s=Module["_AVCodecContext_framerate_den_s"]=(a0,a1)=>(_AVCodecContext_framerate_den_s=Module["_AVCodecContext_framerate_den_s"]=wasmExports["oc"])(a0,a1);var _AVCodecContext_framerate_s=Module["_AVCodecContext_framerate_s"]=(a0,a1,a2)=>(_AVCodecContext_framerate_s=Module["_AVCodecContext_framerate_s"]=wasmExports["pc"])(a0,a1,a2);var _AVCodecContext_sample_aspect_ratio_num=Module["_AVCodecContext_sample_aspect_ratio_num"]=a0=>(_AVCodecContext_sample_aspect_ratio_num=Module["_AVCodecContext_sample_aspect_ratio_num"]=wasmExports["qc"])(a0);var _AVCodecContext_sample_aspect_ratio_den=Module["_AVCodecContext_sample_aspect_ratio_den"]=a0=>(_AVCodecContext_sample_aspect_ratio_den=Module["_AVCodecContext_sample_aspect_ratio_den"]=wasmExports["rc"])(a0);var _AVCodecContext_sample_aspect_ratio_num_s=Module["_AVCodecContext_sample_aspect_ratio_num_s"]=(a0,a1)=>(_AVCodecContext_sample_aspect_ratio_num_s=Module["_AVCodecContext_sample_aspect_ratio_num_s"]=wasmExports["sc"])(a0,a1);var _AVCodecContext_sample_aspect_ratio_den_s=Module["_AVCodecContext_sample_aspect_ratio_den_s"]=(a0,a1)=>(_AVCodecContext_sample_aspect_ratio_den_s=Module["_AVCodecContext_sample_aspect_ratio_den_s"]=wasmExports["tc"])(a0,a1);var _AVCodecContext_sample_aspect_ratio_s=Module["_AVCodecContext_sample_aspect_ratio_s"]=(a0,a1,a2)=>(_AVCodecContext_sample_aspect_ratio_s=Module["_AVCodecContext_sample_aspect_ratio_s"]=wasmExports["uc"])(a0,a1,a2);var _AVCodecContext_time_base_num=Module["_AVCodecContext_time_base_num"]=a0=>(_AVCodecContext_time_base_num=Module["_AVCodecContext_time_base_num"]=wasmExports["vc"])(a0);var _AVCodecContext_time_base_den=Module["_AVCodecContext_time_base_den"]=a0=>(_AVCodecContext_time_base_den=Module["_AVCodecContext_time_base_den"]=wasmExports["wc"])(a0);var _AVCodecContext_time_base_num_s=Module["_AVCodecContext_time_base_num_s"]=(a0,a1)=>(_AVCodecContext_time_base_num_s=Module["_AVCodecContext_time_base_num_s"]=wasmExports["xc"])(a0,a1);var _AVCodecContext_time_base_den_s=Module["_AVCodecContext_time_base_den_s"]=(a0,a1)=>(_AVCodecContext_time_base_den_s=Module["_AVCodecContext_time_base_den_s"]=wasmExports["yc"])(a0,a1);var _AVCodecContext_time_base_s=Module["_AVCodecContext_time_base_s"]=(a0,a1,a2)=>(_AVCodecContext_time_base_s=Module["_AVCodecContext_time_base_s"]=wasmExports["zc"])(a0,a1,a2);var _AVCodecContext_channel_layoutmask_s=Module["_AVCodecContext_channel_layoutmask_s"]=(a0,a1,a2)=>(_AVCodecContext_channel_layoutmask_s=Module["_AVCodecContext_channel_layoutmask_s"]=wasmExports["Ac"])(a0,a1,a2);var _AVCodecContext_channel_layoutmask=Module["_AVCodecContext_channel_layoutmask"]=a0=>(_AVCodecContext_channel_layoutmask=Module["_AVCodecContext_channel_layoutmask"]=wasmExports["Bc"])(a0);var _AVCodecContext_channels=Module["_AVCodecContext_channels"]=a0=>(_AVCodecContext_channels=Module["_AVCodecContext_channels"]=wasmExports["Cc"])(a0);var _AVCodecContext_channels_s=Module["_AVCodecContext_channels_s"]=(a0,a1)=>(_AVCodecContext_channels_s=Module["_AVCodecContext_channels_s"]=wasmExports["Dc"])(a0,a1);var _AVCodecContext_ch_layout_nb_channels=Module["_AVCodecContext_ch_layout_nb_channels"]=a0=>(_AVCodecContext_ch_layout_nb_channels=Module["_AVCodecContext_ch_layout_nb_channels"]=wasmExports["Ec"])(a0);var _AVCodecContext_ch_layout_nb_channels_s=Module["_AVCodecContext_ch_layout_nb_channels_s"]=(a0,a1)=>(_AVCodecContext_ch_layout_nb_channels_s=Module["_AVCodecContext_ch_layout_nb_channels_s"]=wasmExports["Fc"])(a0,a1);var _AVCodecContext_channel_layout=Module["_AVCodecContext_channel_layout"]=a0=>(_AVCodecContext_channel_layout=Module["_AVCodecContext_channel_layout"]=wasmExports["Gc"])(a0);var _AVCodecContext_channel_layouthi=Module["_AVCodecContext_channel_layouthi"]=a0=>(_AVCodecContext_channel_layouthi=Module["_AVCodecContext_channel_layouthi"]=wasmExports["Hc"])(a0);var _AVCodecContext_channel_layout_s=Module["_AVCodecContext_channel_layout_s"]=(a0,a1)=>(_AVCodecContext_channel_layout_s=Module["_AVCodecContext_channel_layout_s"]=wasmExports["Ic"])(a0,a1);var _AVCodecContext_channel_layouthi_s=Module["_AVCodecContext_channel_layouthi_s"]=(a0,a1)=>(_AVCodecContext_channel_layouthi_s=Module["_AVCodecContext_channel_layouthi_s"]=wasmExports["Jc"])(a0,a1);var _AVCodecDescriptor_id=Module["_AVCodecDescriptor_id"]=a0=>(_AVCodecDescriptor_id=Module["_AVCodecDescriptor_id"]=wasmExports["Kc"])(a0);var _AVCodecDescriptor_id_s=Module["_AVCodecDescriptor_id_s"]=(a0,a1)=>(_AVCodecDescriptor_id_s=Module["_AVCodecDescriptor_id_s"]=wasmExports["Lc"])(a0,a1);var _AVCodecDescriptor_long_name=Module["_AVCodecDescriptor_long_name"]=a0=>(_AVCodecDescriptor_long_name=Module["_AVCodecDescriptor_long_name"]=wasmExports["Mc"])(a0);var _AVCodecDescriptor_long_name_s=Module["_AVCodecDescriptor_long_name_s"]=(a0,a1)=>(_AVCodecDescriptor_long_name_s=Module["_AVCodecDescriptor_long_name_s"]=wasmExports["Nc"])(a0,a1);var _AVCodecDescriptor_mime_types_a=Module["_AVCodecDescriptor_mime_types_a"]=(a0,a1)=>(_AVCodecDescriptor_mime_types_a=Module["_AVCodecDescriptor_mime_types_a"]=wasmExports["Oc"])(a0,a1);var _AVCodecDescriptor_mime_types_a_s=Module["_AVCodecDescriptor_mime_types_a_s"]=(a0,a1,a2)=>(_AVCodecDescriptor_mime_types_a_s=Module["_AVCodecDescriptor_mime_types_a_s"]=wasmExports["Pc"])(a0,a1,a2);var _AVCodecDescriptor_name=Module["_AVCodecDescriptor_name"]=a0=>(_AVCodecDescriptor_name=Module["_AVCodecDescriptor_name"]=wasmExports["Qc"])(a0);var _AVCodecDescriptor_name_s=Module["_AVCodecDescriptor_name_s"]=(a0,a1)=>(_AVCodecDescriptor_name_s=Module["_AVCodecDescriptor_name_s"]=wasmExports["Rc"])(a0,a1);var _AVCodecDescriptor_props=Module["_AVCodecDescriptor_props"]=a0=>(_AVCodecDescriptor_props=Module["_AVCodecDescriptor_props"]=wasmExports["Sc"])(a0);var _AVCodecDescriptor_props_s=Module["_AVCodecDescriptor_props_s"]=(a0,a1)=>(_AVCodecDescriptor_props_s=Module["_AVCodecDescriptor_props_s"]=wasmExports["Tc"])(a0,a1);var _AVCodecDescriptor_type=Module["_AVCodecDescriptor_type"]=a0=>(_AVCodecDescriptor_type=Module["_AVCodecDescriptor_type"]=wasmExports["Uc"])(a0);var _AVCodecDescriptor_type_s=Module["_AVCodecDescriptor_type_s"]=(a0,a1)=>(_AVCodecDescriptor_type_s=Module["_AVCodecDescriptor_type_s"]=wasmExports["Vc"])(a0,a1);var _AVCodecParameters_codec_id=Module["_AVCodecParameters_codec_id"]=a0=>(_AVCodecParameters_codec_id=Module["_AVCodecParameters_codec_id"]=wasmExports["Wc"])(a0);var _AVCodecParameters_codec_id_s=Module["_AVCodecParameters_codec_id_s"]=(a0,a1)=>(_AVCodecParameters_codec_id_s=Module["_AVCodecParameters_codec_id_s"]=wasmExports["Xc"])(a0,a1);var _AVCodecParameters_codec_tag=Module["_AVCodecParameters_codec_tag"]=a0=>(_AVCodecParameters_codec_tag=Module["_AVCodecParameters_codec_tag"]=wasmExports["Yc"])(a0);var _AVCodecParameters_codec_tag_s=Module["_AVCodecParameters_codec_tag_s"]=(a0,a1)=>(_AVCodecParameters_codec_tag_s=Module["_AVCodecParameters_codec_tag_s"]=wasmExports["Zc"])(a0,a1);var _AVCodecParameters_codec_type=Module["_AVCodecParameters_codec_type"]=a0=>(_AVCodecParameters_codec_type=Module["_AVCodecParameters_codec_type"]=wasmExports["_c"])(a0);var _AVCodecParameters_codec_type_s=Module["_AVCodecParameters_codec_type_s"]=(a0,a1)=>(_AVCodecParameters_codec_type_s=Module["_AVCodecParameters_codec_type_s"]=wasmExports["$c"])(a0,a1);var _AVCodecParameters_extradata=Module["_AVCodecParameters_extradata"]=a0=>(_AVCodecParameters_extradata=Module["_AVCodecParameters_extradata"]=wasmExports["ad"])(a0);var _AVCodecParameters_extradata_s=Module["_AVCodecParameters_extradata_s"]=(a0,a1)=>(_AVCodecParameters_extradata_s=Module["_AVCodecParameters_extradata_s"]=wasmExports["bd"])(a0,a1);var _AVCodecParameters_extradata_size=Module["_AVCodecParameters_extradata_size"]=a0=>(_AVCodecParameters_extradata_size=Module["_AVCodecParameters_extradata_size"]=wasmExports["cd"])(a0);var _AVCodecParameters_extradata_size_s=Module["_AVCodecParameters_extradata_size_s"]=(a0,a1)=>(_AVCodecParameters_extradata_size_s=Module["_AVCodecParameters_extradata_size_s"]=wasmExports["dd"])(a0,a1);var _AVCodecParameters_format=Module["_AVCodecParameters_format"]=a0=>(_AVCodecParameters_format=Module["_AVCodecParameters_format"]=wasmExports["ed"])(a0);var _AVCodecParameters_format_s=Module["_AVCodecParameters_format_s"]=(a0,a1)=>(_AVCodecParameters_format_s=Module["_AVCodecParameters_format_s"]=wasmExports["fd"])(a0,a1);var _AVCodecParameters_bit_rate=Module["_AVCodecParameters_bit_rate"]=a0=>(_AVCodecParameters_bit_rate=Module["_AVCodecParameters_bit_rate"]=wasmExports["gd"])(a0);var _AVCodecParameters_bit_rate_s=Module["_AVCodecParameters_bit_rate_s"]=(a0,a1,a2)=>(_AVCodecParameters_bit_rate_s=Module["_AVCodecParameters_bit_rate_s"]=wasmExports["hd"])(a0,a1,a2);var _AVCodecParameters_profile=Module["_AVCodecParameters_profile"]=a0=>(_AVCodecParameters_profile=Module["_AVCodecParameters_profile"]=wasmExports["id"])(a0);var _AVCodecParameters_profile_s=Module["_AVCodecParameters_profile_s"]=(a0,a1)=>(_AVCodecParameters_profile_s=Module["_AVCodecParameters_profile_s"]=wasmExports["jd"])(a0,a1);var _AVCodecParameters_level=Module["_AVCodecParameters_level"]=a0=>(_AVCodecParameters_level=Module["_AVCodecParameters_level"]=wasmExports["kd"])(a0);var _AVCodecParameters_level_s=Module["_AVCodecParameters_level_s"]=(a0,a1)=>(_AVCodecParameters_level_s=Module["_AVCodecParameters_level_s"]=wasmExports["ld"])(a0,a1);var _AVCodecParameters_width=Module["_AVCodecParameters_width"]=a0=>(_AVCodecParameters_width=Module["_AVCodecParameters_width"]=wasmExports["md"])(a0);var _AVCodecParameters_width_s=Module["_AVCodecParameters_width_s"]=(a0,a1)=>(_AVCodecParameters_width_s=Module["_AVCodecParameters_width_s"]=wasmExports["nd"])(a0,a1);var _AVCodecParameters_height=Module["_AVCodecParameters_height"]=a0=>(_AVCodecParameters_height=Module["_AVCodecParameters_height"]=wasmExports["od"])(a0);var _AVCodecParameters_height_s=Module["_AVCodecParameters_height_s"]=(a0,a1)=>(_AVCodecParameters_height_s=Module["_AVCodecParameters_height_s"]=wasmExports["pd"])(a0,a1);var _AVCodecParameters_color_range=Module["_AVCodecParameters_color_range"]=a0=>(_AVCodecParameters_color_range=Module["_AVCodecParameters_color_range"]=wasmExports["qd"])(a0);var _AVCodecParameters_color_range_s=Module["_AVCodecParameters_color_range_s"]=(a0,a1)=>(_AVCodecParameters_color_range_s=Module["_AVCodecParameters_color_range_s"]=wasmExports["rd"])(a0,a1);var _AVCodecParameters_color_primaries=Module["_AVCodecParameters_color_primaries"]=a0=>(_AVCodecParameters_color_primaries=Module["_AVCodecParameters_color_primaries"]=wasmExports["sd"])(a0);var _AVCodecParameters_color_primaries_s=Module["_AVCodecParameters_color_primaries_s"]=(a0,a1)=>(_AVCodecParameters_color_primaries_s=Module["_AVCodecParameters_color_primaries_s"]=wasmExports["td"])(a0,a1);var _AVCodecParameters_color_trc=Module["_AVCodecParameters_color_trc"]=a0=>(_AVCodecParameters_color_trc=Module["_AVCodecParameters_color_trc"]=wasmExports["ud"])(a0);var _AVCodecParameters_color_trc_s=Module["_AVCodecParameters_color_trc_s"]=(a0,a1)=>(_AVCodecParameters_color_trc_s=Module["_AVCodecParameters_color_trc_s"]=wasmExports["vd"])(a0,a1);var _AVCodecParameters_color_space=Module["_AVCodecParameters_color_space"]=a0=>(_AVCodecParameters_color_space=Module["_AVCodecParameters_color_space"]=wasmExports["wd"])(a0);var _AVCodecParameters_color_space_s=Module["_AVCodecParameters_color_space_s"]=(a0,a1)=>(_AVCodecParameters_color_space_s=Module["_AVCodecParameters_color_space_s"]=wasmExports["xd"])(a0,a1);var _AVCodecParameters_chroma_location=Module["_AVCodecParameters_chroma_location"]=a0=>(_AVCodecParameters_chroma_location=Module["_AVCodecParameters_chroma_location"]=wasmExports["yd"])(a0);var _AVCodecParameters_chroma_location_s=Module["_AVCodecParameters_chroma_location_s"]=(a0,a1)=>(_AVCodecParameters_chroma_location_s=Module["_AVCodecParameters_chroma_location_s"]=wasmExports["zd"])(a0,a1);var _AVCodecParameters_sample_rate=Module["_AVCodecParameters_sample_rate"]=a0=>(_AVCodecParameters_sample_rate=Module["_AVCodecParameters_sample_rate"]=wasmExports["Ad"])(a0);var _AVCodecParameters_sample_rate_s=Module["_AVCodecParameters_sample_rate_s"]=(a0,a1)=>(_AVCodecParameters_sample_rate_s=Module["_AVCodecParameters_sample_rate_s"]=wasmExports["Bd"])(a0,a1);var _AVCodecParameters_framerate_num=Module["_AVCodecParameters_framerate_num"]=a0=>(_AVCodecParameters_framerate_num=Module["_AVCodecParameters_framerate_num"]=wasmExports["Cd"])(a0);var _AVCodecParameters_framerate_den=Module["_AVCodecParameters_framerate_den"]=a0=>(_AVCodecParameters_framerate_den=Module["_AVCodecParameters_framerate_den"]=wasmExports["Dd"])(a0);var _AVCodecParameters_framerate_num_s=Module["_AVCodecParameters_framerate_num_s"]=(a0,a1)=>(_AVCodecParameters_framerate_num_s=Module["_AVCodecParameters_framerate_num_s"]=wasmExports["Ed"])(a0,a1);var _AVCodecParameters_framerate_den_s=Module["_AVCodecParameters_framerate_den_s"]=(a0,a1)=>(_AVCodecParameters_framerate_den_s=Module["_AVCodecParameters_framerate_den_s"]=wasmExports["Fd"])(a0,a1);var _AVCodecParameters_framerate_s=Module["_AVCodecParameters_framerate_s"]=(a0,a1,a2)=>(_AVCodecParameters_framerate_s=Module["_AVCodecParameters_framerate_s"]=wasmExports["Gd"])(a0,a1,a2);var _AVCodecParameters_channel_layoutmask_s=Module["_AVCodecParameters_channel_layoutmask_s"]=(a0,a1,a2)=>(_AVCodecParameters_channel_layoutmask_s=Module["_AVCodecParameters_channel_layoutmask_s"]=wasmExports["Hd"])(a0,a1,a2);var _AVCodecParameters_channel_layoutmask=Module["_AVCodecParameters_channel_layoutmask"]=a0=>(_AVCodecParameters_channel_layoutmask=Module["_AVCodecParameters_channel_layoutmask"]=wasmExports["Id"])(a0);var _AVCodecParameters_channels=Module["_AVCodecParameters_channels"]=a0=>(_AVCodecParameters_channels=Module["_AVCodecParameters_channels"]=wasmExports["Jd"])(a0);var _AVCodecParameters_channels_s=Module["_AVCodecParameters_channels_s"]=(a0,a1)=>(_AVCodecParameters_channels_s=Module["_AVCodecParameters_channels_s"]=wasmExports["Kd"])(a0,a1);var _AVCodecParameters_ch_layout_nb_channels=Module["_AVCodecParameters_ch_layout_nb_channels"]=a0=>(_AVCodecParameters_ch_layout_nb_channels=Module["_AVCodecParameters_ch_layout_nb_channels"]=wasmExports["Ld"])(a0);var _AVCodecParameters_ch_layout_nb_channels_s=Module["_AVCodecParameters_ch_layout_nb_channels_s"]=(a0,a1)=>(_AVCodecParameters_ch_layout_nb_channels_s=Module["_AVCodecParameters_ch_layout_nb_channels_s"]=wasmExports["Md"])(a0,a1);var _AVPacket_data=Module["_AVPacket_data"]=a0=>(_AVPacket_data=Module["_AVPacket_data"]=wasmExports["Nd"])(a0);var _AVPacket_data_s=Module["_AVPacket_data_s"]=(a0,a1)=>(_AVPacket_data_s=Module["_AVPacket_data_s"]=wasmExports["Od"])(a0,a1);var _AVPacket_dts=Module["_AVPacket_dts"]=a0=>(_AVPacket_dts=Module["_AVPacket_dts"]=wasmExports["Pd"])(a0);var _AVPacket_dtshi=Module["_AVPacket_dtshi"]=a0=>(_AVPacket_dtshi=Module["_AVPacket_dtshi"]=wasmExports["Qd"])(a0);var _AVPacket_dts_s=Module["_AVPacket_dts_s"]=(a0,a1)=>(_AVPacket_dts_s=Module["_AVPacket_dts_s"]=wasmExports["Rd"])(a0,a1);var _AVPacket_dtshi_s=Module["_AVPacket_dtshi_s"]=(a0,a1)=>(_AVPacket_dtshi_s=Module["_AVPacket_dtshi_s"]=wasmExports["Sd"])(a0,a1);var _AVPacket_duration=Module["_AVPacket_duration"]=a0=>(_AVPacket_duration=Module["_AVPacket_duration"]=wasmExports["Td"])(a0);var _AVPacket_durationhi=Module["_AVPacket_durationhi"]=a0=>(_AVPacket_durationhi=Module["_AVPacket_durationhi"]=wasmExports["Ud"])(a0);var _AVPacket_duration_s=Module["_AVPacket_duration_s"]=(a0,a1)=>(_AVPacket_duration_s=Module["_AVPacket_duration_s"]=wasmExports["Vd"])(a0,a1);var _AVPacket_durationhi_s=Module["_AVPacket_durationhi_s"]=(a0,a1)=>(_AVPacket_durationhi_s=Module["_AVPacket_durationhi_s"]=wasmExports["Wd"])(a0,a1);var _AVPacket_flags=Module["_AVPacket_flags"]=a0=>(_AVPacket_flags=Module["_AVPacket_flags"]=wasmExports["Xd"])(a0);var _AVPacket_flags_s=Module["_AVPacket_flags_s"]=(a0,a1)=>(_AVPacket_flags_s=Module["_AVPacket_flags_s"]=wasmExports["Yd"])(a0,a1);var _AVPacket_pos=Module["_AVPacket_pos"]=a0=>(_AVPacket_pos=Module["_AVPacket_pos"]=wasmExports["Zd"])(a0);var _AVPacket_poshi=Module["_AVPacket_poshi"]=a0=>(_AVPacket_poshi=Module["_AVPacket_poshi"]=wasmExports["_d"])(a0);var _AVPacket_pos_s=Module["_AVPacket_pos_s"]=(a0,a1)=>(_AVPacket_pos_s=Module["_AVPacket_pos_s"]=wasmExports["$d"])(a0,a1);var _AVPacket_poshi_s=Module["_AVPacket_poshi_s"]=(a0,a1)=>(_AVPacket_poshi_s=Module["_AVPacket_poshi_s"]=wasmExports["ae"])(a0,a1);var _AVPacket_pts=Module["_AVPacket_pts"]=a0=>(_AVPacket_pts=Module["_AVPacket_pts"]=wasmExports["be"])(a0);var _AVPacket_ptshi=Module["_AVPacket_ptshi"]=a0=>(_AVPacket_ptshi=Module["_AVPacket_ptshi"]=wasmExports["ce"])(a0);var _AVPacket_pts_s=Module["_AVPacket_pts_s"]=(a0,a1)=>(_AVPacket_pts_s=Module["_AVPacket_pts_s"]=wasmExports["de"])(a0,a1);var _AVPacket_ptshi_s=Module["_AVPacket_ptshi_s"]=(a0,a1)=>(_AVPacket_ptshi_s=Module["_AVPacket_ptshi_s"]=wasmExports["ee"])(a0,a1);var _AVPacket_side_data=Module["_AVPacket_side_data"]=a0=>(_AVPacket_side_data=Module["_AVPacket_side_data"]=wasmExports["fe"])(a0);var _AVPacket_side_data_s=Module["_AVPacket_side_data_s"]=(a0,a1)=>(_AVPacket_side_data_s=Module["_AVPacket_side_data_s"]=wasmExports["ge"])(a0,a1);var _AVPacket_side_data_elems=Module["_AVPacket_side_data_elems"]=a0=>(_AVPacket_side_data_elems=Module["_AVPacket_side_data_elems"]=wasmExports["he"])(a0);var _AVPacket_side_data_elems_s=Module["_AVPacket_side_data_elems_s"]=(a0,a1)=>(_AVPacket_side_data_elems_s=Module["_AVPacket_side_data_elems_s"]=wasmExports["ie"])(a0,a1);var _AVPacket_size=Module["_AVPacket_size"]=a0=>(_AVPacket_size=Module["_AVPacket_size"]=wasmExports["je"])(a0);var _AVPacket_size_s=Module["_AVPacket_size_s"]=(a0,a1)=>(_AVPacket_size_s=Module["_AVPacket_size_s"]=wasmExports["ke"])(a0,a1);var _AVPacket_stream_index=Module["_AVPacket_stream_index"]=a0=>(_AVPacket_stream_index=Module["_AVPacket_stream_index"]=wasmExports["le"])(a0);var _AVPacket_stream_index_s=Module["_AVPacket_stream_index_s"]=(a0,a1)=>(_AVPacket_stream_index_s=Module["_AVPacket_stream_index_s"]=wasmExports["me"])(a0,a1);var _AVPacket_time_base_num=Module["_AVPacket_time_base_num"]=a0=>(_AVPacket_time_base_num=Module["_AVPacket_time_base_num"]=wasmExports["ne"])(a0);var _AVPacket_time_base_den=Module["_AVPacket_time_base_den"]=a0=>(_AVPacket_time_base_den=Module["_AVPacket_time_base_den"]=wasmExports["oe"])(a0);var _AVPacket_time_base_num_s=Module["_AVPacket_time_base_num_s"]=(a0,a1)=>(_AVPacket_time_base_num_s=Module["_AVPacket_time_base_num_s"]=wasmExports["pe"])(a0,a1);var _AVPacket_time_base_den_s=Module["_AVPacket_time_base_den_s"]=(a0,a1)=>(_AVPacket_time_base_den_s=Module["_AVPacket_time_base_den_s"]=wasmExports["qe"])(a0,a1);var _AVPacket_time_base_s=Module["_AVPacket_time_base_s"]=(a0,a1,a2)=>(_AVPacket_time_base_s=Module["_AVPacket_time_base_s"]=wasmExports["re"])(a0,a1,a2);var _AVPacketSideData_data=Module["_AVPacketSideData_data"]=(a0,a1)=>(_AVPacketSideData_data=Module["_AVPacketSideData_data"]=wasmExports["se"])(a0,a1);var _AVPacketSideData_size=Module["_AVPacketSideData_size"]=(a0,a1)=>(_AVPacketSideData_size=Module["_AVPacketSideData_size"]=wasmExports["te"])(a0,a1);var _AVPacketSideData_type=Module["_AVPacketSideData_type"]=(a0,a1)=>(_AVPacketSideData_type=Module["_AVPacketSideData_type"]=wasmExports["ue"])(a0,a1);var _avcodec_open2_js=Module["_avcodec_open2_js"]=(a0,a1,a2)=>(_avcodec_open2_js=Module["_avcodec_open2_js"]=wasmExports["ve"])(a0,a1,a2);var _avcodec_open2=Module["_avcodec_open2"]=(a0,a1,a2)=>(_avcodec_open2=Module["_avcodec_open2"]=wasmExports["we"])(a0,a1,a2);var _av_packet_rescale_ts_js=Module["_av_packet_rescale_ts_js"]=(a0,a1,a2,a3,a4)=>(_av_packet_rescale_ts_js=Module["_av_packet_rescale_ts_js"]=wasmExports["xe"])(a0,a1,a2,a3,a4);var _AVFormatContext_duration=Module["_AVFormatContext_duration"]=a0=>(_AVFormatContext_duration=Module["_AVFormatContext_duration"]=wasmExports["ye"])(a0);var _AVFormatContext_durationhi=Module["_AVFormatContext_durationhi"]=a0=>(_AVFormatContext_durationhi=Module["_AVFormatContext_durationhi"]=wasmExports["ze"])(a0);var _AVFormatContext_duration_s=Module["_AVFormatContext_duration_s"]=(a0,a1)=>(_AVFormatContext_duration_s=Module["_AVFormatContext_duration_s"]=wasmExports["Ae"])(a0,a1);var _AVFormatContext_durationhi_s=Module["_AVFormatContext_durationhi_s"]=(a0,a1)=>(_AVFormatContext_durationhi_s=Module["_AVFormatContext_durationhi_s"]=wasmExports["Be"])(a0,a1);var _AVFormatContext_flags=Module["_AVFormatContext_flags"]=a0=>(_AVFormatContext_flags=Module["_AVFormatContext_flags"]=wasmExports["Ce"])(a0);var _AVFormatContext_flags_s=Module["_AVFormatContext_flags_s"]=(a0,a1)=>(_AVFormatContext_flags_s=Module["_AVFormatContext_flags_s"]=wasmExports["De"])(a0,a1);var _AVFormatContext_nb_streams=Module["_AVFormatContext_nb_streams"]=a0=>(_AVFormatContext_nb_streams=Module["_AVFormatContext_nb_streams"]=wasmExports["Ee"])(a0);var _AVFormatContext_nb_streams_s=Module["_AVFormatContext_nb_streams_s"]=(a0,a1)=>(_AVFormatContext_nb_streams_s=Module["_AVFormatContext_nb_streams_s"]=wasmExports["Fe"])(a0,a1);var _AVFormatContext_oformat=Module["_AVFormatContext_oformat"]=a0=>(_AVFormatContext_oformat=Module["_AVFormatContext_oformat"]=wasmExports["Ge"])(a0);var _AVFormatContext_oformat_s=Module["_AVFormatContext_oformat_s"]=(a0,a1)=>(_AVFormatContext_oformat_s=Module["_AVFormatContext_oformat_s"]=wasmExports["He"])(a0,a1);var _AVFormatContext_pb=Module["_AVFormatContext_pb"]=a0=>(_AVFormatContext_pb=Module["_AVFormatContext_pb"]=wasmExports["Ie"])(a0);var _AVFormatContext_pb_s=Module["_AVFormatContext_pb_s"]=(a0,a1)=>(_AVFormatContext_pb_s=Module["_AVFormatContext_pb_s"]=wasmExports["Je"])(a0,a1);var _AVFormatContext_start_time=Module["_AVFormatContext_start_time"]=a0=>(_AVFormatContext_start_time=Module["_AVFormatContext_start_time"]=wasmExports["Ke"])(a0);var _AVFormatContext_start_timehi=Module["_AVFormatContext_start_timehi"]=a0=>(_AVFormatContext_start_timehi=Module["_AVFormatContext_start_timehi"]=wasmExports["Le"])(a0);var _AVFormatContext_start_time_s=Module["_AVFormatContext_start_time_s"]=(a0,a1)=>(_AVFormatContext_start_time_s=Module["_AVFormatContext_start_time_s"]=wasmExports["Me"])(a0,a1);var _AVFormatContext_start_timehi_s=Module["_AVFormatContext_start_timehi_s"]=(a0,a1)=>(_AVFormatContext_start_timehi_s=Module["_AVFormatContext_start_timehi_s"]=wasmExports["Ne"])(a0,a1);var _AVFormatContext_streams_a=Module["_AVFormatContext_streams_a"]=(a0,a1)=>(_AVFormatContext_streams_a=Module["_AVFormatContext_streams_a"]=wasmExports["Oe"])(a0,a1);var _AVFormatContext_streams_a_s=Module["_AVFormatContext_streams_a_s"]=(a0,a1,a2)=>(_AVFormatContext_streams_a_s=Module["_AVFormatContext_streams_a_s"]=wasmExports["Pe"])(a0,a1,a2);var _AVStream_codecpar=Module["_AVStream_codecpar"]=a0=>(_AVStream_codecpar=Module["_AVStream_codecpar"]=wasmExports["Qe"])(a0);var _AVStream_codecpar_s=Module["_AVStream_codecpar_s"]=(a0,a1)=>(_AVStream_codecpar_s=Module["_AVStream_codecpar_s"]=wasmExports["Re"])(a0,a1);var _AVStream_discard=Module["_AVStream_discard"]=a0=>(_AVStream_discard=Module["_AVStream_discard"]=wasmExports["Se"])(a0);var _AVStream_discard_s=Module["_AVStream_discard_s"]=(a0,a1)=>(_AVStream_discard_s=Module["_AVStream_discard_s"]=wasmExports["Te"])(a0,a1);var _AVStream_duration=Module["_AVStream_duration"]=a0=>(_AVStream_duration=Module["_AVStream_duration"]=wasmExports["Ue"])(a0);var _AVStream_durationhi=Module["_AVStream_durationhi"]=a0=>(_AVStream_durationhi=Module["_AVStream_durationhi"]=wasmExports["Ve"])(a0);var _AVStream_duration_s=Module["_AVStream_duration_s"]=(a0,a1)=>(_AVStream_duration_s=Module["_AVStream_duration_s"]=wasmExports["We"])(a0,a1);var _AVStream_durationhi_s=Module["_AVStream_durationhi_s"]=(a0,a1)=>(_AVStream_durationhi_s=Module["_AVStream_durationhi_s"]=wasmExports["Xe"])(a0,a1);var _AVStream_time_base_num=Module["_AVStream_time_base_num"]=a0=>(_AVStream_time_base_num=Module["_AVStream_time_base_num"]=wasmExports["Ye"])(a0);var _AVStream_time_base_den=Module["_AVStream_time_base_den"]=a0=>(_AVStream_time_base_den=Module["_AVStream_time_base_den"]=wasmExports["Ze"])(a0);var _AVStream_time_base_num_s=Module["_AVStream_time_base_num_s"]=(a0,a1)=>(_AVStream_time_base_num_s=Module["_AVStream_time_base_num_s"]=wasmExports["_e"])(a0,a1);var _AVStream_time_base_den_s=Module["_AVStream_time_base_den_s"]=(a0,a1)=>(_AVStream_time_base_den_s=Module["_AVStream_time_base_den_s"]=wasmExports["$e"])(a0,a1);var _AVStream_time_base_s=Module["_AVStream_time_base_s"]=(a0,a1,a2)=>(_AVStream_time_base_s=Module["_AVStream_time_base_s"]=wasmExports["af"])(a0,a1,a2);var _avformat_seek_file_min=Module["_avformat_seek_file_min"]=(a0,a1,a2,a3,a4)=>(_avformat_seek_file_min=Module["_avformat_seek_file_min"]=wasmExports["bf"])(a0,a1,a2,a3,a4);var _avformat_seek_file=Module["_avformat_seek_file"]=(a0,a1,a2,a3,a4,a5,a6,a7,a8)=>(_avformat_seek_file=Module["_avformat_seek_file"]=wasmExports["cf"])(a0,a1,a2,a3,a4,a5,a6,a7,a8);var _avformat_seek_file_max=Module["_avformat_seek_file_max"]=(a0,a1,a2,a3,a4)=>(_avformat_seek_file_max=Module["_avformat_seek_file_max"]=wasmExports["df"])(a0,a1,a2,a3,a4);var _avformat_seek_file_approx=Module["_avformat_seek_file_approx"]=(a0,a1,a2,a3,a4)=>(_avformat_seek_file_approx=Module["_avformat_seek_file_approx"]=wasmExports["ef"])(a0,a1,a2,a3,a4);var _AVFilterInOut_filter_ctx=Module["_AVFilterInOut_filter_ctx"]=a0=>(_AVFilterInOut_filter_ctx=Module["_AVFilterInOut_filter_ctx"]=wasmExports["ff"])(a0);var _AVFilterInOut_filter_ctx_s=Module["_AVFilterInOut_filter_ctx_s"]=(a0,a1)=>(_AVFilterInOut_filter_ctx_s=Module["_AVFilterInOut_filter_ctx_s"]=wasmExports["gf"])(a0,a1);var _AVFilterInOut_name=Module["_AVFilterInOut_name"]=a0=>(_AVFilterInOut_name=Module["_AVFilterInOut_name"]=wasmExports["hf"])(a0);var _AVFilterInOut_name_s=Module["_AVFilterInOut_name_s"]=(a0,a1)=>(_AVFilterInOut_name_s=Module["_AVFilterInOut_name_s"]=wasmExports["jf"])(a0,a1);var _AVFilterInOut_next=Module["_AVFilterInOut_next"]=a0=>(_AVFilterInOut_next=Module["_AVFilterInOut_next"]=wasmExports["kf"])(a0);var _AVFilterInOut_next_s=Module["_AVFilterInOut_next_s"]=(a0,a1)=>(_AVFilterInOut_next_s=Module["_AVFilterInOut_next_s"]=wasmExports["lf"])(a0,a1);var _AVFilterInOut_pad_idx=Module["_AVFilterInOut_pad_idx"]=a0=>(_AVFilterInOut_pad_idx=Module["_AVFilterInOut_pad_idx"]=wasmExports["mf"])(a0);var _AVFilterInOut_pad_idx_s=Module["_AVFilterInOut_pad_idx_s"]=(a0,a1)=>(_AVFilterInOut_pad_idx_s=Module["_AVFilterInOut_pad_idx_s"]=wasmExports["nf"])(a0,a1);var _av_buffersink_get_time_base_num=Module["_av_buffersink_get_time_base_num"]=a0=>(_av_buffersink_get_time_base_num=Module["_av_buffersink_get_time_base_num"]=wasmExports["of"])(a0);var _av_buffersink_get_time_base_den=Module["_av_buffersink_get_time_base_den"]=a0=>(_av_buffersink_get_time_base_den=Module["_av_buffersink_get_time_base_den"]=wasmExports["pf"])(a0);var _ff_buffersink_set_ch_layout=Module["_ff_buffersink_set_ch_layout"]=(a0,a1,a2)=>(_ff_buffersink_set_ch_layout=Module["_ff_buffersink_set_ch_layout"]=wasmExports["qf"])(a0,a1,a2);var _av_opt_set=Module["_av_opt_set"]=(a0,a1,a2,a3)=>(_av_opt_set=Module["_av_opt_set"]=wasmExports["rf"])(a0,a1,a2,a3);var _libavjs_with_swscale=Module["_libavjs_with_swscale"]=()=>(_libavjs_with_swscale=Module["_libavjs_with_swscale"]=wasmExports["sf"])();var _ffmpeg_main=Module["_ffmpeg_main"]=()=>(_ffmpeg_main=Module["_ffmpeg_main"]=wasmExports["tf"])();var _ffprobe_main=Module["_ffprobe_main"]=()=>(_ffprobe_main=Module["_ffprobe_main"]=wasmExports["uf"])();var _libavjs_create_main_thread=Module["_libavjs_create_main_thread"]=()=>(_libavjs_create_main_thread=Module["_libavjs_create_main_thread"]=wasmExports["vf"])();var _avformat_alloc_output_context2_js=Module["_avformat_alloc_output_context2_js"]=(a0,a1,a2)=>(_avformat_alloc_output_context2_js=Module["_avformat_alloc_output_context2_js"]=wasmExports["wf"])(a0,a1,a2);var _avformat_open_input_js=Module["_avformat_open_input_js"]=(a0,a1,a2)=>(_avformat_open_input_js=Module["_avformat_open_input_js"]=wasmExports["xf"])(a0,a1,a2);var _avformat_open_input=Module["_avformat_open_input"]=(a0,a1,a2,a3)=>(_avformat_open_input=Module["_avformat_open_input"]=wasmExports["yf"])(a0,a1,a2,a3);var _avio_open2_js=Module["_avio_open2_js"]=(a0,a1,a2,a3)=>(_avio_open2_js=Module["_avio_open2_js"]=wasmExports["zf"])(a0,a1,a2,a3);var _avfilter_graph_create_filter_js=Module["_avfilter_graph_create_filter_js"]=(a0,a1,a2,a3,a4)=>(_avfilter_graph_create_filter_js=Module["_avfilter_graph_create_filter_js"]=wasmExports["Af"])(a0,a1,a2,a3,a4);var _av_dict_copy_js=Module["_av_dict_copy_js"]=(a0,a1,a2)=>(_av_dict_copy_js=Module["_av_dict_copy_js"]=wasmExports["Bf"])(a0,a1,a2);var _av_dict_set_js=Module["_av_dict_set_js"]=(a0,a1,a2,a3)=>(_av_dict_set_js=Module["_av_dict_set_js"]=wasmExports["Cf"])(a0,a1,a2,a3);var _av_compare_ts_js=Module["_av_compare_ts_js"]=(a0,a1,a2,a3,a4,a5,a6,a7)=>(_av_compare_ts_js=Module["_av_compare_ts_js"]=wasmExports["Df"])(a0,a1,a2,a3,a4,a5,a6,a7);var _ff_error=Module["_ff_error"]=a0=>(_ff_error=Module["_ff_error"]=wasmExports["Ef"])(a0);var _mallinfo_uordblks=Module["_mallinfo_uordblks"]=()=>(_mallinfo_uordblks=Module["_mallinfo_uordblks"]=wasmExports["Ff"])();var _av_strdup=Module["_av_strdup"]=a0=>(_av_strdup=Module["_av_strdup"]=wasmExports["Gf"])(a0);var _av_dict_free=Module["_av_dict_free"]=a0=>(_av_dict_free=Module["_av_dict_free"]=wasmExports["If"])(a0);var _av_frame_alloc=Module["_av_frame_alloc"]=()=>(_av_frame_alloc=Module["_av_frame_alloc"]=wasmExports["Jf"])();var _av_frame_free=Module["_av_frame_free"]=a0=>(_av_frame_free=Module["_av_frame_free"]=wasmExports["Kf"])(a0);var _av_frame_unref=Module["_av_frame_unref"]=a0=>(_av_frame_unref=Module["_av_frame_unref"]=wasmExports["Lf"])(a0);var _av_frame_get_buffer=Module["_av_frame_get_buffer"]=(a0,a1)=>(_av_frame_get_buffer=Module["_av_frame_get_buffer"]=wasmExports["Mf"])(a0,a1);var _av_frame_ref=Module["_av_frame_ref"]=(a0,a1)=>(_av_frame_ref=Module["_av_frame_ref"]=wasmExports["Nf"])(a0,a1);var _av_frame_clone=Module["_av_frame_clone"]=a0=>(_av_frame_clone=Module["_av_frame_clone"]=wasmExports["Of"])(a0);var _av_frame_make_writable=Module["_av_frame_make_writable"]=a0=>(_av_frame_make_writable=Module["_av_frame_make_writable"]=wasmExports["Pf"])(a0);var _av_log_get_level=Module["_av_log_get_level"]=()=>(_av_log_get_level=Module["_av_log_get_level"]=wasmExports["Qf"])();var _av_log_set_level=Module["_av_log_set_level"]=a0=>(_av_log_set_level=Module["_av_log_set_level"]=wasmExports["Rf"])(a0);var _free=Module["_free"]=a0=>(_free=Module["_free"]=wasmExports["Sf"])(a0);var _av_get_sample_fmt_name=Module["_av_get_sample_fmt_name"]=a0=>(_av_get_sample_fmt_name=Module["_av_get_sample_fmt_name"]=wasmExports["Tf"])(a0);var _av_pix_fmt_desc_get=Module["_av_pix_fmt_desc_get"]=a0=>(_av_pix_fmt_desc_get=Module["_av_pix_fmt_desc_get"]=wasmExports["Uf"])(a0);var _open=Module["_open"]=(a0,a1,a2)=>(_open=Module["_open"]=wasmExports["Vf"])(a0,a1,a2);var _av_get_bytes_per_sample=Module["_av_get_bytes_per_sample"]=a0=>(_av_get_bytes_per_sample=Module["_av_get_bytes_per_sample"]=wasmExports["Wf"])(a0);var _avformat_free_context=Module["_avformat_free_context"]=a0=>(_avformat_free_context=Module["_avformat_free_context"]=wasmExports["Xf"])(a0);var _av_find_best_stream=Module["_av_find_best_stream"]=(a0,a1,a2,a3,a4,a5)=>(_av_find_best_stream=Module["_av_find_best_stream"]=wasmExports["Yf"])(a0,a1,a2,a3,a4,a5);var _avio_close=Module["_avio_close"]=a0=>(_avio_close=Module["_avio_close"]=wasmExports["Zf"])(a0);var _avio_flush=Module["_avio_flush"]=a0=>(_avio_flush=Module["_avio_flush"]=wasmExports["_f"])(a0);var _avformat_alloc_context=Module["_avformat_alloc_context"]=()=>(_avformat_alloc_context=Module["_avformat_alloc_context"]=wasmExports["$f"])();var _avcodec_parameters_to_context=Module["_avcodec_parameters_to_context"]=(a0,a1)=>(_avcodec_parameters_to_context=Module["_avcodec_parameters_to_context"]=wasmExports["ag"])(a0,a1);var _avcodec_descriptor_get=Module["_avcodec_descriptor_get"]=a0=>(_avcodec_descriptor_get=Module["_avcodec_descriptor_get"]=wasmExports["bg"])(a0);var _av_packet_unref=Module["_av_packet_unref"]=a0=>(_av_packet_unref=Module["_av_packet_unref"]=wasmExports["cg"])(a0);var _avcodec_free_context=Module["_avcodec_free_context"]=a0=>(_avcodec_free_context=Module["_avcodec_free_context"]=wasmExports["dg"])(a0);var _avcodec_parameters_free=Module["_avcodec_parameters_free"]=a0=>(_avcodec_parameters_free=Module["_avcodec_parameters_free"]=wasmExports["eg"])(a0);var _av_packet_free=Module["_av_packet_free"]=a0=>(_av_packet_free=Module["_av_packet_free"]=wasmExports["fg"])(a0);var _avformat_new_stream=Module["_avformat_new_stream"]=(a0,a1)=>(_avformat_new_stream=Module["_avformat_new_stream"]=wasmExports["gg"])(a0,a1);var _avcodec_parameters_copy=Module["_avcodec_parameters_copy"]=(a0,a1)=>(_avcodec_parameters_copy=Module["_avcodec_parameters_copy"]=wasmExports["hg"])(a0,a1);var _av_packet_ref=Module["_av_packet_ref"]=(a0,a1)=>(_av_packet_ref=Module["_av_packet_ref"]=wasmExports["ig"])(a0,a1);var _avcodec_find_decoder=Module["_avcodec_find_decoder"]=a0=>(_avcodec_find_decoder=Module["_avcodec_find_decoder"]=wasmExports["jg"])(a0);var _avformat_close_input=Module["_avformat_close_input"]=a0=>(_avformat_close_input=Module["_avformat_close_input"]=wasmExports["kg"])(a0);var _av_read_frame=Module["_av_read_frame"]=(a0,a1)=>(_av_read_frame=Module["_av_read_frame"]=wasmExports["lg"])(a0,a1);var _avcodec_get_name=Module["_avcodec_get_name"]=a0=>(_avcodec_get_name=Module["_avcodec_get_name"]=wasmExports["mg"])(a0);var _av_packet_new_side_data=Module["_av_packet_new_side_data"]=(a0,a1,a2)=>(_av_packet_new_side_data=Module["_av_packet_new_side_data"]=wasmExports["ng"])(a0,a1,a2);var _avformat_find_stream_info=Module["_avformat_find_stream_info"]=(a0,a1)=>(_avformat_find_stream_info=Module["_avformat_find_stream_info"]=wasmExports["og"])(a0,a1);var _avcodec_parameters_from_context=Module["_avcodec_parameters_from_context"]=(a0,a1)=>(_avcodec_parameters_from_context=Module["_avcodec_parameters_from_context"]=wasmExports["pg"])(a0,a1);var _avcodec_send_packet=Module["_avcodec_send_packet"]=(a0,a1)=>(_avcodec_send_packet=Module["_avcodec_send_packet"]=wasmExports["qg"])(a0,a1);var _avcodec_receive_frame=Module["_avcodec_receive_frame"]=(a0,a1)=>(_avcodec_receive_frame=Module["_avcodec_receive_frame"]=wasmExports["rg"])(a0,a1);var _avcodec_alloc_context3=Module["_avcodec_alloc_context3"]=a0=>(_avcodec_alloc_context3=Module["_avcodec_alloc_context3"]=wasmExports["sg"])(a0);var _avcodec_parameters_alloc=Module["_avcodec_parameters_alloc"]=()=>(_avcodec_parameters_alloc=Module["_avcodec_parameters_alloc"]=wasmExports["tg"])();var _av_find_input_format=Module["_av_find_input_format"]=a0=>(_av_find_input_format=Module["_av_find_input_format"]=wasmExports["ug"])(a0);var _av_packet_clone=Module["_av_packet_clone"]=a0=>(_av_packet_clone=Module["_av_packet_clone"]=wasmExports["vg"])(a0);var _avformat_write_header=Module["_avformat_write_header"]=(a0,a1)=>(_avformat_write_header=Module["_avformat_write_header"]=wasmExports["wg"])(a0,a1);var _av_write_frame=Module["_av_write_frame"]=(a0,a1)=>(_av_write_frame=Module["_av_write_frame"]=wasmExports["xg"])(a0,a1);var _av_interleaved_write_frame=Module["_av_interleaved_write_frame"]=(a0,a1)=>(_av_interleaved_write_frame=Module["_av_interleaved_write_frame"]=wasmExports["yg"])(a0,a1);var _av_write_trailer=Module["_av_write_trailer"]=a0=>(_av_write_trailer=Module["_av_write_trailer"]=wasmExports["zg"])(a0);var _av_packet_alloc=Module["_av_packet_alloc"]=()=>(_av_packet_alloc=Module["_av_packet_alloc"]=wasmExports["Ag"])();var _close=Module["_close"]=a0=>(_close=Module["_close"]=wasmExports["Bg"])(a0);var _av_shrink_packet=Module["_av_shrink_packet"]=(a0,a1)=>(_av_shrink_packet=Module["_av_shrink_packet"]=wasmExports["Cg"])(a0,a1);var _av_seek_frame=Module["_av_seek_frame"]=(a0,a1,a2,a3,a4)=>(_av_seek_frame=Module["_av_seek_frame"]=wasmExports["Dg"])(a0,a1,a2,a3,a4);var _avformat_flush=Module["_avformat_flush"]=a0=>(_avformat_flush=Module["_avformat_flush"]=wasmExports["Eg"])(a0);var _av_grow_packet=Module["_av_grow_packet"]=(a0,a1)=>(_av_grow_packet=Module["_av_grow_packet"]=wasmExports["Fg"])(a0,a1);var _avcodec_find_encoder=Module["_avcodec_find_encoder"]=a0=>(_avcodec_find_encoder=Module["_avcodec_find_encoder"]=wasmExports["Gg"])(a0);var _avcodec_find_encoder_by_name=Module["_avcodec_find_encoder_by_name"]=a0=>(_avcodec_find_encoder_by_name=Module["_avcodec_find_encoder_by_name"]=wasmExports["Hg"])(a0);var _avcodec_find_decoder_by_name=Module["_avcodec_find_decoder_by_name"]=a0=>(_avcodec_find_decoder_by_name=Module["_avcodec_find_decoder_by_name"]=wasmExports["Ig"])(a0);var _avcodec_flush_buffers=Module["_avcodec_flush_buffers"]=a0=>(_avcodec_flush_buffers=Module["_avcodec_flush_buffers"]=wasmExports["Jg"])(a0);var _avcodec_close=Module["_avcodec_close"]=a0=>(_avcodec_close=Module["_avcodec_close"]=wasmExports["Kg"])(a0);var _avcodec_descriptor_next=Module["_avcodec_descriptor_next"]=a0=>(_avcodec_descriptor_next=Module["_avcodec_descriptor_next"]=wasmExports["Lg"])(a0);var _avcodec_descriptor_get_by_name=Module["_avcodec_descriptor_get_by_name"]=a0=>(_avcodec_descriptor_get_by_name=Module["_avcodec_descriptor_get_by_name"]=wasmExports["Mg"])(a0);var _avcodec_send_frame=Module["_avcodec_send_frame"]=(a0,a1)=>(_avcodec_send_frame=Module["_avcodec_send_frame"]=wasmExports["Ng"])(a0,a1);var _avcodec_receive_packet=Module["_avcodec_receive_packet"]=(a0,a1)=>(_avcodec_receive_packet=Module["_avcodec_receive_packet"]=wasmExports["Og"])(a0,a1);var _av_packet_make_writable=Module["_av_packet_make_writable"]=a0=>(_av_packet_make_writable=Module["_av_packet_make_writable"]=wasmExports["Pg"])(a0);var _avfilter_get_by_name=Module["_avfilter_get_by_name"]=a0=>(_avfilter_get_by_name=Module["_avfilter_get_by_name"]=wasmExports["Qg"])(a0);var _avfilter_link=Module["_avfilter_link"]=(a0,a1,a2,a3)=>(_avfilter_link=Module["_avfilter_link"]=wasmExports["Rg"])(a0,a1,a2,a3);var _avfilter_free=Module["_avfilter_free"]=a0=>(_avfilter_free=Module["_avfilter_free"]=wasmExports["Sg"])(a0);var _avfilter_graph_alloc=Module["_avfilter_graph_alloc"]=()=>(_avfilter_graph_alloc=Module["_avfilter_graph_alloc"]=wasmExports["Tg"])();var _avfilter_graph_free=Module["_avfilter_graph_free"]=a0=>(_avfilter_graph_free=Module["_avfilter_graph_free"]=wasmExports["Ug"])(a0);var _avfilter_graph_config=Module["_avfilter_graph_config"]=(a0,a1)=>(_avfilter_graph_config=Module["_avfilter_graph_config"]=wasmExports["Vg"])(a0,a1);var _av_buffersink_get_frame=Module["_av_buffersink_get_frame"]=(a0,a1)=>(_av_buffersink_get_frame=Module["_av_buffersink_get_frame"]=wasmExports["Wg"])(a0,a1);var _av_buffersink_set_frame_size=Module["_av_buffersink_set_frame_size"]=(a0,a1)=>(_av_buffersink_set_frame_size=Module["_av_buffersink_set_frame_size"]=wasmExports["Xg"])(a0,a1);var _av_buffersrc_add_frame_flags=Module["_av_buffersrc_add_frame_flags"]=(a0,a1,a2)=>(_av_buffersrc_add_frame_flags=Module["_av_buffersrc_add_frame_flags"]=wasmExports["Yg"])(a0,a1,a2);var _avfilter_inout_alloc=Module["_avfilter_inout_alloc"]=()=>(_avfilter_inout_alloc=Module["_avfilter_inout_alloc"]=wasmExports["Zg"])();var _avfilter_inout_free=Module["_avfilter_inout_free"]=a0=>(_avfilter_inout_free=Module["_avfilter_inout_free"]=wasmExports["_g"])(a0);var _avfilter_graph_parse=Module["_avfilter_graph_parse"]=(a0,a1,a2,a3,a4)=>(_avfilter_graph_parse=Module["_avfilter_graph_parse"]=wasmExports["$g"])(a0,a1,a2,a3,a4);var _sws_freeContext=Module["_sws_freeContext"]=a0=>(_sws_freeContext=Module["_sws_freeContext"]=wasmExports["ah"])(a0);var _sws_scale_frame=Module["_sws_scale_frame"]=(a0,a1,a2)=>(_sws_scale_frame=Module["_sws_scale_frame"]=wasmExports["bh"])(a0,a1,a2);var _sws_getContext=Module["_sws_getContext"]=(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9)=>(_sws_getContext=Module["_sws_getContext"]=wasmExports["ch"])(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9);var _malloc=Module["_malloc"]=a0=>(_malloc=Module["_malloc"]=wasmExports["dh"])(a0);var _calloc=Module["_calloc"]=(a0,a1)=>(_calloc=Module["_calloc"]=wasmExports["eh"])(a0,a1);var _emfiberthreads_timeout_expiry=Module["_emfiberthreads_timeout_expiry"]=(a0,a1)=>(_emfiberthreads_timeout_expiry=Module["_emfiberthreads_timeout_expiry"]=wasmExports["fh"])(a0,a1);var _dup2=Module["_dup2"]=(a0,a1)=>(_dup2=Module["_dup2"]=wasmExports["gh"])(a0,a1);var _strerror=Module["_strerror"]=a0=>(_strerror=Module["_strerror"]=wasmExports["hh"])(a0);var _setThrew=(a0,a1)=>(_setThrew=wasmExports["ih"])(a0,a1);var __emscripten_tempret_set=a0=>(__emscripten_tempret_set=wasmExports["jh"])(a0);var __emscripten_stack_restore=a0=>(__emscripten_stack_restore=wasmExports["kh"])(a0);var __emscripten_stack_alloc=a0=>(__emscripten_stack_alloc=wasmExports["lh"])(a0);var _emscripten_stack_get_current=()=>(_emscripten_stack_get_current=wasmExports["mh"])();var dynCall_iiii=Module["dynCall_iiii"]=(a0,a1,a2,a3)=>(dynCall_iiii=Module["dynCall_iiii"]=wasmExports["nh"])(a0,a1,a2,a3);var dynCall_ii=Module["dynCall_ii"]=(a0,a1)=>(dynCall_ii=Module["dynCall_ii"]=wasmExports["oh"])(a0,a1);var dynCall_viiii=Module["dynCall_viiii"]=(a0,a1,a2,a3,a4)=>(dynCall_viiii=Module["dynCall_viiii"]=wasmExports["ph"])(a0,a1,a2,a3,a4);var dynCall_iiiiii=Module["dynCall_iiiiii"]=(a0,a1,a2,a3,a4,a5)=>(dynCall_iiiiii=Module["dynCall_iiiiii"]=wasmExports["qh"])(a0,a1,a2,a3,a4,a5);var dynCall_iii=Module["dynCall_iii"]=(a0,a1,a2)=>(dynCall_iii=Module["dynCall_iii"]=wasmExports["rh"])(a0,a1,a2);var dynCall_vii=Module["dynCall_vii"]=(a0,a1,a2)=>(dynCall_vii=Module["dynCall_vii"]=wasmExports["sh"])(a0,a1,a2);var dynCall_viiiiii=Module["dynCall_viiiiii"]=(a0,a1,a2,a3,a4,a5,a6)=>(dynCall_viiiiii=Module["dynCall_viiiiii"]=wasmExports["th"])(a0,a1,a2,a3,a4,a5,a6);var dynCall_vi=Module["dynCall_vi"]=(a0,a1)=>(dynCall_vi=Module["dynCall_vi"]=wasmExports["uh"])(a0,a1);var dynCall_iiiii=Module["dynCall_iiiii"]=(a0,a1,a2,a3,a4)=>(dynCall_iiiii=Module["dynCall_iiiii"]=wasmExports["vh"])(a0,a1,a2,a3,a4);var dynCall_viii=Module["dynCall_viii"]=(a0,a1,a2,a3)=>(dynCall_viii=Module["dynCall_viii"]=wasmExports["wh"])(a0,a1,a2,a3);var dynCall_viiiii=Module["dynCall_viiiii"]=(a0,a1,a2,a3,a4,a5)=>(dynCall_viiiii=Module["dynCall_viiiii"]=wasmExports["xh"])(a0,a1,a2,a3,a4,a5);var dynCall_viiiiiii=Module["dynCall_viiiiiii"]=(a0,a1,a2,a3,a4,a5,a6,a7)=>(dynCall_viiiiiii=Module["dynCall_viiiiiii"]=wasmExports["yh"])(a0,a1,a2,a3,a4,a5,a6,a7);var dynCall_jij=Module["dynCall_jij"]=(a0,a1,a2,a3)=>(dynCall_jij=Module["dynCall_jij"]=wasmExports["zh"])(a0,a1,a2,a3);var dynCall_viiiiiiii=Module["dynCall_viiiiiiii"]=(a0,a1,a2,a3,a4,a5,a6,a7,a8)=>(dynCall_viiiiiiii=Module["dynCall_viiiiiiii"]=wasmExports["Ah"])(a0,a1,a2,a3,a4,a5,a6,a7,a8);var dynCall_iiiijj=Module["dynCall_iiiijj"]=(a0,a1,a2,a3,a4,a5,a6,a7)=>(dynCall_iiiijj=Module["dynCall_iiiijj"]=wasmExports["Bh"])(a0,a1,a2,a3,a4,a5,a6,a7);var dynCall_iiiiiiiii=Module["dynCall_iiiiiiiii"]=(a0,a1,a2,a3,a4,a5,a6,a7,a8)=>(dynCall_iiiiiiiii=Module["dynCall_iiiiiiiii"]=wasmExports["Ch"])(a0,a1,a2,a3,a4,a5,a6,a7,a8);var dynCall_viiid=Module["dynCall_viiid"]=(a0,a1,a2,a3,a4)=>(dynCall_viiid=Module["dynCall_viiid"]=wasmExports["Dh"])(a0,a1,a2,a3,a4);var dynCall_iiiiiiiiii=Module["dynCall_iiiiiiiiii"]=(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9)=>(dynCall_iiiiiiiiii=Module["dynCall_iiiiiiiiii"]=wasmExports["Eh"])(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9);var dynCall_i=Module["dynCall_i"]=a0=>(dynCall_i=Module["dynCall_i"]=wasmExports["Fh"])(a0);var _asyncify_start_unwind=a0=>(_asyncify_start_unwind=wasmExports["Gh"])(a0);var _asyncify_stop_unwind=()=>(_asyncify_stop_unwind=wasmExports["Hh"])();var _asyncify_start_rewind=a0=>(_asyncify_start_rewind=wasmExports["Ih"])(a0);var _asyncify_stop_rewind=()=>(_asyncify_stop_rewind=wasmExports["Jh"])();function invoke_iiiii(index,a1,a2,a3,a4){var sp=stackSave();try{return dynCall_iiiii(index,a1,a2,a3,a4)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_vii(index,a1,a2){var sp=stackSave();try{dynCall_vii(index,a1,a2)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_iii(index,a1,a2){var sp=stackSave();try{return dynCall_iii(index,a1,a2)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viiii(index,a1,a2,a3,a4){var sp=stackSave();try{dynCall_viiii(index,a1,a2,a3,a4)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_iiiiiiiii(index,a1,a2,a3,a4,a5,a6,a7,a8){var sp=stackSave();try{return dynCall_iiiiiiiii(index,a1,a2,a3,a4,a5,a6,a7,a8)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viii(index,a1,a2,a3){var sp=stackSave();try{dynCall_viii(index,a1,a2,a3)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_ii(index,a1){var sp=stackSave();try{return dynCall_ii(index,a1)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_vi(index,a1){var sp=stackSave();try{dynCall_vi(index,a1)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viiid(index,a1,a2,a3,a4){var sp=stackSave();try{dynCall_viiid(index,a1,a2,a3,a4)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_iiiiii(index,a1,a2,a3,a4,a5){var sp=stackSave();try{return dynCall_iiiiii(index,a1,a2,a3,a4,a5)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_iiii(index,a1,a2,a3){var sp=stackSave();try{return dynCall_iiii(index,a1,a2,a3)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_i(index){var sp=stackSave();try{return dynCall_i(index)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_iiiiiiiiii(index,a1,a2,a3,a4,a5,a6,a7,a8,a9){var sp=stackSave();try{return dynCall_iiiiiiiiii(index,a1,a2,a3,a4,a5,a6,a7,a8,a9)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viiiii(index,a1,a2,a3,a4,a5){var sp=stackSave();try{dynCall_viiiii(index,a1,a2,a3,a4,a5)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viiiiiiii(index,a1,a2,a3,a4,a5,a6,a7,a8){var sp=stackSave();try{dynCall_viiiiiiii(index,a1,a2,a3,a4,a5,a6,a7,a8)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viiiiiii(index,a1,a2,a3,a4,a5,a6,a7){var sp=stackSave();try{dynCall_viiiiiii(index,a1,a2,a3,a4,a5,a6,a7)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_viiiiii(index,a1,a2,a3,a4,a5,a6){var sp=stackSave();try{dynCall_viiiiii(index,a1,a2,a3,a4,a5,a6)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_iiiijj(index,a1,a2,a3,a4,a5,a6,a7){var sp=stackSave();try{return dynCall_iiiijj(index,a1,a2,a3,a4,a5,a6,a7)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}function invoke_jij(index,a1,a2,a3){var sp=stackSave();try{return dynCall_jij(index,a1,a2,a3)}catch(e){stackRestore(sp);if(e!==e+0)throw e;_setThrew(1,0)}}Module["ccall"]=ccall;Module["cwrap"]=cwrap;var calledRun;var calledPrerun;dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function run(){if(runDependencies>0){return}if(!calledPrerun){calledPrerun=1;preRun();if(runDependencies>0){return}}function doRun(){if(calledRun)return;calledRun=1;Module["calledRun"]=1;if(ABORT)return;initRuntime();readyPromiseResolve(Module);Module["onRuntimeInitialized"]?.();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout(()=>{setTimeout(()=>Module["setStatus"](""),1);doRun()},1)}else{doRun()}}if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}run();var serializationPromise=null;function serially(f){var p;if(serializationPromise){p=serializationPromise.catch(function(){}).then(function(){return f()})}else{p=f()}serializationPromise=p=p.finally(function(){if(serializationPromise===p)serializationPromise=null});return p}Module.fsThrownError=null;var ERRNO_CODES={EPERM:1,EIO:5,EAGAIN:6,ECANCELED:11,ESPIPE:29};var readerCallbacks={open:function(stream){if(stream.flags&3){throw new FS.ErrnoError(ERRNO_CODES.EPERM)}},close:function(){},read:function(stream,buffer,offset,length,position){var data=Module.readBuffers[stream.node.name];if(!data||data.buf.length===0&&!data.eof){if(Module.onread){try{var rr=Module.onread(stream.node.name,position,length);if(rr&&rr.then&&rr.catch){rr.catch(function(ex){ff_reader_dev_send(stream.node.name,null,{error:ex})})}}catch(ex){ff_reader_dev_send(stream.node.name,null,{error:ex})}}data=Module.readBuffers[stream.node.name]}if(!data)throw new FS.ErrnoError(ERRNO_CODES.EAGAIN);if(data.error){Module.fsThrownError=data.error;throw new FS.ErrnoError(ERRNO_CODES.ECANCELED)}if(data.errorCode)throw new FS.ErrnoError(data.errorCode);if(data.buf.length===0){if(data.eof){return 0}else{data.ready=false;throw new FS.ErrnoError(ERRNO_CODES.EAGAIN)}}var ret;if(length=bufMax){if(position>=stream.node.ff_block_reader_dev_size)return 0;if(!Module.onblockread)throw new FS.ErrnoError(ERRNO_CODES.EIO);try{var brr=Module.onblockread(stream.node.name,position,length);if(brr&&brr.then&&brr.catch){brr.catch(function(ex){ff_block_reader_dev_send(stream.node.name,position,null,{error:ex})})}}catch(ex){Module.fsThrownError=ex;throw new FS.ErrnoError(ERRNO_CODES.ECANCELED)}bufMin=data.position;bufMax=data.position+data.buf.length;if(position=bufMax){data.ready=false;throw new FS.ErrnoError(ERRNO_CODES.EAGAIN)}}var bufPos=position-bufMin;var ret;if(bufPos+lengthdata.length)av_shrink_packet(pkt,data.length)}var ptr=AVPacket_data(pkt);Module.HEAPU8.set(data,ptr)};var ff_init_muxer=Module.ff_init_muxer=function(opts,streamCtxs){var oformat=opts.oformat?opts.oformat:0;var format_name=opts.format_name?opts.format_name:null;var filename=opts.filename?opts.filename:null;var oc=avformat_alloc_output_context2_js(oformat,format_name,filename);if(oc===0)throw new Error("Failed to allocate output context");var fmt=AVFormatContext_oformat(oc);var sts=[];streamCtxs.forEach(function(ctx){var st=avformat_new_stream(oc,0);if(st===0)throw new Error("Could not allocate stream");sts.push(st);var codecpar=AVStream_codecpar(st);var ret;if(opts.codecpars){ret=avcodec_parameters_copy(codecpar,ctx[0]);AVCodecParameters_codec_tag_s(codecpar,0)}else{ret=avcodec_parameters_from_context(codecpar,ctx[0])}if(ret<0)throw new Error("Could not copy the stream parameters: "+ff_error(ret));AVStream_time_base_s(st,ctx[1],ctx[2])});if(opts.device)FS.mkdev(opts.filename,511,writerDev);var pb=null;if(opts.open){pb=avio_open2_js(opts.filename,2,0,0);if(pb===0)throw new Error("Could not open file");AVFormatContext_pb_s(oc,pb)}return[oc,fmt,pb,sts]};var ff_free_muxer=Module.ff_free_muxer=function(oc,pb){avformat_free_context(oc);if(pb)avio_close(pb)};function ff_init_demuxer_file(filename,fmt){var fmt_ctx;return avformat_open_input_js(filename,fmt?fmt:null,null).then(function(ret){fmt_ctx=ret;if(fmt_ctx===0)throw new Error("Could not open source file");return avformat_find_stream_info(fmt_ctx,0)}).then(function(){var nb_streams=AVFormatContext_nb_streams(fmt_ctx);var streams=[];for(var i=0;i=opts.limit)return[-6,outPackets];return Promise.all([]).then(step)})}return step()}Module.ff_read_frame_multi=function(){var args=arguments;return serially(function(){return ff_read_frame_multi.apply(void 0,args)})};Module.ff_read_multi=function(fmt_ctx,pkt,devfile,opts){console.log("[libav.js] ff_read_multi is deprecated. Use ff_read_frame_multi.");return Module.ff_read_frame_multi(fmt_ctx,pkt,opts)};var ff_init_filter_graph=Module.ff_init_filter_graph=function(filters_descr,input,output){var buffersrc,abuffersrc,buffersink,abuffersink,filter_graph,tmp_src_ctx,tmp_sink_ctx,src_ctxs,sink_ctxs,io_outputs,io_inputs,int32s;var instr,outstr;var multiple_inputs=!!input.length;if(!multiple_inputs)input=[input];var multiple_outputs=!!output.length;if(!multiple_outputs)output=[output];src_ctxs=[];sink_ctxs=[];try{buffersrc=avfilter_get_by_name("buffer");abuffersrc=avfilter_get_by_name("abuffer");buffersink=avfilter_get_by_name("buffersink");abuffersink=avfilter_get_by_name("abuffersink");filter_graph=avfilter_graph_alloc();if(filter_graph===0)throw new Error("Failed to allocate filter graph");io_outputs=0;var ii=0;input.forEach(function(input){var next_io_outputs=avfilter_inout_alloc();if(next_io_outputs===0)throw new Error("Failed to allocate outputs");AVFilterInOut_next_s(next_io_outputs,io_outputs);io_outputs=next_io_outputs;var nm="in"+(multiple_inputs?ii:"");if(input.type===0){if(buffersrc===0)throw new Error("Failed to load buffer filter");var frame_rate=input.frame_rate;var time_base=input.time_base;if(typeof frame_rate==="undefined")frame_rate=30;if(typeof time_base==="undefined")time_base=[1,frame_rate];tmp_src_ctx=avfilter_graph_create_filter_js(buffersrc,nm,"time_base="+time_base[0]+"/"+time_base[1]+":frame_rate="+frame_rate+":pix_fmt="+(input.pix_fmt?input.pix_fmt:0)+":width="+(input.width?input.width:640)+":height="+(input.height?input.height:360),null,filter_graph)}else{if(abuffersrc===0)throw new Error("Failed to load abuffer filter");var sample_rate=input.sample_rate;var time_base=input.time_base;if(typeof sample_rate==="undefined")sample_rate=48e3;if(typeof time_base==="undefined")time_base=[1,sample_rate];tmp_src_ctx=avfilter_graph_create_filter_js(abuffersrc,nm,"time_base="+time_base[0]+"/"+time_base[1]+":sample_rate="+sample_rate+":sample_fmt="+(input.sample_fmt?input.sample_fmt:3)+":channel_layout=0x"+(input.channel_layout?input.channel_layout:4).toString(16),null,filter_graph)}if(tmp_src_ctx===0)throw new Error("Cannot create buffer source");src_ctxs.push(tmp_src_ctx);instr=av_strdup(nm);if(instr===0)throw new Error("Failed to allocate output");AVFilterInOut_name_s(io_outputs,instr);instr=0;AVFilterInOut_filter_ctx_s(io_outputs,tmp_src_ctx);tmp_src_ctx=0;AVFilterInOut_pad_idx_s(io_outputs,0);ii++});io_inputs=0;var oi=0;output.forEach(function(output){var next_io_inputs=avfilter_inout_alloc();if(next_io_inputs===0)throw new Error("Failed to allocate inputs");AVFilterInOut_next_s(next_io_inputs,io_inputs);io_inputs=next_io_inputs;var nm="out"+(multiple_outputs?oi:"");if(output.type===0){if(buffersink===0)throw new Error("Failed to load buffersink filter");tmp_sink_ctx=avfilter_graph_create_filter_js(buffersink,nm,null,null,filter_graph)}else{tmp_sink_ctx=avfilter_graph_create_filter_js(abuffersink,nm,null,null,filter_graph)}if(tmp_sink_ctx===0)throw new Error("Cannot create buffer sink");sink_ctxs.push(tmp_sink_ctx);if(output.type===0){int32s=ff_malloc_int32_list([output.pix_fmt?output.pix_fmt:0,-1]);if(int32s===0)throw new Error("Failed to transfer parameters");if(av_opt_set_int_list_js(tmp_sink_ctx,"pix_fmts",4,int32s,-1,1)<0){throw new Error("Failed to set filter parameters")}free(int32s);int32s=0}else{int32s=ff_malloc_int32_list([output.sample_fmt?output.sample_fmt:3,-1,output.sample_rate?output.sample_rate:48e3,-1]);if(int32s===0)throw new Error("Failed to transfer parameters");var ch_layout=output.channel_layout?output.channel_layout:4;var ch_layout_i64=[~~ch_layout,Math.floor(ch_layout/4294967296)];if(av_opt_set_int_list_js(tmp_sink_ctx,"sample_fmts",4,int32s,-1,1)<0||ff_buffersink_set_ch_layout(tmp_sink_ctx,ch_layout_i64[0],ch_layout_i64[1])<0||av_opt_set_int_list_js(tmp_sink_ctx,"sample_rates",4,int32s+8,-1,1)<0){throw new Error("Failed to set filter parameters")}free(int32s);int32s=0}outstr=av_strdup(nm);if(outstr===0)throw new Error("Failed to transfer parameters");AVFilterInOut_name_s(io_inputs,outstr);outstr=0;AVFilterInOut_filter_ctx_s(io_inputs,tmp_sink_ctx);tmp_sink_ctx=0;AVFilterInOut_pad_idx_s(io_inputs,0);oi++});var ret=avfilter_graph_parse(filter_graph,filters_descr,io_inputs,io_outputs,0);if(ret<0)throw new Error("Failed to initialize filters: "+ff_error(ret));io_inputs=io_outputs=0;var oi=0;output.forEach(function(output){if(output.frame_size)av_buffersink_set_frame_size(sink_ctxs[oi],output.frame_size);oi++});ret=avfilter_graph_config(filter_graph,0);if(ret<0)throw new Error("Failed to configure filter graph: "+ff_error(ret))}catch(ex){if(io_outputs)avfilter_inout_free(io_outputs);if(io_inputs)avfilter_inout_free(io_inputs);if(filter_graph)avfilter_graph_free(filter_graph);if(tmp_src_ctx)avfilter_free(tmp_src_ctx);if(tmp_sink_ctx)avfilter_free(tmp_sink_ctx);if(int32s)free(int32s);if(instr)free(instr);if(outstr)free(outstr);throw ex}return[filter_graph,multiple_inputs?src_ctxs:src_ctxs[0],multiple_outputs?sink_ctxs:sink_ctxs[0]]};var ff_filter_multi=Module.ff_filter_multi=function(srcs,buffersink_ctx,framePtr,inFrames,config){var outFrames=[];var transfer=[];var tbNum=-1,tbDen=-1;if(!srcs.length){srcs=[srcs];inFrames=[inFrames];config=[config]}config=config.map(function(config){if(config===true)return{fin:true};return config||{}});var max=inFrames.map(function(srcFrames){return srcFrames.length}).reduce(function(a,b){return Math.max(a,b)});function handleFrame(buffersrc_ctx,inFrame,copyoutFrame,config){if(inFrame!==null)ff_copyin_frame(framePtr,inFrame);var ret=av_buffersrc_add_frame_flags(buffersrc_ctx,inFrame?framePtr:0,8);if(ret<0)throw new Error("Error while feeding the audio filtergraph: "+ff_error(ret));av_frame_unref(framePtr);while(true){ret=av_buffersink_get_frame(buffersink_ctx,framePtr);if(ret===-6||ret===-541478725)break;if(ret<0)throw new Error("Error while receiving a frame from the filtergraph: "+ff_error(ret));if(tbNum<0){tbNum=av_buffersink_get_time_base_num(buffersink_ctx);tbDen=av_buffersink_get_time_base_den(buffersink_ctx)}var outFrame=copyoutFrame(framePtr);if(tbNum&&!config.ignoreSinkTimebase){if(typeof outFrame==="number"){AVFrame_time_base_s(outFrame,tbNum,tbDen)}else if(outFrame){outFrame.time_base_num=tbNum;outFrame.time_base_den=tbDen}}if(outFrame&&outFrame.libavjsTransfer&&outFrame.libavjsTransfer.length)transfer.push.apply(transfer,outFrame.libavjsTransfer);outFrames.push(outFrame);av_frame_unref(framePtr)}}var copyoutFrames=[];for(var ti=0;ti=5){var data=[];for(var ci=0;ci>=log2ch;plane+=linesize*h;if(plane>dataHi)dataHi=plane}outFrame.data=Module.HEAPU8.slice(dataLo,dataHi);transfer.push(outFrame.data.buffer);for(var p=0;p<8;p++){var linesize=AVFrame_linesize_a(frame,p);if(!linesize)break;var plane=AVFrame_data_a(frame,p);layout.push({offset:plane-dataLo,stride:linesize})}return outFrame};var ff_frame_video_packed_size=Module.ff_frame_video_packed_size=function(frame){var width=AVFrame_width(frame);var height=AVFrame_height(frame);var format=AVFrame_format(frame);var desc=av_pix_fmt_desc_get(format);var bpp=1;if(!(AVPixFmtDescriptor_flags(desc)&16))bpp*=AVPixFmtDescriptor_nb_components(desc);var dataSz=0;for(var i=0;i<8;i++){var linesize=AVFrame_linesize_a(frame,i);if(!linesize)break;var w=width*bpp;var h=height;if(i===1||i===2){w>>=AVPixFmtDescriptor_log2_chroma_w(desc);h>>=AVPixFmtDescriptor_log2_chroma_h(desc)}dataSz+=w*h}return dataSz};function ff_copyout_frame_data_packed(data,layout,frame){var width=AVFrame_width(frame);var height=AVFrame_height(frame);var format=AVFrame_format(frame);var desc=av_pix_fmt_desc_get(format);var bpp=1;if(!(AVPixFmtDescriptor_flags(desc)&16))bpp*=AVPixFmtDescriptor_nb_components(desc);var dIdx=0;for(var i=0;i<8;i++){var linesize=AVFrame_linesize_a(frame,i);if(!linesize)break;var inData=AVFrame_data_a(frame,i);var w=width*bpp;var h=height;if(i===1||i===2){w>>=AVPixFmtDescriptor_log2_chroma_w(desc);h>>=AVPixFmtDescriptor_log2_chroma_h(desc)}layout.push({offset:dIdx,stride:w});for(var y=0;y>>=1}}["channel_layout","channels","format","pts","ptshi","sample_rate","time_base_num","time_base_den"].forEach(function(key){if(key in frame)CAccessors["AVFrame_"+key+"_s"](framePtr,frame[key])});var nb_samples;if(format>=5){nb_samples=frame.data[0].length}else{nb_samples=frame.data.length/channels}AVFrame_nb_samples_s(framePtr,nb_samples);if(av_frame_make_writable(framePtr)<0){var ret=av_frame_get_buffer(framePtr,0);if(ret<0)throw new Error("Failed to allocate frame buffers: "+ff_error(ret))}if(format>=5){for(var ci=0;ci>=log2cw;h>>=log2ch}layout.push({offset:off,stride:w*bpp});off+=w*h*bpp}}for(var p=0;p>=log2ch;var ioff=lplane.offset;var ooff=0;var stride=Math.min(lplane.stride,linesize);for(var y=0;y LibAVFactory); +/* + * Copyright (C) 2019-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +if (/* We're in a worker */ + typeof importScripts !== "undefined" && + /* We're not being loaded with noworker from the main code */ + typeof LibAV === "undefined" && + /* We're not being loaded as a thread */ + ( + (typeof self === "undefined" && typeof Module === "undefined") || + (typeof self !== "undefined" && self.name !== "em-pthread") + ) + ) (function() { + var libav; + + Promise.all([]).then(function() { + /* We're the primary code for this worker. The host should ask us to + * load immediately. */ + return new Promise(function(res, rej) { + onmessage = function(e) { + if (e && e.data && e.data.config) { + LibAVFactory({ + wasmurl: e.data.config.wasmurl, + variant: e.data.config.variant + }).then(res).catch(rej); + } + }; + }); + + }).then(function(lib) { + libav = lib; + + // Now we're ready for normal messages + onmessage = function(e) { + var id = e.data[0]; + var fun = e.data[1]; + var args = e.data.slice(2); + var ret = void 0; + var succ = true; + + function reply() { + var transfer = []; + if (ret && ret.libavjsTransfer) + transfer = ret.libavjsTransfer + try { + postMessage([id, fun, succ, ret], transfer); + } catch (ex) { + try { + ret = JSON.parse(JSON.stringify( + ret, function(k, v) { return v; } + )); + postMessage([id, fun, succ, ret], transfer); + } catch (ex) { + postMessage([id, fun, succ, "" + ret]); + } + } + } + + try { + ret = libav[fun].apply(libav, args); + } catch (ex) { + succ = false; + ret = ex; + } + if (succ && ret && ret.then) { + // Let the promise resolve + ret.then(function(res) { + ret = res; + }).catch(function(ex) { + succ = false; + ret = ex; + }).then(reply); + + } else reply(); + }; + + libav.onwrite = function(name, pos, buf) { + /* We have to buf.slice(0) so we don't duplicate the entire heap just + * to get one part of it in postMessage */ + buf = buf.slice(0); + postMessage(["onwrite", "onwrite", true, [name, pos, buf]], [buf.buffer]); + }; + + libav.onread = function(name, pos, len) { + postMessage(["onread", "onread", true, [name, pos, len]]); + }; + + libav.onblockread = function(name, pos, len) { + postMessage(["onblockread", "onblockread", true, [name, pos, len]]); + }; + + postMessage(["onready", "onready", true, null]); + + }).catch(function(ex) { + console.log("Loading LibAV failed\n" + ex + "\n" + ex.stack); + }); +})(); diff --git a/src/libav-6.5.7.1-webm-vp9.wasm.wasm b/src/libav-6.5.7.1-webm-vp9.wasm.wasm new file mode 100644 index 0000000..06e51c1 Binary files /dev/null and b/src/libav-6.5.7.1-webm-vp9.wasm.wasm differ diff --git a/src/libavjs-webcodecs-polyfill.js b/src/libavjs-webcodecs-polyfill.js new file mode 100644 index 0000000..88861e2 --- /dev/null +++ b/src/libavjs-webcodecs-polyfill.js @@ -0,0 +1,4207 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.LibAVWebCodecs = {})); +})(this, (function (exports) { 'use strict'; + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + let EncodedAudioChunk$1 = class EncodedAudioChunk { + constructor(init) { + /* 1. If init.transfer contains more than one reference to the same + * ArrayBuffer, then throw a DataCloneError DOMException. */ + // 2. For each transferable in init.transfer: + /* 1. If [[Detached]] internal slot is true, then throw a + * DataCloneError DOMException. */ + // (not worth checking in a polyfill) + /* 3. Let chunk be a new EncodedAudioChunk object, initialized as + * follows */ + { + // 1. Assign init.type to [[type]]. + this.type = init.type; + // 2. Assign init.timestamp to [[timestamp]]. + this.timestamp = init.timestamp; + /* 3. If init.duration exists, assign it to [[duration]], or assign + * null otherwise. */ + if (typeof init.duration === "number") + this.duration = init.duration; + else + this.duration = null; + // 4. Assign init.data.byteLength to [[byte length]]; + this.byteLength = init.data.byteLength; + /* 5. If init.transfer contains an ArrayBuffer referenced by + * init.data the User Agent MAY choose to: */ + let transfer = false; + if (init.transfer) { + /* 1. Let resource be a new media resource referencing sample + * data in init.data. */ + let inBuffer; + if (init.data.buffer) + inBuffer = init.data.buffer; + else + inBuffer = init.data; + let t; + if (init.transfer instanceof Array) + t = init.transfer; + else + t = Array.from(init.transfer); + for (const b of t) { + if (b === inBuffer) { + transfer = true; + break; + } + } + } + // 6. Otherwise: + // 1. Assign a copy of init.data to [[internal data]]. + const data = new Uint8Array(init.data.buffer || init.data, init.data.byteOffset || 0, init.data.BYTES_PER_ELEMENT + ? (init.data.BYTES_PER_ELEMENT * init.data.length) + : init.data.byteLength); + if (transfer) + this._data = data; + else + this._data = data.slice(0); + } + // 4. For each transferable in init.transfer: + // 1. Perform DetachArrayBuffer on transferable + // (already done by transferring) + // 5. Return chunk. + } + // Internal + _libavGetData() { return this._data; } + copyTo(destination) { + (new Uint8Array(destination.buffer || destination, destination.byteOffset || 0)).set(this._data); + } + }; + + (function (Object) { + typeof globalThis !== 'object' && ( + this ? + get() : + (Object.defineProperty(Object.prototype, '_T_', { + configurable: true, + get: get + }), _T_) + ); + function get() { + var global = this || self; + global.globalThis = global; + delete Object.prototype._T_; + } + }(Object)); + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + let AudioData$1 = class AudioData { + constructor(init) { + // 1. If init is not a valid AudioDataInit, throw a TypeError. + AudioData._checkValidAudioDataInit(init); + /* 2. If init.transfer contains more than one reference to the same + * ArrayBuffer, then throw a DataCloneError DOMException. */ + // 3. For each transferable in init.transfer: + // 1. If [[Detached]] internal slot is true, then throw a DataCloneError DOMException. + // (Not worth doing in polyfill) + // 4. Let frame be a new AudioData object, initialized as follows: + { + // 1. Assign false to [[Detached]]. + // (not doable in polyfill) + // 2. Assign init.format to [[format]]. + this.format = init.format; + // 3. Assign init.sampleRate to [[sample rate]]. + this.sampleRate = init.sampleRate; + // 4. Assign init.numberOfFrames to [[number of frames]]. + this.numberOfFrames = init.numberOfFrames; + // 5. Assign init.numberOfChannels to [[number of channels]]. + this.numberOfChannels = init.numberOfChannels; + // 6. Assign init.timestamp to [[timestamp]]. + this.timestamp = init.timestamp; + /* 7. If init.transfer contains an ArrayBuffer referenced by + * init.data the User Agent MAY choose to: */ + let transfer = false; + if (init.transfer) { + // 1. Let resource be a new media resource referencing sample data in data. + let inBuffer; + if (init.data.buffer) + inBuffer = init.data.buffer; + else + inBuffer = init.data; + let t; + if (init.transfer instanceof Array) + t = init.transfer; + else + t = Array.from(init.transfer); + for (const b of t) { + if (b === inBuffer) { + transfer = true; + break; + } + } + } + // 8. Otherwise: + // 1. Let resource be a media resource containing a copy of init.data. + // 9. Let resourceReference be a reference to resource. + let inData, byteOffset = 0; + if (transfer) { + inData = init.data; + byteOffset = init.data.byteOffset || 0; + } + else { + inData = init.data.slice(0); + } + const resourceReference = audioView(init.format, inData.buffer || inData, byteOffset); + // 10. Assign resourceReference to [[resource reference]]. + this._data = resourceReference; + } + // 5. For each transferable in init.transfer: + // 1. Perform DetachArrayBuffer on transferable + // (Already done by transferring) + // 6. Return frame. + // Duration not calculated in spec? + this.duration = init.numberOfFrames / init.sampleRate * 1000000; + } + /** + * Convert a polyfill AudioData to a native AudioData. + * @param opts Conversion options + */ + toNative(opts = {}) { + const ret = new globalThis.AudioData({ + data: this._data, + format: this.format, + sampleRate: this.sampleRate, + numberOfFrames: this.numberOfFrames, + numberOfChannels: this.numberOfChannels, + timestamp: this.timestamp, + transfer: opts.transfer ? [this._data.buffer] : [] + }); + if (opts.transfer) + this.close(); + return ret; + } + /** + * Convert a native AudioData to a polyfill AudioData. WARNING: Inefficient, + * as the data cannot be transferred out. + * @param from AudioData to copy in + */ + static fromNative(from /* native AudioData */) { + const ad = from; + const isInterleaved_ = isInterleaved(ad.format); + const planes = isInterleaved_ ? 1 : ad.numberOfChannels; + const sizePerPlane = ad.allocationSize({ + format: ad.format, + planeIndex: 0 + }); + const data = new Uint8Array(sizePerPlane); + for (let p = 0; p < planes; p++) { + ad.copyTo(data.subarray(p * sizePerPlane), { + format: ad.format, + planeIndex: p + }); + } + return new AudioData({ + data, + format: ad.format, + sampleRate: ad.sampleRate, + numberOfFrames: ad.numberOfFrames, + numberOfChannels: ad.numberOfChannels, + timestamp: ad.timestamp, + transfer: [data.buffer] + }); + } + // Internal + _libavGetData() { return this._data; } + static _checkValidAudioDataInit(init) { + // 1. If sampleRate less than or equal to 0, return false. + if (init.sampleRate <= 0) + throw new TypeError(`Invalid sample rate ${init.sampleRate}`); + // 2. If numberOfFrames = 0, return false. + if (init.numberOfFrames <= 0) + throw new TypeError(`Invalid number of frames ${init.numberOfFrames}`); + // 3. If numberOfChannels = 0, return false. + if (init.numberOfChannels <= 0) + throw new TypeError(`Invalid number of channels ${init.numberOfChannels}`); + // 4. Verify data has enough data by running the following steps: + { + // 1. Let totalSamples be the product of multiplying numberOfFrames by numberOfChannels. + const totalSamples = init.numberOfFrames * init.numberOfChannels; + // 2. Let bytesPerSample be the number of bytes per sample, as defined by the format. + const bytesPerSample_ = bytesPerSample(init.format); + // 3. Let totalSize be the product of multiplying bytesPerSample with totalSamples. + const totalSize = bytesPerSample_ * totalSamples; + // 4. Let dataSize be the size in bytes of data. + const dataSize = init.data.byteLength; + // 5. If dataSize is less than totalSize, return false. + if (dataSize < totalSize) + throw new TypeError(`This audio data must be at least ${totalSize} bytes`); + } + // 5. Return true. + } + allocationSize(options) { + // 1. If [[Detached]] is true, throw an InvalidStateError DOMException. + if (this._data === null) + throw new DOMException("Detached", "InvalidStateError"); + /* 2. Let copyElementCount be the result of running the Compute Copy + * Element Count algorithm with options. */ + const copyElementCount = this._computeCopyElementCount(options); + // 3. Let destFormat be the value of [[format]]. + let destFormat = this.format; + // 4. If options.format exists, assign options.format to destFormat. + if (options.format) + destFormat = options.format; + /* 5. Let bytesPerSample be the number of bytes per sample, as defined + * by the destFormat. */ + const bytesPerSample_ = bytesPerSample(destFormat); + /* 6. Return the product of multiplying bytesPerSample by + * copyElementCount. */ + return bytesPerSample_ * copyElementCount; + } + _computeCopyElementCount(options) { + // 1. Let destFormat be the value of [[format]]. + let destFormat = this.format; + // 2. If options.format exists, assign options.format to destFormat. + if (options.format) + destFormat = options.format; + /* 3. If destFormat describes an interleaved AudioSampleFormat and + * options.planeIndex is greater than 0, throw a RangeError. */ + const isInterleaved_ = isInterleaved(destFormat); + if (isInterleaved_) { + if (options.planeIndex > 0) + throw new RangeError("Invalid plane"); + } + /* 4. Otherwise, if destFormat describes a planar AudioSampleFormat and + * if options.planeIndex is greater or equal to [[number of channels]], + * throw a RangeError. */ + else if (options.planeIndex >= this.numberOfChannels) + throw new RangeError("Invalid plane"); + /* 5. If [[format]] does not equal destFormat and the User Agent does + * not support the requested AudioSampleFormat conversion, throw a + * NotSupportedError DOMException. Conversion to f32-planar MUST always + * be supported. */ + if (this.format !== destFormat && + destFormat !== "f32-planar") + throw new DOMException("Only conversion to f32-planar is supported", "NotSupportedError"); + /* 6. Let frameCount be the number of frames in the plane identified by + * options.planeIndex. */ + const frameCount = this.numberOfFrames; // All planes have the same number of frames + /* 7. If options.frameOffset is greater than or equal to frameCount, + * throw a RangeError. */ + const frameOffset = options.frameOffset || 0; + if (frameOffset >= frameCount) + throw new RangeError("Frame offset out of range"); + /* 8. Let copyFrameCount be the difference of subtracting + * options.frameOffset from frameCount. */ + let copyFrameCount = frameCount - frameOffset; + // 9. If options.frameCount exists: + if (typeof options.frameCount === "number") { + /* 1. If options.frameCount is greater than copyFrameCount, throw a + * RangeError. */ + if (options.frameCount >= copyFrameCount) + throw new RangeError("Frame count out of range"); + // 2. Otherwise, assign options.frameCount to copyFrameCount. + copyFrameCount = options.frameCount; + } + // 10. Let elementCount be copyFrameCount. + let elementCount = copyFrameCount; + /* 11. If destFormat describes an interleaved AudioSampleFormat, + * mutliply elementCount by [[number of channels]] */ + if (isInterleaved_) + elementCount *= this.numberOfChannels; + // 12. return elementCount. + return elementCount; + } + copyTo(destination, options) { + // 1. If [[Detached]] is true, throw an InvalidStateError DOMException. + if (this._data === null) + throw new DOMException("Detached", "InvalidStateError"); + /* 2. Let copyElementCount be the result of running the Compute Copy + * Element Count algorithm with options. */ + const copyElementCount = this._computeCopyElementCount(options); + // 3. Let destFormat be the value of [[format]]. + let destFormat = this.format; + // 4. If options.format exists, assign options.format to destFormat. + if (options.format) + destFormat = options.format; + /* 5. Let bytesPerSample be the number of bytes per sample, as defined + * by the destFormat. */ + const bytesPerSample_ = bytesPerSample(destFormat); + /* 6. If the product of multiplying bytesPerSample by copyElementCount + * is greater than destination.byteLength, throw a RangeError. */ + if (bytesPerSample_ * copyElementCount > destination.byteLength) + throw new RangeError("Buffer too small"); + /* 7. Let resource be the media resource referenced by [[resource + * reference]]. */ + const resource = this._data; + /* 8. Let planeFrames be the region of resource corresponding to + * options.planeIndex. */ + const planeFrames = resource.subarray(options.planeIndex * this.numberOfFrames); + const frameOffset = options.frameOffset || 0; + const numberOfChannels = this.numberOfChannels; + /* 9. Copy elements of planeFrames into destination, starting with the + * frame positioned at options.frameOffset and stopping after + * copyElementCount samples have been copied. If destFormat does not + * equal [[format]], convert elements to the destFormat + * AudioSampleFormat while making the copy. */ + if (this.format === destFormat) { + const dest = audioView(destFormat, destination.buffer || destination, destination.byteOffset || 0); + if (isInterleaved(destFormat)) { + dest.set(planeFrames.subarray(frameOffset * numberOfChannels, frameOffset * numberOfChannels + copyElementCount)); + } + else { + dest.set(planeFrames.subarray(frameOffset, frameOffset + copyElementCount)); + } + } + else { + // Actual conversion necessary. Always to f32-planar. + const out = audioView(destFormat, destination.buffer || destination, destination.byteOffset || 0); + // First work out the conversion + let sub = 0; + let div = 1; + switch (this.format) { + case "u8": + case "u8-planar": + sub = 0x80; + div = 0x80; + break; + case "s16": + case "s16-planar": + div = 0x8000; + break; + case "s32": + case "s32-planar": + div = 0x80000000; + break; + } + // Then do it + if (isInterleaved(this.format)) { + for (let i = options.planeIndex + frameOffset * numberOfChannels, o = 0; o < copyElementCount; i += numberOfChannels, o++) + out[o] = (planeFrames[i] - sub) / div; + } + else { + for (let i = frameOffset, o = 0; o < copyElementCount; i++, o++) + out[o] = (planeFrames[i] - sub) / div; + } + } + } + clone() { + // 1. If [[Detached]] is true, throw an InvalidStateError DOMException. + if (this._data === null) + throw new DOMException("Detached", "InvalidStateError"); + /* 2. Return the result of running the Clone AudioData algorithm with + * this. */ + return new AudioData({ + format: this.format, + sampleRate: this.sampleRate, + numberOfFrames: this.numberOfFrames, + numberOfChannels: this.numberOfChannels, + timestamp: this.timestamp, + data: this._data + }); + } + close() { + this._data = null; + } + }; + /** + * Construct the appropriate type of ArrayBufferView for the given sample + * format and buffer. + * @param format Sample format + * @param buffer ArrayBuffer (NOT view) + * @param byteOffset Offset into the buffer + */ + function audioView(format, buffer, byteOffset) { + switch (format) { + case "u8": + case "u8-planar": + return new Uint8Array(buffer, byteOffset); + case "s16": + case "s16-planar": + return new Int16Array(buffer, byteOffset); + case "s32": + case "s32-planar": + return new Int32Array(buffer, byteOffset); + case "f32": + case "f32-planar": + return new Float32Array(buffer, byteOffset); + default: + throw new TypeError("Invalid AudioSampleFormat"); + } + } + /** + * Number of bytes per sample of this format. + * @param format Sample format + */ + function bytesPerSample(format) { + switch (format) { + case "u8": + case "u8-planar": + return 1; + case "s16": + case "s16-planar": + return 2; + case "s32": + case "s32-planar": + case "f32": + case "f32-planar": + return 4; + default: + throw new TypeError("Invalid AudioSampleFormat"); + } + } + /** + * Is this format interleaved? + * @param format Sample format + */ + function isInterleaved(format) { + switch (format) { + case "u8": + case "s16": + case "s32": + case "f32": + return true; + case "u8-planar": + case "s16-planar": + case "s32-planar": + case "f32-planar": + return false; + default: + throw new TypeError("Invalid AudioSampleFormat"); + } + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + /* Unfortunately, browsers don't let us extend EventTarget. So, we implement an + * EventTarget interface with a “has-a” relationship instead of an “is-a” + * relationship. We have an event target, and expose its event functions as our + * own. */ + class HasAEventTarget { + constructor() { + const ev = this._eventer = new EventTarget(); + this.addEventListener = ev.addEventListener.bind(ev); + this.removeEventListener = ev.removeEventListener.bind(ev); + this.dispatchEvent = ev.dispatchEvent.bind(ev); + } + } + class DequeueEventTarget extends HasAEventTarget { + constructor() { + super(); + this.addEventListener("dequeue", ev => { + if (this.ondequeue) + this.ondequeue(ev); + }); + } + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$8 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + // Wrapper function to use + let LibAVWrapper = null; + // Currently available libav instances + const libavs = []; + // Options required to create a LibAV instance + let libavOptions = {}; + /** + * Supported decoders. + */ + let decoders = null; + /** + * Supported encoders. + */ + let encoders = null; + /** + * Set the libav wrapper to use. + */ + function setLibAV(to) { + LibAVWrapper = to; + } + /** + * Set the libav loading options. + */ + function setLibAVOptions(to) { + libavOptions = to; + } + /** + * Get a libav instance. + */ + function get() { + return __awaiter$8(this, void 0, void 0, function* () { + if (libavs.length) + return libavs.shift(); + return yield LibAVWrapper.LibAV(libavOptions); + }); + } + /** + * Free a libav instance for later reuse. + */ + function free(libav) { + libavs.push(libav); + } + /** + * Get the list of encoders/decoders supported by libav (which are also + * supported by this polyfill) + * @param encoders Check for encoders instead of decoders + */ + function codecs(encoders) { + return __awaiter$8(this, void 0, void 0, function* () { + const libav = yield get(); + const ret = []; + for (const [avname, codec] of [ + ["flac", "flac"], + ["libopus", "opus"], + ["libvorbis", "vorbis"], + ["libaom-av1", "av01"], + ["libvpx-vp9", "vp09"], + ["libvpx", "vp8"] + ]) { + if (encoders) { + if (yield libav.avcodec_find_encoder_by_name(avname)) + ret.push(codec); + } + else { + if (yield libav.avcodec_find_decoder_by_name(avname)) + ret.push(codec); + } + } + free(libav); + return ret; + }); + } + /** + * Load the lists of supported decoders and encoders. + */ + function load$2() { + return __awaiter$8(this, void 0, void 0, function* () { + LibAVWrapper = LibAVWrapper || LibAV; + decoders = yield codecs(false); + encoders = yield codecs(true); + }); + } + /** + * Convert a decoder from the codec registry (or libav.js-specific parameters) + * to libav.js. Returns null if unsupported. + */ + function decoder(codec, config) { + if (typeof codec === "string") { + codec = codec.replace(/\..*/, ""); + let outCodec = codec; + switch (codec) { + // Audio + case "flac": + if (typeof config.description === "undefined") { + // description is required per spec, but one can argue, if this limitation makes sense + return null; + } + break; + case "opus": + if (typeof config.description !== "undefined") { + // ogg bitstream is not supported by the current implementation + return null; + } + outCodec = "libopus"; + break; + case "vorbis": + if (typeof config.description === "undefined") { + // description is required per spec, but one can argue, if this limitation makes sense + return null; + } + outCodec = "libvorbis"; + break; + // Video + case "av01": + outCodec = "libaom-av1"; + break; + case "vp09": + outCodec = "libvpx-vp9"; + break; + case "vp8": + outCodec = "libvpx"; + break; + // Unsupported + case "mp3": + case "mp4a": + case "ulaw": + case "alaw": + case "avc1": + case "avc3": + case "hev1": + case "hvc1": + return null; + // Unrecognized + default: + throw new TypeError("Unrecognized codec"); + } + // Check whether we actually support this codec + if (!(decoders.indexOf(codec) >= 0)) + return null; + return { codec: outCodec }; + } + else { + return codec.libavjs; + } + } + /** + * Convert an encoder from the codec registry (or libav.js-specific parameters) + * to libav.js. Returns null if unsupported. + */ + function encoder(codec, config) { + if (typeof codec === "string") { + const codecParts = codec.split("."); + codec = codecParts[0]; + let outCodec = codec; + const ctx = {}; + const options = {}; + let video = false; + switch (codec) { + // Audio + case "flac": + ctx.sample_fmt = 2 /* S32 */; + ctx.bit_rate = 0; + if (typeof config.flac === "object" && + config.flac !== null) { + const flac = config.flac; + // FIXME: Check block size + if (typeof flac.blockSize === "number") + ctx.frame_size = flac.blockSize; + if (typeof flac.compressLevel === "number") { + // Not supported + return null; + } + } + break; + case "opus": + outCodec = "libopus"; + ctx.sample_fmt = 3 /* FLT */; + ctx.sample_rate = 48000; + if (typeof config.opus === "object" && + config.opus !== null) { + const opus = config.opus; + // FIXME: Check frame duration + if (typeof opus.frameDuration === "number") + options.frame_duration = "" + (opus.frameDuration / 1000); + if (typeof opus.complexity !== "undefined") { + // We don't support the complexity option + return null; + } + if (typeof opus.packetlossperc === "number") { + if (opus.packetlossperc < 0 || opus.packetlossperc > 100) + return null; + options.packet_loss = "" + opus.packetlossperc; + } + if (typeof opus.useinbandfec === "boolean") + options.fec = opus.useinbandfec ? "1" : "0"; + if (typeof opus.usedtx === "boolean") { + // We don't support the usedtx option + return null; + } + if (typeof opus.format === "string") { + // ogg bitstream is not supported + if (opus.format !== "opus") + return null; + } + } + break; + case "vorbis": + outCodec = "libvorbis"; + ctx.sample_fmt = 8 /* FLTP */; + break; + // Video + case "av01": + video = true; + outCodec = "libaom-av1"; + if (config.latencyMode === "realtime") { + options.usage = "realtime"; + options["cpu-used"] = "8"; + } + // Check for advanced options + if (!av1Advanced(codecParts, ctx)) + return null; + break; + case "vp09": + video = true; + outCodec = "libvpx-vp9"; + if (config.latencyMode === "realtime") { + options.quality = "realtime"; + options["cpu-used"] = "8"; + } + // Check for advanced options + if (!vp9Advanced(codecParts, ctx)) + return null; + break; + case "vp8": + video = true; + outCodec = "libvpx"; + if (config.latencyMode === "realtime") { + options.quality = "realtime"; + options["cpu-used"] = "8"; + } + break; + // Unsupported + case "mp3": + case "mp4a": + case "ulaw": + case "alaw": + case "avc1": + return null; + // Unrecognized + default: + throw new TypeError("Unrecognized codec"); + } + // Check whether we actually support this codec + if (!(encoders.indexOf(codec) >= 0)) + return null; + if (video) { + if (typeof ctx.pix_fmt !== "number") + ctx.pix_fmt = 0 /* YUV420P */; + const width = ctx.width = config.width; + const height = ctx.height = config.height; + if (config.framerate) { + /* FIXME: We need this as a rational, not a floating point, and + * this is obviously not the right way to do it */ + ctx.framerate_num = Math.round(config.framerate); + ctx.framerate_den = 1; + } + // Check for non-square pixels + const dWidth = config.displayWidth || config.width; + const dHeight = config.displayHeight || config.height; + if (dWidth !== width || dHeight !== height) { + ctx.sample_aspect_ratio_num = dWidth * height; + ctx.sample_aspect_ratio_den = dHeight * width; + } + } + else { + if (!ctx.sample_rate) + ctx.sample_rate = config.sampleRate || 48000; + if (config.numberOfChannels) { + const n = config.numberOfChannels; + ctx.channel_layout = (n === 1) ? 4 : ((1 << n) - 1); + } + } + if (typeof ctx.bit_rate !== "number" && config.bitrate) { + // NOTE: CBR requests are, quite rightly, ignored + ctx.bit_rate = config.bitrate; + } + return { + codec: outCodec, + ctx, options + }; + } + else { + return codec.libavjs; + } + } + /** + * Handler for advanced options for AV1. + * @param codecParts .-separated parts of the codec string. + * @param ctx Context to populate with advanced options. + */ + function av1Advanced(codecParts, ctx) { + if (codecParts[1]) { + const profile = +codecParts[1]; + if (profile >= 0 && profile <= 2) + ctx.profile = profile; + else + throw new TypeError("Invalid AV1 profile"); + } + if (codecParts[2]) { + const level = +codecParts[2]; + if (level >= 0 && level <= 23) + ctx.level = level; + else + throw new TypeError("Invalid AV1 level"); + } + if (codecParts[3]) { + switch (codecParts[3]) { + case "M": + // Default + break; + case "H": + if (ctx.level && ctx.level >= 8) { + // Valid but unsupported + return false; + } + else { + throw new TypeError("The AV1 high tier is only available for level 4.0 and up"); + } + default: + throw new TypeError("Invalid AV1 tier"); + } + } + if (codecParts[4]) { + const depth = +codecParts[3]; + if (depth === 10 || depth === 12) { + // Valid but unsupported + return false; + } + else if (depth !== 8) { + throw new TypeError("Invalid AV1 bit depth"); + } + } + if (codecParts[5]) { + // Monochrome + switch (codecParts[5]) { + case "0": + // Default + break; + case "1": + // Valid but unsupported + return false; + default: + throw new TypeError("Invalid AV1 monochrome flag"); + } + } + if (codecParts[6]) { + // Subsampling mode + switch (codecParts[6]) { + case "000": // YUV444 + ctx.pix_fmt = 5 /* YUV444P */; + break; + case "100": // YUV422 + ctx.pix_fmt = 4 /* YUV422P */; + break; + case "110": // YUV420P (default) + ctx.pix_fmt = 0 /* YUV420P */; + break; + case "111": // Monochrome + return false; + default: + throw new TypeError("Invalid AV1 subsampling mode"); + } + } + /* The remaining values have to do with color formats, which we don't + * support correctly anyway */ + return true; + } + /** + * Handler for advanced options for VP9. + * @param codecParts .-separated parts of the codec string. + * @param ctx Context to populate with advanced options. + */ + function vp9Advanced(codecParts, ctx) { + if (codecParts[1]) { + const profile = +codecParts[1]; + if (profile >= 0 && profile <= 3) + ctx.profile = profile; + else + throw new TypeError("Invalid VP9 profile"); + } + if (codecParts[2]) { + const level = [+codecParts[2][0], +codecParts[2][1]]; + if (level[0] >= 1 && level[0] <= 4) { + if (level[1] >= 0 && level[1] <= 1) ; + else { + throw new TypeError("Invalid VP9 level"); + } + } + else if (level[0] >= 5 && level[0] <= 6) { + if (level[1] >= 0 && level[1] <= 2) ; + else { + throw new TypeError("Invalid VP9 level"); + } + } + else { + throw new TypeError("Invalid VP9 level"); + } + ctx.level = +codecParts[2]; + } + if (codecParts[3]) { + const depth = +codecParts[3]; + if (depth === 10 || depth === 12) { + // Valid but unsupported + return false; + } + else if (depth !== 8) { + throw new TypeError("Invalid VP9 bit depth"); + } + } + if (codecParts[4]) { + const chromaMode = +codecParts[4]; + switch (chromaMode) { + case 0: + case 1: + // FIXME: These are subtly different YUV420P modes, but we treat them the same + ctx.pix_fmt = 0 /* YUV420P */; + break; + case 2: // YUV422 + ctx.pix_fmt = 4 /* YUV422P */; + break; + case 3: // YUV444 + ctx.pix_fmt = 5 /* YUV444P */; + break; + default: + throw new TypeError("Invalid VP9 chroma subsampling format"); + } + } + /* The remaining values have to do with color formats, which we don't + * support correctly anyway */ + return true; + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + /** + * Clone this configuration. Just copies over the supported/recognized fields. + */ + function cloneConfig(config, fields) { + const ret = {}; + for (const field of fields) { + if (field in config) + ret[field] = config[field]; + } + return ret; + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$7 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + let AudioDecoder$1 = class AudioDecoder extends DequeueEventTarget { + constructor(init) { + super(); + // 1. Let d be a new AudioDecoder object. + // 2. Assign a new queue to [[control message queue]]. + this._p = Promise.all([]); + // 3. Assign false to [[message queue blocked]]. + // (unused in polyfill) + // 4. Assign null to [[codec implementation]]. + this._libav = null; + this._codec = this._c = this._pkt = this._frame = 0; + // 5. Assign the result of starting a new parallel queue to [[codec work queue]]. + // (shared with control message queue) + // 6. Assign false to [[codec saturated]]. + // (codec is never saturated) + // 7. Assign init.output to [[output callback]]. + this._output = init.output; + // 8. Assign init.error to [[error callback]]. + this._error = init.error; + // 9. Assign true to [[key chunk required]]. + // (implicit part of the underlying codec) + // 10. Assign "unconfigured" to [[state]] + this.state = "unconfigured"; + // 11. Assign 0 to [[decodeQueueSize]]. + this.decodeQueueSize = 0; + // 12. Assign a new list to [[pending flush promises]]. + // (shared with control message queue) + // 13. Assign false to [[dequeue event scheduled]]. + // (shared with control message queue) + // 14. Return d. + } + configure(config) { + // 1. If config is not a valid AudioDecoderConfig, throw a TypeError. + // NOTE: We don't support sophisticated codec string parsing (yet) + // 2. If [[state]] is “closed”, throw an InvalidStateError DOMException. + if (this.state === "closed") + throw new DOMException("Decoder is closed", "InvalidStateError"); + // Free any internal state + if (this._libav) + this._p = this._p.then(() => this._free()); + // 3. Set [[state]] to "configured". + this.state = "configured"; + // 4. Set [[key chunk required]] to true. + // (implicit part of underlying codecs) + // 5. Queue a control message to configure the decoder with config. + this._p = this._p.then(() => __awaiter$7(this, void 0, void 0, function* () { + /* 1. Let supported be the result of running the Check + * Configuration Support algorithm with config. */ + let udesc = void 0; + if (config.description) { + if (ArrayBuffer.isView(config.description)) { + const descView = config.description; + udesc = new Uint8Array(descView.buffer, descView.byteOffset, descView.byteLength); + } + else { + const descBuf = config.description; + udesc = new Uint8Array(descBuf); + } + } + const supported = decoder(config.codec, config); + /* 2. If supported is false, queue a task to run the Close + * AudioDecoder algorithm with NotSupportedError and abort these + * steps. */ + if (!supported) { + this._closeAudioDecoder(new DOMException("Unsupported codec", "NotSupportedError")); + return; + } + /* 3. If needed, assign [[codec implementation]] with an + * implementation supporting config. */ + const libav = this._libav = yield get(); + const codecpara = yield libav.avcodec_parameters_alloc(); + const ps = [ + libav.AVCodecParameters_channels_s(codecpara, config.numberOfChannels), + libav.AVCodecParameters_sample_rate_s(codecpara, config.sampleRate), + libav.AVCodecParameters_codec_type_s(codecpara, 1 /* AVMEDIA_TYPE_AUDIO */) + ]; + let extraDataPtr = 0; + if (!udesc) { + ps.push(libav.AVCodecParameters_extradata_s(codecpara, 0)); + ps.push(libav.AVCodecParameters_extradata_size_s(codecpara, 0)); + } + else { + ps.push(libav.AVCodecParameters_extradata_size_s(codecpara, udesc.byteLength)); + extraDataPtr = yield libav.calloc(udesc.byteLength + 64 /* AV_INPUT_BUFFER_PADDING_SIZE */, 1); + ps.push(libav.copyin_u8(extraDataPtr, udesc)); + ps.push(libav.AVCodecParameters_extradata_s(codecpara, extraDataPtr)); + } + yield Promise.all(ps); + // 4. Configure [[codec implementation]] with config. + [this._codec, this._c, this._pkt, this._frame] = + yield libav.ff_init_decoder(supported.codec, codecpara); + const fps = [ + libav.AVCodecContext_time_base_s(this._c, 1, 1000), + libav.avcodec_parameters_free_js(codecpara) + ]; + if (extraDataPtr) + fps.push(libav.free(extraDataPtr)); + yield Promise.all(fps); + // 5. queue a task to run the following steps: + // 1. Assign false to [[message queue blocked]]. + // 2. Queue a task to Process the control message queue. + // (shared queue) + })).catch(this._error); + } + // Our own algorithm, close libav + _free() { + return __awaiter$7(this, void 0, void 0, function* () { + if (this._c) { + yield this._libav.ff_free_decoder(this._c, this._pkt, this._frame); + this._codec = this._c = this._pkt = this._frame = 0; + } + if (this._libav) { + free(this._libav); + this._libav = null; + } + }); + } + _closeAudioDecoder(exception) { + // 1. Run the Reset AudioDecoder algorithm with exception. + this._resetAudioDecoder(exception); + // 2. Set [[state]] to "closed". + this.state = "closed"; + /* 3. Clear [[codec implementation]] and release associated system + * resources. */ + this._p = this._p.then(() => this._free()); + /* 4. If exception is not an AbortError DOMException, queue a task on + * the control thread event loop to invoke the [[error callback]] with + * exception. */ + if (exception.name !== "AbortError") + this._p = this._p.then(() => { this._error(exception); }); + } + _resetAudioDecoder(exception) { + // 1. If [[state]] is "closed", throw an InvalidStateError. + if (this.state === "closed") + throw new DOMException("Decoder closed", "InvalidStateError"); + // 2. Set [[state]] to "unconfigured". + this.state = "unconfigured"; + // ... really, we're just going to free it now + this._p = this._p.then(() => this._free()); + } + decode(chunk) { + // 1. If [[state]] is not "configured", throw an InvalidStateError. + if (this.state !== "configured") + throw new DOMException("Unconfigured", "InvalidStateError"); + // 2. If [[key chunk required]] is true: + // 1. If chunk.[[type]] is not key, throw a DataError. + /* 2. Implementers SHOULD inspect the chunk’s [[internal data]] to + * verify that it is truly a key chunk. If a mismatch is detected, + * throw a DataError. */ + // 3. Otherwise, assign false to [[key chunk required]]. + // (handled within the codec) + // 3. Increment [[decodeQueueSize]]. + this.decodeQueueSize++; + // 4. Queue a control message to decode the chunk. + this._p = this._p.then(() => __awaiter$7(this, void 0, void 0, function* () { + const libav = this._libav; + const c = this._c; + const pkt = this._pkt; + const frame = this._frame; + let decodedOutputs = null; + // (1. and 2. relate to saturation) + // 3. Decrement [[decodeQueueSize]] and run the Schedule Dequeue Event algorithm. + this.decodeQueueSize--; + this.dispatchEvent(new CustomEvent("dequeue")); + // 1. Attempt to use [[codec implementation]] to decode the chunk. + try { + // Convert to a libav packet + const ptsFull = Math.floor(chunk.timestamp / 1000); + const [pts, ptshi] = libav.f64toi64(ptsFull); + const packet = { + data: chunk._libavGetData(), + pts, + ptshi, + dts: pts, + dtshi: ptshi + }; + if (chunk.duration) { + packet.duration = Math.floor(chunk.duration / 1000); + packet.durationhi = 0; + } + decodedOutputs = yield libav.ff_decode_multi(c, pkt, frame, [packet]); + /* 2. If decoding results in an error, queue a task to run the Close + * AudioDecoder algorithm with EncodingError and return. */ + } + catch (ex) { + this._p = this._p.then(() => { + this._closeAudioDecoder(ex); + }); + return; + } + /* 3. If [[codec saturated]] equals true and + * [[codec implementation]] is no longer saturated, queue a task + * to perform the following steps: */ + // 1. Assign false to [[codec saturated]]. + // 2. Process the control message queue. + // (no saturation) + /* 4. Let decoded outputs be a list of decoded audio data outputs + * emitted by [[codec implementation]]. */ + /* 5. If decoded outputs is not empty, queue a task to run the + * Output AudioData algorithm with decoded outputs. */ + if (decodedOutputs) + this._outputAudioData(decodedOutputs); + })).catch(this._error); + } + _outputAudioData(outputs) { + const libav = this._libav; + for (const frame of outputs) { + // 1. format + let format; + let planar = false; + switch (frame.format) { + case libav.AV_SAMPLE_FMT_U8: + format = "u8"; + break; + case libav.AV_SAMPLE_FMT_S16: + format = "s16"; + break; + case libav.AV_SAMPLE_FMT_S32: + format = "s32"; + break; + case libav.AV_SAMPLE_FMT_FLT: + format = "f32"; + break; + case libav.AV_SAMPLE_FMT_U8P: + format = "u8"; + planar = true; + break; + case libav.AV_SAMPLE_FMT_S16P: + format = "s16"; + planar = true; + break; + case libav.AV_SAMPLE_FMT_S32P: + format = "s32"; + planar = true; + break; + case libav.AV_SAMPLE_FMT_FLTP: + format = "f32"; + planar = true; + break; + default: + throw new DOMException("Unsupported libav format!", "EncodingError"); + } + // 2. sampleRate + const sampleRate = frame.sample_rate; + // 3. numberOfFrames + const numberOfFrames = frame.nb_samples; + // 4. numberOfChannels + const numberOfChannels = frame.channels; + // 5. timestamp + const timestamp = libav.i64tof64(frame.pts, frame.ptshi) * 1000; + // 6. data + let raw; + if (planar) { + let ct = 0; + for (let i = 0; i < frame.data.length; i++) + ct += frame.data[i].length; + raw = new (frame.data[0].constructor)(ct); + ct = 0; + for (let i = 0; i < frame.data.length; i++) { + const part = frame.data[i]; + raw.set(part, ct); + ct += part.length; + } + } + else { + raw = frame.data; + } + const data = new AudioData$1({ + format, sampleRate, numberOfFrames, numberOfChannels, + timestamp, data: raw + }); + this._output(data); + } + } + flush() { + /* 1. If [[state]] is not "configured", return a promise rejected with + * InvalidStateError DOMException. */ + if (this.state !== "configured") + throw new DOMException("Invalid state", "InvalidStateError"); + // 2. Set [[key chunk required]] to true. + // (part of the codec) + // 3. Let promise be a new Promise. + // 4. Append promise to [[pending flush promises]]. + // 5. Queue a control message to flush the codec with promise. + // 6. Process the control message queue. + // 7. Return promise. + const ret = this._p.then(() => __awaiter$7(this, void 0, void 0, function* () { + // 1. Signal [[codec implementation]] to emit all internal pending outputs. + if (!this._c) + return; + // Make sure any last data is flushed + const libav = this._libav; + const c = this._c; + const pkt = this._pkt; + const frame = this._frame; + let decodedOutputs = null; + try { + decodedOutputs = yield libav.ff_decode_multi(c, pkt, frame, [], true); + } + catch (ex) { + this._p = this._p.then(() => { + this._closeAudioDecoder(ex); + }); + } + /* 2. Let decoded outputs be a list of decoded audio data outputs + * emitted by [[codec implementation]]. */ + // 3. Queue a task to perform these steps: + { + /* 1. If decoded outputs is not empty, run the Output AudioData + * algorithm with decoded outputs. */ + if (decodedOutputs) + this._outputAudioData(decodedOutputs); + // 2. Remove promise from [[pending flush promises]]. + // 3. Resolve promise. + } + })); + this._p = ret; + return ret; + } + reset() { + this._resetAudioDecoder(new DOMException("Reset", "AbortError")); + } + close() { + this._closeAudioDecoder(new DOMException("Close", "AbortError")); + } + static isConfigSupported(config) { + return __awaiter$7(this, void 0, void 0, function* () { + const dec = decoder(config.codec, config); + let supported = false; + if (dec) { + const libav = yield get(); + try { + const [, c, pkt, frame] = yield libav.ff_init_decoder(dec.codec); + yield libav.ff_free_decoder(c, pkt, frame); + supported = true; + } + catch (ex) { } + yield free(libav); + } + return { + supported, + config: cloneConfig(config, ["codec", "sampleRate", "numberOfChannels"]) + }; + }); + } + }; + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$6 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + let AudioEncoder$1 = class AudioEncoder extends DequeueEventTarget { + constructor(init) { + super(); + // Metadata argument for output + this._outputMetadata = null; + this._outputMetadataFilled = false; + this._pts = null; + // 1. Let e be a new AudioEncoder object. + // 2. Assign a new queue to [[control message queue]]. + this._p = Promise.all([]); + // 3. Assign false to [[message queue blocked]]. + // (unused in polyfill) + // 4. Assign null to [[codec implementation]]. + this._libav = null; + this._codec = this._c = this._frame = this._pkt = 0; + this._filter_in_ctx = this._filter_out_ctx = null; + this._filter_graph = this._buffersrc_ctx = this._buffersink_ctx = 0; + /* 5. Assign the result of starting a new parallel queue to + * [[codec work queue]]. */ + // (shared queue) + // 6. Assign false to [[codec saturated]]. + // (saturation unneeded in the polyfill) + // 7. Assign init.output to [[output callback]]. + this._output = init.output; + // 8. Assign init.error to [[error callback]]. + this._error = init.error; + // 9. Assign null to [[active encoder config]]. + // 10. Assign null to [[active output config]]. + // (both part of the codec) + // 11. Assign "unconfigured" to [[state]] + this.state = "unconfigured"; + // 12. Assign 0 to [[encodeQueueSize]]. + this.encodeQueueSize = 0; + // 13. Assign a new list to [[pending flush promises]]. + // 14. Assign false to [[dequeue event scheduled]]. + // (shared queue) + // 15. Return e. + } + configure(config) { + const self = this; + // 1. If config is not a valid AudioEncoderConfig, throw a TypeError. + // NOTE: We don't support sophisticated codec string parsing (yet) + // 2. If [[state]] is "closed", throw an InvalidStateError. + if (this.state === "closed") + throw new DOMException("Encoder is closed", "InvalidStateError"); + // Free any internal state + if (this._libav) + this._p = this._p.then(() => this._free()); + // 3. Set [[state]] to "configured". + this.state = "configured"; + // 4. Queue a control message to configure the encoder using config. + this._p = this._p.then(function () { + return __awaiter$6(this, void 0, void 0, function* () { + /* 1. Let supported be the result of running the Check + * Configuration Support algorithm with config. */ + const supported = encoder(config.codec, config); + // Get the output metadata now + self._outputMetadata = { decoderConfig: { + codec: config.codec, + // Rest will be filled in when we get data + sampleRate: 0, + numberOfChannels: 0 + } }; + self._outputMetadataFilled = false; + /* 2. If supported is false, queue a task to run the Close + * AudioEncoder algorithm with NotSupportedError and abort these + * steps. */ + if (!supported) { + self._closeAudioEncoder(new DOMException("Unsupported codec", "NotSupportedError")); + return; + } + /* 3. If needed, assign [[codec implementation]] with an + * implementation supporting config. */ + // 4. Configure [[codec implementation]] with config. + const libav = self._libav = yield get(); + // And initialize + let frame_size; + [self._codec, self._c, self._frame, self._pkt, frame_size] = + yield libav.ff_init_encoder(supported.codec, supported); + self._pts = null; + yield libav.AVCodecContext_time_base_s(self._c, 1, supported.ctx.sample_rate); + // Be ready to set up the filter + self._filter_out_ctx = { + sample_rate: supported.ctx.sample_rate, + sample_fmt: supported.ctx.sample_fmt, + channel_layout: supported.ctx.channel_layout, + frame_size + }; + // 5. queue a task to run the following steps: + // 1. Assign false to [[message queue blocked]]. + // 2. Queue a task to Process the control message queue. + // (shared queue) + }); + }).catch(this._error); + } + // Our own algorithm, close libav + _free() { + return __awaiter$6(this, void 0, void 0, function* () { + if (this._filter_graph) { + yield this._libav.avfilter_graph_free_js(this._filter_graph); + this._filter_in_ctx = this._filter_out_ctx = null; + this._filter_graph = this._buffersrc_ctx = this._buffersink_ctx = + 0; + } + if (this._c) { + yield this._libav.ff_free_encoder(this._c, this._frame, this._pkt); + this._codec = this._c = this._frame = this._pkt = 0; + } + if (this._libav) { + free(this._libav); + this._libav = null; + } + }); + } + _closeAudioEncoder(exception) { + // 1. Run the Reset AudioEncoder algorithm with exception. + this._resetAudioEncoder(exception); + // 2. Set [[state]] to "closed". + this.state = "closed"; + /* 3. Clear [[codec implementation]] and release associated system + * resources. */ + this._p = this._p.then(() => this._free()); + /* 4. If exception is not an AbortError DOMException, invoke the + * [[error callback]] with exception. */ + if (exception.name !== "AbortError") + this._p = this._p.then(() => { this._error(exception); }); + } + _resetAudioEncoder(exception) { + // 1. If [[state]] is "closed", throw an InvalidStateError. + if (this.state === "closed") + throw new DOMException("Encoder closed", "InvalidStateError"); + // 2. Set [[state]] to "unconfigured". + this.state = "unconfigured"; + // ... really, we're just going to free it now + this._p = this._p.then(() => this._free()); + } + encode(data) { + /* 1. If the value of data’s [[Detached]] internal slot is true, throw + * a TypeError. */ + if (data._libavGetData() === null) + throw new TypeError("Detached"); + // 2. If [[state]] is not "configured", throw an InvalidStateError. + if (this.state !== "configured") + throw new DOMException("Unconfigured", "InvalidStateError"); + /* 3. Let dataClone hold the result of running the Clone AudioData + * algorithm with data. */ + const dataClone = data.clone(); + // 4. Increment [[encodeQueueSize]]. + this.encodeQueueSize++; + // 5. Queue a control message to encode dataClone. + this._p = this._p.then(() => __awaiter$6(this, void 0, void 0, function* () { + const libav = this._libav; + const c = this._c; + const pkt = this._pkt; + const framePtr = this._frame; + let encodedOutputs = null; + /* 3. Decrement [[encodeQueueSize]] and run the Schedule Dequeue + * Event algorithm. */ + this.encodeQueueSize--; + this.dispatchEvent(new CustomEvent("dequeue")); + /* 1. Attempt to use [[codec implementation]] to encode the media + * resource described by dataClone. */ + try { + // Arrange the data + let raw = dataClone._libavGetData(); + const nb_samples = dataClone.numberOfFrames; + if (!isInterleaved(dataClone.format)) { + let split = []; + for (let i = 0; i < dataClone.numberOfChannels; i++) + split.push(raw.subarray(i * nb_samples, (i + 1) * nb_samples)); + raw = split; + } + // Convert the format + let format; + switch (dataClone.format) { + case "u8": + format = libav.AV_SAMPLE_FMT_U8; + break; + case "s16": + format = libav.AV_SAMPLE_FMT_S16; + break; + case "s32": + format = libav.AV_SAMPLE_FMT_S32; + break; + case "f32": + format = libav.AV_SAMPLE_FMT_FLT; + break; + case "u8-planar": + format = libav.AV_SAMPLE_FMT_U8P; + break; + case "s16-planar": + format = libav.AV_SAMPLE_FMT_S16P; + break; + case "s32-planar": + format = libav.AV_SAMPLE_FMT_S32P; + break; + case "f32-planar": + format = libav.AV_SAMPLE_FMT_FLTP; + break; + default: + throw new TypeError("Invalid AudioSampleFormat"); + } + // Convert the timestamp + const ptsFull = Math.floor(dataClone.timestamp / 1000); + const [pts, ptshi] = libav.f64toi64(ptsFull); + // Convert the channel layout + const cc = dataClone.numberOfChannels; + const channel_layout = (cc === 1) ? 4 : ((1 << cc) - 1); + // Make the frame + const sample_rate = dataClone.sampleRate; + const frame = { + data: raw, + format, pts, ptshi, channel_layout, sample_rate + }; + // Check if the filter needs to be reconfigured + let preOutputs = null; + if (this._filter_in_ctx) { + const filter_ctx = this._filter_in_ctx; + if (filter_ctx.sample_fmt !== frame.format || + filter_ctx.channel_layout !== frame.channel_layout || + filter_ctx.sample_rate !== frame.sample_rate) { + // Need a new filter! First, get anything left in the filter + let fframes = yield this._filter([], true); + // Can't send partial frames through the encoder + fframes = fframes.filter(x => { + let frame_size; + if (x.data[0].length) { + // Planar + frame_size = x.data[0].length; + } + else { + frame_size = x.data.length / x.channels; + } + return frame_size === this._filter_out_ctx.frame_size; + }); + if (fframes.length) { + preOutputs = + yield libav.ff_encode_multi(c, framePtr, pkt, fframes); + } + yield libav.avfilter_graph_free_js(this._filter_graph); + this._filter_in_ctx = null; + this._filter_graph = this._buffersrc_ctx = + this._buffersink_ctx = 0; + } + } + // Set up the filter + if (!this._filter_graph) { + const filter_ctx = this._filter_in_ctx = { + sample_rate: frame.sample_rate, + sample_fmt: frame.format, + channel_layout: frame.channel_layout + }; + [this._filter_graph, this._buffersrc_ctx, this._buffersink_ctx] = + yield libav.ff_init_filter_graph("aresample", filter_ctx, this._filter_out_ctx); + } + // Filter + const fframes = yield this._filter([frame]); + // And encode + encodedOutputs = + yield libav.ff_encode_multi(c, framePtr, pkt, fframes); + if (preOutputs) + encodedOutputs = preOutputs.concat(encodedOutputs); + if (encodedOutputs.length && !this._outputMetadataFilled && + fframes && fframes.length) + yield this._getOutputMetadata(fframes[0]); + /* 2. If encoding results in an error, queue a task on the control + * thread event loop to run the Close AudioEncoder algorithm with + * EncodingError. */ + } + catch (ex) { + this._p = this._p.then(() => { + this._closeAudioEncoder(ex); + }); + } + /* 3. If [[codec saturated]] equals true and + * [[codec implementation]] is no longer saturated, queue a task + * to perform the following steps: */ + // 1. Assign false to [[codec saturated]]. + // 2. Process the control message queue. + // (no saturation) + /* 4. Let encoded outputs be a list of encoded audio data outputs + * emitted by [[codec implementation]]. */ + /* 5. If encoded outputs is not empty, queue a task to run the + * Output EncodedAudioChunks algorithm with encoded outputs. */ + if (encodedOutputs) + this._outputEncodedAudioChunks(encodedOutputs); + })).catch(this._error); + } + // Internal: Filter the given audio + _filter(frames, fin = false) { + return __awaiter$6(this, void 0, void 0, function* () { + /* The specification does not state how timestamps should be related + * between input and output. It's obvious that the timestamps should + * increase at the appropriate rate based on the number of samples seen, + * but where they should start is not stated. Google Chrome starts with + * the timestamp of the first input frame, and ignores all other input + * frame timestamps. We follow that convention as well. */ + if (frames.length && this._pts === null) + this._pts = (frames[0].pts || 0); + const fframes = yield this._libav.ff_filter_multi(this._buffersrc_ctx, this._buffersink_ctx, this._frame, frames, fin); + for (const frame of fframes) { + frame.pts = this._pts; + frame.ptshi = 0; + this._pts += frame.nb_samples; + } + return fframes; + }); + } + // Internal: Get output metadata + _getOutputMetadata(frame) { + return __awaiter$6(this, void 0, void 0, function* () { + const libav = this._libav; + const c = this._c; + const extradataPtr = yield libav.AVCodecContext_extradata(c); + const extradata_size = yield libav.AVCodecContext_extradata_size(c); + let extradata = null; + if (extradataPtr && extradata_size) + extradata = yield libav.copyout_u8(extradataPtr, extradata_size); + this._outputMetadata.decoderConfig.sampleRate = frame.sample_rate; + this._outputMetadata.decoderConfig.numberOfChannels = frame.channels; + if (extradata) + this._outputMetadata.decoderConfig.description = extradata; + this._outputMetadataFilled = true; + }); + } + _outputEncodedAudioChunks(packets) { + const libav = this._libav; + const sampleRate = this._filter_out_ctx.sample_rate; + for (const packet of packets) { + // 1. type + const type = (packet.flags & 1) ? "key" : "delta"; + // 2. timestamp + let timestamp = libav.i64tof64(packet.pts, packet.ptshi); + timestamp = Math.floor(timestamp / sampleRate * 1000000); + const chunk = new EncodedAudioChunk$1({ + type, timestamp, + data: packet.data + }); + if (this._outputMetadataFilled) + this._output(chunk, this._outputMetadata || void 0); + else + this._output(chunk); + } + } + flush() { + /* 1. If [[state]] is not "configured", return a promise rejected with + * InvalidStateError DOMException. */ + if (this.state !== "configured") + throw new DOMException("Invalid state", "InvalidStateError"); + // 2. Let promise be a new Promise. + // 3. Append promise to [[pending flush promises]]. + // 4. Queue a control message to flush the codec with promise. + // 5. Process the control message queue. + // 6. Return promise. + const ret = this._p.then(() => __awaiter$6(this, void 0, void 0, function* () { + if (!this._c) + return; + /* 1. Signal [[codec implementation]] to emit all internal pending + * outputs. */ + // Make sure any last data is flushed + const libav = this._libav; + const c = this._c; + const frame = this._frame; + const pkt = this._pkt; + const buffersrc_ctx = this._buffersrc_ctx; + this._buffersink_ctx; + let encodedOutputs = null; + try { + let fframes = null; + if (buffersrc_ctx) + fframes = yield this._filter([], true); + encodedOutputs = + yield libav.ff_encode_multi(c, frame, pkt, fframes || [], true); + if (!this._outputMetadataFilled && fframes && fframes.length) + yield this._getOutputMetadata(fframes[0]); + } + catch (ex) { + this._p = this._p.then(() => { + this._closeAudioEncoder(ex); + }); + } + /* 2. Let encoded outputs be a list of encoded audio data outputs + * emitted by [[codec implementation]]. */ + // 3. Queue a task to perform these steps: + { + /* 1. If encoded outputs is not empty, run the Output + * EncodedAudioChunks algorithm with encoded outputs. */ + if (encodedOutputs) + this._outputEncodedAudioChunks(encodedOutputs); + // 2. Remove promise from [[pending flush promises]]. + // 3. Resolve promise. + // (shared queue) + } + })); + this._p = ret; + return ret; + } + reset() { + this._resetAudioEncoder(new DOMException("Reset", "AbortError")); + } + close() { + this._closeAudioEncoder(new DOMException("Close", "AbortError")); + } + static isConfigSupported(config) { + return __awaiter$6(this, void 0, void 0, function* () { + const enc = encoder(config.codec, config); + let supported = false; + if (enc) { + const libav = yield get(); + try { + const [, c, frame, pkt] = yield libav.ff_init_encoder(enc.codec, enc); + yield libav.ff_free_encoder(c, frame, pkt); + supported = true; + } + catch (ex) { } + yield free(libav); + } + return { + supported, + config: cloneConfig(config, ["codec", "sampleRate", "numberOfChannels", "bitrate"]) + }; + }); + } + }; + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + const EncodedVideoChunk$1 = EncodedAudioChunk$1; + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$5 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + // A canvas element used to convert CanvasImageSources to buffers + let offscreenCanvas = null; + let VideoFrame$1 = class VideoFrame { + constructor(data, init) { + /* NOTE: These should all be readonly, but the constructor style above + * doesn't work with that */ + this.format = "I420"; + this.codedWidth = 0; + this.codedHeight = 0; + this.codedRect = null; + this.visibleRect = null; + this.displayWidth = 0; + this.displayHeight = 0; + this.timestamp = 0; // microseconds + this._layout = null; + this._data = null; + /** + * (Internal) Does this use non-square pixels? + */ + this._nonSquarePixels = false; + /** + * (Internal) If non-square pixels, the SAR (sample/pixel aspect ratio) + */ + this._sar_num = 1; + this._sar_den = 1; + if (data instanceof ArrayBuffer || + data.buffer instanceof ArrayBuffer) { + this._constructBuffer(data, init); + } + else if (data instanceof VideoFrame || + (globalThis.VideoFrame && data instanceof globalThis.VideoFrame)) { + const array = new Uint8Array(data.allocationSize()); + data.copyTo(array); + this._constructBuffer(array, { + transfer: [array.buffer], + // 1. Let format be otherFrame.format. + /* 2. FIXME: If init.alpha is discard, assign + * otherFrame.format's equivalent opaque format format. */ + format: data.format, + /* 3. Let validInit be the result of running the Validate + * VideoFrameInit algorithm with format and otherFrame’s + * [[coded width]] and [[coded height]]. */ + // 4. If validInit is false, throw a TypeError. + /* 7. Assign the following attributes from otherFrame to frame: + * codedWidth, codedHeight, colorSpace. */ + codedHeight: data.codedHeight, + codedWidth: data.codedWidth, + colorSpace: data.colorSpace, + /* 8. Let defaultVisibleRect be the result of performing the + * getter steps for visibleRect on otherFrame. */ + /* 9. Let defaultDisplayWidth, and defaultDisplayHeight be + * otherFrame’s [[display width]], and [[display height]] + * respectively. */ + /* 10. Run the Initialize Visible Rect and Display Size + * algorithm with init, frame, defaultVisibleRect, + * defaultDisplayWidth, and defaultDisplayHeight. */ + visibleRect: (init === null || init === void 0 ? void 0 : init.visibleRect) || data.visibleRect, + displayHeight: (init === null || init === void 0 ? void 0 : init.displayHeight) || data.displayHeight, + displayWidth: (init === null || init === void 0 ? void 0 : init.displayWidth) || data.displayWidth, + /* 11. If duration exists in init, assign it to frame’s + * [[duration]]. Otherwise, assign otherFrame.duration to + * frame’s [[duration]]. */ + duration: (init === null || init === void 0 ? void 0 : init.duration) || data.duration, + /* 12. If timestamp exists in init, assign it to frame’s + * [[timestamp]]. Otherwise, assign otherFrame’s timestamp to + * frame’s [[timestamp]]. */ + timestamp: (init === null || init === void 0 ? void 0 : init.timestamp) || data.timestamp, + /* Assign the result of calling Copy VideoFrame metadata with + * init’s metadata to frame.[[metadata]]. */ + metadata: JSON.parse(JSON.stringify(init === null || init === void 0 ? void 0 : init.metadata)) + }); + } + else if (data instanceof HTMLVideoElement) { + /* Check the usability of the image argument. If this throws an + * exception or returns bad, then throw an InvalidStateError + * DOMException. */ + if (data.readyState === HTMLVideoElement.prototype.HAVE_NOTHING + || data.readyState === HTMLVideoElement.prototype.HAVE_METADATA) { + throw new DOMException("Video is not ready for reading frames", "InvalidStateError"); + } + // If image’s networkState attribute is NETWORK_EMPTY, then throw an InvalidStateError DOMException. + if (data.networkState === data.NETWORK_EMPTY) { + throw new DOMException("Video network state is empty", "InvalidStateError"); + } + this._constructCanvas(data, Object.assign(Object.assign({}, init), { timestamp: (init === null || init === void 0 ? void 0 : init.timestamp) || data.currentTime * 1e6 })); + } + else { + this._constructCanvas(data, init); + } + } + _constructCanvas(image, init) { + /* The spec essentially re-specifies “draw it”, and has specific + * instructions for each sort of thing it might be. So, we don't + * document all the steps here, we just... draw it. */ + // Get the width and height + let width = 0, height = 0; + if (image.naturalWidth) { + width = image.naturalWidth; + height = image.naturalHeight; + } + else if (image.videoWidth) { + width = image.videoWidth; + height = image.videoHeight; + } + else if (image.width) { + width = image.width; + height = image.height; + } + if (!width || !height) + throw new DOMException("Could not determine dimensions", "InvalidStateError"); + if (offscreenCanvas === null) { + if (typeof OffscreenCanvas !== "undefined") { + offscreenCanvas = new OffscreenCanvas(width, height); + } + else { + offscreenCanvas = document.createElement("canvas"); + offscreenCanvas.style.display = "none"; + document.body.appendChild(offscreenCanvas); + } + } + offscreenCanvas.width = width; + offscreenCanvas.height = height; + const options = { desynchronized: true, willReadFrequently: true }; + const ctx = offscreenCanvas.getContext("2d", options); + ctx.clearRect(0, 0, width, height); + ctx.drawImage(image, 0, 0); + this._constructBuffer(ctx.getImageData(0, 0, width, height).data, { + format: "RGBA", + codedWidth: width, + codedHeight: height, + timestamp: (init === null || init === void 0 ? void 0 : init.timestamp) || 0, + duration: (init === null || init === void 0 ? void 0 : init.duration) || 0, + layout: [{ offset: 0, stride: width * 4 }], + displayWidth: (init === null || init === void 0 ? void 0 : init.displayWidth) || width, + displayHeight: (init === null || init === void 0 ? void 0 : init.displayHeight) || height + }); + } + _constructBuffer(data, init) { + // 1. If init is not a valid VideoFrameBufferInit, throw a TypeError. + VideoFrame._checkValidVideoFrameBufferInit(init); + /* 2. Let defaultRect be «[ "x:" → 0, "y" → 0, "width" → + * init.codedWidth, "height" → init.codedWidth ]». */ + const defaultRect = new DOMRect(0, 0, init.codedWidth, init.codedHeight); + // 3. Let overrideRect be undefined. + let overrideRect = void 0; + // 4. If init.visibleRect exists, assign its value to overrideRect. + if (init.visibleRect) + overrideRect = DOMRect.fromRect(init.visibleRect); + /* 5. Let parsedRect be the result of running the Parse Visible Rect + * algorithm with defaultRect, overrideRect, init.codedWidth, + * init.codedHeight, and init.format. */ + // 6. If parsedRect is an exception, return parsedRect. + this.codedWidth = init.codedWidth; // (for _parseVisibleRect) + this.codedHeight = init.codedHeight; + const parsedRect = this._parseVisibleRect(defaultRect, overrideRect || null); + // 7. Let optLayout be undefined. + let optLayout = void 0; + // 8. If init.layout exists, assign its value to optLayout. + if (init.layout) { + if (init.layout instanceof Array) + optLayout = init.layout; + else + optLayout = Array.from(init.layout); + } + /* 9. Let combinedLayout be the result of running the Compute Layout + * and Allocation Size algorithm with parsedRect, init.format, and + * optLayout. */ + // 10. If combinedLayout is an exception, throw combinedLayout. + this.format = init.format; // (needed for _computeLayoutAndAllocationSize) + const combinedLayout = this._computeLayoutAndAllocationSize(parsedRect, optLayout || null); + /* 11. If data.byteLength is less than combinedLayout’s allocationSize, + * throw a TypeError. */ + if (data.byteLength < combinedLayout.allocationSize) + throw new TypeError("data is too small for layout"); + /* 12. If init.transfer contains more than one reference to the same + * ArrayBuffer, then throw a DataCloneError DOMException. */ + // 13. For each transferable in init.transfer: + // 1. If [[Detached]] internal slot is true, then throw a DataCloneError DOMException. + // (not checked in polyfill) + /* 14. If init.transfer contains an ArrayBuffer referenced by data the + * User Agent MAY choose to: */ + let transfer = false; + if (init.transfer) { + /* 1. Let resource be a new media resource referencing pixel data + * in data. */ + let inBuffer; + if (data.buffer) + inBuffer = data.buffer; + else + inBuffer = data; + let t; + if (init.transfer instanceof Array) + t = init.transfer; + else + t = Array.from(init.transfer); + for (const b of t) { + if (b === inBuffer) { + transfer = true; + break; + } + } + } + // 15. Otherwise: + /* 1. Let resource be a new media resource containing a copy of + * data. Use visibleRect and layout to determine where in data + * the pixels for each plane reside. */ + /* The User Agent MAY choose to allocate resource with a larger + * coded size and plane strides to improve memory alignment. + * Increases will be reflected by codedWidth and codedHeight. + * Additionally, the User Agent MAY use visibleRect to copy only + * the visible rectangle. It MAY also reposition the visible + * rectangle within resource. The final position will be + * reflected by visibleRect. */ + /* NOTE: The spec seems to be missing the step where you actually use + * the resource to define the [[resource reference]]. */ + const format = init.format; + if (init.layout) { + // FIXME: Make sure it's the right size + if (init.layout instanceof Array) + this._layout = init.layout; + else + this._layout = Array.from(init.layout); + } + else { + const numPlanes_ = numPlanes(format); + const layout = []; + let offset = 0; + for (let i = 0; i < numPlanes_; i++) { + const sampleWidth = horizontalSubSamplingFactor(format, i); + const sampleHeight = verticalSubSamplingFactor(format, i); + const stride = ~~(this.codedWidth / sampleWidth); + layout.push({ offset, stride }); + offset += stride * (~~(this.codedHeight / sampleHeight)); + } + this._layout = layout; + } + this._data = new Uint8Array(data.buffer || data, data.byteOffset || 0); + if (!transfer) { + const numPlanes_ = numPlanes(format); + // Only copy the relevant part + let layout = this._layout; + let lo = 1 / 0; + let hi = 0; + for (let i = 0; i < numPlanes_; i++) { + const plane = layout[i]; + let offset = plane.offset; + if (offset < lo) + lo = offset; + const sampleHeight = verticalSubSamplingFactor(format, i); + offset += plane.stride * (~~(this.codedHeight / sampleHeight)); + if (offset > hi) + hi = offset; + } + // Fix the layout to compensate + if (lo !== 0) { + layout = this._layout = layout.map(x => ({ + offset: x.offset - lo, + stride: x.stride + })); + } + this._data = this._data.slice(lo, hi); + } + // 16. For each transferable in init.transfer: + // 1. Perform DetachArrayBuffer on transferable + // (not doable in polyfill) + // 17. Let resourceCodedWidth be the coded width of resource. + const resourceCodedWidth = init.codedWidth; + // 18. Let resourceCodedHeight be the coded height of resource. + const resourceCodedHeight = init.codedHeight; + /* 19. Let resourceVisibleLeft be the left offset for the visible + * rectangle of resource. */ + parsedRect.left; + /* 20. Let resourceVisibleTop be the top offset for the visible + * rectangle of resource. */ + parsedRect.top; + // 21. Let frame be a new VideoFrame object initialized as follows: + { + /* 1. Assign resourceCodedWidth, resourceCodedHeight, + * resourceVisibleLeft, and resourceVisibleTop to + * [[coded width]], [[coded height]], [[visible left]], and + * [[visible top]] respectively. */ + // (codedWidth/codedHeight done earlier) + this.codedRect = new DOMRect(0, 0, resourceCodedWidth, resourceCodedHeight); + this.visibleRect = parsedRect; + // 2. If init.visibleRect exists: + if (init.visibleRect) { + // 1. Let truncatedVisibleWidth be the value of visibleRect.width after truncating. + // 2. Assign truncatedVisibleWidth to [[visible width]]. + // 3. Let truncatedVisibleHeight be the value of visibleRect.height after truncating. + // 4. Assign truncatedVisibleHeight to [[visible height]]. + this.visibleRect = DOMRect.fromRect(init.visibleRect); + // 3. Otherwise: + } + else { + // 1. Assign [[coded width]] to [[visible width]]. + // 2. Assign [[coded height]] to [[visible height]]. + this.visibleRect = new DOMRect(0, 0, resourceCodedWidth, resourceCodedHeight); + } + /* 4. If init.displayWidth exists, assign it to [[display width]]. + * Otherwise, assign [[visible width]] to [[display width]]. */ + if (typeof init.displayWidth === "number") + this.displayWidth = init.displayWidth; + else + this.displayWidth = this.visibleRect.width; + /* 5. If init.displayHeight exists, assign it to [[display height]]. + * Otherwise, assign [[visible height]] to [[display height]]. */ + if (typeof init.displayHeight === "number") + this.displayHeight = init.displayHeight; + else + this.displayHeight = this.visibleRect.height; + // Account for non-square pixels + if (this.displayWidth !== this.visibleRect.width || + this.displayHeight !== this.visibleRect.height) { + // Dubious (but correct) SAR calculation + this._nonSquarePixels = true; + this._sar_num = this.displayWidth * this.visibleRect.width; + this._sar_den = this.displayHeight * this.visibleRect.height; + } + else { + this._nonSquarePixels = false; + this._sar_num = this._sar_den = 1; + } + /* 6. Assign init’s timestamp and duration to [[timestamp]] and + * [[duration]] respectively. */ + this.timestamp = init.timestamp; + this.duration = init.duration; + // 7. Let colorSpace be undefined. + // 8. If init.colorSpace exists, assign its value to colorSpace. + // (color spaces not supported) + // 9. Assign init’s format to [[format]]. + // (done earlier) + /* 10. Assign the result of running the Pick Color Space algorithm, + * with colorSpace and [[format]], to [[color space]]. */ + // (color spaces not supported) + /* 11. Assign the result of calling Copy VideoFrame metadata with + * init’s metadata to frame.[[metadata]]. */ + // (no actual metadata is yet described by the spec) + } + // 22. Return frame. + } + /** + * Convert a polyfill VideoFrame to a native VideoFrame. + * @param opts Conversion options + */ + toNative(opts = {}) { + const ret = new globalThis.VideoFrame(this._data, { + layout: this._layout, + format: this.format, + codedWidth: this.codedWidth, + codedHeight: this.codedHeight, + visibleRect: this.visibleRect, + displayWidth: this.displayWidth, + displayHeight: this.displayHeight, + duration: this.duration, + timestamp: this.timestamp, + transfer: opts.transfer ? [this._data.buffer] : [] + }); + if (opts.transfer) + this.close(); + return ret; + } + /** + * Convert a native VideoFrame to a polyfill VideoFrame. WARNING: Inefficient, + * as the data cannot be transferred out. + * @param from VideoFrame to copy in + */ + static fromNative(from /* native VideoFrame */) { + const vf = from; + const data = new Uint8Array(vf.allocationSize()); + vf.copyTo(data); + return new VideoFrame(data, { + format: vf.format, + codedWidth: vf.codedWidth, + codedHeight: vf.codedHeight, + visibleRect: vf.visibleRect, + displayWidth: vf.displayWidth, + displayHeight: vf.displayHeight, + duration: vf.duration, + timestamp: vf.timestamp + }); + } + // Internal + _libavGetData() { return this._data; } + _libavGetLayout() { return this._layout; } + static _checkValidVideoFrameBufferInit(init) { + // 1. If codedWidth = 0 or codedHeight = 0,return false. + if (!init.codedWidth || !init.codedHeight) + throw new TypeError("Invalid coded dimensions"); + if (init.visibleRect) { + /* 2. If any attribute of visibleRect is negative or not finite, return + * false. */ + const vr = DOMRect.fromRect(init.visibleRect); + if (vr.x < 0 || !Number.isFinite(vr.x) || + vr.y < 0 || !Number.isFinite(vr.y) || + vr.width < 0 || !Number.isFinite(vr.width) || + vr.height < 0 || !Number.isFinite(vr.height)) { + throw new TypeError("Invalid visible rectangle"); + } + // 3. If visibleRect.y + visibleRect.height > codedHeight, return false. + if (vr.y + vr.height > init.codedHeight) + throw new TypeError("Visible rectangle outside of coded height"); + // 4. If visibleRect.x + visibleRect.width > codedWidth, return false. + if (vr.x + vr.width > init.codedWidth) + throw new TypeError("Visible rectangle outside of coded width"); + // 5. If only one of displayWidth or displayHeight exists, return false. + // 6. If displayWidth = 0 or displayHeight = 0, return false. + if ((init.displayWidth && !init.displayHeight) || + (!init.displayWidth && !init.displayHeight) || + (init.displayWidth === 0 || init.displayHeight === 0)) + throw new TypeError("Invalid display dimensions"); + } + // 7. Return true. + } + metadata() { + // 1. If [[Detached]] is true, throw an InvalidStateError DOMException. + if (this._data === null) + throw new DOMException("Detached", "InvalidStateError"); + /* 2. Return the result of calling Copy VideoFrame metadata with + * [[metadata]]. */ + // No actual metadata is yet defined in the spec + return null; + } + allocationSize(options = {}) { + // 1. If [[Detached]] is true, throw an InvalidStateError DOMException. + if (this._data === null) + throw new DOMException("Detached", "InvalidStateError"); + // 2. If [[format]] is null, throw a NotSupportedError DOMException. + if (this.format === null) + throw new DOMException("Not supported", "NotSupportedError"); + /* 3. Let combinedLayout be the result of running the Parse + * VideoFrameCopyToOptions algorithm with options. */ + // 4. If combinedLayout is an exception, throw combinedLayout. + const combinedLayout = this._parseVideoFrameCopyToOptions(options); + // 5. Return combinedLayout’s allocationSize. + return combinedLayout.allocationSize; + } + _parseVideoFrameCopyToOptions(options) { + /* 1. Let defaultRect be the result of performing the getter steps for + * visibleRect. */ + const defaultRect = this.visibleRect; + // 2. Let overrideRect be undefined. + // 3. If options.rect exists, assign its value to overrideRect. + let overrideRect = options.rect ? + new DOMRect(options.rect.x, options.rect.y, options.rect.width, options.rect.height) + : null; + /* 4. Let parsedRect be the result of running the Parse Visible Rect + * algorithm with defaultRect, overrideRect, [[coded width]], [[coded + * height]], and [[format]]. */ + // 5. If parsedRect is an exception, return parsedRect. + const parsedRect = this._parseVisibleRect(defaultRect, overrideRect); + // 6. Let optLayout be undefined. + // 7. If options.layout exists, assign its value to optLayout. + let optLayout = null; + if (options.layout) { + if (options.layout instanceof Array) + optLayout = options.layout; + else + optLayout = Array.from(options.layout); + } + /* 8. Let combinedLayout be the result of running the Compute Layout + * and Allocation Size algorithm with parsedRect, [[format]], and + * optLayout. */ + const combinedLayout = this._computeLayoutAndAllocationSize(parsedRect, optLayout); + // 9. Return combinedLayout. + return combinedLayout; + } + _parseVisibleRect(defaultRect, overrideRect) { + // 1. Let sourceRect be defaultRect + let sourceRect = defaultRect; + // 2. If overrideRect is not undefined: + if (overrideRect) { + /* 1. If either of overrideRect.width or height is 0, return a + * TypeError. */ + if (overrideRect.width === 0 || overrideRect.height === 0) + throw new TypeError("Invalid rectangle"); + /* 2. If the sum of overrideRect.x and overrideRect.width is + * greater than [[coded width]], return a TypeError. */ + if (overrideRect.x + overrideRect.width > this.codedWidth) + throw new TypeError("Invalid rectangle"); + /* 3. If the sum of overrideRect.y and overrideRect.height is + * greater than [[coded height]], return a TypeError. */ + if (overrideRect.y + overrideRect.height > this.codedHeight) + throw new TypeError("Invalid rectangle"); + // 4. Assign overrideRect to sourceRect. + sourceRect = overrideRect; + } + /* 3. Let validAlignment be the result of running the Verify Rect Offset + * Alignment algorithm with format and sourceRect. */ + const validAlignment = this._verifyRectOffsetAlignment(sourceRect); + // 4. If validAlignment is false, throw a TypeError. + if (!validAlignment) + throw new TypeError("Invalid alignment"); + // 5. Return sourceRect. + return sourceRect; + } + _computeLayoutAndAllocationSize(parsedRect, layout) { + // 1. Let numPlanes be the number of planes as defined by format. + let numPlanes_ = numPlanes(this.format); + /* 2. If layout is not undefined and its length does not equal + * numPlanes, throw a TypeError. */ + if (layout && layout.length !== numPlanes_) + throw new TypeError("Invalid layout"); + // 3. Let minAllocationSize be 0. + let minAllocationSize = 0; + // 4. Let computedLayouts be a new list. + let computedLayouts = []; + // 5. Let endOffsets be a new list. + let endOffsets = []; + // 6. Let planeIndex be 0. + let planeIndex = 0; + // 7. While planeIndex < numPlanes: + while (planeIndex < numPlanes_) { + /* 1. Let plane be the Plane identified by planeIndex as defined by + * format. */ + // 2. Let sampleBytes be the number of bytes per sample for plane. + const sampleBytes_ = sampleBytes(this.format, planeIndex); + /* 3. Let sampleWidth be the horizontal sub-sampling factor of each + * subsample for plane. */ + const sampleWidth = horizontalSubSamplingFactor(this.format, planeIndex); + /* 4. Let sampleHeight be the vertical sub-sampling factor of each + * subsample for plane. */ + const sampleHeight = verticalSubSamplingFactor(this.format, planeIndex); + // 5. Let computedLayout be a new computed plane layout. + const computedLayout = { + destinationOffset: 0, + destinationStride: 0, + /* 6. Set computedLayout’s sourceTop to the result of the division + * of truncated parsedRect.y by sampleHeight, rounded up to the + * nearest integer. */ + sourceTop: Math.ceil(~~parsedRect.y / sampleHeight), + /* 7. Set computedLayout’s sourceHeight to the result of the + * division of truncated parsedRect.height by sampleHeight, + * rounded up to the nearest integer. */ + sourceHeight: Math.ceil(~~parsedRect.height / sampleHeight), + /* 8. Set computedLayout’s sourceLeftBytes to the result of the + * integer division of truncated parsedRect.x by sampleWidth, + * multiplied by sampleBytes. */ + sourceLeftBytes: ~~(parsedRect.x / sampleWidth * sampleBytes_), + /* 9. Set computedLayout’s sourceWidthBytes to the result of the + * integer division of truncated parsedRect.width by + * sampleHeight, multiplied by sampleBytes. */ + sourceWidthBytes: ~~(parsedRect.width / sampleWidth * sampleBytes_) + }; + // 10. If layout is not undefined: + if (layout) { + /* 1. Let planeLayout be the PlaneLayout in layout at position + * planeIndex. */ + const planeLayout = layout[planeIndex]; + /* 2. If planeLayout.stride is less than computedLayout’s + * sourceWidthBytes, return a TypeError. */ + if (planeLayout.stride < computedLayout.sourceWidthBytes) + throw new TypeError("Invalid stride"); + /* 3. Assign planeLayout.offset to computedLayout’s + * destinationOffset. */ + computedLayout.destinationOffset = planeLayout.offset; + /* 4. Assign planeLayout.stride to computedLayout’s + * destinationStride. */ + computedLayout.destinationStride = planeLayout.stride; + // 11. Otherwise: + } + else { + /* 1. Assign minAllocationSize to computedLayout’s + * destinationOffset. */ + computedLayout.destinationOffset = minAllocationSize; + /* 2. Assign computedLayout’s sourceWidthBytes to + * computedLayout’s destinationStride. */ + computedLayout.destinationStride = computedLayout.sourceWidthBytes; + } + /* 12. Let planeSize be the product of multiplying computedLayout’s + * destinationStride and sourceHeight. */ + const planeSize = computedLayout.destinationStride * computedLayout.sourceHeight; + /* 13. Let planeEnd be the sum of planeSize and computedLayout’s + * destinationOffset. */ + const planeEnd = planeSize + computedLayout.destinationOffset; + /* 14. If planeSize or planeEnd is greater than maximum range of + * unsigned long, return a TypeError. */ + if (planeSize >= 0x100000000 || + planeEnd >= 0x100000000) + throw new TypeError("Plane too large"); + // 15. Append planeEnd to endOffsets. + endOffsets.push(planeEnd); + /* 16. Assign the maximum of minAllocationSize and planeEnd to + * minAllocationSize. */ + if (planeEnd > minAllocationSize) + minAllocationSize = planeEnd; + // 17. Let earlierPlaneIndex be 0. + let earlierPlaneIndex = 0; + // 18. While earlierPlaneIndex is less than planeIndex. + while (earlierPlaneIndex < planeIndex) { + // 1. Let earlierLayout be computedLayouts[earlierPlaneIndex]. + const earlierLayout = computedLayouts[earlierPlaneIndex]; + /* 2. If endOffsets[planeIndex] is less than or equal to + * earlierLayout’s destinationOffset or if + * endOffsets[earlierPlaneIndex] is less than or equal to + * computedLayout’s destinationOffset, continue. */ + if (planeEnd <= earlierLayout.destinationOffset || + endOffsets[earlierPlaneIndex] <= computedLayout.destinationOffset) ; + else + throw new TypeError("Invalid plane layout"); + // 4. Increment earlierPlaneIndex by 1. + earlierPlaneIndex++; + } + // 19. Append computedLayout to computedLayouts. + computedLayouts.push(computedLayout); + // 20. Increment planeIndex by 1. + planeIndex++; + } + /* 8. Let combinedLayout be a new combined buffer layout, initialized + * as follows: */ + const combinedLayout = { + // 1. Assign computedLayouts to computedLayouts. + computedLayouts, + // 2. Assign minAllocationSize to allocationSize. + allocationSize: minAllocationSize + }; + // 9. Return combinedLayout. + return combinedLayout; + } + _verifyRectOffsetAlignment(rect) { + // 1. If format is null, return true. + if (!this.format) + return true; + // 2. Let planeIndex be 0. + let planeIndex = 0; + // 3. Let numPlanes be the number of planes as defined by format. + const numPlanes_ = numPlanes(this.format); + // 4. While planeIndex is less than numPlanes: + while (planeIndex < numPlanes_) { + /* 1. Let plane be the Plane identified by planeIndex as defined by + * format. */ + /* 2. Let sampleWidth be the horizontal sub-sampling factor of each + * subsample for plane. */ + const sampleWidth = horizontalSubSamplingFactor(this.format, planeIndex); + /* 3. Let sampleHeight be the vertical sub-sampling factor of each + * subsample for plane. */ + const sampleHeight = verticalSubSamplingFactor(this.format, planeIndex); + // 4. If rect.x is not a multiple of sampleWidth, return false. + const xw = rect.x / sampleWidth; + if (xw !== ~~xw) + return false; + // 5. If rect.y is not a multiple of sampleHeight, return false. + const yh = rect.y / sampleHeight; + if (yh !== ~~yh) + return false; + // 6. Increment planeIndex by 1. + planeIndex++; + } + // 5. Return true. + return true; + } + copyTo(destination, options = {}) { + return __awaiter$5(this, void 0, void 0, function* () { + const destBuf = new Uint8Array(destination.buffer || destination, destination.byteOffset || 0); + // 1. If [[Detached]] is true, throw an InvalidStateError DOMException. + if (this._data === null) + throw new DOMException("Detached", "InvalidStateError"); + // 2. If [[format]] is null, throw a NotSupportedError DOMException. + if (!this.format) + throw new DOMException("No format", "NotSupportedError"); + /* 3. Let combinedLayout be the result of running the Parse + * VideoFrameCopyToOptions algorithm with options. */ + /* 4. If combinedLayout is an exception, return a promise rejected with + * combinedLayout. */ + const combinedLayout = this._parseVideoFrameCopyToOptions(options); + /* 5. If destination.byteLength is less than combinedLayout’s + * allocationSize, return a promise rejected with a TypeError. */ + if (destination.byteLength < combinedLayout.allocationSize) + throw new TypeError("Insufficient space"); + // 6. Let p be a new Promise. + /* 7. Let copyStepsQueue be the result of starting a new parallel + * queue. */ + // 8. Let planeLayouts be a new list. + let planeLayouts = []; + // 9. Enqueue the following steps to copyStepsQueue: + { + /* 1. Let resource be the media resource referenced by [[resource + * reference]]. */ + /* 2. Let numPlanes be the number of planes as defined by + * [[format]]. */ + numPlanes(this.format); + // 3. Let planeIndex be 0. + let planeIndex = 0; + // 4. While planeIndex is less than combinedLayout’s numPlanes: + while (planeIndex < combinedLayout.computedLayouts.length) { + /* 1. Let sourceStride be the stride of the plane in resource as + * identified by planeIndex. */ + const sourceStride = this._layout[planeIndex].stride; + /* 2. Let computedLayout be the computed plane layout in + * combinedLayout’s computedLayouts at the position of planeIndex */ + const computedLayout = combinedLayout.computedLayouts[planeIndex]; + /* 3. Let sourceOffset be the product of multiplying + * computedLayout’s sourceTop by sourceStride */ + let sourceOffset = computedLayout.sourceTop * sourceStride; + // 4. Add computedLayout’s sourceLeftBytes to sourceOffset. + sourceOffset += computedLayout.sourceLeftBytes; + // 5. Let destinationOffset be computedLayout’s destinationOffset. + let destinationOffset = computedLayout.destinationOffset; + // 6. Let rowBytes be computedLayout’s sourceWidthBytes. + const rowBytes = computedLayout.sourceWidthBytes; + /* 7. Let layout be a new PlaneLayout, with offset set to + * destinationOffset and stride set to rowBytes. */ + const layout = { + offset: computedLayout.destinationOffset, + stride: computedLayout.destinationStride + }; + // 8. Let row be 0. + let row = 0; + // 9. While row is less than computedLayout’s sourceHeight: + while (row < computedLayout.sourceHeight) { + /* 1. Copy rowBytes bytes from resource starting at + * sourceOffset to destination starting at destinationOffset. */ + destBuf.set(this._data.subarray(sourceOffset, sourceOffset + rowBytes), destinationOffset); + // 2. Increment sourceOffset by sourceStride. + sourceOffset += sourceStride; + /* 3. Increment destinationOffset by computedLayout’s + * destinationStride. */ + destinationOffset += computedLayout.destinationStride; + // 4. Increment row by 1. + row++; + } + // 10. Increment planeIndex by 1. + planeIndex++; + // 11. Append layout to planeLayouts. + planeLayouts.push(layout); + } + // 5. Queue a task to resolve p with planeLayouts. + } + // 10. Return p. + return planeLayouts; + }); + } + clone() { + return new VideoFrame(this._data, { + format: this.format, + codedWidth: this.codedWidth, + codedHeight: this.codedHeight, + timestamp: this.timestamp, + duration: this.duration, + layout: this._layout, + transfer: [this._data.buffer] + }); + } + close() { + this._data = null; + } + }; + /** + * Convert a WebCodecs pixel format to a libav pixel format. + * @param libav LibAV instance for constants + * @param wcFormat WebCodecs format + */ + function wcFormatToLibAVFormat(libav, wcFormat) { + let format = libav.AV_PIX_FMT_RGBA; + switch (wcFormat) { + case "I420": + format = libav.AV_PIX_FMT_YUV420P; + break; + case "I420P10": + format = 0x3E; /* AV_PIX_FMT_YUV420P10 */ + break; + case "I420P12": + format = 0x7B; /* AV_PIX_FMT_YUV420P12 */ + break; + case "I420A": + format = libav.AV_PIX_FMT_YUVA420P; + break; + case "I420AP10": + format = 0x57; /* AV_PIX_FMT_YUVA420P10 */ + break; + case "I420AP12": + throw new TypeError("YUV420P12 is not supported by libav"); + case "I422": + format = libav.AV_PIX_FMT_YUV422P; + break; + case "I422P10": + format = 0x40; /* AV_PIX_FMT_YUV422P10 */ + break; + case "I422P12": + format = 0x7F; /* AV_PIX_FMT_YUV422P12 */ + break; + case "I422A": + format = 0x4E; /* AV_PIX_FMT_YUVA422P */ + break; + case "I422AP10": + format = 0x59; /* AV_PIX_FMT_YUVA422P10 */ + break; + case "I422AP10": + format = 0xBA; /* AV_PIX_FMT_YUVA422P12 */ + break; + case "I444": + format = libav.AV_PIX_FMT_YUV444P; + break; + case "I444P10": + format = 0x44; /* AV_PIX_FMT_YUV444P10 */ + break; + case "I444P12": + format = 0x83; /* AV_PIX_FMT_YUV444P12 */ + break; + case "I444A": + format = 0x4F; /* AV_PIX_FMT_YUVA444P */ + break; + case "I444AP10": + format = 0x5B; /* AV_PIX_FMT_YUVA444P10 */ + break; + case "I444AP12": + format = 0xBC; /* AV_PIX_FMT_YUVA444P10 */ + break; + case "NV12": + format = libav.AV_PIX_FMT_NV12; + break; + case "RGBA": + format = libav.AV_PIX_FMT_RGBA; + break; + case "RGBX": + format = 0x77; /* AV_PIX_FMT_RGB0 */ + break; + case "BGRA": + format = libav.AV_PIX_FMT_BGRA; + break; + case "BGRX": + format = 0x79; /* AV_PIX_FMT_BGR0 */ + break; + default: + throw new TypeError("Invalid VideoPixelFormat"); + } + return format; + } + /** + * Number of planes in the given format. + * @param format The format + */ + function numPlanes(format) { + switch (format) { + case "I420": + case "I420P10": + case "I420P12": + case "I422": + case "I422P10": + case "I422P12": + case "I444": + case "I444P10": + case "I444P12": + return 3; + case "I420A": + case "I420AP10": + case "I420AP12": + case "I422A": + case "I422AP10": + case "I422AP12": + case "I444A": + case "I444AP10": + case "I444AP12": + return 4; + case "NV12": + return 2; + case "RGBA": + case "RGBX": + case "BGRA": + case "BGRX": + return 1; + default: + throw new DOMException("Unsupported video pixel format", "NotSupportedError"); + } + } + /** + * Number of bytes per sample in the given format and plane. + * @param format The format + * @param planeIndex The plane index + */ + function sampleBytes(format, planeIndex) { + switch (format) { + case "I420": + case "I420A": + case "I422": + case "I422A": + case "I444": + case "I444A": + return 1; + case "I420P10": + case "I420AP10": + case "I422P10": + case "I422AP10": + case "I444P10": + case "I444AP10": + case "I420P12": + case "I420AP12": + case "I422P12": + case "I422AP12": + case "I444P12": + case "I444AP12": + return 2; + case "NV12": + if (planeIndex === 1) + return 2; + else + return 1; + case "RGBA": + case "RGBX": + case "BGRA": + case "BGRX": + return 4; + default: + throw new DOMException("Unsupported video pixel format", "NotSupportedError"); + } + } + /** + * Horizontal sub-sampling factor for the given format and plane. + * @param format The format + * @param planeIndex The plane index + */ + function horizontalSubSamplingFactor(format, planeIndex) { + // First plane (often luma) is always full + if (planeIndex === 0) + return 1; + // Plane 3 (alpha if present) is always full + if (planeIndex === 3) + return 1; + switch (format) { + case "I420": + case "I420P10": + case "I420P12": + case "I420A": + case "I420AP10": + case "I420AP12": + case "I422": + case "I422P10": + case "I422P12": + case "I422A": + case "I422AP10": + case "I422AP12": + return 2; + case "I444": + case "I444P10": + case "I444P12": + case "I444A": + case "I444AP10": + case "I444AP12": + return 1; + case "NV12": + return 2; + case "RGBA": + case "RGBX": + case "BGRA": + case "BGRX": + return 1; + default: + throw new DOMException("Unsupported video pixel format", "NotSupportedError"); + } + } + /** + * Vertical sub-sampling factor for the given format and plane. + * @param format The format + * @param planeIndex The plane index + */ + function verticalSubSamplingFactor(format, planeIndex) { + // First plane (often luma) is always full + if (planeIndex === 0) + return 1; + // Plane 3 (alpha if present) is always full + if (planeIndex === 3) + return 1; + switch (format) { + case "I420": + case "I420P10": + case "I420P12": + case "I420A": + case "I420AP10": + case "I420AP12": + return 2; + case "I422": + case "I422P10": + case "I422P12": + case "I422A": + case "I422AP10": + case "I422AP12": + case "I444": + case "I444P10": + case "I444P12": + case "I444A": + case "I444AP10": + case "I444AP12": + return 1; + case "NV12": + return 2; + case "RGBA": + case "RGBX": + case "BGRA": + case "BGRX": + return 1; + default: + throw new DOMException("Unsupported video pixel format", "NotSupportedError"); + } + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$4 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + let VideoDecoder$1 = class VideoDecoder extends DequeueEventTarget { + constructor(init) { + super(); + // 1. Let d be a new VideoDecoder object. + // 2. Assign a new queue to [[control message queue]]. + this._p = Promise.all([]); + // 3. Assign false to [[message queue blocked]]. + // (unneeded in polyfill) + // 4. Assign null to [[codec implementation]]. + this._libav = null; + this._codec = this._c = this._pkt = this._frame = 0; + /* 5. Assign the result of starting a new parallel queue to + * [[codec work queue]]. */ + // (shared queue) + // 6. Assign false to [[codec saturated]]. + // (saturation not needed) + // 7. Assign init.output to [[output callback]]. + this._output = init.output; + // 8. Assign init.error to [[error callback]]. + this._error = init.error; + // 9. Assign null to [[active decoder config]]. + // (part of codec) + // 10. Assign true to [[key chunk required]]. + // (part of codec) + // 11. Assign "unconfigured" to [[state]] + this.state = "unconfigured"; + // 12. Assign 0 to [[decodeQueueSize]]. + this.decodeQueueSize = 0; + // 13. Assign a new list to [[pending flush promises]]. + // (shared queue) + // 14. Assign false to [[dequeue event scheduled]]. + // (not needed in polyfill) + // 15. Return d. + } + configure(config) { + // 1. If config is not a valid VideoDecoderConfig, throw a TypeError. + // NOTE: We don't support sophisticated codec string parsing (yet) + // 2. If [[state]] is “closed”, throw an InvalidStateError DOMException. + if (this.state === "closed") + throw new DOMException("Decoder is closed", "InvalidStateError"); + // Free any internal state + if (this._libav) + this._p = this._p.then(() => this._free()); + // 3. Set [[state]] to "configured". + this.state = "configured"; + // 4. Set [[key chunk required]] to true. + // (part of the codec) + // 5. Queue a control message to configure the decoder with config. + this._p = this._p.then(() => __awaiter$4(this, void 0, void 0, function* () { + /* 1. Let supported be the result of running the Check + * Configuration Support algorithm with config. */ + const supported = decoder(config.codec, config); + /* 2. If supported is false, queue a task to run the Close + * VideoDecoder algorithm with NotSupportedError and abort these + * steps. */ + if (!supported) { + this._closeVideoDecoder(new DOMException("Unsupported codec", "NotSupportedError")); + return; + } + /* 3. If needed, assign [[codec implementation]] with an + * implementation supporting config. */ + // 4. Configure [[codec implementation]] with config. + const libav = this._libav = yield get(); + // Initialize + [this._codec, this._c, this._pkt, this._frame] = + yield libav.ff_init_decoder(supported.codec); + yield libav.AVCodecContext_time_base_s(this._c, 1, 1000); + // 5. queue a task to run the following steps: + // 1. Assign false to [[message queue blocked]]. + // 2. Queue a task to Process the control message queue. + })).catch(this._error); + } + // Our own algorithm, close libav + _free() { + return __awaiter$4(this, void 0, void 0, function* () { + if (this._c) { + yield this._libav.ff_free_decoder(this._c, this._pkt, this._frame); + this._codec = this._c = this._pkt = this._frame = 0; + } + if (this._libav) { + free(this._libav); + this._libav = null; + } + }); + } + _closeVideoDecoder(exception) { + // 1. Run the Reset VideoDecoder algorithm with exception. + this._resetVideoDecoder(exception); + // 2. Set [[state]] to "closed". + this.state = "closed"; + /* 3. Clear [[codec implementation]] and release associated system + * resources. */ + this._p = this._p.then(() => this._free()); + /* 4. If exception is not an AbortError DOMException, invoke the + * [[error callback]] with exception. */ + if (exception.name !== "AbortError") + this._p = this._p.then(() => { this._error(exception); }); + } + _resetVideoDecoder(exception) { + // 1. If [[state]] is "closed", throw an InvalidStateError. + if (this.state === "closed") + throw new DOMException("Decoder closed", "InvalidStateError"); + // 2. Set [[state]] to "unconfigured". + this.state = "unconfigured"; + // ... really, we're just going to free it now + this._p = this._p.then(() => this._free()); + } + decode(chunk) { + const self = this; + // 1. If [[state]] is not "configured", throw an InvalidStateError. + if (this.state !== "configured") + throw new DOMException("Unconfigured", "InvalidStateError"); + // 2. If [[key chunk required]] is true: + // 1. If chunk.[[type]] is not key, throw a DataError. + /* 2. Implementers SHOULD inspect the chunk’s [[internal data]] to + * verify that it is truly a key chunk. If a mismatch is detected, + * throw a DataError. */ + // 3. Otherwise, assign false to [[key chunk required]]. + // 3. Increment [[decodeQueueSize]]. + this.decodeQueueSize++; + // 4. Queue a control message to decode the chunk. + this._p = this._p.then(function () { + return __awaiter$4(this, void 0, void 0, function* () { + const libav = self._libav; + const c = self._c; + const pkt = self._pkt; + const frame = self._frame; + let decodedOutputs = null; + /* 3. Decrement [[decodeQueueSize]] and run the Schedule Dequeue + * Event algorithm. */ + self.decodeQueueSize--; + self.dispatchEvent(new CustomEvent("dequeue")); + // 1. Attempt to use [[codec implementation]] to decode the chunk. + try { + // Convert to a libav packet + const ptsFull = Math.floor(chunk.timestamp / 1000); + const [pts, ptshi] = libav.f64toi64(ptsFull); + const packet = { + data: chunk._libavGetData(), + pts, + ptshi, + dts: pts, + dtshi: ptshi + }; + if (chunk.duration) { + packet.duration = Math.floor(chunk.duration / 1000); + packet.durationhi = 0; + } + decodedOutputs = yield libav.ff_decode_multi(c, pkt, frame, [packet]); + /* 2. If decoding results in an error, queue a task on the control + * thread event loop to run the Close VideoDecoder algorithm with + * EncodingError. */ + } + catch (ex) { + self._p = self._p.then(() => { + self._closeVideoDecoder(ex); + }); + } + /* 3. If [[codec saturated]] equals true and + * [[codec implementation]] is no longer saturated, queue a task + * to perform the following steps: */ + // 1. Assign false to [[codec saturated]]. + // 2. Process the control message queue. + // (unneeded) + /* 4. Let decoded outputs be a list of decoded video data outputs + * emitted by [[codec implementation]] in presentation order. */ + /* 5. If decoded outputs is not empty, queue a task to run the + * Output VideoFrame algorithm with decoded outputs. */ + if (decodedOutputs) + self._outputVideoFrames(decodedOutputs); + }); + }).catch(this._error); + } + _outputVideoFrames(frames) { + const libav = this._libav; + for (const frame of frames) { + // 1. format + let format; + switch (frame.format) { + case libav.AV_PIX_FMT_YUV420P: + format = "I420"; + break; + case 0x3E: /* AV_PIX_FMT_YUV420P10 */ + format = "I420P10"; + break; + case 0x7B: /* AV_PIX_FMT_YUV420P12 */ + format = "I420P12"; + break; + case libav.AV_PIX_FMT_YUVA420P: + format = "I420A"; + break; + case 0x57: /* AV_PIX_FMT_YUVA420P10 */ + format = "I420AP10"; + break; + case libav.AV_PIX_FMT_YUV422P: + format = "I422"; + break; + case 0x40: /* AV_PIX_FMT_YUV422P10 */ + format = "I422P10"; + break; + case 0x7F: /* AV_PIX_FMT_YUV422P12 */ + format = "I422P12"; + break; + case 0x4E: /* AV_PIX_FMT_YUVA422P */ + format = "I422A"; + break; + case 0x59: /* AV_PIX_FMT_YUVA422P10 */ + format = "I422AP10"; + break; + case 0xBA: /* AV_PIX_FMT_YUVA422P12 */ + format = "I422AP12"; + break; + case libav.AV_PIX_FMT_YUV444P: + format = "I444"; + break; + case 0x44: /* AV_PIX_FMT_YUV444P10 */ + format = "I444P10"; + break; + case 0x83: /* AV_PIX_FMT_YUV444P12 */ + format = "I444P12"; + break; + case 0x4F: /* AV_PIX_FMT_YUVA444P */ + format = "I444A"; + break; + case 0x5B: /* AV_PIX_FMT_YUVA444P10 */ + format = "I444AP10"; + break; + case 0xBC: /* AV_PIX_FMT_YUVA444P12 */ + format = "I444AP12"; + break; + case libav.AV_PIX_FMT_NV12: + format = "NV12"; + break; + case libav.AV_PIX_FMT_RGBA: + format = "RGBA"; + break; + case 0x77: /* AV_PIX_FMT_RGB0 */ + format = "RGBX"; + break; + case libav.AV_PIX_FMT_BGRA: + format = "BGRA"; + break; + case 0x79: /* AV_PIX_FMT_BGR0 */ + format = "BGRX"; + break; + default: + throw new DOMException("Unsupported libav format!", "EncodingError"); + } + // 2. width and height + const codedWidth = frame.width; + const codedHeight = frame.height; + // 3. cropping + let visibleRect; + if (frame.crop) { + visibleRect = new DOMRect(frame.crop.left, frame.crop.top, codedWidth - frame.crop.left - frame.crop.right, codedHeight - frame.crop.top - frame.crop.bottom); + } + else { + visibleRect = new DOMRect(0, 0, codedWidth, codedHeight); + } + // Check for non-square pixels + let displayWidth = codedWidth; + let displayHeight = codedHeight; + if (frame.sample_aspect_ratio && frame.sample_aspect_ratio[0]) { + const sar = frame.sample_aspect_ratio; + if (sar[0] > sar[1]) + displayWidth = ~~(codedWidth * sar[0] / sar[1]); + else + displayHeight = ~~(codedHeight * sar[1] / sar[0]); + } + // 3. timestamp + const timestamp = libav.i64tof64(frame.pts, frame.ptshi) * 1000; + const data = new VideoFrame$1(frame.data, { + layout: frame.layout, + format, codedWidth, codedHeight, visibleRect, displayWidth, displayHeight, + timestamp + }); + this._output(data); + } + } + flush() { + /* 1. If [[state]] is not "configured", return a promise rejected with + * InvalidStateError DOMException. */ + if (this.state !== "configured") + throw new DOMException("Invalid state", "InvalidStateError"); + // 2. Set [[key chunk required]] to true. + // (handled by codec) + // 3. Let promise be a new Promise. + // 4. Append promise to [[pending flush promises]]. + // 5. Queue a control message to flush the codec with promise. + // 6. Process the control message queue. + const ret = this._p.then(() => __awaiter$4(this, void 0, void 0, function* () { + /* 1. Signal [[codec implementation]] to emit all internal pending + * outputs. */ + if (!this._c) + return; + // Make sure any last data is flushed + const libav = this._libav; + const c = this._c; + const pkt = this._pkt; + const frame = this._frame; + let decodedOutputs = null; + try { + decodedOutputs = yield libav.ff_decode_multi(c, pkt, frame, [], true); + } + catch (ex) { + this._p = this._p.then(() => { + this._closeVideoDecoder(ex); + }); + } + /* 2. Let decoded outputs be a list of decoded video data outputs + * emitted by [[codec implementation]]. */ + // 3. Queue a task to perform these steps: + { + /* 1. If decoded outputs is not empty, run the Output VideoFrame + * algorithm with decoded outputs. */ + if (decodedOutputs) + this._outputVideoFrames(decodedOutputs); + // 2. Remove promise from [[pending flush promises]]. + // 3. Resolve promise. + } + })); + this._p = ret; + // 7. Return promise. + return ret; + } + reset() { + this._resetVideoDecoder(new DOMException("Reset", "AbortError")); + } + close() { + this._closeVideoDecoder(new DOMException("Close", "AbortError")); + } + static isConfigSupported(config) { + return __awaiter$4(this, void 0, void 0, function* () { + const dec = decoder(config.codec, config); + let supported = false; + if (dec) { + const libav = yield get(); + try { + const [, c, pkt, frame] = yield libav.ff_init_decoder(dec.codec); + yield libav.ff_free_decoder(c, pkt, frame); + supported = true; + } + catch (ex) { } + yield free(libav); + } + return { + supported, + config: cloneConfig(config, ["codec", "codedWidth", "codedHeight"]) + }; + }); + } + }; + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$3 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + let VideoEncoder$1 = class VideoEncoder extends DequeueEventTarget { + constructor(init) { + super(); + this._extradataSet = false; + this._extradata = null; + // If our output uses non-square pixels, that information + this._nonSquarePixels = false; + this._sar_num = 1; + this._sar_den = 1; + // 1. Let e be a new VideoEncoder object. + // 2. Assign a new queue to [[control message queue]]. + this._p = Promise.all([]); + // 3. Assign false to [[message queue blocked]]. + // (unneeded in polyfill) + // 4. Assign null to [[codec implementation]]. + this._libav = null; + this._codec = this._c = this._frame = this._pkt = 0; + /* 5. Assign the result of starting a new parallel queue to + * [[codec work queue]]. */ + // (shared queue) + // 6. Assign false to [[codec saturated]]. + // (saturation unneeded) + // 7. Assign init.output to [[output callback]]. + this._output = init.output; + // 8. Assign init.error to [[error callback]]. + this._error = init.error; + // 9. Assign null to [[active encoder config]]. + // (part of codec) + // 10. Assign null to [[active output config]]. + this._metadata = null; + // 11. Assign "unconfigured" to [[state]] + this.state = "unconfigured"; + // 12. Assign 0 to [[encodeQueueSize]]. + this.encodeQueueSize = 0; + // 13. Assign a new list to [[pending flush promises]]. + // (shared queue) + // 14. Assign false to [[dequeue event scheduled]]. + // (shared queue) + // 15. Return e. + } + configure(config) { + // 1. If config is not a valid VideoEncoderConfig, throw a TypeError. + // NOTE: We don't support sophisticated codec string parsing (yet) + // 2. If [[state]] is "closed", throw an InvalidStateError. + if (this.state === "closed") + throw new DOMException("Encoder is closed", "InvalidStateError"); + // Free any internal state + if (this._libav) + this._p = this._p.then(() => this._free()); + // 3. Set [[state]] to "configured". + this.state = "configured"; + // 4. Queue a control message to configure the encoder using config. + this._p = this._p.then(() => __awaiter$3(this, void 0, void 0, function* () { + /* 1. Let supported be the result of running the Check + * Configuration Support algorithm with config. */ + const supported = encoder(config.codec, config); + /* 2. If supported is false, queue a task to run the Close + * VideoEncoder algorithm with NotSupportedError and abort these + * steps. */ + if (!supported) { + this._closeVideoEncoder(new DOMException("Unsupported codec", "NotSupportedError")); + return; + } + /* 3. If needed, assign [[codec implementation]] with an + * implementation supporting config. */ + // 4. Configure [[codec implementation]] with config. + const libav = this._libav = yield get(); + this._metadata = { + decoderConfig: { + codec: supported.codec + } + }; + // And initialize + [this._codec, this._c, this._frame, this._pkt] = + yield libav.ff_init_encoder(supported.codec, supported); + this._extradataSet = false; + this._extradata = null; + yield libav.AVCodecContext_time_base_s(this._c, 1, 1000); + const width = config.width; + const height = config.height; + this._sws = 0; + this._swsFrame = 0; + this._swsOut = { + width, height, + format: supported.ctx.pix_fmt + }; + // Check for non-square pixels + const dWidth = config.displayWidth || width; + const dHeight = config.displayHeight || height; + if (dWidth !== width || dHeight !== height) { + this._nonSquarePixels = true; + this._sar_num = dWidth * height; + this._sar_den = dHeight * width; + } + else { + this._nonSquarePixels = false; + } + // 5. queue a task to run the following steps: + // 1. Assign false to [[message queue blocked]]. + // 2. Queue a task to Process the control message queue. + })).catch(this._error); + } + // Our own algorithm, close libav + _free() { + return __awaiter$3(this, void 0, void 0, function* () { + if (this._sws) { + yield this._libav.av_frame_free_js(this._swsFrame); + yield this._libav.sws_freeContext(this._sws); + this._sws = this._swsFrame = 0; + this._swsIn = this._swsOut = void 0; + } + if (this._c) { + yield this._libav.ff_free_encoder(this._c, this._frame, this._pkt); + this._codec = this._c = this._frame = this._pkt = 0; + } + if (this._libav) { + free(this._libav); + this._libav = null; + } + }); + } + _closeVideoEncoder(exception) { + // 1. Run the Reset VideoEncoder algorithm with exception. + this._resetVideoEncoder(exception); + // 2. Set [[state]] to "closed". + this.state = "closed"; + /* 3. Clear [[codec implementation]] and release associated system + * resources. */ + this._p = this._p.then(() => this._free()); + /* 4. If exception is not an AbortError DOMException, invoke the + * [[error callback]] with exception. */ + if (exception.name !== "AbortError") + this._p = this._p.then(() => { this._error(exception); }); + } + _resetVideoEncoder(exception) { + // 1. If [[state]] is "closed", throw an InvalidStateError. + if (this.state === "closed") + throw new DOMException("Encoder closed", "InvalidStateError"); + // 2. Set [[state]] to "unconfigured". + this.state = "unconfigured"; + // ... really, we're just going to free it now + this._p = this._p.then(() => this._free()); + } + encode(frame, options = {}) { + /* 1. If the value of frame’s [[Detached]] internal slot is true, throw + * a TypeError. */ + if (frame._libavGetData() === null) + throw new TypeError("Detached"); + // 2. If [[state]] is not "configured", throw an InvalidStateError. + if (this.state !== "configured") + throw new DOMException("Unconfigured", "InvalidStateError"); + /* 3. Let frameClone hold the result of running the Clone VideoFrame + * algorithm with frame. */ + const frameClone = frame.clone(); + // 4. Increment [[encodeQueueSize]]. + this.encodeQueueSize++; + // 5. Queue a control message to encode frameClone. + this._p = this._p.then(() => __awaiter$3(this, void 0, void 0, function* () { + const libav = this._libav; + const c = this._c; + const pkt = this._pkt; + const framePtr = this._frame; + const swsOut = this._swsOut; + let encodedOutputs = null; + /* 3. Decrement [[encodeQueueSize]] and run the Schedule Dequeue + * Event algorithm. */ + this.encodeQueueSize--; + this.dispatchEvent(new CustomEvent("dequeue")); + /* 1. Attempt to use [[codec implementation]] to encode frameClone + * according to options. */ + try { + // Convert the format + const format = wcFormatToLibAVFormat(libav, frameClone.format); + // Convert the data + const rawU8 = frameClone._libavGetData(); + const layout = frameClone._libavGetLayout(); + // Convert the timestamp + const ptsFull = Math.floor(frameClone.timestamp / 1000); + const [pts, ptshi] = libav.f64toi64(ptsFull); + // Make the frame + const frame = { + data: rawU8, layout, + format, pts, ptshi, + width: frameClone.codedWidth, + height: frameClone.codedHeight, + crop: { + left: frameClone.visibleRect.left, + right: frameClone.visibleRect.right, + top: frameClone.visibleRect.top, + bottom: frameClone.visibleRect.bottom + }, + key_frame: options.keyFrame ? 1 : 0, + pict_type: options.keyFrame ? 1 : 0 + }; + // Possibly scale + if (frame.width !== swsOut.width || + frame.height !== swsOut.height || + frame.format !== swsOut.format) { + if (frameClone._nonSquarePixels) { + frame.sample_aspect_ratio = [ + frameClone._sar_num, + frameClone._sar_den + ]; + } + // Need a scaler + let sws = this._sws, swsIn = this._swsIn, swsFrame = this._swsFrame; + if (!sws || + frame.width !== swsIn.width || + frame.height !== swsIn.height || + frame.format !== swsIn.format) { + // Need to allocate the scaler + if (sws) + yield libav.sws_freeContext(sws); + swsIn = { + width: frame.width, + height: frame.height, + format: frame.format + }; + sws = yield libav.sws_getContext(swsIn.width, swsIn.height, swsIn.format, swsOut.width, swsOut.height, swsOut.format, 2, 0, 0, 0); + this._sws = sws; + this._swsIn = swsIn; + // Maybe need a frame + if (!swsFrame) + this._swsFrame = swsFrame = yield libav.av_frame_alloc(); + } + // Scale and encode the frame + const [, swsRes, , , , , , encRes] = yield Promise.all([ + libav.ff_copyin_frame(framePtr, frame), + libav.sws_scale_frame(sws, swsFrame, framePtr), + this._nonSquarePixels ? + libav.AVFrame_sample_aspect_ratio_s(swsFrame, this._sar_num, this._sar_den) : + null, + libav.AVFrame_pts_s(swsFrame, pts), + libav.AVFrame_ptshi_s(swsFrame, ptshi), + libav.AVFrame_key_frame_s(swsFrame, options.keyFrame ? 1 : 0), + libav.AVFrame_pict_type_s(swsFrame, options.keyFrame ? 1 : 0), + libav.avcodec_send_frame(c, swsFrame) + ]); + if (swsRes < 0 || encRes < 0) + throw new Error("Encoding failed!"); + encodedOutputs = []; + while (true) { + const recv = yield libav.avcodec_receive_packet(c, pkt); + if (recv === -libav.EAGAIN) + break; + else if (recv < 0) + throw new Error("Encoding failed!"); + encodedOutputs.push(yield libav.ff_copyout_packet(pkt)); + } + } + else { + if (this._nonSquarePixels) { + frame.sample_aspect_ratio = [ + this._sar_num, + this._sar_den + ]; + } + // Encode directly + encodedOutputs = + yield libav.ff_encode_multi(c, framePtr, pkt, [frame]); + } + if (encodedOutputs.length && !this._extradataSet) + yield this._getExtradata(); + /* 2. If encoding results in an error, queue a task to run the + * Close VideoEncoder algorithm with EncodingError and return. */ + } + catch (ex) { + this._p = this._p.then(() => { + this._closeVideoEncoder(ex); + }); + return; + } + /* 3. If [[codec saturated]] equals true and + * [[codec implementation]] is no longer saturated, queue a task + * to perform the following steps: */ + // 1. Assign false to [[codec saturated]]. + // 2. Process the control message queue. + // (unneeded in polyfill) + /* 4. Let encoded outputs be a list of encoded video data outputs + * emitted by [[codec implementation]]. */ + /* 5. If encoded outputs is not empty, queue a task to run the + * Output EncodedVideoChunks algorithm with encoded outputs. */ + if (encodedOutputs) + this._outputEncodedVideoChunks(encodedOutputs); + })).catch(this._error); + } + // Internal: Get extradata + _getExtradata() { + return __awaiter$3(this, void 0, void 0, function* () { + const libav = this._libav; + const c = this._c; + const extradata = yield libav.AVCodecContext_extradata(c); + const extradata_size = yield libav.AVCodecContext_extradata_size(c); + if (extradata && extradata_size) { + this._metadata.decoderConfig.description = this._extradata = + yield libav.copyout_u8(extradata, extradata_size); + } + this._extradataSet = true; + }); + } + _outputEncodedVideoChunks(packets) { + const libav = this._libav; + for (const packet of packets) { + // 1. type + const type = (packet.flags & 1) ? "key" : "delta"; + // 2. timestamp + const timestamp = libav.i64tof64(packet.pts, packet.ptshi) * 1000; + const chunk = new EncodedVideoChunk$1({ + type: type, timestamp, + data: packet.data + }); + if (this._extradataSet) + this._output(chunk, this._metadata || void 0); + else + this._output(chunk); + } + } + flush() { + /* 1. If [[state]] is not "configured", return a promise rejected with + * InvalidStateError DOMException. */ + if (this.state !== "configured") + throw new DOMException("Invalid state", "InvalidStateError"); + // 2. Let promise be a new Promise. + // 3. Append promise to [[pending flush promises]]. + // 4. Queue a control message to flush the codec with promise. + // 5. Process the control message queue. + const ret = this._p.then(() => __awaiter$3(this, void 0, void 0, function* () { + /* 1. Signal [[codec implementation]] to emit all internal pending + * outputs. */ + if (!this._c) + return; + // Make sure any last data is flushed + const libav = this._libav; + const c = this._c; + const frame = this._frame; + const pkt = this._pkt; + let encodedOutputs = null; + try { + encodedOutputs = + yield libav.ff_encode_multi(c, frame, pkt, [], true); + if (!this._extradataSet) + yield this._getExtradata(); + } + catch (ex) { + this._p = this._p.then(() => { + this._closeVideoEncoder(ex); + }); + } + /* 2. Let encoded outputs be a list of encoded video data outputs + * emitted by [[codec implementation]]. */ + // 3. Queue a task to perform these steps: + { + /* 1. If encoded outputs is not empty, run the Output + * EncodedVideoChunks algorithm with encoded outputs. */ + if (encodedOutputs) + this._outputEncodedVideoChunks(encodedOutputs); + // 2. Remove promise from [[pending flush promises]]. + // 3. Resolve promise. + } + })); + this._p = ret; + // 6. Return promise. + return ret; + } + reset() { + this._resetVideoEncoder(new DOMException("Reset", "AbortError")); + } + close() { + this._closeVideoEncoder(new DOMException("Close", "AbortError")); + } + static isConfigSupported(config) { + return __awaiter$3(this, void 0, void 0, function* () { + const enc = encoder(config.codec, config); + let supported = false; + if (enc) { + const libav = yield get(); + try { + const [, c, frame, pkt] = yield libav.ff_init_encoder(enc.codec, enc); + yield libav.ff_free_encoder(c, frame, pkt); + supported = true; + } + catch (ex) { } + yield free(libav); + } + return { + supported, + config: cloneConfig(config, ["codec", "width", "height", "bitrate", "framerate", "latencyMode"]) + }; + }); + } + }; + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$2 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + // A non-threaded libav.js instance for scaling. + let scalerSync = null; + // A synchronous libav.js instance for scaling. + let scalerAsync = null; + // The original drawImage + let origDrawImage = null; + // The original drawImage Offscreen + let origDrawImageOffscreen = null; + // The original createImageBitmap + let origCreateImageBitmap = null; + /** + * Load rendering capability. + * @param libavOptions Options to use while loading libav, only asynchronous + * @param polyfill Set to polyfill CanvasRenderingContext2D.drawImage + */ + function load$1(libavOptions, polyfill) { + return __awaiter$2(this, void 0, void 0, function* () { + // Get our scalers + if ("importScripts" in globalThis) { + // Make sure the worker code doesn't run + LibAVWrapper.nolibavworker = true; + } + scalerSync = yield LibAVWrapper.LibAV({ noworker: true }); + scalerAsync = yield LibAVWrapper.LibAV(libavOptions); + // Polyfill drawImage + if ('CanvasRenderingContext2D' in globalThis) { + origDrawImage = CanvasRenderingContext2D.prototype.drawImage; + if (polyfill) + CanvasRenderingContext2D.prototype.drawImage = drawImagePolyfill; + } + if ('OffscreenCanvasRenderingContext2D' in globalThis) { + origDrawImageOffscreen = OffscreenCanvasRenderingContext2D.prototype.drawImage; + if (polyfill) + OffscreenCanvasRenderingContext2D.prototype.drawImage = drawImagePolyfillOffscreen; + } + // Polyfill createImageBitmap + origCreateImageBitmap = globalThis.createImageBitmap; + if (polyfill) + globalThis.createImageBitmap = createImageBitmap$1; + }); + } + /** + * Draw this video frame on this canvas, synchronously. + * @param ctx CanvasRenderingContext2D to draw on + * @param image VideoFrame (or anything else) to draw + * @param sx Source X position OR destination X position + * @param sy Source Y position OR destination Y position + * @param sWidth Source width OR destination width + * @param sHeight Source height OR destination height + * @param dx Destination X position + * @param dy Destination Y position + * @param dWidth Destination width + * @param dHeight Destination height + */ + function canvasDrawImage$1(ctx, image, ax, ay, sWidth, sHeight, dx, dy, dWidth, dHeight) { + if (!(image._data)) { + // Just use the original + return origDrawImage.apply(ctx, Array.prototype.slice.call(arguments, 1)); + } + // Normalize the arguments + if (typeof sWidth === "undefined") { + // dx, dy + dx = ax; + dy = ay; + } + else if (typeof dx === "undefined") { + // dx, dy, dWidth, dHeight + dx = ax; + dy = ay; + dWidth = sWidth; + dHeight = sHeight; + sWidth = void 0; + sHeight = void 0; + } + else ; + if (typeof dWidth === "undefined") { + dWidth = image.displayWidth; + dHeight = image.displayHeight; + } + // Convert the format to libav.js + const format = wcFormatToLibAVFormat(scalerSync, image.format); + // Convert the frame synchronously + const sctx = scalerSync.sws_getContext_sync(image.visibleRect.width, image.visibleRect.height, format, dWidth, dHeight, scalerSync.AV_PIX_FMT_RGBA, 2, 0, 0, 0); + const inFrame = scalerSync.av_frame_alloc_sync(); + const outFrame = scalerSync.av_frame_alloc_sync(); + let rawU8; + let layout; + if (image._libavGetData) { + rawU8 = image._libavGetData(); + layout = image._libavGetLayout(); + } + else { + // Just have to hope this is a polyfill VideoFrame copied weirdly! + rawU8 = image._data; + layout = image._layout; + } + // Copy it in + scalerSync.ff_copyin_frame_sync(inFrame, { + data: rawU8, + layout, + format, + width: image.codedWidth, + height: image.codedHeight, + crop: { + left: image.visibleRect.left, + right: image.visibleRect.right, + top: image.visibleRect.top, + bottom: image.visibleRect.bottom + } + }); + // Rescale + scalerSync.sws_scale_frame_sync(sctx, outFrame, inFrame); + // Get the data back out again + const frameData = scalerSync.ff_copyout_frame_video_imagedata_sync(outFrame); + // Finally, draw it + ctx.putImageData(frameData, dx, dy); + // And clean up + scalerSync.av_frame_free_js_sync(outFrame); + scalerSync.av_frame_free_js_sync(inFrame); + scalerSync.sws_freeContext_sync(sctx); + } + /** + * Polyfill version of canvasDrawImage. + */ + function drawImagePolyfill(image, sx, sy, sWidth, sHeight, dx, dy, dWidth, dHeight) { + if (image instanceof VideoFrame$1) { + return canvasDrawImage$1(this, image, sx, sy, sWidth, sHeight, dx, dy, dWidth, dHeight); + } + return origDrawImage.apply(this, arguments); + } + /** + * Polyfill version of offscreenCanvasDrawImage. + */ + function drawImagePolyfillOffscreen(image, sx, sy, sWidth, sHeight, dx, dy, dWidth, dHeight) { + if (image instanceof VideoFrame$1) { + return canvasDrawImage$1(this, image, sx, sy, sWidth, sHeight, dx, dy, dWidth, dHeight); + } + return origDrawImageOffscreen.apply(this, arguments); + } + /** + * Create an ImageBitmap from this drawable, asynchronously. NOTE: + * Sub-rectangles are not implemented for VideoFrames, so only options is + * available, and there, only scaling is available. + * @param image VideoFrame (or anything else) to draw + * @param options Other options + */ + function createImageBitmap$1(image, opts = {}) { + if (!(image._data)) { + // Just use the original + return origCreateImageBitmap.apply(globalThis, arguments); + } + // Convert the format to libav.js + const format = wcFormatToLibAVFormat(scalerAsync, image.format); + // Normalize arguments + const dWidth = (typeof opts.resizeWidth === "number") + ? opts.resizeWidth : image.displayWidth; + const dHeight = (typeof opts.resizeHeight === "number") + ? opts.resizeHeight : image.displayHeight; + // Convert the frame + return (() => __awaiter$2(this, void 0, void 0, function* () { + const [sctx, inFrame, outFrame] = yield Promise.all([ + scalerAsync.sws_getContext(image.visibleRect.width, image.visibleRect.height, format, dWidth, dHeight, scalerAsync.AV_PIX_FMT_RGBA, 2, 0, 0, 0), + scalerAsync.av_frame_alloc(), + scalerAsync.av_frame_alloc() + ]); + // Convert the data + let rawU8; + let layout = void 0; + if (image._libavGetData) { + rawU8 = image._libavGetData(); + layout = image._libavGetLayout(); + } + else if (image._data) { + // Assume a VideoFrame weirdly serialized + rawU8 = image._data; + layout = image._layout; + } + else { + rawU8 = new Uint8Array(image.allocationSize()); + yield image.copyTo(rawU8); + } + // Copy it in + yield scalerAsync.ff_copyin_frame(inFrame, { + data: rawU8, + layout, + format, + width: image.codedWidth, + height: image.codedHeight, + crop: { + left: image.visibleRect.left, + right: image.visibleRect.right, + top: image.visibleRect.top, + bottom: image.visibleRect.bottom + } + }), + // Rescale + yield scalerAsync.sws_scale_frame(sctx, outFrame, inFrame); + // Get the data back out again + const frameData = yield scalerAsync.ff_copyout_frame_video_imagedata(outFrame); + // And clean up + yield Promise.all([ + scalerAsync.av_frame_free_js(outFrame), + scalerAsync.av_frame_free_js(inFrame), + scalerAsync.sws_freeContext(sctx) + ]); + // Make the ImageBitmap + return yield origCreateImageBitmap(frameData); + }))(); + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter$1 = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + /** + * Error thrown to indicate a configuration is unsupported. + */ + let UnsupportedException$1 = class UnsupportedException extends Error { + constructor() { + super("The requested configuration is not supported"); + } + }; + /** + * Get an AudioDecoder environment that supports this configuration. Throws an + * UnsupportedException if no environment supports the configuration. + * @param config Audio decoder configuration + */ + function getAudioDecoder$1(config) { + return __awaiter$1(this, void 0, void 0, function* () { + try { + if (typeof globalThis.AudioDecoder !== "undefined" && + (yield globalThis.AudioDecoder.isConfigSupported(config)).supported) { + return { + AudioDecoder: globalThis.AudioDecoder, + EncodedAudioChunk: globalThis.EncodedAudioChunk, + AudioData: globalThis.AudioData + }; + } + } + catch (ex) { } + if ((yield AudioDecoder$1.isConfigSupported(config)).supported) { + return { + AudioDecoder: AudioDecoder$1, + EncodedAudioChunk: EncodedAudioChunk$1, + AudioData: AudioData$1 + }; + } + throw new UnsupportedException$1(); + }); + } + /** + * Get an VideoDecoder environment that supports this configuration. Throws an + * UnsupportedException if no environment supports the configuration. + * @param config Video decoder configuration + */ + function getVideoDecoder$1(config) { + return __awaiter$1(this, void 0, void 0, function* () { + try { + if (typeof globalThis.VideoDecoder !== "undefined" && + (yield globalThis.VideoDecoder.isConfigSupported(config)).supported) { + return { + VideoDecoder: globalThis.VideoDecoder, + EncodedVideoChunk: globalThis.EncodedVideoChunk, + VideoFrame: globalThis.VideoFrame + }; + } + } + catch (ex) { } + if ((yield VideoDecoder$1.isConfigSupported(config)).supported) { + return { + VideoDecoder: VideoDecoder$1, + EncodedVideoChunk: EncodedVideoChunk$1, + VideoFrame: VideoFrame$1 + }; + } + throw new UnsupportedException$1(); + }); + } + /** + * Get an AudioEncoder environment that supports this configuration. Throws an + * UnsupportedException if no environment supports the configuration. + * @param config Audio encoder configuration + */ + function getAudioEncoder$1(config) { + return __awaiter$1(this, void 0, void 0, function* () { + try { + if (typeof globalThis.AudioEncoder !== "undefined" && + (yield globalThis.AudioEncoder.isConfigSupported(config)).supported) { + return { + AudioEncoder: globalThis.AudioEncoder, + EncodedAudioChunk: globalThis.EncodedAudioChunk, + AudioData: globalThis.AudioData + }; + } + } + catch (ex) { } + if ((yield AudioEncoder$1.isConfigSupported(config)).supported) { + return { + AudioEncoder: AudioEncoder$1, + EncodedAudioChunk: EncodedAudioChunk$1, + AudioData: AudioData$1 + }; + } + throw new UnsupportedException$1(); + }); + } + /** + * Get an VideoEncoder environment that supports this configuration. Throws an + * UnsupportedException if no environment supports the configuration. + * @param config Video encoder configuration + */ + function getVideoEncoder$1(config) { + return __awaiter$1(this, void 0, void 0, function* () { + try { + if (typeof globalThis.VideoEncoder !== "undefined" && + (yield globalThis.VideoEncoder.isConfigSupported(config)).supported) { + return { + VideoEncoder: globalThis.VideoEncoder, + EncodedVideoChunk: globalThis.EncodedVideoChunk, + VideoFrame: globalThis.VideoFrame + }; + } + } + catch (ex) { } + if ((yield VideoEncoder$1.isConfigSupported(config)).supported) { + return { + VideoEncoder: VideoEncoder$1, + EncodedVideoChunk: EncodedVideoChunk$1, + VideoFrame: VideoFrame$1 + }; + } + throw new UnsupportedException$1(); + }); + } + + /* + * This file is part of the libav.js WebCodecs Polyfill implementation. The + * interface implemented is derived from the W3C standard. No attribution is + * required when using this library. + * + * Copyright (c) 2021-2024 Yahweasel + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + /** + * Load LibAV-WebCodecs-Polyfill. + */ + function load(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + // Set up libavOptions + let libavOptions = {}; + if (options.libavOptions) + Object.assign(libavOptions, options.libavOptions); + // Maybe load libav + if (!options.LibAV && typeof globalThis.LibAV === "undefined") { + yield new Promise((res, rej) => { + // Can't load workers from another origin + libavOptions.noworker = true; + // Load libav + const libavBase = "https://cdn.jsdelivr.net/npm/@libav.js/variant-webm-vp9@6.5.7/dist"; + globalThis.LibAV = { base: libavBase }; + const libavVar = "libav-6.0.7.0.2-webm-vp9.js"; + if (typeof importScripts !== "undefined") { + importScripts(`${libavBase}/${libavVar}`); + res(void 0); + } + else { + const scr = document.createElement("script"); + scr.src = `${libavBase}/${libavVar}`; + scr.onload = res; + scr.onerror = rej; + document.body.appendChild(scr); + } + }); + } + // And load the libav handler + if (options.LibAV) + setLibAV(options.LibAV); + setLibAVOptions(libavOptions); + yield load$2(); + if (options.polyfill) { + for (const exp of [ + ["EncodedAudioChunk", EncodedAudioChunk$1], + ["AudioData", AudioData$1], + ["AudioDecoder", AudioDecoder$1], + ["AudioEncoder", AudioEncoder$1], + ["EncodedVideoChunk", EncodedVideoChunk$1], + ["VideoFrame", VideoFrame$1], + ["VideoDecoder", VideoDecoder$1], + ["VideoEncoder", VideoEncoder$1] + ]) { + if (!globalThis[exp[0]]) + globalThis[exp[0]] = exp[1]; + } + } + yield load$1(libavOptions, !!options.polyfill); + }); + } + const EncodedAudioChunk = EncodedAudioChunk$1; + const AudioData = AudioData$1; + const AudioDecoder = AudioDecoder$1; + const AudioEncoder = AudioEncoder$1; + const EncodedVideoChunk = EncodedVideoChunk$1; + const VideoFrame = VideoFrame$1; + const VideoDecoder = VideoDecoder$1; + const VideoEncoder = VideoEncoder$1; + // Rendering + const canvasDrawImage = canvasDrawImage$1; + const createImageBitmap = createImageBitmap$1; + const UnsupportedException = UnsupportedException$1; + const getAudioDecoder = getAudioDecoder$1; + const getVideoDecoder = getVideoDecoder$1; + const getAudioEncoder = getAudioEncoder$1; + const getVideoEncoder = getVideoEncoder$1; + + exports.AudioData = AudioData; + exports.AudioDecoder = AudioDecoder; + exports.AudioEncoder = AudioEncoder; + exports.EncodedAudioChunk = EncodedAudioChunk; + exports.EncodedVideoChunk = EncodedVideoChunk; + exports.UnsupportedException = UnsupportedException; + exports.VideoDecoder = VideoDecoder; + exports.VideoEncoder = VideoEncoder; + exports.VideoFrame = VideoFrame; + exports.canvasDrawImage = canvasDrawImage; + exports.createImageBitmap = createImageBitmap; + exports.getAudioDecoder = getAudioDecoder; + exports.getAudioEncoder = getAudioEncoder; + exports.getVideoDecoder = getVideoDecoder; + exports.getVideoEncoder = getVideoEncoder; + exports.load = load; + +})); diff --git a/src/main.js b/src/main.js index 0c696bc..96cb31c 100644 --- a/src/main.js +++ b/src/main.js @@ -5159,12 +5159,15 @@ async function exportMp4(path) { }); } } -// exportMp4() async function render() { document.querySelector("body").style.cursor = "wait"; const path = await saveFileDialog({ filters: [ + { + name: "WebM files (.webm)", + extensions: ["webm"], + }, { name: "MP4 files (.mp4)", extensions: ["mp4"], @@ -5178,7 +5181,7 @@ async function render() { extensions: ["html"], }, ], - defaultPath: await join(await documentDir(), "untitled.mp4"), + defaultPath: await join(await documentDir(), "untitled.webm"), }); if (path != undefined) { // SVG balks on images @@ -5192,11 +5195,122 @@ async function render() { const ext = path.split(".").pop().toLowerCase(); + const canvas = document.createElement("canvas"); + canvas.width = config.fileWidth; // Set desired width + canvas.height = config.fileHeight; // Set desired height + let exportContext = { + ...context, + ctx: canvas.getContext("2d"), + selectionRect: undefined, + selection: [], + shapeselection: [], + }; + + switch (ext) { case "mp4": exportMp4(path) return break + case "webm": + + createProgressModal(); + + // Store the original context + const oldContext = context; + context = exportContext; + + let currentFrame = 0; + const bitrate = 1e6 + const frameTimeMicroseconds = parseInt(1_000_000 / config.framerate) + + await LibAVWebCodecs.load() + console.log("Codecs loaded") + const target = new WebMMuxer.ArrayBufferTarget() + const muxer = new WebMMuxer.Muxer({ + target: target, + video: { + codec: 'V_VP9', + width: config.fileWidth, + height: config.fileHeight, + frameRate: config.framerate, + }, + firstTimestampBehavior: 'offset', + }) + let videoEncoder = new VideoEncoder({ + output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),//, currentFrame * frameTimeMicroseconds), + error: (e) => console.error(e), + }) + + videoEncoder.configure({ + codec: 'vp09.00.10.08', + width: config.fileWidth, + height: config.fileHeight, + bitrate, + bitrateMode: "constant" + }) + + async function finishEncoding() { + const progressText = document.getElementById('progressText'); + progressText.innerText = 'Finalizing...'; + const progressBar = document.getElementById('progressBar'); + progressBar.value = 100; + await videoEncoder.flush() + muxer.finalize() + await writeFile( + path, + new Uint8Array(target.buffer), + ); + const modal = document.getElementById('progressModal'); + modal.style.display = 'none'; + document.querySelector("body").style.cursor = "default"; + } + + const processFrame = async () => { + if (currentFrame < root.maxFrame) { + // Update progress bar + const progressText = document.getElementById('progressText'); + progressText.innerText = `Rendering frame ${currentFrame + 1} of ${root.maxFrame}`; + const progressBar = document.getElementById('progressBar'); + const progress = Math.round(((currentFrame + 1) / root.maxFrame) * 100); + progressBar.value = progress; + + root.setFrameNum(currentFrame) + exportContext.ctx.fillStyle = "white"; + exportContext.ctx.rect(0, 0, config.fileWidth, config.fileHeight); + exportContext.ctx.fill(); + root.draw(exportContext.ctx); + const frame = new VideoFrame( + await LibAVWebCodecs.createImageBitmap(canvas), + { timestamp: currentFrame * frameTimeMicroseconds } + ); + + async function encodeFrame(frame) { + // const keyFrame = true + const keyFrame = currentFrame % 60 === 0 + videoEncoder.encode(frame, { keyFrame }) + frame.close() + } + + await encodeFrame(frame) + + frame.close() + + + currentFrame++; + setTimeout(processFrame, 4); + } else { + // Once all frames are processed, reset context and export + context = oldContext; + finishEncoding() + } + }; + + processFrame(); + return + + + break; case "html": fetch("/player.html") .then((response) => { @@ -5226,23 +5340,17 @@ async function render() { break; case "png": const frames = []; - const canvas = document.createElement("canvas"); + canvas = document.createElement("canvas"); canvas.width = config.fileWidth; // Set desired width canvas.height = config.fileHeight; // Set desired height - let exportContext = { - ...context, - ctx: canvas.getContext("2d"), - selectionRect: undefined, - selection: [], - shapeselection: [], - }; + for (let i = 0; i < root.maxFrame; i++) { root.currentFrameNum = i; exportContext.ctx.fillStyle = "white"; exportContext.ctx.rect(0, 0, config.fileWidth, config.fileHeight); exportContext.ctx.fill(); - await root.draw(exportContext); + root.draw(exportContext); // Convert the canvas content to a PNG image (this is the "frame" we add to the APNG) const imageData = exportContext.ctx.getImageData( diff --git a/src/webm-muxer.js b/src/webm-muxer.js new file mode 100644 index 0000000..64ac6dd --- /dev/null +++ b/src/webm-muxer.js @@ -0,0 +1,1489 @@ +"use strict"; +var WebMMuxer = (() => { + var __defProp = Object.defineProperty; + var __getOwnPropDesc = Object.getOwnPropertyDescriptor; + var __getOwnPropNames = Object.getOwnPropertyNames; + var __hasOwnProp = Object.prototype.hasOwnProperty; + var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); + }; + var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; + }; + var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + var __accessCheck = (obj, member, msg) => { + if (!member.has(obj)) + throw TypeError("Cannot " + msg); + }; + var __privateGet = (obj, member, getter) => { + __accessCheck(obj, member, "read from private field"); + return getter ? getter.call(obj) : member.get(obj); + }; + var __privateAdd = (obj, member, value) => { + if (member.has(obj)) + throw TypeError("Cannot add the same private member more than once"); + member instanceof WeakSet ? member.add(obj) : member.set(obj, value); + }; + var __privateSet = (obj, member, value, setter) => { + __accessCheck(obj, member, "write to private field"); + setter ? setter.call(obj, value) : member.set(obj, value); + return value; + }; + var __privateMethod = (obj, member, method) => { + __accessCheck(obj, member, "access private method"); + return method; + }; + + // src/index.ts + var src_exports = {}; + __export(src_exports, { + ArrayBufferTarget: () => ArrayBufferTarget, + FileSystemWritableFileStreamTarget: () => FileSystemWritableFileStreamTarget, + Muxer: () => Muxer, + StreamTarget: () => StreamTarget, + SubtitleEncoder: () => SubtitleEncoder + }); + + // src/ebml.ts + var EBMLFloat32 = class { + constructor(value) { + this.value = value; + } + }; + var EBMLFloat64 = class { + constructor(value) { + this.value = value; + } + }; + var measureUnsignedInt = (value) => { + if (value < 1 << 8) { + return 1; + } else if (value < 1 << 16) { + return 2; + } else if (value < 1 << 24) { + return 3; + } else if (value < 2 ** 32) { + return 4; + } else if (value < 2 ** 40) { + return 5; + } else { + return 6; + } + }; + var measureEBMLVarInt = (value) => { + if (value < (1 << 7) - 1) { + return 1; + } else if (value < (1 << 14) - 1) { + return 2; + } else if (value < (1 << 21) - 1) { + return 3; + } else if (value < (1 << 28) - 1) { + return 4; + } else if (value < 2 ** 35 - 1) { + return 5; + } else if (value < 2 ** 42 - 1) { + return 6; + } else { + throw new Error("EBML VINT size not supported " + value); + } + }; + + // src/misc.ts + var readBits = (bytes, start, end) => { + let result = 0; + for (let i = start; i < end; i++) { + let byteIndex = Math.floor(i / 8); + let byte = bytes[byteIndex]; + let bitIndex = 7 - (i & 7); + let bit = (byte & 1 << bitIndex) >> bitIndex; + result <<= 1; + result |= bit; + } + return result; + }; + var writeBits = (bytes, start, end, value) => { + for (let i = start; i < end; i++) { + let byteIndex = Math.floor(i / 8); + let byte = bytes[byteIndex]; + let bitIndex = 7 - (i & 7); + byte &= ~(1 << bitIndex); + byte |= (value & 1 << end - i - 1) >> end - i - 1 << bitIndex; + bytes[byteIndex] = byte; + } + }; + + // src/target.ts + var isTarget = Symbol("isTarget"); + var Target = class { + }; + isTarget; + var ArrayBufferTarget = class extends Target { + constructor() { + super(...arguments); + this.buffer = null; + } + }; + var StreamTarget = class extends Target { + constructor(options) { + super(); + this.options = options; + if (typeof options !== "object") { + throw new TypeError("StreamTarget requires an options object to be passed to its constructor."); + } + if (options.onData) { + if (typeof options.onData !== "function") { + throw new TypeError("options.onData, when provided, must be a function."); + } + if (options.onData.length < 2) { + throw new TypeError( + "options.onData, when provided, must be a function that takes in at least two arguments (data and position). Ignoring the position argument, which specifies the byte offset at which the data is to be written, can lead to broken outputs." + ); + } + } + if (options.onHeader && typeof options.onHeader !== "function") { + throw new TypeError("options.onHeader, when provided, must be a function."); + } + if (options.onCluster && typeof options.onCluster !== "function") { + throw new TypeError("options.onCluster, when provided, must be a function."); + } + if (options.chunked !== void 0 && typeof options.chunked !== "boolean") { + throw new TypeError("options.chunked, when provided, must be a boolean."); + } + if (options.chunkSize !== void 0 && (!Number.isInteger(options.chunkSize) || options.chunkSize <= 0)) { + throw new TypeError("options.chunkSize, when provided, must be a positive integer."); + } + } + }; + var FileSystemWritableFileStreamTarget = class extends Target { + constructor(stream, options) { + super(); + this.stream = stream; + this.options = options; + if (!(stream instanceof FileSystemWritableFileStream)) { + throw new TypeError("FileSystemWritableFileStreamTarget requires a FileSystemWritableFileStream instance."); + } + if (options !== void 0 && typeof options !== "object") { + throw new TypeError("FileSystemWritableFileStreamTarget's options, when provided, must be an object."); + } + if (options) { + if (options.chunkSize !== void 0 && (!Number.isInteger(options.chunkSize) || options.chunkSize <= 0)) { + throw new TypeError("options.chunkSize, when provided, must be a positive integer"); + } + } + } + }; + + // src/writer.ts + var _helper, _helperView, _writeByte, writeByte_fn, _writeFloat32, writeFloat32_fn, _writeFloat64, writeFloat64_fn, _writeUnsignedInt, writeUnsignedInt_fn, _writeString, writeString_fn; + var Writer = class { + constructor() { + __privateAdd(this, _writeByte); + __privateAdd(this, _writeFloat32); + __privateAdd(this, _writeFloat64); + __privateAdd(this, _writeUnsignedInt); + __privateAdd(this, _writeString); + this.pos = 0; + __privateAdd(this, _helper, new Uint8Array(8)); + __privateAdd(this, _helperView, new DataView(__privateGet(this, _helper).buffer)); + this.offsets = /* @__PURE__ */ new WeakMap(); + this.dataOffsets = /* @__PURE__ */ new WeakMap(); + } + seek(newPos) { + this.pos = newPos; + } + writeEBMLVarInt(value, width = measureEBMLVarInt(value)) { + let pos = 0; + switch (width) { + case 1: + __privateGet(this, _helperView).setUint8(pos++, 1 << 7 | value); + break; + case 2: + __privateGet(this, _helperView).setUint8(pos++, 1 << 6 | value >> 8); + __privateGet(this, _helperView).setUint8(pos++, value); + break; + case 3: + __privateGet(this, _helperView).setUint8(pos++, 1 << 5 | value >> 16); + __privateGet(this, _helperView).setUint8(pos++, value >> 8); + __privateGet(this, _helperView).setUint8(pos++, value); + break; + case 4: + __privateGet(this, _helperView).setUint8(pos++, 1 << 4 | value >> 24); + __privateGet(this, _helperView).setUint8(pos++, value >> 16); + __privateGet(this, _helperView).setUint8(pos++, value >> 8); + __privateGet(this, _helperView).setUint8(pos++, value); + break; + case 5: + __privateGet(this, _helperView).setUint8(pos++, 1 << 3 | value / 2 ** 32 & 7); + __privateGet(this, _helperView).setUint8(pos++, value >> 24); + __privateGet(this, _helperView).setUint8(pos++, value >> 16); + __privateGet(this, _helperView).setUint8(pos++, value >> 8); + __privateGet(this, _helperView).setUint8(pos++, value); + break; + case 6: + __privateGet(this, _helperView).setUint8(pos++, 1 << 2 | value / 2 ** 40 & 3); + __privateGet(this, _helperView).setUint8(pos++, value / 2 ** 32 | 0); + __privateGet(this, _helperView).setUint8(pos++, value >> 24); + __privateGet(this, _helperView).setUint8(pos++, value >> 16); + __privateGet(this, _helperView).setUint8(pos++, value >> 8); + __privateGet(this, _helperView).setUint8(pos++, value); + break; + default: + throw new Error("Bad EBML VINT size " + width); + } + this.write(__privateGet(this, _helper).subarray(0, pos)); + } + writeEBML(data) { + if (data === null) + return; + if (data instanceof Uint8Array) { + this.write(data); + } else if (Array.isArray(data)) { + for (let elem of data) { + this.writeEBML(elem); + } + } else { + this.offsets.set(data, this.pos); + __privateMethod(this, _writeUnsignedInt, writeUnsignedInt_fn).call(this, data.id); + if (Array.isArray(data.data)) { + let sizePos = this.pos; + let sizeSize = data.size === -1 ? 1 : data.size ?? 4; + if (data.size === -1) { + __privateMethod(this, _writeByte, writeByte_fn).call(this, 255); + } else { + this.seek(this.pos + sizeSize); + } + let startPos = this.pos; + this.dataOffsets.set(data, startPos); + this.writeEBML(data.data); + if (data.size !== -1) { + let size = this.pos - startPos; + let endPos = this.pos; + this.seek(sizePos); + this.writeEBMLVarInt(size, sizeSize); + this.seek(endPos); + } + } else if (typeof data.data === "number") { + let size = data.size ?? measureUnsignedInt(data.data); + this.writeEBMLVarInt(size); + __privateMethod(this, _writeUnsignedInt, writeUnsignedInt_fn).call(this, data.data, size); + } else if (typeof data.data === "string") { + this.writeEBMLVarInt(data.data.length); + __privateMethod(this, _writeString, writeString_fn).call(this, data.data); + } else if (data.data instanceof Uint8Array) { + this.writeEBMLVarInt(data.data.byteLength, data.size); + this.write(data.data); + } else if (data.data instanceof EBMLFloat32) { + this.writeEBMLVarInt(4); + __privateMethod(this, _writeFloat32, writeFloat32_fn).call(this, data.data.value); + } else if (data.data instanceof EBMLFloat64) { + this.writeEBMLVarInt(8); + __privateMethod(this, _writeFloat64, writeFloat64_fn).call(this, data.data.value); + } + } + } + }; + _helper = new WeakMap(); + _helperView = new WeakMap(); + _writeByte = new WeakSet(); + writeByte_fn = function(value) { + __privateGet(this, _helperView).setUint8(0, value); + this.write(__privateGet(this, _helper).subarray(0, 1)); + }; + _writeFloat32 = new WeakSet(); + writeFloat32_fn = function(value) { + __privateGet(this, _helperView).setFloat32(0, value, false); + this.write(__privateGet(this, _helper).subarray(0, 4)); + }; + _writeFloat64 = new WeakSet(); + writeFloat64_fn = function(value) { + __privateGet(this, _helperView).setFloat64(0, value, false); + this.write(__privateGet(this, _helper)); + }; + _writeUnsignedInt = new WeakSet(); + writeUnsignedInt_fn = function(value, width = measureUnsignedInt(value)) { + let pos = 0; + switch (width) { + case 6: + __privateGet(this, _helperView).setUint8(pos++, value / 2 ** 40 | 0); + case 5: + __privateGet(this, _helperView).setUint8(pos++, value / 2 ** 32 | 0); + case 4: + __privateGet(this, _helperView).setUint8(pos++, value >> 24); + case 3: + __privateGet(this, _helperView).setUint8(pos++, value >> 16); + case 2: + __privateGet(this, _helperView).setUint8(pos++, value >> 8); + case 1: + __privateGet(this, _helperView).setUint8(pos++, value); + break; + default: + throw new Error("Bad UINT size " + width); + } + this.write(__privateGet(this, _helper).subarray(0, pos)); + }; + _writeString = new WeakSet(); + writeString_fn = function(str) { + this.write(new Uint8Array(str.split("").map((x) => x.charCodeAt(0)))); + }; + var _target, _buffer, _bytes, _ensureSize, ensureSize_fn; + var ArrayBufferTargetWriter = class extends Writer { + constructor(target) { + super(); + __privateAdd(this, _ensureSize); + __privateAdd(this, _target, void 0); + __privateAdd(this, _buffer, new ArrayBuffer(2 ** 16)); + __privateAdd(this, _bytes, new Uint8Array(__privateGet(this, _buffer))); + __privateSet(this, _target, target); + } + write(data) { + __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos + data.byteLength); + __privateGet(this, _bytes).set(data, this.pos); + this.pos += data.byteLength; + } + finalize() { + __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos); + __privateGet(this, _target).buffer = __privateGet(this, _buffer).slice(0, this.pos); + } + }; + _target = new WeakMap(); + _buffer = new WeakMap(); + _bytes = new WeakMap(); + _ensureSize = new WeakSet(); + ensureSize_fn = function(size) { + let newLength = __privateGet(this, _buffer).byteLength; + while (newLength < size) + newLength *= 2; + if (newLength === __privateGet(this, _buffer).byteLength) + return; + let newBuffer = new ArrayBuffer(newLength); + let newBytes = new Uint8Array(newBuffer); + newBytes.set(__privateGet(this, _bytes), 0); + __privateSet(this, _buffer, newBuffer); + __privateSet(this, _bytes, newBytes); + }; + var _trackingWrites, _trackedWrites, _trackedStart, _trackedEnd; + var BaseStreamTargetWriter = class extends Writer { + constructor(target) { + super(); + this.target = target; + __privateAdd(this, _trackingWrites, false); + __privateAdd(this, _trackedWrites, void 0); + __privateAdd(this, _trackedStart, void 0); + __privateAdd(this, _trackedEnd, void 0); + } + write(data) { + if (!__privateGet(this, _trackingWrites)) + return; + let pos = this.pos; + if (pos < __privateGet(this, _trackedStart)) { + if (pos + data.byteLength <= __privateGet(this, _trackedStart)) + return; + data = data.subarray(__privateGet(this, _trackedStart) - pos); + pos = 0; + } + let neededSize = pos + data.byteLength - __privateGet(this, _trackedStart); + let newLength = __privateGet(this, _trackedWrites).byteLength; + while (newLength < neededSize) + newLength *= 2; + if (newLength !== __privateGet(this, _trackedWrites).byteLength) { + let copy = new Uint8Array(newLength); + copy.set(__privateGet(this, _trackedWrites), 0); + __privateSet(this, _trackedWrites, copy); + } + __privateGet(this, _trackedWrites).set(data, pos - __privateGet(this, _trackedStart)); + __privateSet(this, _trackedEnd, Math.max(__privateGet(this, _trackedEnd), pos + data.byteLength)); + } + startTrackingWrites() { + __privateSet(this, _trackingWrites, true); + __privateSet(this, _trackedWrites, new Uint8Array(2 ** 10)); + __privateSet(this, _trackedStart, this.pos); + __privateSet(this, _trackedEnd, this.pos); + } + getTrackedWrites() { + if (!__privateGet(this, _trackingWrites)) { + throw new Error("Can't get tracked writes since nothing was tracked."); + } + let slice = __privateGet(this, _trackedWrites).subarray(0, __privateGet(this, _trackedEnd) - __privateGet(this, _trackedStart)); + let result = { + data: slice, + start: __privateGet(this, _trackedStart), + end: __privateGet(this, _trackedEnd) + }; + __privateSet(this, _trackedWrites, void 0); + __privateSet(this, _trackingWrites, false); + return result; + } + }; + _trackingWrites = new WeakMap(); + _trackedWrites = new WeakMap(); + _trackedStart = new WeakMap(); + _trackedEnd = new WeakMap(); + var _sections, _lastFlushEnd, _ensureMonotonicity; + var StreamTargetWriter = class extends BaseStreamTargetWriter { + constructor(target, ensureMonotonicity) { + super(target); + __privateAdd(this, _sections, []); + __privateAdd(this, _lastFlushEnd, 0); + __privateAdd(this, _ensureMonotonicity, void 0); + __privateSet(this, _ensureMonotonicity, ensureMonotonicity); + } + write(data) { + super.write(data); + __privateGet(this, _sections).push({ + data: data.slice(), + start: this.pos + }); + this.pos += data.byteLength; + } + flush() { + if (__privateGet(this, _sections).length === 0) + return; + let chunks = []; + let sorted = [...__privateGet(this, _sections)].sort((a, b) => a.start - b.start); + chunks.push({ + start: sorted[0].start, + size: sorted[0].data.byteLength + }); + for (let i = 1; i < sorted.length; i++) { + let lastChunk = chunks[chunks.length - 1]; + let section = sorted[i]; + if (section.start <= lastChunk.start + lastChunk.size) { + lastChunk.size = Math.max(lastChunk.size, section.start + section.data.byteLength - lastChunk.start); + } else { + chunks.push({ + start: section.start, + size: section.data.byteLength + }); + } + } + for (let chunk of chunks) { + chunk.data = new Uint8Array(chunk.size); + for (let section of __privateGet(this, _sections)) { + if (chunk.start <= section.start && section.start < chunk.start + chunk.size) { + chunk.data.set(section.data, section.start - chunk.start); + } + } + if (__privateGet(this, _ensureMonotonicity) && chunk.start < __privateGet(this, _lastFlushEnd)) { + throw new Error("Internal error: Monotonicity violation."); + } + this.target.options.onData?.(chunk.data, chunk.start); + __privateSet(this, _lastFlushEnd, chunk.start + chunk.data.byteLength); + } + __privateGet(this, _sections).length = 0; + } + finalize() { + } + }; + _sections = new WeakMap(); + _lastFlushEnd = new WeakMap(); + _ensureMonotonicity = new WeakMap(); + var DEFAULT_CHUNK_SIZE = 2 ** 24; + var MAX_CHUNKS_AT_ONCE = 2; + var _chunkSize, _chunks, _lastFlushEnd2, _ensureMonotonicity2, _writeDataIntoChunks, writeDataIntoChunks_fn, _insertSectionIntoChunk, insertSectionIntoChunk_fn, _createChunk, createChunk_fn, _flushChunks, flushChunks_fn; + var ChunkedStreamTargetWriter = class extends BaseStreamTargetWriter { + constructor(target, ensureMonotonicity) { + super(target); + __privateAdd(this, _writeDataIntoChunks); + __privateAdd(this, _insertSectionIntoChunk); + __privateAdd(this, _createChunk); + __privateAdd(this, _flushChunks); + __privateAdd(this, _chunkSize, void 0); + __privateAdd(this, _chunks, []); + __privateAdd(this, _lastFlushEnd2, 0); + __privateAdd(this, _ensureMonotonicity2, void 0); + __privateSet(this, _chunkSize, target.options?.chunkSize ?? DEFAULT_CHUNK_SIZE); + __privateSet(this, _ensureMonotonicity2, ensureMonotonicity); + if (!Number.isInteger(__privateGet(this, _chunkSize)) || __privateGet(this, _chunkSize) < 2 ** 10) { + throw new Error("Invalid StreamTarget options: chunkSize must be an integer not smaller than 1024."); + } + } + write(data) { + super.write(data); + __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data, this.pos); + __privateMethod(this, _flushChunks, flushChunks_fn).call(this); + this.pos += data.byteLength; + } + finalize() { + __privateMethod(this, _flushChunks, flushChunks_fn).call(this, true); + } + }; + _chunkSize = new WeakMap(); + _chunks = new WeakMap(); + _lastFlushEnd2 = new WeakMap(); + _ensureMonotonicity2 = new WeakMap(); + _writeDataIntoChunks = new WeakSet(); + writeDataIntoChunks_fn = function(data, position) { + let chunkIndex = __privateGet(this, _chunks).findIndex((x) => x.start <= position && position < x.start + __privateGet(this, _chunkSize)); + if (chunkIndex === -1) + chunkIndex = __privateMethod(this, _createChunk, createChunk_fn).call(this, position); + let chunk = __privateGet(this, _chunks)[chunkIndex]; + let relativePosition = position - chunk.start; + let toWrite = data.subarray(0, Math.min(__privateGet(this, _chunkSize) - relativePosition, data.byteLength)); + chunk.data.set(toWrite, relativePosition); + let section = { + start: relativePosition, + end: relativePosition + toWrite.byteLength + }; + __privateMethod(this, _insertSectionIntoChunk, insertSectionIntoChunk_fn).call(this, chunk, section); + if (chunk.written[0].start === 0 && chunk.written[0].end === __privateGet(this, _chunkSize)) { + chunk.shouldFlush = true; + } + if (__privateGet(this, _chunks).length > MAX_CHUNKS_AT_ONCE) { + for (let i = 0; i < __privateGet(this, _chunks).length - 1; i++) { + __privateGet(this, _chunks)[i].shouldFlush = true; + } + __privateMethod(this, _flushChunks, flushChunks_fn).call(this); + } + if (toWrite.byteLength < data.byteLength) { + __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data.subarray(toWrite.byteLength), position + toWrite.byteLength); + } + }; + _insertSectionIntoChunk = new WeakSet(); + insertSectionIntoChunk_fn = function(chunk, section) { + let low = 0; + let high = chunk.written.length - 1; + let index = -1; + while (low <= high) { + let mid = Math.floor(low + (high - low + 1) / 2); + if (chunk.written[mid].start <= section.start) { + low = mid + 1; + index = mid; + } else { + high = mid - 1; + } + } + chunk.written.splice(index + 1, 0, section); + if (index === -1 || chunk.written[index].end < section.start) + index++; + while (index < chunk.written.length - 1 && chunk.written[index].end >= chunk.written[index + 1].start) { + chunk.written[index].end = Math.max(chunk.written[index].end, chunk.written[index + 1].end); + chunk.written.splice(index + 1, 1); + } + }; + _createChunk = new WeakSet(); + createChunk_fn = function(includesPosition) { + let start = Math.floor(includesPosition / __privateGet(this, _chunkSize)) * __privateGet(this, _chunkSize); + let chunk = { + start, + data: new Uint8Array(__privateGet(this, _chunkSize)), + written: [], + shouldFlush: false + }; + __privateGet(this, _chunks).push(chunk); + __privateGet(this, _chunks).sort((a, b) => a.start - b.start); + return __privateGet(this, _chunks).indexOf(chunk); + }; + _flushChunks = new WeakSet(); + flushChunks_fn = function(force = false) { + for (let i = 0; i < __privateGet(this, _chunks).length; i++) { + let chunk = __privateGet(this, _chunks)[i]; + if (!chunk.shouldFlush && !force) + continue; + for (let section of chunk.written) { + if (__privateGet(this, _ensureMonotonicity2) && chunk.start + section.start < __privateGet(this, _lastFlushEnd2)) { + throw new Error("Internal error: Monotonicity violation."); + } + this.target.options.onData?.( + chunk.data.subarray(section.start, section.end), + chunk.start + section.start + ); + __privateSet(this, _lastFlushEnd2, chunk.start + section.end); + } + __privateGet(this, _chunks).splice(i--, 1); + } + }; + var FileSystemWritableFileStreamTargetWriter = class extends ChunkedStreamTargetWriter { + constructor(target, ensureMonotonicity) { + super(new StreamTarget({ + onData: (data, position) => target.stream.write({ + type: "write", + data, + position + }), + chunked: true, + chunkSize: target.options?.chunkSize + }), ensureMonotonicity); + } + }; + + // src/muxer.ts + var VIDEO_TRACK_NUMBER = 1; + var AUDIO_TRACK_NUMBER = 2; + var SUBTITLE_TRACK_NUMBER = 3; + var VIDEO_TRACK_TYPE = 1; + var AUDIO_TRACK_TYPE = 2; + var SUBTITLE_TRACK_TYPE = 17; + var MAX_CHUNK_LENGTH_MS = 2 ** 15; + var CODEC_PRIVATE_MAX_SIZE = 2 ** 12; + var APP_NAME = "https://github.com/Vanilagy/webm-muxer"; + var SEGMENT_SIZE_BYTES = 6; + var CLUSTER_SIZE_BYTES = 5; + var FIRST_TIMESTAMP_BEHAVIORS = ["strict", "offset", "permissive"]; + var _options, _writer, _segment, _segmentInfo, _seekHead, _tracksElement, _segmentDuration, _colourElement, _videoCodecPrivate, _audioCodecPrivate, _subtitleCodecPrivate, _cues, _currentCluster, _currentClusterTimestamp, _duration, _videoChunkQueue, _audioChunkQueue, _subtitleChunkQueue, _firstVideoTimestamp, _firstAudioTimestamp, _lastVideoTimestamp, _lastAudioTimestamp, _lastSubtitleTimestamp, _colorSpace, _finalized, _validateOptions, validateOptions_fn, _createFileHeader, createFileHeader_fn, _writeEBMLHeader, writeEBMLHeader_fn, _createCodecPrivatePlaceholders, createCodecPrivatePlaceholders_fn, _createColourElement, createColourElement_fn, _createSeekHead, createSeekHead_fn, _createSegmentInfo, createSegmentInfo_fn, _createTracks, createTracks_fn, _createSegment, createSegment_fn, _createCues, createCues_fn, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn, _segmentDataOffset, segmentDataOffset_get, _writeVideoDecoderConfig, writeVideoDecoderConfig_fn, _fixVP9ColorSpace, fixVP9ColorSpace_fn, _writeSubtitleChunks, writeSubtitleChunks_fn, _createInternalChunk, createInternalChunk_fn, _validateTimestamp, validateTimestamp_fn, _writeBlock, writeBlock_fn, _createCodecPrivateElement, createCodecPrivateElement_fn, _writeCodecPrivate, writeCodecPrivate_fn, _createNewCluster, createNewCluster_fn, _finalizeCurrentCluster, finalizeCurrentCluster_fn, _ensureNotFinalized, ensureNotFinalized_fn; + var Muxer = class { + constructor(options) { + __privateAdd(this, _validateOptions); + __privateAdd(this, _createFileHeader); + __privateAdd(this, _writeEBMLHeader); + __privateAdd(this, _createCodecPrivatePlaceholders); + __privateAdd(this, _createColourElement); + __privateAdd(this, _createSeekHead); + __privateAdd(this, _createSegmentInfo); + __privateAdd(this, _createTracks); + __privateAdd(this, _createSegment); + __privateAdd(this, _createCues); + __privateAdd(this, _maybeFlushStreamingTargetWriter); + __privateAdd(this, _segmentDataOffset); + __privateAdd(this, _writeVideoDecoderConfig); + __privateAdd(this, _fixVP9ColorSpace); + __privateAdd(this, _writeSubtitleChunks); + __privateAdd(this, _createInternalChunk); + __privateAdd(this, _validateTimestamp); + __privateAdd(this, _writeBlock); + __privateAdd(this, _createCodecPrivateElement); + __privateAdd(this, _writeCodecPrivate); + __privateAdd(this, _createNewCluster); + __privateAdd(this, _finalizeCurrentCluster); + __privateAdd(this, _ensureNotFinalized); + __privateAdd(this, _options, void 0); + __privateAdd(this, _writer, void 0); + __privateAdd(this, _segment, void 0); + __privateAdd(this, _segmentInfo, void 0); + __privateAdd(this, _seekHead, void 0); + __privateAdd(this, _tracksElement, void 0); + __privateAdd(this, _segmentDuration, void 0); + __privateAdd(this, _colourElement, void 0); + __privateAdd(this, _videoCodecPrivate, void 0); + __privateAdd(this, _audioCodecPrivate, void 0); + __privateAdd(this, _subtitleCodecPrivate, void 0); + __privateAdd(this, _cues, void 0); + __privateAdd(this, _currentCluster, void 0); + __privateAdd(this, _currentClusterTimestamp, void 0); + __privateAdd(this, _duration, 0); + __privateAdd(this, _videoChunkQueue, []); + __privateAdd(this, _audioChunkQueue, []); + __privateAdd(this, _subtitleChunkQueue, []); + __privateAdd(this, _firstVideoTimestamp, void 0); + __privateAdd(this, _firstAudioTimestamp, void 0); + __privateAdd(this, _lastVideoTimestamp, -1); + __privateAdd(this, _lastAudioTimestamp, -1); + __privateAdd(this, _lastSubtitleTimestamp, -1); + __privateAdd(this, _colorSpace, void 0); + __privateAdd(this, _finalized, false); + __privateMethod(this, _validateOptions, validateOptions_fn).call(this, options); + __privateSet(this, _options, { + type: "webm", + firstTimestampBehavior: "strict", + ...options + }); + this.target = options.target; + let ensureMonotonicity = !!__privateGet(this, _options).streaming; + if (options.target instanceof ArrayBufferTarget) { + __privateSet(this, _writer, new ArrayBufferTargetWriter(options.target)); + } else if (options.target instanceof StreamTarget) { + __privateSet(this, _writer, options.target.options?.chunked ? new ChunkedStreamTargetWriter(options.target, ensureMonotonicity) : new StreamTargetWriter(options.target, ensureMonotonicity)); + } else if (options.target instanceof FileSystemWritableFileStreamTarget) { + __privateSet(this, _writer, new FileSystemWritableFileStreamTargetWriter(options.target, ensureMonotonicity)); + } else { + throw new Error(`Invalid target: ${options.target}`); + } + __privateMethod(this, _createFileHeader, createFileHeader_fn).call(this); + } + addVideoChunk(chunk, meta, timestamp) { + if (!(chunk instanceof EncodedVideoChunk)) { + throw new TypeError("addVideoChunk's first argument (chunk) must be of type EncodedVideoChunk."); + } + if (meta && typeof meta !== "object") { + throw new TypeError("addVideoChunk's second argument (meta), when provided, must be an object."); + } + if (timestamp !== void 0 && (!Number.isFinite(timestamp) || timestamp < 0)) { + throw new TypeError( + "addVideoChunk's third argument (timestamp), when provided, must be a non-negative real number." + ); + } + let data = new Uint8Array(chunk.byteLength); + chunk.copyTo(data); + this.addVideoChunkRaw(data, chunk.type, timestamp ?? chunk.timestamp, meta); + } + addVideoChunkRaw(data, type, timestamp, meta) { + if (!(data instanceof Uint8Array)) { + throw new TypeError("addVideoChunkRaw's first argument (data) must be an instance of Uint8Array."); + } + if (type !== "key" && type !== "delta") { + throw new TypeError("addVideoChunkRaw's second argument (type) must be either 'key' or 'delta'."); + } + if (!Number.isFinite(timestamp) || timestamp < 0) { + throw new TypeError("addVideoChunkRaw's third argument (timestamp) must be a non-negative real number."); + } + if (meta && typeof meta !== "object") { + throw new TypeError("addVideoChunkRaw's fourth argument (meta), when provided, must be an object."); + } + __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this); + if (!__privateGet(this, _options).video) + throw new Error("No video track declared."); + if (__privateGet(this, _firstVideoTimestamp) === void 0) + __privateSet(this, _firstVideoTimestamp, timestamp); + if (meta) + __privateMethod(this, _writeVideoDecoderConfig, writeVideoDecoderConfig_fn).call(this, meta); + let videoChunk = __privateMethod(this, _createInternalChunk, createInternalChunk_fn).call(this, data, type, timestamp, VIDEO_TRACK_NUMBER); + if (__privateGet(this, _options).video.codec === "V_VP9") + __privateMethod(this, _fixVP9ColorSpace, fixVP9ColorSpace_fn).call(this, videoChunk); + __privateSet(this, _lastVideoTimestamp, videoChunk.timestamp); + while (__privateGet(this, _audioChunkQueue).length > 0 && __privateGet(this, _audioChunkQueue)[0].timestamp <= videoChunk.timestamp) { + let audioChunk = __privateGet(this, _audioChunkQueue).shift(); + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, audioChunk, false); + } + if (!__privateGet(this, _options).audio || videoChunk.timestamp <= __privateGet(this, _lastAudioTimestamp)) { + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, videoChunk, true); + } else { + __privateGet(this, _videoChunkQueue).push(videoChunk); + } + __privateMethod(this, _writeSubtitleChunks, writeSubtitleChunks_fn).call(this); + __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this); + } + addAudioChunk(chunk, meta, timestamp) { + if (!(chunk instanceof EncodedAudioChunk)) { + throw new TypeError("addAudioChunk's first argument (chunk) must be of type EncodedAudioChunk."); + } + if (meta && typeof meta !== "object") { + throw new TypeError("addAudioChunk's second argument (meta), when provided, must be an object."); + } + if (timestamp !== void 0 && (!Number.isFinite(timestamp) || timestamp < 0)) { + throw new TypeError( + "addAudioChunk's third argument (timestamp), when provided, must be a non-negative real number." + ); + } + let data = new Uint8Array(chunk.byteLength); + chunk.copyTo(data); + this.addAudioChunkRaw(data, chunk.type, timestamp ?? chunk.timestamp, meta); + } + addAudioChunkRaw(data, type, timestamp, meta) { + if (!(data instanceof Uint8Array)) { + throw new TypeError("addAudioChunkRaw's first argument (data) must be an instance of Uint8Array."); + } + if (type !== "key" && type !== "delta") { + throw new TypeError("addAudioChunkRaw's second argument (type) must be either 'key' or 'delta'."); + } + if (!Number.isFinite(timestamp) || timestamp < 0) { + throw new TypeError("addAudioChunkRaw's third argument (timestamp) must be a non-negative real number."); + } + if (meta && typeof meta !== "object") { + throw new TypeError("addAudioChunkRaw's fourth argument (meta), when provided, must be an object."); + } + __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this); + if (!__privateGet(this, _options).audio) + throw new Error("No audio track declared."); + if (__privateGet(this, _firstAudioTimestamp) === void 0) + __privateSet(this, _firstAudioTimestamp, timestamp); + if (meta?.decoderConfig) { + if (__privateGet(this, _options).streaming) { + __privateSet(this, _audioCodecPrivate, __privateMethod(this, _createCodecPrivateElement, createCodecPrivateElement_fn).call(this, meta.decoderConfig.description)); + } else { + __privateMethod(this, _writeCodecPrivate, writeCodecPrivate_fn).call(this, __privateGet(this, _audioCodecPrivate), meta.decoderConfig.description); + } + } + let audioChunk = __privateMethod(this, _createInternalChunk, createInternalChunk_fn).call(this, data, type, timestamp, AUDIO_TRACK_NUMBER); + __privateSet(this, _lastAudioTimestamp, audioChunk.timestamp); + while (__privateGet(this, _videoChunkQueue).length > 0 && __privateGet(this, _videoChunkQueue)[0].timestamp <= audioChunk.timestamp) { + let videoChunk = __privateGet(this, _videoChunkQueue).shift(); + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, videoChunk, true); + } + if (!__privateGet(this, _options).video || audioChunk.timestamp <= __privateGet(this, _lastVideoTimestamp)) { + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, audioChunk, !__privateGet(this, _options).video); + } else { + __privateGet(this, _audioChunkQueue).push(audioChunk); + } + __privateMethod(this, _writeSubtitleChunks, writeSubtitleChunks_fn).call(this); + __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this); + } + addSubtitleChunk(chunk, meta, timestamp) { + if (typeof chunk !== "object" || !chunk) { + throw new TypeError("addSubtitleChunk's first argument (chunk) must be an object."); + } else { + if (!(chunk.body instanceof Uint8Array)) { + throw new TypeError("body must be an instance of Uint8Array."); + } + if (!Number.isFinite(chunk.timestamp) || chunk.timestamp < 0) { + throw new TypeError("timestamp must be a non-negative real number."); + } + if (!Number.isFinite(chunk.duration) || chunk.duration < 0) { + throw new TypeError("duration must be a non-negative real number."); + } + if (chunk.additions && !(chunk.additions instanceof Uint8Array)) { + throw new TypeError("additions, when present, must be an instance of Uint8Array."); + } + } + if (typeof meta !== "object") { + throw new TypeError("addSubtitleChunk's second argument (meta) must be an object."); + } + __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this); + if (!__privateGet(this, _options).subtitles) + throw new Error("No subtitle track declared."); + if (meta?.decoderConfig) { + if (__privateGet(this, _options).streaming) { + __privateSet(this, _subtitleCodecPrivate, __privateMethod(this, _createCodecPrivateElement, createCodecPrivateElement_fn).call(this, meta.decoderConfig.description)); + } else { + __privateMethod(this, _writeCodecPrivate, writeCodecPrivate_fn).call(this, __privateGet(this, _subtitleCodecPrivate), meta.decoderConfig.description); + } + } + let subtitleChunk = __privateMethod(this, _createInternalChunk, createInternalChunk_fn).call(this, chunk.body, "key", timestamp ?? chunk.timestamp, SUBTITLE_TRACK_NUMBER, chunk.duration, chunk.additions); + __privateSet(this, _lastSubtitleTimestamp, subtitleChunk.timestamp); + __privateGet(this, _subtitleChunkQueue).push(subtitleChunk); + __privateMethod(this, _writeSubtitleChunks, writeSubtitleChunks_fn).call(this); + __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this); + } + finalize() { + if (__privateGet(this, _finalized)) { + throw new Error("Cannot finalize a muxer more than once."); + } + while (__privateGet(this, _videoChunkQueue).length > 0) + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, __privateGet(this, _videoChunkQueue).shift(), true); + while (__privateGet(this, _audioChunkQueue).length > 0) + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, __privateGet(this, _audioChunkQueue).shift(), true); + while (__privateGet(this, _subtitleChunkQueue).length > 0 && __privateGet(this, _subtitleChunkQueue)[0].timestamp <= __privateGet(this, _duration)) { + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, __privateGet(this, _subtitleChunkQueue).shift(), false); + } + if (__privateGet(this, _currentCluster) && !__privateGet(this, _options).streaming) { + __privateMethod(this, _finalizeCurrentCluster, finalizeCurrentCluster_fn).call(this); + } + __privateGet(this, _writer).writeEBML(__privateGet(this, _cues)); + if (!__privateGet(this, _options).streaming) { + let endPos = __privateGet(this, _writer).pos; + let segmentSize = __privateGet(this, _writer).pos - __privateGet(this, _segmentDataOffset, segmentDataOffset_get); + __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(__privateGet(this, _segment)) + 4); + __privateGet(this, _writer).writeEBMLVarInt(segmentSize, SEGMENT_SIZE_BYTES); + __privateGet(this, _segmentDuration).data = new EBMLFloat64(__privateGet(this, _duration)); + __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(__privateGet(this, _segmentDuration))); + __privateGet(this, _writer).writeEBML(__privateGet(this, _segmentDuration)); + __privateGet(this, _seekHead).data[0].data[1].data = __privateGet(this, _writer).offsets.get(__privateGet(this, _cues)) - __privateGet(this, _segmentDataOffset, segmentDataOffset_get); + __privateGet(this, _seekHead).data[1].data[1].data = __privateGet(this, _writer).offsets.get(__privateGet(this, _segmentInfo)) - __privateGet(this, _segmentDataOffset, segmentDataOffset_get); + __privateGet(this, _seekHead).data[2].data[1].data = __privateGet(this, _writer).offsets.get(__privateGet(this, _tracksElement)) - __privateGet(this, _segmentDataOffset, segmentDataOffset_get); + __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(__privateGet(this, _seekHead))); + __privateGet(this, _writer).writeEBML(__privateGet(this, _seekHead)); + __privateGet(this, _writer).seek(endPos); + } + __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this); + __privateGet(this, _writer).finalize(); + __privateSet(this, _finalized, true); + } + }; + _options = new WeakMap(); + _writer = new WeakMap(); + _segment = new WeakMap(); + _segmentInfo = new WeakMap(); + _seekHead = new WeakMap(); + _tracksElement = new WeakMap(); + _segmentDuration = new WeakMap(); + _colourElement = new WeakMap(); + _videoCodecPrivate = new WeakMap(); + _audioCodecPrivate = new WeakMap(); + _subtitleCodecPrivate = new WeakMap(); + _cues = new WeakMap(); + _currentCluster = new WeakMap(); + _currentClusterTimestamp = new WeakMap(); + _duration = new WeakMap(); + _videoChunkQueue = new WeakMap(); + _audioChunkQueue = new WeakMap(); + _subtitleChunkQueue = new WeakMap(); + _firstVideoTimestamp = new WeakMap(); + _firstAudioTimestamp = new WeakMap(); + _lastVideoTimestamp = new WeakMap(); + _lastAudioTimestamp = new WeakMap(); + _lastSubtitleTimestamp = new WeakMap(); + _colorSpace = new WeakMap(); + _finalized = new WeakMap(); + _validateOptions = new WeakSet(); + validateOptions_fn = function(options) { + if (typeof options !== "object") { + throw new TypeError("The muxer requires an options object to be passed to its constructor."); + } + if (!(options.target instanceof Target)) { + throw new TypeError("The target must be provided and an instance of Target."); + } + if (options.video) { + if (typeof options.video.codec !== "string") { + throw new TypeError(`Invalid video codec: ${options.video.codec}. Must be a string.`); + } + if (!Number.isInteger(options.video.width) || options.video.width <= 0) { + throw new TypeError(`Invalid video width: ${options.video.width}. Must be a positive integer.`); + } + if (!Number.isInteger(options.video.height) || options.video.height <= 0) { + throw new TypeError(`Invalid video height: ${options.video.height}. Must be a positive integer.`); + } + if (options.video.frameRate !== void 0) { + if (!Number.isFinite(options.video.frameRate) || options.video.frameRate <= 0) { + throw new TypeError( + `Invalid video frame rate: ${options.video.frameRate}. Must be a positive number.` + ); + } + } + if (options.video.alpha !== void 0 && typeof options.video.alpha !== "boolean") { + throw new TypeError(`Invalid video alpha: ${options.video.alpha}. Must be a boolean.`); + } + } + if (options.audio) { + if (typeof options.audio.codec !== "string") { + throw new TypeError(`Invalid audio codec: ${options.audio.codec}. Must be a string.`); + } + if (!Number.isInteger(options.audio.numberOfChannels) || options.audio.numberOfChannels <= 0) { + throw new TypeError( + `Invalid number of audio channels: ${options.audio.numberOfChannels}. Must be a positive integer.` + ); + } + if (!Number.isInteger(options.audio.sampleRate) || options.audio.sampleRate <= 0) { + throw new TypeError( + `Invalid audio sample rate: ${options.audio.sampleRate}. Must be a positive integer.` + ); + } + if (options.audio.bitDepth !== void 0) { + if (!Number.isInteger(options.audio.bitDepth) || options.audio.bitDepth <= 0) { + throw new TypeError( + `Invalid audio bit depth: ${options.audio.bitDepth}. Must be a positive integer.` + ); + } + } + } + if (options.subtitles) { + if (typeof options.subtitles.codec !== "string") { + throw new TypeError(`Invalid subtitles codec: ${options.subtitles.codec}. Must be a string.`); + } + } + if (options.type !== void 0 && !["webm", "matroska"].includes(options.type)) { + throw new TypeError(`Invalid type: ${options.type}. Must be 'webm' or 'matroska'.`); + } + if (options.firstTimestampBehavior && !FIRST_TIMESTAMP_BEHAVIORS.includes(options.firstTimestampBehavior)) { + throw new TypeError(`Invalid first timestamp behavior: ${options.firstTimestampBehavior}`); + } + if (options.streaming !== void 0 && typeof options.streaming !== "boolean") { + throw new TypeError(`Invalid streaming option: ${options.streaming}. Must be a boolean.`); + } + }; + _createFileHeader = new WeakSet(); + createFileHeader_fn = function() { + if (__privateGet(this, _writer) instanceof BaseStreamTargetWriter && __privateGet(this, _writer).target.options.onHeader) { + __privateGet(this, _writer).startTrackingWrites(); + } + __privateMethod(this, _writeEBMLHeader, writeEBMLHeader_fn).call(this); + if (!__privateGet(this, _options).streaming) { + __privateMethod(this, _createSeekHead, createSeekHead_fn).call(this); + } + __privateMethod(this, _createSegmentInfo, createSegmentInfo_fn).call(this); + __privateMethod(this, _createCodecPrivatePlaceholders, createCodecPrivatePlaceholders_fn).call(this); + __privateMethod(this, _createColourElement, createColourElement_fn).call(this); + if (!__privateGet(this, _options).streaming) { + __privateMethod(this, _createTracks, createTracks_fn).call(this); + __privateMethod(this, _createSegment, createSegment_fn).call(this); + } else { + } + __privateMethod(this, _createCues, createCues_fn).call(this); + __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this); + }; + _writeEBMLHeader = new WeakSet(); + writeEBMLHeader_fn = function() { + let ebmlHeader = { id: 440786851 /* EBML */, data: [ + { id: 17030 /* EBMLVersion */, data: 1 }, + { id: 17143 /* EBMLReadVersion */, data: 1 }, + { id: 17138 /* EBMLMaxIDLength */, data: 4 }, + { id: 17139 /* EBMLMaxSizeLength */, data: 8 }, + { id: 17026 /* DocType */, data: __privateGet(this, _options).type ?? "webm" }, + { id: 17031 /* DocTypeVersion */, data: 2 }, + { id: 17029 /* DocTypeReadVersion */, data: 2 } + ] }; + __privateGet(this, _writer).writeEBML(ebmlHeader); + }; + _createCodecPrivatePlaceholders = new WeakSet(); + createCodecPrivatePlaceholders_fn = function() { + __privateSet(this, _videoCodecPrivate, { id: 236 /* Void */, size: 4, data: new Uint8Array(CODEC_PRIVATE_MAX_SIZE) }); + __privateSet(this, _audioCodecPrivate, { id: 236 /* Void */, size: 4, data: new Uint8Array(CODEC_PRIVATE_MAX_SIZE) }); + __privateSet(this, _subtitleCodecPrivate, { id: 236 /* Void */, size: 4, data: new Uint8Array(CODEC_PRIVATE_MAX_SIZE) }); + }; + _createColourElement = new WeakSet(); + createColourElement_fn = function() { + __privateSet(this, _colourElement, { id: 21936 /* Colour */, data: [ + { id: 21937 /* MatrixCoefficients */, data: 2 }, + { id: 21946 /* TransferCharacteristics */, data: 2 }, + { id: 21947 /* Primaries */, data: 2 }, + { id: 21945 /* Range */, data: 0 } + ] }); + }; + _createSeekHead = new WeakSet(); + createSeekHead_fn = function() { + const kaxCues = new Uint8Array([28, 83, 187, 107]); + const kaxInfo = new Uint8Array([21, 73, 169, 102]); + const kaxTracks = new Uint8Array([22, 84, 174, 107]); + let seekHead = { id: 290298740 /* SeekHead */, data: [ + { id: 19899 /* Seek */, data: [ + { id: 21419 /* SeekID */, data: kaxCues }, + { id: 21420 /* SeekPosition */, size: 5, data: 0 } + ] }, + { id: 19899 /* Seek */, data: [ + { id: 21419 /* SeekID */, data: kaxInfo }, + { id: 21420 /* SeekPosition */, size: 5, data: 0 } + ] }, + { id: 19899 /* Seek */, data: [ + { id: 21419 /* SeekID */, data: kaxTracks }, + { id: 21420 /* SeekPosition */, size: 5, data: 0 } + ] } + ] }; + __privateSet(this, _seekHead, seekHead); + }; + _createSegmentInfo = new WeakSet(); + createSegmentInfo_fn = function() { + let segmentDuration = { id: 17545 /* Duration */, data: new EBMLFloat64(0) }; + __privateSet(this, _segmentDuration, segmentDuration); + let segmentInfo = { id: 357149030 /* Info */, data: [ + { id: 2807729 /* TimestampScale */, data: 1e6 }, + { id: 19840 /* MuxingApp */, data: APP_NAME }, + { id: 22337 /* WritingApp */, data: APP_NAME }, + !__privateGet(this, _options).streaming ? segmentDuration : null + ] }; + __privateSet(this, _segmentInfo, segmentInfo); + }; + _createTracks = new WeakSet(); + createTracks_fn = function() { + let tracksElement = { id: 374648427 /* Tracks */, data: [] }; + __privateSet(this, _tracksElement, tracksElement); + if (__privateGet(this, _options).video) { + tracksElement.data.push({ id: 174 /* TrackEntry */, data: [ + { id: 215 /* TrackNumber */, data: VIDEO_TRACK_NUMBER }, + { id: 29637 /* TrackUID */, data: VIDEO_TRACK_NUMBER }, + { id: 131 /* TrackType */, data: VIDEO_TRACK_TYPE }, + { id: 134 /* CodecID */, data: __privateGet(this, _options).video.codec }, + __privateGet(this, _videoCodecPrivate), + __privateGet(this, _options).video.frameRate ? { id: 2352003 /* DefaultDuration */, data: 1e9 / __privateGet(this, _options).video.frameRate } : null, + { id: 224 /* Video */, data: [ + { id: 176 /* PixelWidth */, data: __privateGet(this, _options).video.width }, + { id: 186 /* PixelHeight */, data: __privateGet(this, _options).video.height }, + __privateGet(this, _options).video.alpha ? { id: 21440 /* AlphaMode */, data: 1 } : null, + __privateGet(this, _colourElement) + ] } + ] }); + } + if (__privateGet(this, _options).audio) { + __privateSet(this, _audioCodecPrivate, __privateGet(this, _options).streaming ? __privateGet(this, _audioCodecPrivate) || null : { id: 236 /* Void */, size: 4, data: new Uint8Array(CODEC_PRIVATE_MAX_SIZE) }); + tracksElement.data.push({ id: 174 /* TrackEntry */, data: [ + { id: 215 /* TrackNumber */, data: AUDIO_TRACK_NUMBER }, + { id: 29637 /* TrackUID */, data: AUDIO_TRACK_NUMBER }, + { id: 131 /* TrackType */, data: AUDIO_TRACK_TYPE }, + { id: 134 /* CodecID */, data: __privateGet(this, _options).audio.codec }, + __privateGet(this, _audioCodecPrivate), + { id: 225 /* Audio */, data: [ + { id: 181 /* SamplingFrequency */, data: new EBMLFloat32(__privateGet(this, _options).audio.sampleRate) }, + { id: 159 /* Channels */, data: __privateGet(this, _options).audio.numberOfChannels }, + __privateGet(this, _options).audio.bitDepth ? { id: 25188 /* BitDepth */, data: __privateGet(this, _options).audio.bitDepth } : null + ] } + ] }); + } + if (__privateGet(this, _options).subtitles) { + tracksElement.data.push({ id: 174 /* TrackEntry */, data: [ + { id: 215 /* TrackNumber */, data: SUBTITLE_TRACK_NUMBER }, + { id: 29637 /* TrackUID */, data: SUBTITLE_TRACK_NUMBER }, + { id: 131 /* TrackType */, data: SUBTITLE_TRACK_TYPE }, + { id: 134 /* CodecID */, data: __privateGet(this, _options).subtitles.codec }, + __privateGet(this, _subtitleCodecPrivate) + ] }); + } + }; + _createSegment = new WeakSet(); + createSegment_fn = function() { + let segment = { + id: 408125543 /* Segment */, + size: __privateGet(this, _options).streaming ? -1 : SEGMENT_SIZE_BYTES, + data: [ + !__privateGet(this, _options).streaming ? __privateGet(this, _seekHead) : null, + __privateGet(this, _segmentInfo), + __privateGet(this, _tracksElement) + ] + }; + __privateSet(this, _segment, segment); + __privateGet(this, _writer).writeEBML(segment); + if (__privateGet(this, _writer) instanceof BaseStreamTargetWriter && __privateGet(this, _writer).target.options.onHeader) { + let { data, start } = __privateGet(this, _writer).getTrackedWrites(); + __privateGet(this, _writer).target.options.onHeader(data, start); + } + }; + _createCues = new WeakSet(); + createCues_fn = function() { + __privateSet(this, _cues, { id: 475249515 /* Cues */, data: [] }); + }; + _maybeFlushStreamingTargetWriter = new WeakSet(); + maybeFlushStreamingTargetWriter_fn = function() { + if (__privateGet(this, _writer) instanceof StreamTargetWriter) { + __privateGet(this, _writer).flush(); + } + }; + _segmentDataOffset = new WeakSet(); + segmentDataOffset_get = function() { + return __privateGet(this, _writer).dataOffsets.get(__privateGet(this, _segment)); + }; + _writeVideoDecoderConfig = new WeakSet(); + writeVideoDecoderConfig_fn = function(meta) { + if (!meta.decoderConfig) + return; + if (meta.decoderConfig.colorSpace) { + let colorSpace = meta.decoderConfig.colorSpace; + __privateSet(this, _colorSpace, colorSpace); + __privateGet(this, _colourElement).data = [ + { id: 21937 /* MatrixCoefficients */, data: { + "rgb": 1, + "bt709": 1, + "bt470bg": 5, + "smpte170m": 6 + }[colorSpace.matrix] }, + { id: 21946 /* TransferCharacteristics */, data: { + "bt709": 1, + "smpte170m": 6, + "iec61966-2-1": 13 + }[colorSpace.transfer] }, + { id: 21947 /* Primaries */, data: { + "bt709": 1, + "bt470bg": 5, + "smpte170m": 6 + }[colorSpace.primaries] }, + { id: 21945 /* Range */, data: [1, 2][Number(colorSpace.fullRange)] } + ]; + if (!__privateGet(this, _options).streaming) { + let endPos = __privateGet(this, _writer).pos; + __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(__privateGet(this, _colourElement))); + __privateGet(this, _writer).writeEBML(__privateGet(this, _colourElement)); + __privateGet(this, _writer).seek(endPos); + } + } + if (meta.decoderConfig.description) { + if (__privateGet(this, _options).streaming) { + __privateSet(this, _videoCodecPrivate, __privateMethod(this, _createCodecPrivateElement, createCodecPrivateElement_fn).call(this, meta.decoderConfig.description)); + } else { + __privateMethod(this, _writeCodecPrivate, writeCodecPrivate_fn).call(this, __privateGet(this, _videoCodecPrivate), meta.decoderConfig.description); + } + } + }; + _fixVP9ColorSpace = new WeakSet(); + fixVP9ColorSpace_fn = function(chunk) { + if (chunk.type !== "key") + return; + if (!__privateGet(this, _colorSpace)) + return; + let i = 0; + if (readBits(chunk.data, 0, 2) !== 2) + return; + i += 2; + let profile = (readBits(chunk.data, i + 1, i + 2) << 1) + readBits(chunk.data, i + 0, i + 1); + i += 2; + if (profile === 3) + i++; + let showExistingFrame = readBits(chunk.data, i + 0, i + 1); + i++; + if (showExistingFrame) + return; + let frameType = readBits(chunk.data, i + 0, i + 1); + i++; + if (frameType !== 0) + return; + i += 2; + let syncCode = readBits(chunk.data, i + 0, i + 24); + i += 24; + if (syncCode !== 4817730) + return; + if (profile >= 2) + i++; + let colorSpaceID = { + "rgb": 7, + "bt709": 2, + "bt470bg": 1, + "smpte170m": 3 + }[__privateGet(this, _colorSpace).matrix]; + writeBits(chunk.data, i + 0, i + 3, colorSpaceID); + }; + _writeSubtitleChunks = new WeakSet(); + writeSubtitleChunks_fn = function() { + let lastWrittenMediaTimestamp = Math.min( + __privateGet(this, _options).video ? __privateGet(this, _lastVideoTimestamp) : Infinity, + __privateGet(this, _options).audio ? __privateGet(this, _lastAudioTimestamp) : Infinity + ); + let queue = __privateGet(this, _subtitleChunkQueue); + while (queue.length > 0 && queue[0].timestamp <= lastWrittenMediaTimestamp) { + __privateMethod(this, _writeBlock, writeBlock_fn).call(this, queue.shift(), !__privateGet(this, _options).video && !__privateGet(this, _options).audio); + } + }; + _createInternalChunk = new WeakSet(); + createInternalChunk_fn = function(data, type, timestamp, trackNumber, duration, additions) { + let adjustedTimestamp = __privateMethod(this, _validateTimestamp, validateTimestamp_fn).call(this, timestamp, trackNumber); + let internalChunk = { + data, + additions, + type, + timestamp: adjustedTimestamp, + duration, + trackNumber + }; + return internalChunk; + }; + _validateTimestamp = new WeakSet(); + validateTimestamp_fn = function(timestamp, trackNumber) { + let lastTimestamp = trackNumber === VIDEO_TRACK_NUMBER ? __privateGet(this, _lastVideoTimestamp) : trackNumber === AUDIO_TRACK_NUMBER ? __privateGet(this, _lastAudioTimestamp) : __privateGet(this, _lastSubtitleTimestamp); + if (trackNumber !== SUBTITLE_TRACK_NUMBER) { + let firstTimestamp = trackNumber === VIDEO_TRACK_NUMBER ? __privateGet(this, _firstVideoTimestamp) : __privateGet(this, _firstAudioTimestamp); + if (__privateGet(this, _options).firstTimestampBehavior === "strict" && lastTimestamp === -1 && timestamp !== 0) { + throw new Error( + `The first chunk for your media track must have a timestamp of 0 (received ${timestamp}). Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of the document, which is probably what you want. + +If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options. +If you want to allow non-zero first timestamps, set firstTimestampBehavior: 'permissive'. +` + ); + } else if (__privateGet(this, _options).firstTimestampBehavior === "offset") { + timestamp -= firstTimestamp; + } + } + if (timestamp < lastTimestamp) { + throw new Error( + `Timestamps must be monotonically increasing (went from ${lastTimestamp} to ${timestamp}).` + ); + } + if (timestamp < 0) { + throw new Error(`Timestamps must be non-negative (received ${timestamp}).`); + } + return timestamp; + }; + _writeBlock = new WeakSet(); + writeBlock_fn = function(chunk, canCreateNewCluster) { + if (__privateGet(this, _options).streaming && !__privateGet(this, _tracksElement)) { + __privateMethod(this, _createTracks, createTracks_fn).call(this); + __privateMethod(this, _createSegment, createSegment_fn).call(this); + } + let msTimestamp = Math.floor(chunk.timestamp / 1e3); + let shouldCreateNewClusterFromKeyFrame = canCreateNewCluster && chunk.type === "key" && msTimestamp - __privateGet(this, _currentClusterTimestamp) >= 1e3; + if (!__privateGet(this, _currentCluster) || shouldCreateNewClusterFromKeyFrame) { + __privateMethod(this, _createNewCluster, createNewCluster_fn).call(this, msTimestamp); + } + let relativeTimestamp = msTimestamp - __privateGet(this, _currentClusterTimestamp); + if (relativeTimestamp < 0) { + return; + } + let clusterIsTooLong = relativeTimestamp >= MAX_CHUNK_LENGTH_MS; + if (clusterIsTooLong) { + throw new Error( + `Current Matroska cluster exceeded its maximum allowed length of ${MAX_CHUNK_LENGTH_MS} milliseconds. In order to produce a correct WebM file, you must pass in a key frame at least every ${MAX_CHUNK_LENGTH_MS} milliseconds.` + ); + } + let prelude = new Uint8Array(4); + let view = new DataView(prelude.buffer); + view.setUint8(0, 128 | chunk.trackNumber); + view.setInt16(1, relativeTimestamp, false); + if (chunk.duration === void 0 && !chunk.additions) { + view.setUint8(3, Number(chunk.type === "key") << 7); + let simpleBlock = { id: 163 /* SimpleBlock */, data: [ + prelude, + chunk.data + ] }; + __privateGet(this, _writer).writeEBML(simpleBlock); + } else { + let msDuration = Math.floor(chunk.duration / 1e3); + let blockGroup = { id: 160 /* BlockGroup */, data: [ + { id: 161 /* Block */, data: [ + prelude, + chunk.data + ] }, + chunk.duration !== void 0 ? { id: 155 /* BlockDuration */, data: msDuration } : null, + chunk.additions ? { id: 30113 /* BlockAdditions */, data: chunk.additions } : null + ] }; + __privateGet(this, _writer).writeEBML(blockGroup); + } + __privateSet(this, _duration, Math.max(__privateGet(this, _duration), msTimestamp)); + }; + _createCodecPrivateElement = new WeakSet(); + createCodecPrivateElement_fn = function(data) { + return { id: 25506 /* CodecPrivate */, size: 4, data: new Uint8Array(data) }; + }; + _writeCodecPrivate = new WeakSet(); + writeCodecPrivate_fn = function(element, data) { + let endPos = __privateGet(this, _writer).pos; + __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(element)); + let codecPrivateElementSize = 2 + 4 + data.byteLength; + let voidDataSize = CODEC_PRIVATE_MAX_SIZE - codecPrivateElementSize; + if (voidDataSize < 0) { + let newByteLength = data.byteLength + voidDataSize; + if (data instanceof ArrayBuffer) { + data = data.slice(0, newByteLength); + } else { + data = data.buffer.slice(0, newByteLength); + } + voidDataSize = 0; + } + element = [ + __privateMethod(this, _createCodecPrivateElement, createCodecPrivateElement_fn).call(this, data), + { id: 236 /* Void */, size: 4, data: new Uint8Array(voidDataSize) } + ]; + __privateGet(this, _writer).writeEBML(element); + __privateGet(this, _writer).seek(endPos); + }; + _createNewCluster = new WeakSet(); + createNewCluster_fn = function(timestamp) { + if (__privateGet(this, _currentCluster) && !__privateGet(this, _options).streaming) { + __privateMethod(this, _finalizeCurrentCluster, finalizeCurrentCluster_fn).call(this); + } + if (__privateGet(this, _writer) instanceof BaseStreamTargetWriter && __privateGet(this, _writer).target.options.onCluster) { + __privateGet(this, _writer).startTrackingWrites(); + } + __privateSet(this, _currentCluster, { + id: 524531317 /* Cluster */, + size: __privateGet(this, _options).streaming ? -1 : CLUSTER_SIZE_BYTES, + data: [ + { id: 231 /* Timestamp */, data: timestamp } + ] + }); + __privateGet(this, _writer).writeEBML(__privateGet(this, _currentCluster)); + __privateSet(this, _currentClusterTimestamp, timestamp); + let clusterOffsetFromSegment = __privateGet(this, _writer).offsets.get(__privateGet(this, _currentCluster)) - __privateGet(this, _segmentDataOffset, segmentDataOffset_get); + __privateGet(this, _cues).data.push({ id: 187 /* CuePoint */, data: [ + { id: 179 /* CueTime */, data: timestamp }, + __privateGet(this, _options).video ? { id: 183 /* CueTrackPositions */, data: [ + { id: 247 /* CueTrack */, data: VIDEO_TRACK_NUMBER }, + { id: 241 /* CueClusterPosition */, data: clusterOffsetFromSegment } + ] } : null, + __privateGet(this, _options).audio ? { id: 183 /* CueTrackPositions */, data: [ + { id: 247 /* CueTrack */, data: AUDIO_TRACK_NUMBER }, + { id: 241 /* CueClusterPosition */, data: clusterOffsetFromSegment } + ] } : null + ] }); + }; + _finalizeCurrentCluster = new WeakSet(); + finalizeCurrentCluster_fn = function() { + let clusterSize = __privateGet(this, _writer).pos - __privateGet(this, _writer).dataOffsets.get(__privateGet(this, _currentCluster)); + let endPos = __privateGet(this, _writer).pos; + __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(__privateGet(this, _currentCluster)) + 4); + __privateGet(this, _writer).writeEBMLVarInt(clusterSize, CLUSTER_SIZE_BYTES); + __privateGet(this, _writer).seek(endPos); + if (__privateGet(this, _writer) instanceof BaseStreamTargetWriter && __privateGet(this, _writer).target.options.onCluster) { + let { data, start } = __privateGet(this, _writer).getTrackedWrites(); + __privateGet(this, _writer).target.options.onCluster(data, start, __privateGet(this, _currentClusterTimestamp)); + } + }; + _ensureNotFinalized = new WeakSet(); + ensureNotFinalized_fn = function() { + if (__privateGet(this, _finalized)) { + throw new Error("Cannot add new video or audio chunks after the file has been finalized."); + } + }; + + // src/subtitles.ts + var cueBlockHeaderRegex = /(?:(.+?)\n)?((?:\d{2}:)?\d{2}:\d{2}.\d{3})\s+-->\s+((?:\d{2}:)?\d{2}:\d{2}.\d{3})/g; + var preambleStartRegex = /^WEBVTT.*?\n{2}/; + var timestampRegex = /(?:(\d{2}):)?(\d{2}):(\d{2}).(\d{3})/; + var inlineTimestampRegex = /<(?:(\d{2}):)?(\d{2}):(\d{2}).(\d{3})>/g; + var textEncoder = new TextEncoder(); + var _options2, _config, _preambleSeen, _preambleBytes, _preambleEmitted, _parseTimestamp, parseTimestamp_fn, _formatTimestamp, formatTimestamp_fn; + var SubtitleEncoder = class { + constructor(options) { + __privateAdd(this, _parseTimestamp); + __privateAdd(this, _formatTimestamp); + __privateAdd(this, _options2, void 0); + __privateAdd(this, _config, void 0); + __privateAdd(this, _preambleSeen, false); + __privateAdd(this, _preambleBytes, void 0); + __privateAdd(this, _preambleEmitted, false); + __privateSet(this, _options2, options); + } + configure(config) { + if (config.codec !== "webvtt") { + throw new Error("Codec must be 'webvtt'."); + } + __privateSet(this, _config, config); + } + encode(text) { + if (!__privateGet(this, _config)) { + throw new Error("Encoder not configured."); + } + text = text.replace("\r\n", "\n").replace("\r", "\n"); + cueBlockHeaderRegex.lastIndex = 0; + let match; + if (!__privateGet(this, _preambleSeen)) { + if (!preambleStartRegex.test(text)) { + let error = new Error("WebVTT preamble incorrect."); + __privateGet(this, _options2).error(error); + throw error; + } + match = cueBlockHeaderRegex.exec(text); + let preamble = text.slice(0, match?.index ?? text.length).trimEnd(); + if (!preamble) { + let error = new Error("No WebVTT preamble provided."); + __privateGet(this, _options2).error(error); + throw error; + } + __privateSet(this, _preambleBytes, textEncoder.encode(preamble)); + __privateSet(this, _preambleSeen, true); + if (match) { + text = text.slice(match.index); + cueBlockHeaderRegex.lastIndex = 0; + } + } + while (match = cueBlockHeaderRegex.exec(text)) { + let notes = text.slice(0, match.index); + let cueIdentifier = match[1] || ""; + let matchEnd = match.index + match[0].length; + let bodyStart = text.indexOf("\n", matchEnd) + 1; + let cueSettings = text.slice(matchEnd, bodyStart).trim(); + let bodyEnd = text.indexOf("\n\n", matchEnd); + if (bodyEnd === -1) + bodyEnd = text.length; + let startTime = __privateMethod(this, _parseTimestamp, parseTimestamp_fn).call(this, match[2]); + let endTime = __privateMethod(this, _parseTimestamp, parseTimestamp_fn).call(this, match[3]); + let duration = endTime - startTime; + let body = text.slice(bodyStart, bodyEnd); + let additions = `${cueSettings} +${cueIdentifier} +${notes}`; + inlineTimestampRegex.lastIndex = 0; + body = body.replace(inlineTimestampRegex, (match2) => { + let time = __privateMethod(this, _parseTimestamp, parseTimestamp_fn).call(this, match2.slice(1, -1)); + let offsetTime = time - startTime; + return `<${__privateMethod(this, _formatTimestamp, formatTimestamp_fn).call(this, offsetTime)}>`; + }); + text = text.slice(bodyEnd).trimStart(); + cueBlockHeaderRegex.lastIndex = 0; + let chunk = { + body: textEncoder.encode(body), + additions: additions.trim() === "" ? void 0 : textEncoder.encode(additions), + timestamp: startTime * 1e3, + duration: duration * 1e3 + }; + let meta = {}; + if (!__privateGet(this, _preambleEmitted)) { + meta.decoderConfig = { + description: __privateGet(this, _preambleBytes) + }; + __privateSet(this, _preambleEmitted, true); + } + __privateGet(this, _options2).output(chunk, meta); + } + } + }; + _options2 = new WeakMap(); + _config = new WeakMap(); + _preambleSeen = new WeakMap(); + _preambleBytes = new WeakMap(); + _preambleEmitted = new WeakMap(); + _parseTimestamp = new WeakSet(); + parseTimestamp_fn = function(string) { + let match = timestampRegex.exec(string); + if (!match) + throw new Error("Expected match."); + return 60 * 60 * 1e3 * Number(match[1] || "0") + 60 * 1e3 * Number(match[2]) + 1e3 * Number(match[3]) + Number(match[4]); + }; + _formatTimestamp = new WeakSet(); + formatTimestamp_fn = function(timestamp) { + let hours = Math.floor(timestamp / (60 * 60 * 1e3)); + let minutes = Math.floor(timestamp % (60 * 60 * 1e3) / (60 * 1e3)); + let seconds = Math.floor(timestamp % (60 * 1e3) / 1e3); + let milliseconds = timestamp % 1e3; + return hours.toString().padStart(2, "0") + ":" + minutes.toString().padStart(2, "0") + ":" + seconds.toString().padStart(2, "0") + "." + milliseconds.toString().padStart(3, "0"); + }; + return __toCommonJS(src_exports); +})(); +if (typeof module === "object" && typeof module.exports === "object") Object.assign(module.exports, WebMMuxer)