diff --git a/samples/encode-decode-worker/css/main.css b/samples/encode-decode-worker/css/main.css
new file mode 100644
index 00000000..c79b1318
--- /dev/null
+++ b/samples/encode-decode-worker/css/main.css
@@ -0,0 +1,267 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+.hidden {
+ display: none;
+}
+
+.highlight {
+ background-color: #eee;
+ font-size: 1.2em;
+ margin: 0 0 30px 0;
+ padding: 0.2em 1.5em;
+}
+
+.warning {
+ color: red;
+ font-weight: 400;
+}
+
+@media screen and (min-width: 1000px) {
+ /* hack! to detect non-touch devices */
+ div#links a {
+ line-height: 0.8em;
+ }
+}
+
+audio {
+ max-width: 100%;
+}
+
+body {
+ font-family: 'Roboto', sans-serif;
+ font-weight: 300;
+ margin: 0;
+ padding: 1em;
+ word-break: break-word;
+}
+
+button {
+ background-color: #d84a38;
+ border: none;
+ border-radius: 2px;
+ color: white;
+ font-family: 'Roboto', sans-serif;
+ font-size: 0.8em;
+ margin: 0 0 1em 0;
+ padding: 0.5em 0.7em 0.6em 0.7em;
+}
+
+button:active {
+ background-color: #cf402f;
+}
+
+button:hover {
+ background-color: #cf402f;
+}
+
+button[disabled] {
+ color: #ccc;
+}
+
+button[disabled]:hover {
+ background-color: #d84a38;
+}
+
+canvas {
+ background-color: #ccc;
+ max-width: 100%;
+ width: 100%;
+}
+
+code {
+ font-family: 'Roboto', sans-serif;
+ font-weight: 400;
+}
+
+div#container {
+ margin: 0 auto 0 auto;
+ max-width: 60em;
+ padding: 1em 1.5em 1.3em 1.5em;
+}
+
+div#links {
+ padding: 0.5em 0 0 0;
+}
+
+h1 {
+ border-bottom: 1px solid #ccc;
+ font-family: 'Roboto', sans-serif;
+ font-weight: 500;
+ margin: 0 0 0.8em 0;
+ padding: 0 0 0.2em 0;
+}
+
+h2 {
+ color: #444;
+ font-weight: 500;
+}
+
+h3 {
+ border-top: 1px solid #eee;
+ color: #666;
+ font-weight: 500;
+ margin: 10px 0 10px 0;
+ white-space: nowrap;
+}
+
+li {
+ margin: 0 0 0.4em 0;
+}
+
+html {
+ /* avoid annoying page width change
+ when moving from the home page */
+ overflow-y: scroll;
+}
+
+img {
+ border: none;
+ max-width: 100%;
+}
+
+input[type=radio] {
+ position: relative;
+ top: -1px;
+}
+
+p {
+ color: #444;
+ font-weight: 300;
+}
+
+p#data {
+ border-top: 1px dotted #666;
+ font-family: Courier New, monospace;
+ line-height: 1.3em;
+ max-height: 1000px;
+ overflow-y: auto;
+ padding: 1em 0 0 0;
+}
+
+p.borderBelow {
+ border-bottom: 1px solid #aaa;
+ padding: 0 0 20px 0;
+}
+
+section p:last-of-type {
+ margin: 0;
+}
+
+section {
+ border-bottom: 1px solid #eee;
+ margin: 0 0 30px 0;
+ padding: 0 0 20px 0;
+}
+
+section:last-of-type {
+ border-bottom: none;
+ padding: 0 0 1em 0;
+}
+
+select {
+ margin: 0 1em 1em 0;
+ position: relative;
+ top: -1px;
+}
+
+h1 span {
+ white-space: nowrap;
+}
+
+a {
+ color: #1D6EEE;
+ font-weight: 300;
+ text-decoration: none;
+}
+
+h1 a {
+ font-weight: 300;
+ margin: 0 10px 0 0;
+ white-space: nowrap;
+}
+
+a:hover {
+ color: #3d85c6;
+ text-decoration: underline;
+}
+
+a#viewSource {
+ display: block;
+ margin: 1.3em 0 0 0;
+ border-top: 1px solid #999;
+ padding: 1em 0 0 0;
+}
+
+div#errorMsg p {
+ color: #F00;
+}
+
+div#links a {
+ display: block;
+ line-height: 1.3em;
+ margin: 0 0 1.5em 0;
+}
+
+div.outputSelector {
+ margin: -1.3em 0 2em 0;
+}
+
+p.description {
+ margin: 0 0 0.5em 0;
+}
+
+strong {
+ font-weight: 500;
+}
+
+textarea {
+ resize: none;
+ font-family: 'Roboto', sans-serif;
+}
+
+video {
+ background: #222;
+ margin: 0 0 20px 0;
+ --width: 100%;
+ width: var(--width);
+ height: calc(var(--width) * 0.75);
+}
+
+ul {
+ margin: 0 0 0.5em 0;
+}
+
+@media screen and (max-width: 650px) {
+ .highlight {
+ font-size: 1em;
+ margin: 0 0 20px 0;
+ padding: 0.2em 1em;
+ }
+
+ h1 {
+ font-size: 24px;
+ }
+}
+
+@media screen and (max-width: 550px) {
+ button:active {
+ background-color: darkRed;
+ }
+
+ h1 {
+ font-size: 22px;
+ }
+}
+
+@media screen and (max-width: 450px) {
+ h1 {
+ font-size: 20px;
+ }
+}
+
+
diff --git a/samples/encode-decode-worker/index.html b/samples/encode-decode-worker/index.html
new file mode 100644
index 00000000..25ff87ef
--- /dev/null
+++ b/samples/encode-decode-worker/index.html
@@ -0,0 +1,159 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WebCodecs in Worker
+
+
+
+
+
+
+
+
+
WebCodecs in Worker
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Local Video
+
+
Encoded (and Decoded) Video via WebCodecs
+
+
+
+
+
+
diff --git a/samples/encode-decode-worker/js/main.js b/samples/encode-decode-worker/js/main.js
new file mode 100644
index 00000000..2e5c4eb0
--- /dev/null
+++ b/samples/encode-decode-worker/js/main.js
@@ -0,0 +1,291 @@
+'use strict';
+
+let preferredResolution;
+let mediaStream, bitrate = 100000;
+let stopped = false;
+let preferredCodec ="VP8";
+let mode = "L1T3";
+let latencyPref = "realtime", bitPref = "variable";
+let hw = "no-preference";
+let streamWorker;
+let inputStream, outputStream;
+const rate = document.querySelector('#rate');
+const connectButton = document.querySelector('#connect');
+const stopButton = document.querySelector('#stop');
+const codecButtons = document.querySelector('#codecButtons');
+const resButtons = document.querySelector('#resButtons');
+const modeButtons = document.querySelector('#modeButtons');
+const hwButtons = document.querySelector('#hwButtons');
+const videoSelect = document.querySelector('select#videoSource');
+const selectors = [videoSelect];
+connectButton.disabled = false;
+stopButton.disabled = true;
+
+videoSelect.onchange = function () {
+ videoSource = videoSelect.value;
+};
+
+const qvgaConstraints = {video: {width: 320, height: 240}};
+const vgaConstraints = {video: {width: 640, height: 480}};
+const hdConstraints = {video: {width: 1280, height: 720}};
+const fullHdConstraints = {video: {width: {min: 1920}, height: {min: 1080}}};
+const tv4KConstraints = {video: {width: {exact: 3840}, height: {exact: 2160}}};
+const cinema4KConstraints = {video: {width: {exact: 4096}, height: {exact: 2160}}};
+const eightKConstraints = {video: {width: {min: 7680}, height: {min: 4320}}};
+
+let constraints = qvgaConstraints;
+
+function addToEventLog(text, severity = 'info') {
+ let log = document.querySelector('textarea');
+ log.value += 'log-' + severity + ': ' + text + '\n';
+ if (severity == 'fatal') stop();
+}
+
+function gotDevices(deviceInfos) {
+ // Handles being called several times to update labels. Preserve values.
+ const values = selectors.map(select => select.value);
+ selectors.forEach(select => {
+ while (select.firstChild) {
+ select.removeChild(select.firstChild);
+ }
+ });
+ for (let i = 0; i !== deviceInfos.length; ++i) {
+ const deviceInfo = deviceInfos[i];
+ const option = document.createElement('option');
+ option.value = deviceInfo.deviceId;
+ if (deviceInfo.kind === 'videoinput') {
+ option.text = deviceInfo.label || `camera ${videoSelect.length + 1}`;
+ videoSelect.appendChild(option);
+ }
+ }
+ selectors.forEach((select, selectorIndex) => {
+ if (Array.prototype.slice.call(select.childNodes).some(n => n.value === values[selectorIndex])) {
+ select.value = values[selectorIndex];
+ }
+ });
+}
+
+async function getResValue(radio) {
+ preferredResolution = radio.value;
+ addToEventLog('Resolution selected: ' + preferredResolution);
+ switch(preferredResolution) {
+ case "qvga":
+ constraints = qvgaConstraints;
+ break;
+ case "vga":
+ constraints = vgaConstraints;
+ break;
+ case "hd":
+ constraints = hdConstraints;
+ break;
+ case "full-hd":
+ constraints = fullHdConstraints;
+ break;
+ case "tv4K":
+ constraints = tv4KConstraints;
+ break;
+ case "cinema4K":
+ constraints = cinema4KConstraints;
+ break;
+ case "eightK":
+ constraints = eightKConstraints;
+ break;
+ default:
+ constraints = qvgaConstraints;
+ break;
+ }
+ // Get a MediaStream from the webcam, and reset the resolution.
+ try {
+ //stop the tracks
+ if (mediaStream){
+ mediaStream.getTracks().forEach(track => {
+ track.stop();
+ });
+ }
+ gotDevices(await navigator.mediaDevices.enumerateDevices());
+ constraints.deviceId = videoSource ? {exact: videoSource} : undefined;
+ mediaStream = await navigator.mediaDevices.getUserMedia(constraints);
+ document.getElementById('inputVideo').srcObject = mediaStream;
+ } catch(e){
+ addToEventLog(`EnumerateDevices or gUM error: ${e.message}`);
+ }
+}
+
+function getPrefValue(radio) {
+ latencyPref = radio.value;
+ addToEventLog('Latency preference selected: ' + latencyPref);
+}
+
+function getBitPrefValue(radio) {
+ bitPref = radio.value;
+ addToEventLog('Bitrate mode selected: ' + bitPref);
+}
+
+function getCodecValue(radio) {
+ preferredCodec = radio.value;
+ addToEventLog('Codec selected: ' + preferredCodec);
+}
+
+function getModeValue(radio) {
+ mode = radio.value;
+ addToEventLog('Mode selected: ' + mode);
+}
+
+function getHwValue(radio) {
+ hw = radio.value;
+ addToEventLog('Hardware Acceleration preference: ' + hw);
+}
+
+function stop() {
+ stopped = true;
+ stopButton.disabled = true;
+ connectButton.disabled = true;
+ streamWorker.postMessage({ type: "stop" });
+ try {
+ inputStream.cancel();
+ addToEventLog('inputStream cancelled');
+ } catch(e) {
+ addToEventLog(`Could not cancel inputStream: ${e.message}`);
+ }
+ try {
+ outputStream.abort();
+ addToEventLog('outputStream aborted');
+ } catch(e) {
+ addToEventLog(`Could not abort outputStream: ${e.message}`);
+ }
+}
+
+document.addEventListener('DOMContentLoaded', async function(event) {
+ if (stopped) return;
+ addToEventLog('DOM Content Loaded');
+
+ if (typeof MediaStreamTrackProcessor === 'undefined' ||
+ typeof MediaStreamTrackGenerator === 'undefined') {
+ addToEventLog('Your browser does not support the experimental Mediacapture-transform API.\n' +
+ 'Please launch with the --enable-blink-features=WebCodecs,MediaStreamInsertableStreams flag','fatal');
+ return;
+ }
+ try {
+ gotDevices(await navigator.mediaDevices.enumerateDevices());
+ } catch (e) {
+ addToEventLog('Error in Device enumeration');
+ }
+ constraints.deviceId = videoSource ? {exact: videoSource} : undefined;
+ // Get a MediaStream from the webcam.
+ mediaStream = await navigator.mediaDevices.getUserMedia(constraints);
+ // Connect the webcam stream to the video element.
+ document.getElementById('inputVideo').srcObject = mediaStream;
+ // Create a new worker.
+ streamWorker = new Worker("js/stream_worker.js");
+ addToEventLog('Worker created.');
+ // Print messages from the worker in the text area.
+ streamWorker.addEventListener('message', function(e) {
+ addToEventLog('Worker msg: ' + e.data.text, e.data.severity);
+ }, false);
+
+ stopButton.onclick = () => {
+ addToEventLog('Stop button clicked.');
+ stop();
+ }
+
+ connectButton.onclick = () => {
+ connectButton.disabled = true;
+ stopButton.disabled = false;
+ hwButtons.style.display = "none";
+ prefButtons.style.display = "none";
+ bitButtons.style.display = "none";
+ codecButtons.style.display = "none";
+ resButtons.style.display = "none";
+ modeButtons.style.display = "none";
+ rateInput.style.display = "none";
+ keyInput.style.display = "none";
+ startMedia();
+ }
+
+ async function startMedia() {
+ if (stopped) return;
+ addToEventLog('startMedia called');
+ try {
+ // Collect the bitrate
+ const rate = document.getElementById('rate').value;
+
+ // Collect the keyframe gap
+ const keygap = document.getElementById('keygap').value;
+
+ // Create a MediaStreamTrackProcessor, which exposes frames from the track
+ // as a ReadableStream of VideoFrames, using non-standard Chrome API.
+ let [track] = mediaStream.getVideoTracks();
+ let ts = track.getSettings();
+ const processor = new MediaStreamTrackProcessor(track);
+ inputStream = processor.readable;
+
+ // Create a MediaStreamTrackGenerator, which exposes a track from a
+ // WritableStream of VideoFrames, using non-standard Chrome API.
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ outputStream = generator.writable;
+ document.getElementById('outputVideo').srcObject = new MediaStream([generator]);
+
+ //Create video Encoder configuration
+ const vConfig = {
+ keyInterval: keygap,
+ resolutionScale: 1,
+ framerateScale: 1.0,
+ };
+
+ let ssrcArr = new Uint32Array(1);
+ window.crypto.getRandomValues(ssrcArr);
+ const ssrc = ssrcArr[0];
+
+ const config = {
+ alpha: "discard",
+ latencyMode: latencyPref,
+ bitrateMode: bitPref,
+ codec: preferredCodec,
+ width: ts.width/vConfig.resolutionScale,
+ height: ts.height/vConfig.resolutionScale,
+ hardwareAcceleration: hw,
+ bitrate: rate,
+ framerate: ts.frameRate/vConfig.framerateScale,
+ keyInterval: vConfig.keyInterval,
+ ssrc: ssrc
+ };
+
+ if (mode != "L1T1") {
+ config.scalabilityMode = mode;
+ }
+
+ switch(preferredCodec){
+ case "H264":
+ config.codec = "avc1.42002A"; // baseline profile, level 4.2
+ config.avc = { format: "annexb" };
+ config.pt = 1;
+ break;
+ case "H265":
+ config.codec = "hvc1.1.6.L123.00"; // Main profile, level 4.1, main Tier
+ config.hevc = { format: "annexb" };
+ config.pt = 2;
+ break;
+ case "VP8":
+ config.codec = "vp8";
+ config.pt = 3;
+ break;
+ case "VP9":
+ config.codec = "vp09.00.10.08"; //VP9, Profile 0, level 1, bit depth 8
+ config.pt = 4;
+ break;
+ case "AV1":
+ config.codec = "av01.0.08M.10.0.110.09" // AV1 Main Profile, level 4.0, Main tier, 10-bit content, non-monochrome, with 4:2:0 chroma subsampling
+ config.pt = 5;
+ break;
+ }
+
+ // Transfer the readable stream to the worker, as well as other info from the user interface.
+ // NOTE: transferring frameStream and reading it in the worker is more
+ // efficient than reading frameStream here and transferring VideoFrames individually.
+ streamWorker.postMessage({ type: "stream", config: config, streams: {input: inputStream, output: outputStream}}, [inputStream, outputStream]);
+
+ } catch(e) {
+ addToEventLog(e.name + ": " + e.message, 'fatal');
+ }
+ }
+}, false);
diff --git a/samples/encode-decode-worker/js/stream_worker.js b/samples/encode-decode-worker/js/stream_worker.js
new file mode 100644
index 00000000..6de09350
--- /dev/null
+++ b/samples/encode-decode-worker/js/stream_worker.js
@@ -0,0 +1,260 @@
+'use strict';
+
+let encoder, decoder, pl, started = false, stopped = false;
+
+let encqueue_aggregate = {
+ all: [],
+ min: Number.MAX_VALUE,
+ max: 0,
+ avg: 0,
+ sum: 0,
+};
+
+let decqueue_aggregate = {
+ all: [],
+ min: Number.MAX_VALUE,
+ max: 0,
+ avg: 0,
+ sum: 0,
+};
+
+function encqueue_update(duration) {
+ encqueue_aggregate.all.push(duration);
+ encqueue_aggregate.min = Math.min(encqueue_aggregate.min, duration);
+ encqueue_aggregate.max = Math.max(encqueue_aggregate.max, duration);
+ encqueue_aggregate.sum += duration;
+}
+
+function encqueue_report() {
+ encqueue_aggregate.all.sort();
+ const len = encqueue_aggregate.all.length;
+ const half = len >> 1;
+ const f = (len + 1) >> 2;
+ const t = (3 * (len + 1)) >> 2;
+ const alpha1 = (len + 1)/4 - Math.trunc((len + 1)/4);
+ const alpha3 = (3 * (len + 1)/4) - Math.trunc(3 * (len + 1)/4);
+ const fquart = encqueue_aggregate.all[f] + alpha1 * (encqueue_aggregate.all[f + 1] - encqueue_aggregate.all[f]);
+ const tquart = encqueue_aggregate.all[t] + alpha3 * (encqueue_aggregate.all[t + 1] - encqueue_aggregate.all[t]);
+ const median = len % 2 === 1 ? encqueue_aggregate.all[len >> 1] : (encqueue_aggregate.all[half - 1] + encqueue_aggregate.all[half]) / 2;
+ return {
+ count: len,
+ min: encqueue_aggregate.min,
+ fquart: fquart,
+ avg: encqueue_aggregate.sum / len,
+ median: median,
+ tquart: tquart,
+ max: encqueue_aggregate.max,
+ };
+}
+
+function decqueue_update(duration) {
+ decqueue_aggregate.all.push(duration);
+ decqueue_aggregate.min = Math.min(decqueue_aggregate.min, duration);
+ decqueue_aggregate.max = Math.max(decqueue_aggregate.max, duration);
+ decqueue_aggregate.sum += duration;
+}
+
+function decqueue_report() {
+ decqueue_aggregate.all.sort();
+ const len = decqueue_aggregate.all.length;
+ const half = len >> 1;
+ const f = (len + 1) >> 2;
+ const t = (3 * (len + 1)) >> 2;
+ const alpha1 = (len + 1)/4 - Math.trunc((len + 1)/4);
+ const alpha3 = (3 * (len + 1)/4) - Math.trunc(3 * (len + 1)/4);
+ const fquart = decqueue_aggregate.all[f] + alpha1 * (decqueue_aggregate.all[f + 1] - decqueue_aggregate.all[f]);
+ const tquart = decqueue_aggregate.all[t] + alpha3 * (decqueue_aggregate.all[t + 1] - decqueue_aggregate.all[t]);
+ const median = len % 2 === 1 ? decqueue_aggregate.all[len >> 1] : (decqueue_aggregate.all[half - 1] + decqueue_aggregate.all[half]) / 2;
+ return {
+ count: len,
+ min: decqueue_aggregate.min,
+ fquart: fquart,
+ avg: decqueue_aggregate.sum / len,
+ median: median,
+ tquart: tquart,
+ max: decqueue_aggregate.max,
+ };
+}
+
+self.addEventListener('message', async function(e) {
+ if (stopped) return;
+ // In this demo, we expect at most two messages, one of each type.
+ let type = e.data.type;
+
+ if (type == "stop") {
+ self.postMessage({text: 'Stop message received.'});
+ if (started) pl.stop();
+ return;
+ } else if (type != "stream"){
+ self.postMessage({severity: 'fatal', text: 'Invalid message received.'});
+ return;
+ }
+ // We received a "stream" event
+ self.postMessage({text: 'Stream event received.'});
+
+ try {
+ pl = new pipeline(e.data);
+ pl.start();
+ } catch (e) {
+ self.postMessage({severity: 'fatal', text: `Pipeline creation failed: ${e.message}`})
+ return;
+ }
+}, false);
+
+class pipeline {
+
+ constructor(eventData) {
+ this.stopped = false;
+ this.inputStream = eventData.streams.input;
+ this.outputStream = eventData.streams.output;
+ this.config = eventData.config;
+ }
+
+ DecodeVideoStream(self) {
+ return new TransformStream({
+ start(controller) {
+ this.decoder = decoder = new VideoDecoder({
+ output: frame => controller.enqueue(frame),
+ error: (e) => {
+ self.postMessage({severity: 'fatal', text: `Init Decoder error: ${e.message}`});
+ }
+ });
+ },
+ transform(chunk, controller) {
+ if (this.decoder.state != "closed") {
+ if (chunk.type == "config") {
+ let config = JSON.parse(chunk.config);
+ VideoDecoder.isConfigSupported(config).then((decoderSupport) => {
+ if(decoderSupport.supported) {
+ this.decoder.configure(decoderSupport.config);
+ self.postMessage({text: 'Decoder successfully configured:\n' + JSON.stringify(decoderSupport.config)});
+ } else {
+ self.postMessage({severity: 'fatal', text: 'Config not supported:\n' + JSON.stringify(decoderSupport.config)});
+ }
+ })
+ .catch((e) => {
+ self.postMessage({severity: 'fatal', text: `Configuration error: ${e.message}`});
+ })
+ } else {
+ try {
+ const queue = this.decoder.decodeQueueSize;
+ decqueue_update(queue);
+ this.decoder.decode(chunk);
+ } catch (e) {
+ self.postMessage({severity: 'fatal', text: 'Derror size: ' + chunk.byteLength + ' seq: ' + chunk.seqNo + ' kf: ' + chunk.keyframeIndex + ' delta: ' + chunk.deltaframeIndex + ' dur: ' + chunk.duration + ' ts: ' + chunk.timestamp + ' ssrc: ' + chunk.ssrc + ' pt: ' + chunk.pt + ' tid: ' + chunk.temporalLayerId + ' type: ' + chunk.type});
+ self.postMessage({severity: 'fatal', text: `Catch Decode error: ${e.message}`});
+ }
+ }
+ }
+ }
+ });
+ }
+
+ EncodeVideoStream(self, config) {
+ return new TransformStream({
+ start(controller) {
+ this.frameCounter = 0;
+ this.seqNo = 0;
+ this.keyframeIndex = 0;
+ this.deltaframeIndex = 0;
+ this.pending_outputs = 0;
+ this.encoder = encoder = new VideoEncoder({
+ output: (chunk, cfg) => {
+ if (cfg.decoderConfig) {
+ const decoderConfig = JSON.stringify(cfg.decoderConfig);
+ self.postMessage({text: 'Configuration: ' + decoderConfig});
+ const configChunk =
+ {
+ type: "config",
+ seqNo: this.seqNo,
+ keyframeIndex: this.keyframeIndex,
+ deltaframeIndex: this.deltaframeIndex,
+ timestamp: 0,
+ pt: 0,
+ config: decoderConfig
+ };
+ controller.enqueue(configChunk);
+ }
+ chunk.temporalLayerId = 0;
+ if (cfg.svc) {
+ chunk.temporalLayerId = cfg.svc.temporalLayerId;
+ }
+ this.seqNo++;
+ if (chunk.type == 'key') {
+ this.keyframeIndex++;
+ this.deltaframeIndex = 0;
+ } else {
+ this.deltaframeIndex++;
+ }
+ this.pending_outputs--;
+ chunk.seqNo = this.seqNo;
+ chunk.keyframeIndex = this.keyframeIndex;
+ chunk.deltaframeIndex = this.deltaframeIndex;
+ controller.enqueue(chunk);
+ },
+ error: (e) => {
+ self.postMessage({severity: 'fatal', text: `Encoder error: ${e.message}`});
+ }
+ });
+ VideoEncoder.isConfigSupported(config).then((encoderSupport) => {
+ if(encoderSupport.supported) {
+ this.encoder.configure(encoderSupport.config);
+ self.postMessage({text: 'Encoder successfully configured:\n' + JSON.stringify(encoderSupport.config)});
+ } else {
+ self.postMessage({severity: 'fatal', text: 'Config not supported:\n' + JSON.stringify(encoderSupport.config)});
+ }
+ })
+ .catch((e) => {
+ self.postMessage({severity: 'fatal', text: `Configuration error: ${e.message}`});
+ })
+ },
+ transform(frame, controller) {
+ if (this.pending_outputs <= 30) {
+ this.pending_outputs++;
+ const insert_keyframe = (this.frameCounter % config.keyInterval) == 0;
+ this.frameCounter++;
+ try {
+ if (this.encoder.state != "closed") {
+ const queue = this.encoder.encodeQueueSize;
+ encqueue_update(queue);
+ this.encoder.encode(frame, { keyFrame: insert_keyframe });
+ }
+ } catch(e) {
+ self.postMessage({severity: 'fatal', text: 'Encoder Error: ' + e.message});
+ }
+ }
+ frame.close();
+ }
+ });
+ }
+
+ stop() {
+ const encqueue_stats = encqueue_report();
+ const decqueue_stats = decqueue_report();
+ self.postMessage({text: 'Encoder Queue report: ' + JSON.stringify(encqueue_stats)});
+ self.postMessage({text: 'Decoder Queue report: ' + JSON.stringify(decqueue_stats)});
+ if (stopped) return;
+ stopped = true;
+ this.stopped = true;
+ self.postMessage({text: 'stop() called'});
+ if (encoder.state != "closed") encoder.close();
+ if (decoder.state != "closed") decoder.close();
+ self.postMessage({text: 'stop(): frame, encoder and decoder closed'});
+ return;
+ }
+
+ async start() {
+ if (stopped) return;
+ started = true;
+ let duplexStream, readStream, writeStream;
+ self.postMessage({text: 'Start method called.'});
+ try {
+ await this.inputStream
+ .pipeThrough(this.EncodeVideoStream(self,this.config))
+ .pipeThrough(this.DecodeVideoStream(self))
+ .pipeTo(this.outputStream);
+ } catch (e) {
+ self.postMessage({severity: 'fatal', text: `start error: ${e.message}`});
+ }
+ }
+}