ref: 3967fd3c5323ec35b5e9f39bbf736e7e9571db9d
parent: bb4b01a1f96ee6576fb0ccc8237cf635e89a01ec
author: Sam Leitch <sam@luceva.net>
date: Sat Mar 8 17:05:40 EST 2014
After discovering the exports from emscripten I cleaned up a bunch of code in the decoder. Decided to use a different approach to decoding that more accurately matched the original h264bsd API. Changed the names of the classes and made the canvas match the new API. Still needs testing.
--- a/js/h264bsdCanvas.js
+++ /dev/null
@@ -1,309 +1,0 @@
-//
-// Copyright (c) 2014 Sam Leitch. All rights reserved.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to
-// deal in the Software without restriction, including without limitation the
-// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-// sell copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-// IN THE SOFTWARE.
-//
-// TODO: Incorporate cropping information
-
-/**
- * This class grabs content from a video element and feeds it to a canvas element.
- * If available the content is modified using a custom WebGL shader program.
- * This class depends on the h264bsd_asm.js Module implementation.
- */
-function H264bsdCanvas(canvas, Module, forceRGB) {
- this.Module = Module;
- this.canvasElement = canvas;
- this.initGlContext();
-
- if(this.contextGl && !forceRGB) {
- this.initProgram();
- this.initBuffers();
- this.initTextures();
- } else {
- this.context2D = canvas.getContext('2d');
- this.rgbBufferSize = 0;
- this.rgbBufferPtr = 0;
- }
-}
-
-/**
- * Create the GL context from the canvas element
- */
-H264bsdCanvas.prototype.initGlContext = function() {
- var canvas = this.canvasElement;
- var gl = null;
-
- var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"];
- var i = 0;
-
- while(!gl && nameIndex < validNames.length) {
- var contextName = validContextNames[i];
-
- try {
- gl = canvas.getContext(contextName);
- } catch (e) {
- gl = null;
- }
-
- if(!gl || typeof gl.getParameter !== "function") {
- gl = null;
- }
-
- ++i;
- }
-
- this.contextGl = gl;
-}
-
-/**
- * Initialize GL shader program
- */
-H264bsdCanvas.prototype.initProgram = function() {
- var gl = this.contextGl;
-
- var vertexShaderScript = [
- 'attribute vec4 vertexPos;',
- 'attribute vec4 texturePos;',
- 'varying vec2 textureCoord;',
-
- 'void main()',
- '{',
- 'gl_Position = vertexPos;',
- 'textureCoord = texturePos.xy;',
- '}'
- ].join('\n');
-
- var fragmentShaderScript = [
- 'precision highp float;',
- 'varying highp vec2 textureCoord;',
- 'uniform sampler2D ySampler;',
- 'uniform sampler2D uSampler;',
- 'uniform sampler2D vSampler;',
- 'const mat4 YUV2RGB = mat4',
- '(',
- '1.1643828125, 0, 1.59602734375, -.87078515625,',
- '1.1643828125, -.39176171875, -.81296875, .52959375,',
- '1.1643828125, 2.017234375, 0, -1.081390625,',
- '0, 0, 0, 1',
- ');',
-
- 'void main(void) {',
- 'highp float y = texture2D(ySampler, textureCoord).r;'
- 'highp float u = texture2D(uSampler, textureCoord).r;'
- 'highp float v = texture2D(vSampler, textureCoord).r;'
- 'gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
- '}'
- ].join('\n');
-
- var vertexShader = gl.createShader(gl.VERTEX_SHADER);
- gl.shaderSource(vertexShader, vertexShaderScript);
- gl.compileShader(vertexShader);
- if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
- console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader));
- }
-
- var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
- gl.shaderSource(fragmentShader, fragmentShaderScript);
- gl.compileShader(fragmentShader);
- if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
- console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader));
- }
-
- var program = gl.createProgram();
- gl.attachShader(program, vertexShader);
- gl.attachShader(program, fragmentShader);
- gl.linkProgram(program);
- if(!gl.getProgramParameter(program, gl.LINK_STATUS)) {
- console.log('Program failed to compile: ' + gl.getProgramInfoLog(program));
- }
-
- gl.useProgram(program);
-
- this.shaderProgram = program;
-}
-
-/**
- * Initialize vertex buffers and attach to shader program
- */
-H264bsdCanvas.prototype.initBuffers = function() {
- var gl = this.contextGl;
- var program = this.shaderProgram;
-
- var vertexPosBuffer = gl.createBuffer();
- gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW);
-
- var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
- gl.enableVertexAttribArray(vertexPosRef);
- gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
-
- var texturePosBuffer = gl.createBuffer();
- gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
-
- var texturePosRef = gl.getAttribLocation(program, 'texturePos');
- gl.enableVertexAttribArray(texturePosRef);
- gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0);
-}
-
-/**
- * Initialize GL textures and attach to shader program
- */
-H264bsdCanvas.prototype.initTextures = function() {
- var gl = this.contextGl;
- var program = this.shaderProgram;
-
- var yTextureRef = this.initTexture();
- var ySamplerRef = gl.getUniformLocation(program, 'ySampler');
- gl.uniform1i(ySamplerRef, 0);
- this.yTextureRef = yTextureRef;
-
- var uTextureRef = this.initTexture();
- var uSamplerRef = gl.getUniformLocation(program, 'uSampler');
- gl.uniform1i(uSamplerRef, 1);
- this.uTextureRef = uTextureRef;
-
- var vTextureRef = this.initTexture();
- var vSamplerRef = gl.getUniformLocation(program, 'vSampler');
- gl.uniform1i(vSamplerRef, 2);
- this.vTextureRef = vTextureRef;
-}
-
-/**
- * Create and configure a single texture
- */
-H264bsdCanvas.prototype.initTexture = function() {
- var textureRef = gl.createTexture();
- gl.bindTexture(gl.TEXTURE_2D, textureRef);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- return textureRef;
-}
-
-/**
- * Draw yuvData in the best way possible
- */
-H264bsdCanvas.prototype.drawNextPicture = function(pStorage) {
- var gl = this.contextGl;
-
- if(gl) {
- this.drawNextPictureGl(pStorage);
- } else {
- this.drawNextPictureARGB(pStorage);
- }
-}
-
-/**
- * Setup GL viewport and draw the yuvData
- */
-H264bsdCanvas.prototype.drawNextPictureGl = function(pStorage) {
- var gl = this.contextGl;
- var yTextureRef = this.yTextureRef;
- var uTextureRef = this.uTextureRef;
- var vTextureRef = this.vTextureRef;
-
- gl.viewport(0, 0, size.w, size.h);
-
- gl.activeTexture(gl.TEXTURE0);
- gl.bindTexture(gl.TEXTURE_2D, yTextureRef);
- gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, size.w, size.h, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, pYuvData);
-
- gl.activeTexture(gl.TEXTURE1);
- gl.bindTexture(gl.TEXTURE_2D, uTextureRef);
- gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, size.w/2, size.h/2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, pYuvData);
-
- gl.activeTexture(gl.TEXTURE2);
- gl.bindTexture(gl.TEXTURE_2D, vTextureRef);
- gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, size.w/2, size.h/2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, pYuvData);
-
- gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
-}
-
-/**
- * Convert yuvData to ARGB data and draw to canvas
- */
-H264bsdCanvas.prototype.drawNextPictureARGB = function(pStorage) {
- var ctx = this.context2D;
- var rgbBufferSize = this.rgbBufferSize;
- var rgbBufferPtr = this.rgbBufferPtr;
- var imageData = this.imageData;
-
- var rgbSize = size.w * size.h * 4;
-
- if(rgbBufferSize < rgbSize) {
- if(rgbBufferPtr != 0) this.free(rgbBufferPtr);
-
- rgbBufferSize = rgbSize;
- rgbBufferPtr = this.malloc(rgbBufferSize);
-
- this.rgbBufferSize = rgbBufferSize;
- this.rgbBufferPtr = rgbBufferPtr;
- }
-
- this.h264bsdConvertToARGB(size.w, size.h, pYuvData, pRgbData);
-
- if(!imageData ||
- imageData.width != size.w ||
- imageData.height != size.h) {
- imageData = ctx.createImageData(size.w, size.h);
- this.imageData = imageData;
- }
-
- var rgbData = this.Module.HEAPU8.subarray(rgbBufferPtr, rgbBufferPtr + rgbSize);
- imageData.data.set(rgbData);
- ctx.putImageData(imageData, 0, 0);
-}
-
-//void h264bsdConvertToARGB(u32 width, u32 height, u8* data, u32 *rgbData);
-H264bsdCanvas.prototype.h264bsdConvertToARGB = function(width, height, pData, pRgbData) {
- this.Module.ccall('h264bsdConvertToARGB',
- Number,
- [Number, Number, Number, Number],
- [width, height, pData, pRgbData]);
-};
-
-// u8* h264bsdNextOutputPicture(storage_t *pStorage, u32 *picId, u32 *isIdrPic, u32 *numErrMbs);
-H264bsdCanvas.prototype.h264bsdNextOutputPicture_ = function(pStorage, pPicId, pIsIdrPic, pNumErrMbs) {
- return this.Module.ccall('h264bsdNextOutputPicture',
- Number,
- [Number, Number, Number, Number],
- [pStorage, pPicId, pIsIdrPic, pNumErrMbs]);
-};
-
-// u32* h264bsdNextOutputPictureARGB(storage_t *pStorage, u32 *picId, u32 *isIdrPic, u32 *numErrMbs);
-H264bsdCanvas.prototype.h264bsdNextOutputPictureARGB_ = function(pStorage, pPicId, pIsIdrPic, pNumErrMbs){
- return this.Module.ccall('h264bsdNextOutputPictureARGB',
- Number,
- [Number, Number, Number, Number],
- [pStorage, pPicId, pIsIdrPic, pNumErrMbs]);
-};
-
-// void* malloc(size_t size);
-H264bsdCanvas.prototype.malloc = function(size) {
- return this.Module.ccall('malloc', Number, [Number], [size]);
-};
-
-// void free(void* ptr);
-H264bsdCanvas.prototype.free = function(ptr) {
- this.Module.ccall('free', null, [Number], [ptr]);
-};
--- /dev/null
+++ b/js/h264bsd_canvas.js
@@ -1,0 +1,261 @@
+//
+// Copyright (c) 2014 Sam Leitch. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+// IN THE SOFTWARE.
+//
+// TODO: Incorporate cropping information
+
+/**
+ * This class can be used to render output pictures from an H264bsdDecoder to a canvas element.
+ * If available the content is rendered using WebGL.
+ */
+function H264bsdCanvas(canvas, forceRGB) {
+ this.canvasElement = canvas;
+ this.initContextGL();
+
+ if(this.contextGL && !forceRGB) {
+ this.initProgram();
+ this.initBuffers();
+ this.initTextures();
+ } else {
+ this.context2D = canvas.getContext('2d');
+ }
+}
+
+/**
+ * Create the GL context from the canvas element
+ */
+H264bsdCanvas.prototype.initContextGL = function() {
+ var canvas = this.canvasElement;
+ var gl = null;
+
+ var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"];
+ var i = 0;
+
+ while(!gl && nameIndex < validNames.length) {
+ var contextName = validContextNames[i];
+
+ try {
+ gl = canvas.getContext(contextName);
+ } catch (e) {
+ gl = null;
+ }
+
+ if(!gl || typeof gl.getParameter !== "function") {
+ gl = null;
+ }
+
+ ++i;
+ }
+
+ this.contextGL = gl;
+}
+
+/**
+ * Initialize GL shader program
+ */
+H264bsdCanvas.prototype.initProgram = function() {
+ var gl = this.contextGL;
+
+ var vertexShaderScript = [
+ 'attribute vec4 vertexPos;',
+ 'attribute vec4 texturePos;',
+ 'varying vec2 textureCoord;',
+
+ 'void main()',
+ '{',
+ 'gl_Position = vertexPos;',
+ 'textureCoord = texturePos.xy;',
+ '}'
+ ].join('\n');
+
+ var fragmentShaderScript = [
+ 'precision highp float;',
+ 'varying highp vec2 textureCoord;',
+ 'uniform sampler2D ySampler;',
+ 'uniform sampler2D uSampler;',
+ 'uniform sampler2D vSampler;',
+ 'const mat4 YUV2RGB = mat4',
+ '(',
+ '1.1643828125, 0, 1.59602734375, -.87078515625,',
+ '1.1643828125, -.39176171875, -.81296875, .52959375,',
+ '1.1643828125, 2.017234375, 0, -1.081390625,',
+ '0, 0, 0, 1',
+ ');',
+
+ 'void main(void) {',
+ 'highp float y = texture2D(ySampler, textureCoord).r;'
+ 'highp float u = texture2D(uSampler, textureCoord).r;'
+ 'highp float v = texture2D(vSampler, textureCoord).r;'
+ 'gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
+ '}'
+ ].join('\n');
+
+ var vertexShader = gl.createShader(gl.VERTEX_SHADER);
+ gl.shaderSource(vertexShader, vertexShaderScript);
+ gl.compileShader(vertexShader);
+ if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
+ console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader));
+ }
+
+ var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
+ gl.shaderSource(fragmentShader, fragmentShaderScript);
+ gl.compileShader(fragmentShader);
+ if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
+ console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader));
+ }
+
+ var program = gl.createProgram();
+ gl.attachShader(program, vertexShader);
+ gl.attachShader(program, fragmentShader);
+ gl.linkProgram(program);
+ if(!gl.getProgramParameter(program, gl.LINK_STATUS)) {
+ console.log('Program failed to compile: ' + gl.getProgramInfoLog(program));
+ }
+
+ gl.useProgram(program);
+
+ this.shaderProgram = program;
+}
+
+/**
+ * Initialize vertex buffers and attach to shader program
+ */
+H264bsdCanvas.prototype.initBuffers = function() {
+ var gl = this.contextGL;
+ var program = this.shaderProgram;
+
+ var vertexPosBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW);
+
+ var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
+ gl.enableVertexAttribArray(vertexPosRef);
+ gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
+
+ var texturePosBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
+
+ var texturePosRef = gl.getAttribLocation(program, 'texturePos');
+ gl.enableVertexAttribArray(texturePosRef);
+ gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0);
+}
+
+/**
+ * Initialize GL textures and attach to shader program
+ */
+H264bsdCanvas.prototype.initTextures = function() {
+ var gl = this.contextGL;
+ var program = this.shaderProgram;
+
+ var yTextureRef = this.initTexture();
+ var ySamplerRef = gl.getUniformLocation(program, 'ySampler');
+ gl.uniform1i(ySamplerRef, 0);
+ this.yTextureRef = yTextureRef;
+
+ var uTextureRef = this.initTexture();
+ var uSamplerRef = gl.getUniformLocation(program, 'uSampler');
+ gl.uniform1i(uSamplerRef, 1);
+ this.uTextureRef = uTextureRef;
+
+ var vTextureRef = this.initTexture();
+ var vSamplerRef = gl.getUniformLocation(program, 'vSampler');
+ gl.uniform1i(vSamplerRef, 2);
+ this.vTextureRef = vTextureRef;
+}
+
+/**
+ * Create and configure a single texture
+ */
+H264bsdCanvas.prototype.initTexture = function() {
+ var gl = this.contextGL;
+
+ var textureRef = gl.createTexture();
+ gl.bindTexture(gl.TEXTURE_2D, textureRef);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.bindTexture(gl.TEXTURE_2D, null);
+
+ return textureRef;
+}
+
+/**
+ * Draw yuvData in the best way possible
+ */
+H264bsdCanvas.prototype.drawNextOutputPicture = function(decoder) {
+ var gl = this.contextGL;
+
+ if(gl) {
+ this.drawNextOuptutPictureGL(decoder);
+ } else {
+ this.drawNextOuptutPictureARGB(decoder);
+ }
+}
+
+/**
+ * Draw the next output picture using WebGL
+ */
+H264bsdCanvas.prototype.drawNextOuptutPictureGL = function(decoder) {
+ var gl = this.contextGL;
+ var yTextureRef = this.yTextureRef;
+ var uTextureRef = this.uTextureRef;
+ var vTextureRef = this.vTextureRef;
+
+ var sizeMB = decoder.outputSizeMB;
+ var width = sizeMB.width * 16;
+ var height = sizeMB.height * 16;
+
+ gl.viewport(0, 0, width, height);
+
+ var i420Data = decoder.nextOutputPicture();
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.bindTexture(gl.TEXTURE_2D, yTextureRef);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, i420Data);
+
+ gl.activeTexture(gl.TEXTURE1);
+ gl.bindTexture(gl.TEXTURE_2D, uTextureRef);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width/2, height/2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, i420Data);
+
+ gl.activeTexture(gl.TEXTURE2);
+ gl.bindTexture(gl.TEXTURE_2D, vTextureRef);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width/2, height/2, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, i420Data);
+
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+}
+
+/**
+ * Draw next output picture using ARGB data on a 2d canvas.
+ */
+H264bsdCanvas.prototype.drawNextOuptutPictureARGB = function(decoder) {
+ var ctx = this.context2D;
+
+ var sizeMB = decoder.outputSizeMB;
+ var width = sizeMB.width * 16;
+ var height = sizeMB.height * 16;
+
+ var argbData = decoder.nextOutputPictureARGB();
+
+ var imageData = ctx.createImageData(width, height);
+ imageData.data.set(argbData);
+ ctx.putImageData(imageData, 0, 0);
+}
--- /dev/null
+++ b/js/h264bsd_decoder.js
@@ -1,0 +1,258 @@
+//
+// Copyright (c) 2013 Sam Leitch. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+// IN THE SOFTWARE.
+//
+
+/**
+ * This class wraps the details of the h264bsd library.
+ * Module object is an Emscripten module provided globally by h264bsd_asm.js
+ *
+ * In order to use this class, you first queue encoded data using queueData.
+ * Each call to decode() will decode a single encoded element.
+ * When decode() return H264bsdDecoder.PIC_RDY, a picture is ready in the output buffer.
+ * The output buffer can be accessed by calling getNextOutputPicture()
+ * An output picture may also be decoded using an H264bsdCanvas.
+ * When you're done decoding, make sure to call release() to clean up internal buffers.
+ */
+function H264bsdDecoder(module) {
+ this.module = module;
+ this.released = false;
+
+ this.pInput = 0;
+ this.inputLength = 0;
+ this.inputOffset = 0;
+
+ this.pStorage = module._h264bsdAlloc();
+ module._h264bsdInit(this.pStorage, 0);
+};
+
+H264bsdDecoder.RDY = 0;
+H264bsdDecoder.PIC_RDY = 1;
+H264bsdDecoder.HDRS_RDY = 2;
+H264bsdDecoder.ERROR = 3;
+H264bsdDecoder.PARAM_SET_ERROR = 4;
+H264bsdDecoder.MEMALLOC_ERROR = 5;
+
+/**
+ * Clean up memory used by the decoder
+ */
+H264bsdDecoder.prototype.release = function() {
+ var module = this.module;
+ var pStorage = this.pStorage;
+ var pInput = this.pInput;
+
+ if(pStorage != 0) {
+ module._h264bsdShutdown(pStorage);
+ module._h264bsdFree(pStorage);
+ }
+
+ if(pInput != 0) {
+ module._free(pInput);
+ }
+
+ this.pStorage = 0;
+ this.pInput = 0;
+ this.inputLength = 0;
+ this.inputOffset = 0;
+};
+
+/**
+ * Queue ArrayBuffer data to be decoded
+ */
+H264bsdDecoder.prototype.queueData(data) {
+ var module = this.module
+ var pInput = this.pInput;
+ var inputLength = this.inputLength;
+ var inputOffset = this.inputOffset;
+
+ if(typeof data === 'undefined' || !(data instanceof ArrayBuffer)) {
+ throw new Error("data must be a ArrayBuffer instance")
+ }
+
+ data = new Uint8Array(data);
+
+ if(pInput === 0) {
+ inputLength = data.byteLength;
+ pInput = module._malloc(inputLength);
+ inputOffset = 0;
+
+ module.HEAPU8.set(data, pInput);
+ } else {
+ var remainingInputLength = inputLength - inputOffset;
+ var newInputLength = remainingInputLength + data.byteLength;
+ var pNewInput = module._malloc(newInputLength);
+
+ module._memcpy(pNewInput, pInput + inputOffset, remainingInputLength);
+ module.HEAPU8.set(data, pNewInput + remainingInputLength);
+
+ module._free(pInput);
+
+ pInput = pNewInput;
+ inputLength = newInputLength;
+ inputOffset = 0;
+ }
+
+ this.pInput = input;
+ this.inputLength = inputLength;
+ this.inputOffset = inputOffset;
+}
+
+/**
+ * Decodes the next NAL unit from the queued data.
+ * Returns H264bsdDecoder.PIC_RDY when a new picture is ready.
+ * Pictures can be accessed using nextOutputPicture() or nextOutputPictureARGB()
+ */
+H264bsdDecoder.prototype.decode = function(picId) {
+ var module = this.module;
+ var pStorage = this.pStorage;
+ var pInput = this.pInput;
+ var inputLength = this.inputLength;
+ var inputOffset = this.inputOffset;
+
+ if(pInput == 0) return H264bsdDecoder.ERROR;
+
+ var pBytesRead = module._malloc(4);
+
+ var retCode = module._h264bsdDecode(pStorage, pInput + inputOffset, inputLength - inputOffset, 0, pBytesRead);
+
+ var bytesRead = module.getValue(pBytesRead, 'i32');
+ module._free(pBytesRead);
+
+ inputOffset += bytesRead;
+
+ if(inputOffset >= inputLength) {
+ module._free(pInput);
+ pInput = 0;
+ inputOffset = 0;
+ inputLength = 0;
+ }
+
+ this.pInput = pInput;
+ this.inputLength = inputLength;
+ this.inputOffset = inputOffset;
+
+ return retCode;
+};
+
+/**
+ * Returns the next output picture as an I420 encoded image.
+ */
+H264bsdDecoder.prototype.nextOutputPicture = function() {
+ var module = this.module;
+ var pStorage = this.pStorage;
+
+ var pPicId = module._malloc(4);
+ var pIsIdrPic = module._malloc(4);
+ var pNumErrMbs = module._malloc(4);
+
+ var pBytes = module._h264bsdNextOutputPicture(pStorage, pPicId, pIsIdrPic, pNumErrMbs);
+
+ // None of these values are currently used.
+ module._free(pPicId);
+ module._free(pIsIdrPic);
+ module._free(pNumErrMbs);
+
+ var outputSizeMB = this.getOutputSizeMB();
+ var outputLength = (outputSizeMB.width * outputSizeMB.height) * 3 / 2;
+
+ var outputBytes = new UInt8Array(self.Module.HEAPU8, pBytes, outputLength);
+
+ return outputBytes
+};
+
+/**
+ * Returns the next output picture as an ARGB encoded image.
+ * Note: There is extra overhead required to convert the image to ARGB.
+ * This method should be avoided if possible.
+ */
+H264bsdDecoder.prototype.nextOutputPictureARGB = function() {
+ var module = this.module;
+ var pStorage = this.pStorage;
+
+ var pPicId = module._malloc(4);
+ var pIsIdrPic = module._malloc(4);
+ var pNumErrMbs = module._malloc(4);
+
+ var pBytes = module._h264bsdNextOutputPictureARGB(pStorage, pPicId, pIsIdrPic, pNumErrMbs);
+
+ // None of these values are currently used.
+ module._free(pPicId);
+ module._free(pIsIdrPic);
+ module._free(pNumErrMbs);
+
+ var outputSizeMB = this.getOutputSizeMB();
+ var outputLength = (outputSizeMB.width * outputSizeMB.height) * 4;
+
+ var outputBytes = new UInt8Array(self.Module.HEAPU8, pBytes, outputLength);
+
+ return outputBytes
+};
+
+/**
+ * Returns an object containing the width and height of output pictures in MB.
+ * This value is only valid after at least one call to decode() has returned H264bsdDecoder.HDRS_RDY
+ */
+H264bsdDecoder.prototype.outputSizeMB = function() {
+ var module = this.module;
+ var pStorage = this.pStorage;
+
+ var width = module._h264bsdPicWidth(pStorage);
+ var height = module._h264bsdPicHeight(pStorage);
+
+ return {'width': width, 'height': height};
+};
+
+/**
+ * Returns the info used to crop output images to there final viewing dimensions.
+ * If this method returns null no cropping info is provided and the full image should be presented.
+ */
+H264bsdDecoder.prototype.croppingInfo = function(){
+ var module = this.module;
+ var pStorage = this.pStorage;
+
+ var pCroppingFlag = self.malloc_(4);
+ var pLeftOffset = self.malloc_(4);
+ var pWidth = self.malloc_(4);
+ var pTopOffset = self.malloc_(4);
+ var pHeight = self.malloc_(4);
+
+ module._h264bsdCroppingParams(pStorage, pCroppingFlag, pLeftOffset, pWidth, pTopOffset, pHeight);
+
+ var croppingFlag = self.Module.getValue(pCroppingFlag, 'i32');
+ var leftOffset = self.Module.getValue(pLeftOffset, 'i32');
+ var width = self.Module.getValue(pWidth, 'i32');
+ var topOffset = self.Module.getValue(pTopOffset, 'i32');
+ var height = self.Module.getValue(pHeight, 'i32');
+
+ module._free(pCroppingFlag);
+ module._free(pLeftOffset);
+ module._free(pWidth);
+ module._free(pTopOffset);
+ module._free(pHeight);
+
+ if(croppingFlag === 0) return null;
+
+ return {
+ 'width': width,
+ 'height': height,
+ 'top': topOffset,
+ 'left': leftOffset
+ };
+};