| Index: content/test/data/media/getusermedia-depth-capture.html
|
| diff --git a/content/test/data/media/getusermedia-depth-capture.html b/content/test/data/media/getusermedia-depth-capture.html
|
| index 521fc0730c17322bf833ccb5588b0ac2c2d4e21f..5dfff759a37b827981fa447a5d123c0ecb9e9771 100644
|
| --- a/content/test/data/media/getusermedia-depth-capture.html
|
| +++ b/content/test/data/media/getusermedia-depth-capture.html
|
| @@ -4,6 +4,15 @@
|
| <script type="text/javascript" src="depth_stream_test_utilities.js"></script>
|
| <script type="text/javascript">
|
|
|
| + function cubemapFaces(gl) {
|
| + return [gl.TEXTURE_CUBE_MAP_POSITIVE_X,
|
| + gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
|
| + gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
|
| + gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
|
| + gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
|
| + gl.TEXTURE_CUBE_MAP_NEGATIVE_Z];
|
| + }
|
| +
|
| $ = function(id) {
|
| return document.getElementById(id);
|
| };
|
| @@ -12,8 +21,8 @@
|
| reportTestSuccess();
|
| });
|
|
|
| - // testVideoToImageBitmap is a layout test that we run here because
|
| - // it requires --use-fake-device-for-media-capture.
|
| + // testVideoToImageBitmap and the tests below are layout tests that we
|
| + // run here because they require --use-fake-device-for-media-capture.
|
| function getDepthStreamAndCallCreateImageBitmap() {
|
| console.log('Calling getDepthStreamAndCallCreateImageBitmap');
|
| getFake16bitStream().then(function(stream) {
|
| @@ -27,6 +36,40 @@
|
| failedCallback);
|
| }
|
|
|
| + function depthStreamToRGBAUint8Texture() {
|
| + console.log('Calling depthStreamToRGBAUint8Texture');
|
| + getFake16bitStream().then(function(stream) {
|
| + detectVideoInLocalView1(stream, function() {
|
| + testVideoToRGBA8Texture('local-view-1', function(skip_info) {
|
| + if (skip_info) {
|
| + console.log("SKIP depthStreamToRGBAUint8Texture: " +
|
| + skip_info);
|
| + }
|
| + stream.getVideoTracks()[0].stop();
|
| + waitForVideoToStop('local-view-1');
|
| + }, failedCallback);
|
| + });
|
| + },
|
| + failedCallback);
|
| + }
|
| +
|
| + function depthStreamToRGBAFloatTexture() {
|
| + console.log('Calling depthStreamToRGBAFloatTexture');
|
| + getFake16bitStream().then(function(stream) {
|
| + detectVideoInLocalView1(stream, function() {
|
| + testVideoToRGBA32FTexture('local-view-1', function(skip_info) {
|
| + if (skip_info) {
|
| + console.log("SKIP depthStreamToRGBAFloatTexture: " +
|
| + skip_info);
|
| + }
|
| + stream.getVideoTracks()[0].stop();
|
| + waitForVideoToStop('local-view-1');
|
| + }, failedCallback);
|
| + });
|
| + },
|
| + failedCallback);
|
| + }
|
| +
|
| function failedCallback(error) {
|
| failTest('GetUserMedia call failed with error name ' + error.name);
|
| }
|
| @@ -40,14 +83,230 @@
|
| attachMediaStream(stream, 'local-view-1');
|
| detectVideoPlaying('local-view-1', callback);
|
| }
|
| +
|
| + function testVideoToImageBitmap(videoElementName, success, error) {
|
| + var bitmaps = {};
|
| + var video = $(videoElementName);
|
| + var canvas = document.createElement('canvas');
|
| + canvas.width = 96;
|
| + canvas.height = 96;
|
| + document.body.appendChild(canvas);
|
| + var p1 = createImageBitmap(video).then(function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, false); });
|
| + var p2 = createImageBitmap(video,
|
| + {imageOrientation: "none", premultiplyAlpha: "premultiply"}).then(
|
| + function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, false); });
|
| + var p3 = createImageBitmap(video,
|
| + {imageOrientation: "none", premultiplyAlpha: "default"}).then(
|
| + function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, false); });
|
| + var p4 = createImageBitmap(video,
|
| + {imageOrientation: "none", premultiplyAlpha: "none"}).then(
|
| + function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, false); });
|
| + var p5 = createImageBitmap(video,
|
| + {imageOrientation: "flipY", premultiplyAlpha: "premultiply"}).then(
|
| + function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, true); });
|
| + var p6 = createImageBitmap(video,
|
| + {imageOrientation: "flipY", premultiplyAlpha: "default"}).then(
|
| + function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, true); });
|
| + var p7 = createImageBitmap(video,
|
| + {imageOrientation: "flipY", premultiplyAlpha: "none"}).then(
|
| + function(imageBitmap) {
|
| + return runImageBitmapTest(imageBitmap, canvas, true); });
|
| + return Promise.all([p1, p2, p3, p4, p5, p6, p7]).then(success(), reason => {
|
| + return error({name: reason});
|
| + });
|
| + }
|
| +
|
| + function runImageBitmapTest(bitmap, canvas, flip_y) {
|
| + var context = canvas.getContext('2d');
|
| + context.drawImage(bitmap, 0, 0);
|
| + var imageData = context.getImageData(0, 0, canvas.width, canvas.height);
|
| + // Fake capture device 96x96 depth image is gradient. See also
|
| + // Draw16BitGradient in fake_video_capture_device.cc.
|
| + var color_step = 255.0 / (canvas.width + canvas.height);
|
| + return verifyPixels(imageData.data, canvas.width, canvas.height, flip_y,
|
| + color_step, 255, 2, "ImageBitmap");
|
| + }
|
| +
|
| + function testVideoToRGBA32FTexture(videoElementName, success, error) {
|
| + var video = $(videoElementName);
|
| + var canvas = document.createElement('canvas');
|
| + canvas.width = 96;
|
| + canvas.height = 96;
|
| + var gl = canvas.getContext('webgl');
|
| + if(!gl)
|
| + return error({name:"WebGL is not available."});
|
| + if(!gl.getExtension("OES_texture_float"))
|
| + return error({name:"OES_texture_float extension is not available."});
|
| + return testVideoToTexture(gl, video, gl.RGBA, gl.RGBA, gl.FLOAT,
|
| + readAndVerifyRGBA32F, success, error);
|
| + }
|
| +
|
| + function testVideoToRGBA8Texture(videoElementName, success, error) {
|
| + var video = $(videoElementName);
|
| + var canvas = document.createElement('canvas');
|
| + canvas.width = 96;
|
| + canvas.height = 96;
|
| + var gl = canvas.getContext('webgl');
|
| + if(!gl)
|
| + return error({name:"WebGL is not available."});
|
| + return testVideoToTexture(gl, video, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE,
|
| + readAndVerifyRGBA8, success, error);
|
| + }
|
| +
|
| + function testVideoToTexture(gl, video, internalformat, format, type,
|
| + readAndVerifyFunction, success, error) {
|
| + // Create framebuffer that we will use for reading back the texture.
|
| + var fb = gl.createFramebuffer();
|
| + gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
|
| + var tests = [];
|
| + // Premultiply alpha is ignored but we just test both values.
|
| + var cases = [
|
| + {flip_y: false, premultiply_alpha: true},
|
| + {flip_y: true, premultiply_alpha: false}
|
| + ];
|
| + for (var i in cases) {
|
| + var flip_y = cases[i].flip_y;
|
| + var premultiply = cases[i].premultiply_alpha;
|
| + uploadVideoToTexture2D(gl, video, internalformat, format, type, flip_y,
|
| + premultiply);
|
| + tests.push(readAndVerifyFunction(gl, video.width, video.height, flip_y,
|
| + "TexImage_TEXTURE_2D"));
|
| + uploadVideoToSubTexture2D(gl, video, internalformat, format, type, flip_y,
|
| + premultiply);
|
| + tests.push(readAndVerifyFunction(gl, video.width, video.height, flip_y,
|
| + "TexSubImage_TEXTURE_2D"));
|
| +
|
| + // cubemap texImage2D.
|
| + var tex = uploadVideoToTextureCubemap(gl, video, internalformat, format,
|
| + type, flip_y, premultiply);
|
| + for (var i = 0; i < cubemapFaces(gl).length; ++i) {
|
| + // Attach the texture to framebuffer for readback.
|
| + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
|
| + cubemapFaces(gl)[i], tex, 0);
|
| + tests.push(readAndVerifyFunction(gl, video.width, video.height,
|
| + flip_y,
|
| + "TexImage_" + cubemapFaces(gl)[i]));
|
| + }
|
| +
|
| + // cubemap texSubImage2D.
|
| + tex = uploadVideoToSubTextureCubemap(gl, video, internalformat, format,
|
| + type, flip_y, premultiply);
|
| + for (var i = 0; i < cubemapFaces(gl).length; ++i) {
|
| + // Attach the texture to framebuffer for readback.
|
| + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
|
| + cubemapFaces(gl)[i], tex, 0);
|
| + tests.push(readAndVerifyFunction(gl, video.width, video.height,
|
| + flip_y,
|
| + "TexSubImage_" + cubemapFaces(gl)[i]));
|
| + }
|
| + }
|
| + return Promise.all(tests).then(success(), reason => {
|
| + return error({name: reason});
|
| + });
|
| + }
|
| +
|
| + // Test setup helper method: create the texture and set texture parameters.
|
| + // For cubemap, target is gl.TEXTURE_CUBE_MAP. For gl.TEXTURE_2D, it is
|
| + // gl.TEXTURE_2D.
|
| + function createTexture(gl, target, video, flip_y, premultiply_alpha) {
|
| + var tex = gl.createTexture();
|
| + gl.bindTexture(target, tex);
|
| + gl.texParameteri(target, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
| + gl.texParameteri(target, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
| + gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
| + gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
| + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flip_y);
|
| + gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, premultiply_alpha);
|
| + return tex;
|
| + }
|
| +
|
| + function uploadVideoToTexture2D(gl, video, internalformat, format, type,
|
| + flip_y, premultiply_alpha) {
|
| + var tex = createTexture(gl, gl.TEXTURE_2D, video, flip_y,
|
| + premultiply_alpha);
|
| + // Attach the texture to framebuffer for readback.
|
| + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D,
|
| + tex, 0);
|
| + gl.texImage2D(gl.TEXTURE_2D, 0, internalformat, format, type, video);
|
| + return tex;
|
| + }
|
| +
|
| + function uploadVideoToSubTexture2D(gl, video, internalformat, format, type,
|
| + flip_y, premultiply_alpha) {
|
| + var tex = createTexture(gl, gl.TEXTURE_2D, video, flip_y,
|
| + premultiply_alpha);
|
| + // Attach the texture to framebuffer for readback.
|
| + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D,
|
| + tex, 0);
|
| + gl.texImage2D(gl.TEXTURE_2D, 0, internalformat, video.width, video.height,
|
| + 0, format, type, null);
|
| + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, format, type, video);
|
| + return tex;
|
| + }
|
| +
|
| + function uploadVideoToTextureCubemap(gl, video, internalformat, format, type,
|
| + flip_y, premultiply_alpha) {
|
| + var tex = createTexture(gl, gl.TEXTURE_CUBE_MAP, video, flip_y,
|
| + premultiply_alpha);
|
| + for (var i = 0; i < cubemapFaces(gl).length; ++i) {
|
| + gl.texImage2D(cubemapFaces(gl)[i], 0, internalformat, format, type,
|
| + video);
|
| + }
|
| + return tex;
|
| + }
|
| +
|
| + function uploadVideoToSubTextureCubemap(gl, video, internalformat, format,
|
| + type, flip_y, premultiply_alpha) {
|
| + var tex = createTexture(gl, gl.TEXTURE_CUBE_MAP, video, flip_y,
|
| + premultiply_alpha);
|
| + for (var i = 0; i < cubemapFaces(gl).length; ++i) {
|
| + gl.texImage2D(cubemapFaces(gl)[i], 0, internalformat, video.width,
|
| + video.height, 0, format, type, null);
|
| + gl.texSubImage2D(cubemapFaces(gl)[i], 0, 0, 0, format, type, video);
|
| + }
|
| + return tex;
|
| + }
|
| +
|
| + function readAndVerifyRGBA8(gl, width, height, flip_y, test_name) {
|
| + var arr = new Uint8Array(width * height * 4);
|
| + gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, arr);
|
| + var color_step = 255.0 / (width + height);
|
| + return verifyPixels(arr, width, height, flip_y, color_step,
|
| + 255 /*wrap_around*/, 2 /*tolerance*/, test_name);
|
| + }
|
| +
|
| + function readAndVerifyRGBA32F(gl, width, height, flip_y, test_name) {
|
| + var arr = new Float32Array(width * height * 4);
|
| + gl.readPixels(0, 0, width, height, gl.RGBA, gl.FLOAT, arr);
|
| + var color_step = 1.0 / (width + height);
|
| + return verifyPixels(arr, width, height, flip_y, color_step,
|
| + 1.0 /*wrap_around*/, 1.5/65535 /*tolerance*/,
|
| + test_name);
|
| + }
|
| +
|
| + function onLoad() {
|
| + var query = /query=(.*)/.exec(window.location.href);
|
| + if (!query)
|
| + return;
|
| + if (query[1] == "RGBAUint8")
|
| + depthStreamToRGBAUint8Texture();
|
| + else if (query[1] == "RGBAFloat")
|
| + depthStreamToRGBAFloatTexture();
|
| + }
|
| </script>
|
| </head>
|
| -<body>
|
| +<body onload="onLoad()">
|
| <table border="0">
|
| - <!-- Canvases are named after their corresponding video elements. -->
|
| <tr>
|
| <td><video id="local-view-1" width="96" height="96" autoplay
|
| style="display:none"></video></td>
|
| + <!-- The canvas is used to detect when video starts and stops. -->
|
| <td><canvas id="local-view-1-canvas" width="96" height="96"
|
| style="display:none"></canvas></td>
|
| </tr>
|
|
|