| Index: content/test/data/media/getusermedia.html
|
| diff --git a/content/test/data/media/getusermedia.html b/content/test/data/media/getusermedia.html
|
| index 7481613291288c2a410d50ad87e206be27bfc33c..e248932fa757e9b3d06e40e7bfbe861d77664cfd 100644
|
| --- a/content/test/data/media/getusermedia.html
|
| +++ b/content/test/data/media/getusermedia.html
|
| @@ -10,15 +10,9 @@
|
|
|
| setAllEventsOccuredHandler(function() {
|
| gLocalStream.stop();
|
| - document.title = 'OK';
|
| - sendValueToTest(document.title);
|
| + reportTestSuccess();
|
| });
|
|
|
| - function sendValueToTest(value) {
|
| - window.domAutomationController.setAutomationId(0);
|
| - window.domAutomationController.send(value);
|
| - }
|
| -
|
| function getSources() {
|
| MediaStreamTrack.getSources(function(devices) {
|
| document.title = 'Sources Available';
|
| @@ -29,30 +23,42 @@
|
| // Creates a MediaStream and renders it locally. When the video is detected to
|
| // be rolling, the title is changed and the stream should be stopped.
|
| function getUserMediaAndStop(constraints) {
|
| - document.title = 'Calling GetUserMedia';
|
| + console.log('Calling getUserMediaAndStop.');
|
| navigator.webkitGetUserMedia(
|
| constraints,
|
| function(stream) { displayAndDetectVideo(stream, stopVideoTrack); },
|
| failedCallback);
|
| }
|
|
|
| + // Requests getusermedia and expects it to fail.
|
| + function getUserMediaAndExpectFailure(constraints) {
|
| + console.log('Calling getUserMediaAndExpectFailure.');
|
| + navigator.webkitGetUserMedia(
|
| + constraints,
|
| + function(stream) { failTest('Unexpectedly succeeded getUserMedia.'); },
|
| + function(error) { reportTestSuccess(); });
|
| + }
|
| +
|
| // Creates a MediaStream and renders it locally. When the video is detected to
|
| - // be rolling, the title should be changed and the stream is let roll for a
|
| - // number |waitTimeInSeconds| and then it should be stopped.
|
| - function getUserMediaAndWaitAndStop(constraints, waitTimeInSeconds) {
|
| + // be rolling we return ok-stream-running through the automation controller.
|
| + function getUserMediaAndGetStreamUp(constraints, waitTimeInSeconds) {
|
| + console.log('Calling getUserMediaAndGetStreamUp.');
|
| navigator.webkitGetUserMedia(
|
| constraints,
|
| function(stream) {
|
| displayAndDetectVideo(
|
| stream,
|
| function() {
|
| - waitAndStopVideoTrack(waitTimeInSeconds);
|
| + sendValueToTest('ok-stream-running');
|
| });
|
| },
|
| failedCallback);
|
| }
|
|
|
| + // Gets a video stream up, analyses it and returns the aspect ratio to the
|
| + // test through the automation controller.
|
| function getUserMediaAndAnalyseAndStop(constraints) {
|
| + console.log('Calling getUserMediaAndAnalyseAndStop.');
|
| navigator.webkitGetUserMedia(
|
| constraints, displayDetectAndAnalyzeVideo, failedCallback);
|
| }
|
| @@ -60,6 +66,7 @@
|
| // This test that a MediaStream can be cloned and that the clone can
|
| // be rendered.
|
| function getUserMediaAndClone() {
|
| + console.log('Calling getUserMediaAndClone.');
|
| navigator.webkitGetUserMedia({video: true, audio: true},
|
| createAndRenderClone, failedCallback);
|
| }
|
| @@ -69,36 +76,35 @@
|
| // streams have the same source, both video streams should stop. If they do,
|
| // the test succeeds.
|
| function twoGetUserMediaAndStop(constraints) {
|
| - document.title = 'Calling Two GetUserMedia';
|
| + console.log('Calling Two GetUserMedia');
|
| navigator.webkitGetUserMedia(
|
| constraints,
|
| - function(stream) {
|
| - displayAndDetectVideo(stream, requestSecondGetUserMedia);
|
| + function(stream) {
|
| + displayAndDetectVideo(stream, requestSecondGetUserMedia);
|
| },
|
| failedCallback);
|
| var requestSecondGetUserMedia = function() {
|
| navigator.webkitGetUserMedia(
|
| constraints,
|
| - function(stream) {
|
| - displayIntoVideoElement(stream,
|
| + function(stream) {
|
| + displayIntoVideoElement(stream,
|
| stopStreamAndVerifyAllLocalViewsDontPlayVideo, 'local-view-2');
|
| },
|
| failedCallback);
|
| };
|
| -
|
| +
|
| var stopStreamAndVerifyAllLocalViewsDontPlayVideo = function() {
|
| gLocalStream.getVideoTracks()[0].stop();
|
| -
|
| +
|
| // Since local-view and local-view-2 are playing the video from the same
|
| // source, both of them should stop.
|
| waitForVideoToStop('local-view');
|
| waitForVideoToStop('local-view-2');
|
| - };
|
| + };
|
| }
|
|
|
| function failedCallback(error) {
|
| - document.title = 'GetUserMedia call failed with code ' + error.code;
|
| - sendValueToTest(document.title);
|
| + failTest('GetUserMedia call failed with code ' + error.code);
|
| }
|
|
|
| function plugStreamIntoVideoElement(stream, videoElement) {
|
| @@ -109,7 +115,6 @@
|
|
|
| function displayIntoVideoElement(stream, callback, videoElement) {
|
| plugStreamIntoVideoElement(stream, videoElement);
|
| - document.title = 'Waiting for video...';
|
| detectVideoPlaying(videoElement, callback);
|
| }
|
|
|
| @@ -128,12 +133,12 @@
|
| // work with audio devices and not all bots has a microphone.
|
| new_stream = new webkitMediaStream();
|
| new_stream.addTrack(stream.getVideoTracks()[0]);
|
| - expectEquals(new_stream.getVideoTracks().length, 1);
|
| + assertEquals(new_stream.getVideoTracks().length, 1);
|
| if (stream.getAudioTracks().length > 0) {
|
| new_stream.addTrack(stream.getAudioTracks()[0]);
|
| - expectEquals(new_stream.getAudioTracks().length, 1);
|
| + assertEquals(new_stream.getAudioTracks().length, 1);
|
| new_stream.removeTrack(new_stream.getAudioTracks()[0]);
|
| - expectEquals(new_stream.getAudioTracks().length, 0);
|
| + assertEquals(new_stream.getAudioTracks().length, 0);
|
| }
|
|
|
| var newStreamUrl = URL.createObjectURL(new_stream);
|
| @@ -147,19 +152,102 @@
|
| }
|
|
|
| function waitAndStopVideoTrack(waitTimeInSeconds) {
|
| - document.title = 'Running...';
|
| setTimeout(stopVideoTrack, waitTimeInSeconds * 1000);
|
| }
|
|
|
| function analyzeVideo() {
|
| - document.title = 'Waiting for video...';
|
| - addExpectedEvent();
|
| detectAspectRatio(function(aspectRatio) {
|
| - document.title = aspectRatio;
|
| - eventOccured();
|
| + sendValueToTest(aspectRatio);
|
| });
|
| }
|
|
|
| + // This function tries to calculate the aspect ratio shown by the fake capture
|
| + // device in the video tag. For this, we count the amount of light green
|
| + // pixels along |aperture| pixels on the positive X and Y axis starting from
|
| + // the center of the image. In this very center there should be a time-varying
|
| + // pacman; the algorithm counts for a couple of iterations and keeps the
|
| + // maximum amount of light green pixels on both directions. From this data
|
| + // the aspect ratio is calculated relative to a 320x240 window, so 4:3 would
|
| + // show as a 1. Furthermore, since an original non-4:3 might be letterboxed or
|
| + // cropped, the actual X and Y pixel amounts are compared with the fake video
|
| + // capture expected pacman radius (see further below).
|
| + function detectAspectRatio(callback) {
|
| + var width = VIDEO_TAG_WIDTH;
|
| + var height = VIDEO_TAG_HEIGHT;
|
| + var videoElement = $('local-view');
|
| + var canvas = $('local-view-canvas');
|
| +
|
| + var maxLightGreenPixelsX = 0;
|
| + var maxLightGreenPixelsY = 0;
|
| +
|
| + var aperture = Math.min(width, height) / 2;
|
| + var iterations = 0;
|
| + var maxIterations = 10;
|
| +
|
| + var detectorFunction = function() {
|
| + var context = canvas.getContext('2d');
|
| + context.drawImage(videoElement, 0, 0, width, height);
|
| +
|
| + // We are interested in a window starting from the center of the image
|
| + // where we expect the circle from the fake video capture to be rolling.
|
| + var pixels = context.getImageData(width / 2, height / 2,
|
| + aperture, aperture);
|
| +
|
| + var lightGreenPixelsX = 0;
|
| + var lightGreenPixelsY = 0;
|
| +
|
| + // Walk horizontally counting light green pixels.
|
| + for (var x = 0; x < aperture; ++x) {
|
| + if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN)
|
| + lightGreenPixelsX++;
|
| + }
|
| + // Walk vertically counting light green pixels.
|
| + for (var y = 0; y < aperture; ++y) {
|
| + if (pixels.data[4 * y * aperture + 1] != 135)
|
| + lightGreenPixelsY++;
|
| + }
|
| + if (lightGreenPixelsX > maxLightGreenPixelsX &&
|
| + lightGreenPixelsX < aperture)
|
| + maxLightGreenPixelsX = lightGreenPixelsX;
|
| + if (lightGreenPixelsY > maxLightGreenPixelsY &&
|
| + lightGreenPixelsY < aperture)
|
| + maxLightGreenPixelsY = lightGreenPixelsY;
|
| +
|
| + var detectedAspectRatioString = "";
|
| + if (++iterations > maxIterations) {
|
| + clearInterval(detectorFunction);
|
| + observedAspectRatio = maxLightGreenPixelsY / maxLightGreenPixelsX;
|
| + // At this point the observed aspect ratio is either 1, for undistorted
|
| + // 4:3, or some other aspect ratio that is seen as distorted.
|
| + if (Math.abs(observedAspectRatio - 1.333) < 0.1)
|
| + detectedAspectRatioString = "16:9";
|
| + else if (Math.abs(observedAspectRatio - 1.20) < 0.1)
|
| + detectedAspectRatioString = "16:10";
|
| + else if (Math.abs(observedAspectRatio - 1.0) < 0.1)
|
| + detectedAspectRatioString = "4:3";
|
| + else
|
| + detectedAspectRatioString = "UNKNOWN aspect ratio";
|
| + console.log(detectedAspectRatioString + " observed aspect ratio (" +
|
| + observedAspectRatio + ")");
|
| +
|
| + // The FakeVideoCapture calculates the circle radius as
|
| + // std::min(capture_format_.width, capture_format_.height) / 4;
|
| + // we do the same and see if both dimensions are scaled, meaning
|
| + // we started from a cropped or stretched image.
|
| + var nonDistortedRadius = Math.min(width, height) / 4;
|
| + if ((maxLightGreenPixelsX != nonDistortedRadius) &&
|
| + (maxLightGreenPixelsY != nonDistortedRadius)) {
|
| + detectedAspectRatioString += " cropped";
|
| + } else
|
| + detectedAspectRatioString += " letterbox";
|
| +
|
| + console.log("Original image is: " + detectedAspectRatioString);
|
| + callback(detectedAspectRatioString);
|
| + }
|
| + }
|
| +
|
| + setInterval(detectorFunction, 50);
|
| + }
|
| </script>
|
| </head>
|
| <body>
|
|
|