| Index: chrome/test/data/extensions/api_test/tab_capture/end_to_end.js
|
| diff --git a/chrome/test/data/extensions/api_test/tab_capture/end_to_end.js b/chrome/test/data/extensions/api_test/tab_capture/end_to_end.js
|
| index 912d445a3bc3ae3dac4383ec5526d984e10e6d68..47c23772f4c142c51c67e530dd5a439a69c6da8b 100644
|
| --- a/chrome/test/data/extensions/api_test/tab_capture/end_to_end.js
|
| +++ b/chrome/test/data/extensions/api_test/tab_capture/end_to_end.js
|
| @@ -3,156 +3,207 @@
|
| // found in the LICENSE file.
|
|
|
| // The tests here cover the end-to-end functionality of tab capturing and
|
| -// playback as video. The page generates a test signal (a full color fill), and
|
| -// the rendering output of the tab is captured into a LocalMediaStream. Then,
|
| -// the LocalMediaStream is plugged into a video element for playback, and a
|
| -// canvas is used to examine the frames of the video for expected content.
|
| +// playback as video. The page generates video test patterns that rotate
|
| +// cyclicly, and the rendering output of the tab is captured into a
|
| +// LocalMediaStream. This stream is then piped into a video element for
|
| +// playback, and a canvas is used to examine the frames of the video for
|
| +// expected content. The stream may be plumbed one of two ways, depending on
|
| +// the 'method' query param:
|
| //
|
| -// A previous version of this test used a polling scheme and two separate tabs
|
| -// with very little control logic. This setup resulted in flakiness, as there
|
| -// were numerous issues that could cause the test to time out. This new version
|
| -// uses an entirely event-based scheme, which ensures everything is synchronized
|
| -// as the test advances through its stages.
|
| -
|
| -
|
| -// Video needs to be global, or the big bad garbage collector will come and
|
| -// huff and puff it all away.
|
| -var video = null;
|
| -
|
| -function TestStream(stream) {
|
| - chrome.test.assertTrue(stream != null);
|
| -
|
| - // The test source is a color fill of red, then green, then blue.
|
| - var colors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ];
|
| - var curColorIdx = 0;
|
| -
|
| - // Create video and canvas elements, but no need to append them to the
|
| - // DOM.
|
| - video = document.createElement("video");
|
| - video.width = 64;
|
| - video.height = 48;
|
| - video.addEventListener("error", chrome.test.fail);
|
| - var canvas = document.createElement("canvas");
|
| -
|
| - function updateTestDocument() {
|
| - document.body.style.backgroundColor =
|
| - "rgb(" + colors[curColorIdx] + ")";
|
| -
|
| - // Important: Blink the testing message so that the capture pipeline
|
| - // will observe drawing updates and continue to produce video frames.
|
| - var message = document.getElementById("message");
|
| - if (!message.blinkInterval) {
|
| - message.innerHTML = "Testing...";
|
| - message.blinkInterval = setInterval(function toggleVisibility() {
|
| - message.style.visibility =
|
| - message.style.visibility == "hidden" ? "visible" : "hidden";
|
| - }, 500);
|
| +// local: LocalMediaStream --> DOM Video Element
|
| +// webrtc: LocalMediaStream --> PeerConnection (sender)
|
| +// --> PeerConnection (receiver) --> DOM Video Element
|
| +
|
| +// The test pattern cycles as a color fill of red, then green, then blue.
|
| +var colors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ];
|
| +var curIdx = 0;
|
| +
|
| +// Capture parameters.
|
| +var width = 64;
|
| +var height = 48;
|
| +var frameRate = 15;
|
| +
|
| +// The stream to playback in the video element.
|
| +var receiveStream = null;
|
| +
|
| +// waitForExpectedColors() removes elements from this array as each is observed.
|
| +// When it becomes empty, the test succeeds.
|
| +var expectedColors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ];
|
| +
|
| +function updateTestPattern() {
|
| + if (!this.canvas) {
|
| + this.canvas = document.createElement("canvas");
|
| + this.canvas.width = 320;
|
| + this.canvas.height = 200;
|
| + this.canvas.style.position = "absolute";
|
| + this.canvas.style.top = "0px";
|
| + this.canvas.style.left = "0px";
|
| + this.canvas.style.width = "100%";
|
| + this.canvas.style.height = "100%";
|
| + document.body.appendChild(this.canvas);
|
| + }
|
| + var context = this.canvas.getContext("2d");
|
| + // Fill with solid color.
|
| + context.fillStyle = "rgb(" + colors[curIdx] + ")";
|
| + context.fillRect(0, 0, this.canvas.width, this.canvas.height);
|
| + // Draw the circle that moves around the page.
|
| + context.fillStyle = "rgb(" + colors[(curIdx + 1) % colors.length] + ")";
|
| + context.beginPath();
|
| + if (!this.frameNumber) {
|
| + this.frameNumber = 1;
|
| + } else {
|
| + ++this.frameNumber;
|
| + }
|
| + var i = this.frameNumber % 200;
|
| + var t = (this.frameNumber + 3000) * (0.01 + i / 8000.0);
|
| + var x = (Math.sin(t) * 0.45 + 0.5) * this.canvas.width;
|
| + var y = (Math.cos(t * 0.9) * 0.45 + 0.5) * this.canvas.height;
|
| + context.arc(x, y, 16, 0, 2 * Math.PI, false);
|
| + context.closePath();
|
| + context.fill();
|
| +}
|
| +
|
| +function renderTestPatternLoop() {
|
| + requestAnimationFrame(renderTestPatternLoop);
|
| + updateTestPattern();
|
| +
|
| + if (!this.stepTimeMillis) {
|
| + this.stepTimeMillis = 100;
|
| + }
|
| + var now = new Date().getTime();
|
| + if (!this.nextSteppingAt) {
|
| + this.nextSteppingAt = now + this.stepTimeMillis;
|
| + } else if (now >= this.nextSteppingAt) {
|
| + ++curIdx;
|
| + if (curIdx >= colors.length) { // Completed a cycle.
|
| + curIdx = 0;
|
| + // Increase the wait time between switching test patterns for overloaded
|
| + // bots that aren't capturing all the frames of video.
|
| + this.stepTimeMillis *= 1.25;
|
| }
|
| + this.nextSteppingAt = now + this.stepTimeMillis;
|
| + }
|
| +}
|
| +
|
| +function waitForExpectedColors(colorDeviation) {
|
| + // If needed, create the video and canvas elements, but no need to append them
|
| + // to the DOM.
|
| + if (!this.video) {
|
| + this.video = document.createElement("video");
|
| + this.video.width = width;
|
| + this.video.height = height;
|
| + this.video.addEventListener("error", chrome.test.fail);
|
| + this.video.src = URL.createObjectURL(receiveStream);
|
| + this.video.play();
|
| +
|
| + this.readbackCanvas = document.createElement("canvas");
|
| + this.readbackCanvas.width = width;
|
| + this.readbackCanvas.height = height;
|
| }
|
|
|
| - function checkVideoForFillColor(event) {
|
| - var curColor = colors[curColorIdx];
|
| - var width = video.videoWidth;
|
| - var height = video.videoHeight;
|
| + // Only bother examining a video frame if the video timestamp has advanced.
|
| + var currentVideoTimestamp = this.video.currentTime;
|
| + if (!this.lastVideoTimestamp ||
|
| + this.lastVideoTimestamp < currentVideoTimestamp) {
|
| + this.lastVideoTimestamp = currentVideoTimestamp;
|
|
|
| // Grab a snapshot of the center pixel of the video.
|
| - canvas.width = width;
|
| - canvas.height = height;
|
| - var ctx = canvas.getContext("2d");
|
| + var ctx = this.readbackCanvas.getContext("2d");
|
| ctx.drawImage(video, 0, 0, width, height);
|
| var imageData = ctx.getImageData(width / 2, height / 2, 1, 1);
|
| var pixel = [ imageData.data[0], imageData.data[1], imageData.data[2] ];
|
|
|
| - // Check whether the pixel is of the expected color value, and proceed
|
| - // to the next test stage when a match is encountered. Note: The video
|
| - // encode/decode process loses color accuracy, which is accounted for
|
| - // here.
|
| - if (Math.abs(pixel[0] - curColor[0]) < 10 &&
|
| - Math.abs(pixel[1] - curColor[1]) < 10 &&
|
| - Math.abs(pixel[2] - curColor[2]) < 10) {
|
| - console.debug("Observed expected color RGB(" + curColor +
|
| - ") in the video as RGB(" + pixel + ")");
|
| - // Continue with the next color; or, if there are no more colors,
|
| - // consider the test successful.
|
| - if (curColorIdx + 1 < colors.length) {
|
| - ++curColorIdx;
|
| - updateTestDocument();
|
| - } else {
|
| - video.removeEventListener("timeupdate", checkVideoForFillColor);
|
| - stream.stop();
|
| - chrome.test.succeed();
|
| + // Does the pixel match one of the expected colors?
|
| + for (var i = 0; i < expectedColors.length; ++i) {
|
| + var curColor = expectedColors[i];
|
| + if (Math.abs(pixel[0] - curColor[0]) <= colorDeviation &&
|
| + Math.abs(pixel[1] - curColor[1]) <= colorDeviation &&
|
| + Math.abs(pixel[2] - curColor[2]) <= colorDeviation) {
|
| + console.debug("Observed expected color RGB(" + curColor +
|
| + ") in the video as RGB(" + pixel + ")");
|
| + expectedColors.splice(i, 1);
|
| }
|
| }
|
| }
|
| - // Play the LocalMediaStream in the video element.
|
| - video.src = URL.createObjectURL(stream);
|
| - video.play();
|
|
|
| - // Kick it off.
|
| - updateTestDocument();
|
| - video.addEventListener("timeupdate", checkVideoForFillColor);
|
| + if (expectedColors.length == 0) {
|
| + chrome.test.succeed();
|
| + } else {
|
| + setTimeout(function () { waitForExpectedColors(colorDeviation); },
|
| + 1000 / frameRate);
|
| + }
|
| }
|
|
|
| -// Set up a WebRTC connection and pipe |stream| through it.
|
| -// Call TestStream with the new stream when done.
|
| -function testThroughWebRTC(stream) {
|
| - var sender = new webkitRTCPeerConnection(null);
|
| - var receiver = new webkitRTCPeerConnection(null);
|
| - sender.onicecandidate = function (event) {
|
| - if (event.candidate) {
|
| - receiver.addIceCandidate(new RTCIceCandidate(event.candidate));
|
| - }
|
| - };
|
| - receiver.onicecandidate = function (event) {
|
| - if (event.candidate) {
|
| - sender.addIceCandidate(new RTCIceCandidate(event.candidate));
|
| +chrome.test.runTests([
|
| + function endToEndTest() {
|
| + // The receive port changes between browser_test invocations, and is passed
|
| + // as an query parameter in the URL.
|
| + var transportMethod; // Should be: local or webrtc.
|
| + var colorDeviation; // How far from the expected intensity ([0,255] scale)?
|
| + try {
|
| + transportMethod = window.location.search.match(/(\?|&)method=(\w+)/)[2];
|
| + chrome.test.assertTrue(transportMethod == 'local' ||
|
| + transportMethod == 'webrtc');
|
| + colorDeviation = parseInt(
|
| + window.location.search.match(/(\?|&)colorDeviation=(\d+)/)[2]);
|
| + chrome.test.assertTrue(colorDeviation >= 0 && colorDeviation <= 255);
|
| + } catch (err) {
|
| + chrome.test.fail("Error parsing query params -- " + err.message);
|
| + return;
|
| }
|
| - };
|
| - receiver.onaddstream = function (event) {
|
| - TestStream(event.stream);
|
| - };
|
| - sender.addStream(stream);
|
| - sender.createOffer(function (sender_description) {
|
| - sender.setLocalDescription(sender_description);
|
| - receiver.setRemoteDescription(sender_description);
|
| - receiver.createAnswer(function (receiver_description) {
|
| - receiver.setLocalDescription(receiver_description);
|
| - sender.setRemoteDescription(receiver_description);
|
| - });
|
| - });
|
| -}
|
| -
|
| -function endToEndVideoTest() {
|
| - chrome.tabCapture.capture(
|
| - { video: true,
|
| - audio: false,
|
| - videoConstraints: {
|
| - mandatory: {
|
| - minWidth: 64,
|
| - minHeight: 48
|
| - }
|
| - }
|
| - },
|
| - TestStream);
|
| -}
|
|
|
| -function endToEndVideoTestWithWebRTC() {
|
| - chrome.tabCapture.capture(
|
| - { video: true,
|
| - audio: false,
|
| - videoConstraints: {
|
| - mandatory: {
|
| - minWidth: 64,
|
| - minHeight: 48
|
| - }
|
| - }
|
| - },
|
| - testThroughWebRTC);
|
| -}
|
| -
|
| -chrome.test.runTests([
|
| - endToEndVideoTest,
|
| - endToEndVideoTestWithWebRTC
|
| + // Start rendering test patterns.
|
| + renderTestPatternLoop();
|
| +
|
| + chrome.tabCapture.capture(
|
| + { video: true,
|
| + audio: true,
|
| + videoConstraints: {
|
| + mandatory: {
|
| + minWidth: width,
|
| + minHeight: height,
|
| + maxWidth: width,
|
| + maxHeight: height,
|
| + maxFrameRate: frameRate,
|
| + }
|
| + }
|
| + },
|
| + function remoteTheStream(captureStream) {
|
| + chrome.test.assertTrue(!!captureStream);
|
| + if (transportMethod == 'local') {
|
| + receiveStream = captureStream;
|
| + waitForExpectedColors(colorDeviation);
|
| + } else if (transportMethod == 'webrtc') {
|
| + var sender = new webkitRTCPeerConnection(null);
|
| + var receiver = new webkitRTCPeerConnection(null);
|
| + sender.onicecandidate = function (event) {
|
| + if (event.candidate) {
|
| + receiver.addIceCandidate(new RTCIceCandidate(event.candidate));
|
| + }
|
| + };
|
| + receiver.onicecandidate = function (event) {
|
| + if (event.candidate) {
|
| + sender.addIceCandidate(new RTCIceCandidate(event.candidate));
|
| + }
|
| + };
|
| + receiver.onaddstream = function (event) {
|
| + receiveStream = event.stream;
|
| + waitForExpectedColors(colorDeviation);
|
| + };
|
| + sender.addStream(captureStream);
|
| + sender.createOffer(function (sender_description) {
|
| + sender.setLocalDescription(sender_description);
|
| + receiver.setRemoteDescription(sender_description);
|
| + receiver.createAnswer(function (receiver_description) {
|
| + receiver.setLocalDescription(receiver_description);
|
| + sender.setRemoteDescription(receiver_description);
|
| + });
|
| + });
|
| + } else {
|
| + chrome.test.fail("Unknown transport method: " + transportMethod);
|
| + }
|
| + });
|
| + }
|
| ]);
|
|
|
| // TODO(miu): Once the WebAudio API is finalized, we should add a test to emit a
|
|
|