Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(49)

Side by Side Diff: chrome/test/data/extensions/api_test/tab_capture/end_to_end.js

Issue 713623002: Stabilize and re-enable TabCaptureApiPixelTest.EndToEnd. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « chrome/test/data/extensions/api_test/tab_capture/end_to_end.html ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 // The tests here cover the end-to-end functionality of tab capturing and 5 // The tests here cover the end-to-end functionality of tab capturing and
6 // playback as video. The page generates a test signal (a full color fill), and 6 // playback as video. The page generates video test patterns that rotate
7 // the rendering output of the tab is captured into a LocalMediaStream. Then, 7 // cyclicly, and the rendering output of the tab is captured into a
8 // the LocalMediaStream is plugged into a video element for playback, and a 8 // LocalMediaStream. This stream is then piped into a video element for
9 // canvas is used to examine the frames of the video for expected content. 9 // playback, and a canvas is used to examine the frames of the video for
10 // expected content. The stream may be plumbed one of two ways, depending on
11 // the 'method' query param:
10 // 12 //
11 // A previous version of this test used a polling scheme and two separate tabs 13 // local: LocalMediaStream --> DOM Video Element
12 // with very little control logic. This setup resulted in flakiness, as there 14 // webrtc: LocalMediaStream --> PeerConnection (sender)
13 // were numerous issues that could cause the test to time out. This new version 15 // --> PeerConnection (receiver) --> DOM Video Element
14 // uses an entirely event-based scheme, which ensures everything is synchronized 16
15 // as the test advances through its stages. 17 // The test pattern cycles as a color fill of red, then green, then blue.
16 18 var colors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ];
17 19 var curIdx = 0;
18 // Video needs to be global, or the big bad garbage collector will come and 20
19 // huff and puff it all away. 21 // Capture parameters.
20 var video = null; 22 var width = 64;
21 23 var height = 48;
22 function TestStream(stream) { 24 var frameRate = 15;
23 chrome.test.assertTrue(stream != null); 25
24 26 // The stream to playback in the video element.
25 // The test source is a color fill of red, then green, then blue. 27 var receiveStream = null;
26 var colors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ]; 28
27 var curColorIdx = 0; 29 // waitForExpectedColors() removes elements from this array as each is observed.
28 30 // When it becomes empty, the test succeeds.
29 // Create video and canvas elements, but no need to append them to the 31 var expectedColors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ];
30 // DOM. 32
31 video = document.createElement("video"); 33 function updateTestPattern() {
32 video.width = 64; 34 if (!this.canvas) {
33 video.height = 48; 35 this.canvas = document.createElement("canvas");
34 video.addEventListener("error", chrome.test.fail); 36 this.canvas.width = 320;
35 var canvas = document.createElement("canvas"); 37 this.canvas.height = 200;
36 38 this.canvas.style.position = "absolute";
37 function updateTestDocument() { 39 this.canvas.style.top = "0px";
38 document.body.style.backgroundColor = 40 this.canvas.style.left = "0px";
39 "rgb(" + colors[curColorIdx] + ")"; 41 this.canvas.style.width = "100%";
40 42 this.canvas.style.height = "100%";
41 // Important: Blink the testing message so that the capture pipeline 43 document.body.appendChild(this.canvas);
42 // will observe drawing updates and continue to produce video frames. 44 }
43 var message = document.getElementById("message"); 45 var context = this.canvas.getContext("2d");
44 if (!message.blinkInterval) { 46 // Fill with solid color.
45 message.innerHTML = "Testing..."; 47 context.fillStyle = "rgb(" + colors[curIdx] + ")";
46 message.blinkInterval = setInterval(function toggleVisibility() { 48 context.fillRect(0, 0, this.canvas.width, this.canvas.height);
47 message.style.visibility = 49 // Draw the circle that moves around the page.
48 message.style.visibility == "hidden" ? "visible" : "hidden"; 50 context.fillStyle = "rgb(" + colors[(curIdx + 1) % colors.length] + ")";
49 }, 500); 51 context.beginPath();
52 if (!this.frameNumber) {
53 this.frameNumber = 1;
54 } else {
55 ++this.frameNumber;
56 }
57 var i = this.frameNumber % 200;
58 var t = (this.frameNumber + 3000) * (0.01 + i / 8000.0);
59 var x = (Math.sin(t) * 0.45 + 0.5) * this.canvas.width;
60 var y = (Math.cos(t * 0.9) * 0.45 + 0.5) * this.canvas.height;
61 context.arc(x, y, 16, 0, 2 * Math.PI, false);
62 context.closePath();
63 context.fill();
64 }
65
66 function renderTestPatternLoop() {
67 requestAnimationFrame(renderTestPatternLoop);
68 updateTestPattern();
69
70 if (!this.stepTimeMillis) {
71 this.stepTimeMillis = 100;
72 }
73 var now = new Date().getTime();
74 if (!this.nextSteppingAt) {
75 this.nextSteppingAt = now + this.stepTimeMillis;
76 } else if (now >= this.nextSteppingAt) {
77 ++curIdx;
78 if (curIdx >= colors.length) { // Completed a cycle.
79 curIdx = 0;
80 // Increase the wait time between switching test patterns for overloaded
81 // bots that aren't capturing all the frames of video.
82 this.stepTimeMillis *= 1.25;
50 } 83 }
51 } 84 this.nextSteppingAt = now + this.stepTimeMillis;
52 85 }
53 function checkVideoForFillColor(event) { 86 }
54 var curColor = colors[curColorIdx]; 87
55 var width = video.videoWidth; 88 function waitForExpectedColors(colorDeviation) {
56 var height = video.videoHeight; 89 // If needed, create the video and canvas elements, but no need to append them
90 // to the DOM.
91 if (!this.video) {
92 this.video = document.createElement("video");
93 this.video.width = width;
94 this.video.height = height;
95 this.video.addEventListener("error", chrome.test.fail);
96 this.video.src = URL.createObjectURL(receiveStream);
97 this.video.play();
98
99 this.readbackCanvas = document.createElement("canvas");
100 this.readbackCanvas.width = width;
101 this.readbackCanvas.height = height;
102 }
103
104 // Only bother examining a video frame if the video timestamp has advanced.
105 var currentVideoTimestamp = this.video.currentTime;
106 if (!this.lastVideoTimestamp ||
107 this.lastVideoTimestamp < currentVideoTimestamp) {
108 this.lastVideoTimestamp = currentVideoTimestamp;
57 109
58 // Grab a snapshot of the center pixel of the video. 110 // Grab a snapshot of the center pixel of the video.
59 canvas.width = width; 111 var ctx = this.readbackCanvas.getContext("2d");
60 canvas.height = height;
61 var ctx = canvas.getContext("2d");
62 ctx.drawImage(video, 0, 0, width, height); 112 ctx.drawImage(video, 0, 0, width, height);
63 var imageData = ctx.getImageData(width / 2, height / 2, 1, 1); 113 var imageData = ctx.getImageData(width / 2, height / 2, 1, 1);
64 var pixel = [ imageData.data[0], imageData.data[1], imageData.data[2] ]; 114 var pixel = [ imageData.data[0], imageData.data[1], imageData.data[2] ];
65 115
66 // Check whether the pixel is of the expected color value, and proceed 116 // Does the pixel match one of the expected colors?
67 // to the next test stage when a match is encountered. Note: The video 117 for (var i = 0; i < expectedColors.length; ++i) {
68 // encode/decode process loses color accuracy, which is accounted for 118 var curColor = expectedColors[i];
69 // here. 119 if (Math.abs(pixel[0] - curColor[0]) <= colorDeviation &&
70 if (Math.abs(pixel[0] - curColor[0]) < 10 && 120 Math.abs(pixel[1] - curColor[1]) <= colorDeviation &&
71 Math.abs(pixel[1] - curColor[1]) < 10 && 121 Math.abs(pixel[2] - curColor[2]) <= colorDeviation) {
72 Math.abs(pixel[2] - curColor[2]) < 10) { 122 console.debug("Observed expected color RGB(" + curColor +
73 console.debug("Observed expected color RGB(" + curColor + 123 ") in the video as RGB(" + pixel + ")");
74 ") in the video as RGB(" + pixel + ")"); 124 expectedColors.splice(i, 1);
75 // Continue with the next color; or, if there are no more colors,
76 // consider the test successful.
77 if (curColorIdx + 1 < colors.length) {
78 ++curColorIdx;
79 updateTestDocument();
80 } else {
81 video.removeEventListener("timeupdate", checkVideoForFillColor);
82 stream.stop();
83 chrome.test.succeed();
84 } 125 }
85 } 126 }
86 } 127 }
87 // Play the LocalMediaStream in the video element. 128
88 video.src = URL.createObjectURL(stream); 129 if (expectedColors.length == 0) {
89 video.play(); 130 chrome.test.succeed();
90 131 } else {
91 // Kick it off. 132 setTimeout(function () { waitForExpectedColors(colorDeviation); },
92 updateTestDocument(); 133 1000 / frameRate);
93 video.addEventListener("timeupdate", checkVideoForFillColor); 134 }
94 } 135 }
95 136
96 // Set up a WebRTC connection and pipe |stream| through it. 137 chrome.test.runTests([
97 // Call TestStream with the new stream when done. 138 function endToEndTest() {
98 function testThroughWebRTC(stream) { 139 // The receive port changes between browser_test invocations, and is passed
99 var sender = new webkitRTCPeerConnection(null); 140 // as an query parameter in the URL.
100 var receiver = new webkitRTCPeerConnection(null); 141 var transportMethod; // Should be: local or webrtc.
101 sender.onicecandidate = function (event) { 142 var colorDeviation; // How far from the expected intensity ([0,255] scale)?
102 if (event.candidate) { 143 try {
103 receiver.addIceCandidate(new RTCIceCandidate(event.candidate)); 144 transportMethod = window.location.search.match(/(\?|&)method=(\w+)/)[2];
145 chrome.test.assertTrue(transportMethod == 'local' ||
146 transportMethod == 'webrtc');
147 colorDeviation = parseInt(
148 window.location.search.match(/(\?|&)colorDeviation=(\d+)/)[2]);
149 chrome.test.assertTrue(colorDeviation >= 0 && colorDeviation <= 255);
150 } catch (err) {
151 chrome.test.fail("Error parsing query params -- " + err.message);
152 return;
104 } 153 }
105 }; 154
106 receiver.onicecandidate = function (event) { 155 // Start rendering test patterns.
107 if (event.candidate) { 156 renderTestPatternLoop();
108 sender.addIceCandidate(new RTCIceCandidate(event.candidate)); 157
109 } 158 chrome.tabCapture.capture(
110 }; 159 { video: true,
111 receiver.onaddstream = function (event) { 160 audio: true,
112 TestStream(event.stream); 161 videoConstraints: {
113 }; 162 mandatory: {
114 sender.addStream(stream); 163 minWidth: width,
115 sender.createOffer(function (sender_description) { 164 minHeight: height,
116 sender.setLocalDescription(sender_description); 165 maxWidth: width,
117 receiver.setRemoteDescription(sender_description); 166 maxHeight: height,
118 receiver.createAnswer(function (receiver_description) { 167 maxFrameRate: frameRate,
119 receiver.setLocalDescription(receiver_description); 168 }
120 sender.setRemoteDescription(receiver_description); 169 }
121 }); 170 },
122 }); 171 function remoteTheStream(captureStream) {
123 } 172 chrome.test.assertTrue(!!captureStream);
124 173 if (transportMethod == 'local') {
125 function endToEndVideoTest() { 174 receiveStream = captureStream;
126 chrome.tabCapture.capture( 175 waitForExpectedColors(colorDeviation);
127 { video: true, 176 } else if (transportMethod == 'webrtc') {
128 audio: false, 177 var sender = new webkitRTCPeerConnection(null);
129 videoConstraints: { 178 var receiver = new webkitRTCPeerConnection(null);
130 mandatory: { 179 sender.onicecandidate = function (event) {
131 minWidth: 64, 180 if (event.candidate) {
132 minHeight: 48 181 receiver.addIceCandidate(new RTCIceCandidate(event.candidate));
133 } 182 }
134 } 183 };
135 }, 184 receiver.onicecandidate = function (event) {
136 TestStream); 185 if (event.candidate) {
137 } 186 sender.addIceCandidate(new RTCIceCandidate(event.candidate));
138 187 }
139 function endToEndVideoTestWithWebRTC() { 188 };
140 chrome.tabCapture.capture( 189 receiver.onaddstream = function (event) {
141 { video: true, 190 receiveStream = event.stream;
142 audio: false, 191 waitForExpectedColors(colorDeviation);
143 videoConstraints: { 192 };
144 mandatory: { 193 sender.addStream(captureStream);
145 minWidth: 64, 194 sender.createOffer(function (sender_description) {
146 minHeight: 48 195 sender.setLocalDescription(sender_description);
147 } 196 receiver.setRemoteDescription(sender_description);
148 } 197 receiver.createAnswer(function (receiver_description) {
149 }, 198 receiver.setLocalDescription(receiver_description);
150 testThroughWebRTC); 199 sender.setRemoteDescription(receiver_description);
151 } 200 });
152 201 });
153 chrome.test.runTests([ 202 } else {
154 endToEndVideoTest, 203 chrome.test.fail("Unknown transport method: " + transportMethod);
155 endToEndVideoTestWithWebRTC 204 }
205 });
206 }
156 ]); 207 ]);
157 208
158 // TODO(miu): Once the WebAudio API is finalized, we should add a test to emit a 209 // TODO(miu): Once the WebAudio API is finalized, we should add a test to emit a
159 // tone from the sender page, and have the receiver page check for the audio 210 // tone from the sender page, and have the receiver page check for the audio
160 // tone. 211 // tone.
OLDNEW
« no previous file with comments | « chrome/test/data/extensions/api_test/tab_capture/end_to_end.html ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698