| OLD | NEW |
| 1 <html> | 1 <html> |
| 2 <head> | 2 <head> |
| 3 <script type="text/javascript" src="webrtc_test_utilities.js"></script> | 3 <script type="text/javascript" src="webrtc_test_utilities.js"></script> |
| 4 <script type="text/javascript"> | 4 <script type="text/javascript"> |
| 5 $ = function(id) { | 5 $ = function(id) { |
| 6 return document.getElementById(id); | 6 return document.getElementById(id); |
| 7 }; | 7 }; |
| 8 | 8 |
| 9 var gLocalStream = null; | 9 var gLocalStream = null; |
| 10 | 10 |
| 11 setAllEventsOccuredHandler(function() { | 11 setAllEventsOccuredHandler(function() { |
| 12 gLocalStream.stop(); | 12 gLocalStream.stop(); |
| 13 document.title = 'OK'; | 13 reportTestSuccess(); |
| 14 sendValueToTest(document.title); | |
| 15 }); | 14 }); |
| 16 | 15 |
| 17 function sendValueToTest(value) { | |
| 18 window.domAutomationController.setAutomationId(0); | |
| 19 window.domAutomationController.send(value); | |
| 20 } | |
| 21 | |
| 22 function getSources() { | 16 function getSources() { |
| 23 MediaStreamTrack.getSources(function(devices) { | 17 MediaStreamTrack.getSources(function(devices) { |
| 24 document.title = 'Sources Available'; | 18 document.title = 'Sources Available'; |
| 25 sendValueToTest(JSON.stringify(devices)); | 19 sendValueToTest(JSON.stringify(devices)); |
| 26 }); | 20 }); |
| 27 } | 21 } |
| 28 | 22 |
| 29 // Creates a MediaStream and renders it locally. When the video is detected to | 23 // Creates a MediaStream and renders it locally. When the video is detected to |
| 30 // be rolling, the title is changed and the stream should be stopped. | 24 // be rolling, the title is changed and the stream should be stopped. |
| 31 function getUserMediaAndStop(constraints) { | 25 function getUserMediaAndStop(constraints) { |
| 32 document.title = 'Calling GetUserMedia'; | 26 console.log('Calling getUserMediaAndStop.'); |
| 33 navigator.webkitGetUserMedia( | 27 navigator.webkitGetUserMedia( |
| 34 constraints, | 28 constraints, |
| 35 function(stream) { displayAndDetectVideo(stream, stopVideoTrack); }, | 29 function(stream) { displayAndDetectVideo(stream, stopVideoTrack); }, |
| 36 failedCallback); | 30 failedCallback); |
| 37 } | 31 } |
| 38 | 32 |
| 33 // Requests getusermedia and expects it to fail. |
| 34 function getUserMediaAndExpectFailure(constraints) { |
| 35 console.log('Calling getUserMediaAndExpectFailure.'); |
| 36 navigator.webkitGetUserMedia( |
| 37 constraints, |
| 38 function(stream) { failTest('Unexpectedly succeeded getUserMedia.'); }, |
| 39 function(error) { reportTestSuccess(); }); |
| 40 } |
| 41 |
| 39 // Creates a MediaStream and renders it locally. When the video is detected to | 42 // Creates a MediaStream and renders it locally. When the video is detected to |
| 40 // be rolling, the title should be changed and the stream is let roll for a | 43 // be rolling we return ok-stream-running through the automation controller. |
| 41 // number |waitTimeInSeconds| and then it should be stopped. | 44 function getUserMediaAndGetStreamUp(constraints, waitTimeInSeconds) { |
| 42 function getUserMediaAndWaitAndStop(constraints, waitTimeInSeconds) { | 45 console.log('Calling getUserMediaAndGetStreamUp.'); |
| 43 navigator.webkitGetUserMedia( | 46 navigator.webkitGetUserMedia( |
| 44 constraints, | 47 constraints, |
| 45 function(stream) { | 48 function(stream) { |
| 46 displayAndDetectVideo( | 49 displayAndDetectVideo( |
| 47 stream, | 50 stream, |
| 48 function() { | 51 function() { |
| 49 waitAndStopVideoTrack(waitTimeInSeconds); | 52 sendValueToTest('ok-stream-running'); |
| 50 }); | 53 }); |
| 51 }, | 54 }, |
| 52 failedCallback); | 55 failedCallback); |
| 53 } | 56 } |
| 54 | 57 |
| 58 // Gets a video stream up, analyses it and returns the aspect ratio to the |
| 59 // test through the automation controller. |
| 55 function getUserMediaAndAnalyseAndStop(constraints) { | 60 function getUserMediaAndAnalyseAndStop(constraints) { |
| 61 console.log('Calling getUserMediaAndAnalyseAndStop.'); |
| 56 navigator.webkitGetUserMedia( | 62 navigator.webkitGetUserMedia( |
| 57 constraints, displayDetectAndAnalyzeVideo, failedCallback); | 63 constraints, displayDetectAndAnalyzeVideo, failedCallback); |
| 58 } | 64 } |
| 59 | 65 |
| 60 // This test that a MediaStream can be cloned and that the clone can | 66 // This test that a MediaStream can be cloned and that the clone can |
| 61 // be rendered. | 67 // be rendered. |
| 62 function getUserMediaAndClone() { | 68 function getUserMediaAndClone() { |
| 69 console.log('Calling getUserMediaAndClone.'); |
| 63 navigator.webkitGetUserMedia({video: true, audio: true}, | 70 navigator.webkitGetUserMedia({video: true, audio: true}, |
| 64 createAndRenderClone, failedCallback); | 71 createAndRenderClone, failedCallback); |
| 65 } | 72 } |
| 66 | 73 |
| 67 // Creates two MediaStream and renders them locally. When the video of both | 74 // Creates two MediaStream and renders them locally. When the video of both |
| 68 // streams are detected to be rolling, we stop the local stream. Since both | 75 // streams are detected to be rolling, we stop the local stream. Since both |
| 69 // streams have the same source, both video streams should stop. If they do, | 76 // streams have the same source, both video streams should stop. If they do, |
| 70 // the test succeeds. | 77 // the test succeeds. |
| 71 function twoGetUserMediaAndStop(constraints) { | 78 function twoGetUserMediaAndStop(constraints) { |
| 72 document.title = 'Calling Two GetUserMedia'; | 79 console.log('Calling Two GetUserMedia'); |
| 73 navigator.webkitGetUserMedia( | 80 navigator.webkitGetUserMedia( |
| 74 constraints, | 81 constraints, |
| 75 function(stream) { | 82 function(stream) { |
| 76 displayAndDetectVideo(stream, requestSecondGetUserMedia); | 83 displayAndDetectVideo(stream, requestSecondGetUserMedia); |
| 77 }, | 84 }, |
| 78 failedCallback); | 85 failedCallback); |
| 79 var requestSecondGetUserMedia = function() { | 86 var requestSecondGetUserMedia = function() { |
| 80 navigator.webkitGetUserMedia( | 87 navigator.webkitGetUserMedia( |
| 81 constraints, | 88 constraints, |
| 82 function(stream) { | 89 function(stream) { |
| 83 displayIntoVideoElement(stream, | 90 displayIntoVideoElement(stream, |
| 84 stopStreamAndVerifyAllLocalViewsDontPlayVideo, 'local-view-2'); | 91 stopStreamAndVerifyAllLocalViewsDontPlayVideo, 'local-view-2'); |
| 85 }, | 92 }, |
| 86 failedCallback); | 93 failedCallback); |
| 87 }; | 94 }; |
| 88 | 95 |
| 89 var stopStreamAndVerifyAllLocalViewsDontPlayVideo = function() { | 96 var stopStreamAndVerifyAllLocalViewsDontPlayVideo = function() { |
| 90 gLocalStream.getVideoTracks()[0].stop(); | 97 gLocalStream.getVideoTracks()[0].stop(); |
| 91 | 98 |
| 92 // Since local-view and local-view-2 are playing the video from the same | 99 // Since local-view and local-view-2 are playing the video from the same |
| 93 // source, both of them should stop. | 100 // source, both of them should stop. |
| 94 waitForVideoToStop('local-view'); | 101 waitForVideoToStop('local-view'); |
| 95 waitForVideoToStop('local-view-2'); | 102 waitForVideoToStop('local-view-2'); |
| 96 }; | 103 }; |
| 97 } | 104 } |
| 98 | 105 |
| 99 function failedCallback(error) { | 106 function failedCallback(error) { |
| 100 document.title = 'GetUserMedia call failed with code ' + error.code; | 107 failTest('GetUserMedia call failed with code ' + error.code); |
| 101 sendValueToTest(document.title); | |
| 102 } | 108 } |
| 103 | 109 |
| 104 function plugStreamIntoVideoElement(stream, videoElement) { | 110 function plugStreamIntoVideoElement(stream, videoElement) { |
| 105 gLocalStream = stream; | 111 gLocalStream = stream; |
| 106 var localStreamUrl = URL.createObjectURL(stream); | 112 var localStreamUrl = URL.createObjectURL(stream); |
| 107 $(videoElement).src = localStreamUrl; | 113 $(videoElement).src = localStreamUrl; |
| 108 } | 114 } |
| 109 | 115 |
| 110 function displayIntoVideoElement(stream, callback, videoElement) { | 116 function displayIntoVideoElement(stream, callback, videoElement) { |
| 111 plugStreamIntoVideoElement(stream, videoElement); | 117 plugStreamIntoVideoElement(stream, videoElement); |
| 112 document.title = 'Waiting for video...'; | |
| 113 detectVideoPlaying(videoElement, callback); | 118 detectVideoPlaying(videoElement, callback); |
| 114 } | 119 } |
| 115 | 120 |
| 116 function displayAndDetectVideo(stream, callback) { | 121 function displayAndDetectVideo(stream, callback) { |
| 117 displayIntoVideoElement(stream, callback, 'local-view'); | 122 displayIntoVideoElement(stream, callback, 'local-view'); |
| 118 } | 123 } |
| 119 | 124 |
| 120 function displayDetectAndAnalyzeVideo(stream) { | 125 function displayDetectAndAnalyzeVideo(stream) { |
| 121 plugStreamIntoVideoElement(stream, 'local-view'); | 126 plugStreamIntoVideoElement(stream, 'local-view'); |
| 122 analyzeVideo(); | 127 analyzeVideo(); |
| 123 } | 128 } |
| 124 | 129 |
| 125 function createAndRenderClone(stream) { | 130 function createAndRenderClone(stream) { |
| 126 gLocalStream = stream; | 131 gLocalStream = stream; |
| 127 // TODO(perkj): --use-fake-device-for-media-stream do not currently | 132 // TODO(perkj): --use-fake-device-for-media-stream do not currently |
| 128 // work with audio devices and not all bots has a microphone. | 133 // work with audio devices and not all bots has a microphone. |
| 129 new_stream = new webkitMediaStream(); | 134 new_stream = new webkitMediaStream(); |
| 130 new_stream.addTrack(stream.getVideoTracks()[0]); | 135 new_stream.addTrack(stream.getVideoTracks()[0]); |
| 131 expectEquals(new_stream.getVideoTracks().length, 1); | 136 assertEquals(new_stream.getVideoTracks().length, 1); |
| 132 if (stream.getAudioTracks().length > 0) { | 137 if (stream.getAudioTracks().length > 0) { |
| 133 new_stream.addTrack(stream.getAudioTracks()[0]); | 138 new_stream.addTrack(stream.getAudioTracks()[0]); |
| 134 expectEquals(new_stream.getAudioTracks().length, 1); | 139 assertEquals(new_stream.getAudioTracks().length, 1); |
| 135 new_stream.removeTrack(new_stream.getAudioTracks()[0]); | 140 new_stream.removeTrack(new_stream.getAudioTracks()[0]); |
| 136 expectEquals(new_stream.getAudioTracks().length, 0); | 141 assertEquals(new_stream.getAudioTracks().length, 0); |
| 137 } | 142 } |
| 138 | 143 |
| 139 var newStreamUrl = URL.createObjectURL(new_stream); | 144 var newStreamUrl = URL.createObjectURL(new_stream); |
| 140 $('local-view').src = newStreamUrl; | 145 $('local-view').src = newStreamUrl; |
| 141 waitForVideo('local-view'); | 146 waitForVideo('local-view'); |
| 142 } | 147 } |
| 143 | 148 |
| 144 function stopVideoTrack() { | 149 function stopVideoTrack() { |
| 145 gLocalStream.getVideoTracks()[0].stop(); | 150 gLocalStream.getVideoTracks()[0].stop(); |
| 146 waitForVideoToStop('local-view'); | 151 waitForVideoToStop('local-view'); |
| 147 } | 152 } |
| 148 | 153 |
| 149 function waitAndStopVideoTrack(waitTimeInSeconds) { | 154 function waitAndStopVideoTrack(waitTimeInSeconds) { |
| 150 document.title = 'Running...'; | |
| 151 setTimeout(stopVideoTrack, waitTimeInSeconds * 1000); | 155 setTimeout(stopVideoTrack, waitTimeInSeconds * 1000); |
| 152 } | 156 } |
| 153 | 157 |
| 154 function analyzeVideo() { | 158 function analyzeVideo() { |
| 155 document.title = 'Waiting for video...'; | |
| 156 addExpectedEvent(); | |
| 157 detectAspectRatio(function(aspectRatio) { | 159 detectAspectRatio(function(aspectRatio) { |
| 158 document.title = aspectRatio; | 160 sendValueToTest(aspectRatio); |
| 159 eventOccured(); | |
| 160 }); | 161 }); |
| 161 } | 162 } |
| 162 | 163 |
| 164 // This function tries to calculate the aspect ratio shown by the fake capture |
| 165 // device in the video tag. For this, we count the amount of light green |
| 166 // pixels along |aperture| pixels on the positive X and Y axis starting from |
| 167 // the center of the image. In this very center there should be a time-varying |
| 168 // pacman; the algorithm counts for a couple of iterations and keeps the |
| 169 // maximum amount of light green pixels on both directions. From this data |
| 170 // the aspect ratio is calculated relative to a 320x240 window, so 4:3 would |
| 171 // show as a 1. Furthermore, since an original non-4:3 might be letterboxed or |
| 172 // cropped, the actual X and Y pixel amounts are compared with the fake video |
| 173 // capture expected pacman radius (see further below). |
| 174 function detectAspectRatio(callback) { |
| 175 var width = VIDEO_TAG_WIDTH; |
| 176 var height = VIDEO_TAG_HEIGHT; |
| 177 var videoElement = $('local-view'); |
| 178 var canvas = $('local-view-canvas'); |
| 179 |
| 180 var maxLightGreenPixelsX = 0; |
| 181 var maxLightGreenPixelsY = 0; |
| 182 |
| 183 var aperture = Math.min(width, height) / 2; |
| 184 var iterations = 0; |
| 185 var maxIterations = 10; |
| 186 |
| 187 var detectorFunction = function() { |
| 188 var context = canvas.getContext('2d'); |
| 189 context.drawImage(videoElement, 0, 0, width, height); |
| 190 |
| 191 // We are interested in a window starting from the center of the image |
| 192 // where we expect the circle from the fake video capture to be rolling. |
| 193 var pixels = context.getImageData(width / 2, height / 2, |
| 194 aperture, aperture); |
| 195 |
| 196 var lightGreenPixelsX = 0; |
| 197 var lightGreenPixelsY = 0; |
| 198 |
| 199 // Walk horizontally counting light green pixels. |
| 200 for (var x = 0; x < aperture; ++x) { |
| 201 if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN) |
| 202 lightGreenPixelsX++; |
| 203 } |
| 204 // Walk vertically counting light green pixels. |
| 205 for (var y = 0; y < aperture; ++y) { |
| 206 if (pixels.data[4 * y * aperture + 1] != 135) |
| 207 lightGreenPixelsY++; |
| 208 } |
| 209 if (lightGreenPixelsX > maxLightGreenPixelsX && |
| 210 lightGreenPixelsX < aperture) |
| 211 maxLightGreenPixelsX = lightGreenPixelsX; |
| 212 if (lightGreenPixelsY > maxLightGreenPixelsY && |
| 213 lightGreenPixelsY < aperture) |
| 214 maxLightGreenPixelsY = lightGreenPixelsY; |
| 215 |
| 216 var detectedAspectRatioString = ""; |
| 217 if (++iterations > maxIterations) { |
| 218 clearInterval(detectorFunction); |
| 219 observedAspectRatio = maxLightGreenPixelsY / maxLightGreenPixelsX; |
| 220 // At this point the observed aspect ratio is either 1, for undistorted |
| 221 // 4:3, or some other aspect ratio that is seen as distorted. |
| 222 if (Math.abs(observedAspectRatio - 1.333) < 0.1) |
| 223 detectedAspectRatioString = "16:9"; |
| 224 else if (Math.abs(observedAspectRatio - 1.20) < 0.1) |
| 225 detectedAspectRatioString = "16:10"; |
| 226 else if (Math.abs(observedAspectRatio - 1.0) < 0.1) |
| 227 detectedAspectRatioString = "4:3"; |
| 228 else |
| 229 detectedAspectRatioString = "UNKNOWN aspect ratio"; |
| 230 console.log(detectedAspectRatioString + " observed aspect ratio (" + |
| 231 observedAspectRatio + ")"); |
| 232 |
| 233 // The FakeVideoCapture calculates the circle radius as |
| 234 // std::min(capture_format_.width, capture_format_.height) / 4; |
| 235 // we do the same and see if both dimensions are scaled, meaning |
| 236 // we started from a cropped or stretched image. |
| 237 var nonDistortedRadius = Math.min(width, height) / 4; |
| 238 if ((maxLightGreenPixelsX != nonDistortedRadius) && |
| 239 (maxLightGreenPixelsY != nonDistortedRadius)) { |
| 240 detectedAspectRatioString += " cropped"; |
| 241 } else |
| 242 detectedAspectRatioString += " letterbox"; |
| 243 |
| 244 console.log("Original image is: " + detectedAspectRatioString); |
| 245 callback(detectedAspectRatioString); |
| 246 } |
| 247 } |
| 248 |
| 249 setInterval(detectorFunction, 50); |
| 250 } |
| 163 </script> | 251 </script> |
| 164 </head> | 252 </head> |
| 165 <body> | 253 <body> |
| 166 <table border="0"> | 254 <table border="0"> |
| 167 <tr> | 255 <tr> |
| 168 <td>Local Preview</td> | 256 <td>Local Preview</td> |
| 169 </tr> | 257 </tr> |
| 170 <tr> | 258 <tr> |
| 171 <td><video width="320" height="240" id="local-view" | 259 <td><video width="320" height="240" id="local-view" |
| 172 autoplay="autoplay"></video></td> | 260 autoplay="autoplay"></video></td> |
| 173 <td><canvas width="320" height="240" id="local-view-canvas" | 261 <td><canvas width="320" height="240" id="local-view-canvas" |
| 174 style="display:none"></canvas></td> | 262 style="display:none"></canvas></td> |
| 175 </tr> | 263 </tr> |
| 176 <tr> | 264 <tr> |
| 177 <td>Local Preview 2</td> | 265 <td>Local Preview 2</td> |
| 178 </tr> | 266 </tr> |
| 179 <tr> | 267 <tr> |
| 180 <td><video width="320" height="240" id="local-view-2" | 268 <td><video width="320" height="240" id="local-view-2" |
| 181 autoplay="autoplay"></video></td> | 269 autoplay="autoplay"></video></td> |
| 182 <!-- Canvases are named after their corresponding video elements. --> | 270 <!-- Canvases are named after their corresponding video elements. --> |
| 183 <td><canvas width="320" height="240" id="local-view-2-canvas" | 271 <td><canvas width="320" height="240" id="local-view-2-canvas" |
| 184 style="display:none"></canvas></td> | 272 style="display:none"></canvas></td> |
| 185 </tr> | 273 </tr> |
| 186 </table> | 274 </table> |
| 187 </body> | 275 </body> |
| 188 </html> | 276 </html> |
| OLD | NEW |