Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 <html> | |
| 2 <head> | |
| 3 <script type="text/javascript" src="webrtc_test_utilities.js"></script> | |
| 4 <script type="text/javascript" src="webrtc_test_common.js"></script> | |
| 5 <script type="text/javascript" src="webrtc_test_audio.js"></script> | |
| 6 <script type="text/javascript"> | |
| 7 $ = function(id) { | |
| 8 return document.getElementById(id); | |
| 9 }; | |
| 10 | |
| 11 window.onerror = function(errorMsg, url, lineNumber, column, errorObj) { | |
| 12 failTest('Error: ' + errorMsg + '\nScript: ' + url + | |
| 13 '\nLine: ' + lineNumber + '\nColumn: ' + column + | |
| 14 '\nStackTrace: ' + errorObj); | |
| 15 } | |
| 16 | |
| 17 var gFirstConnection = null; | |
| 18 var gSecondConnection = null; | |
| 19 var gTestWithoutMsid = false; | |
|
phoglund_chromium
2016/07/22 07:28:14
This one is not used in this test; get rid of it.
| |
| 20 var gLocalStream = null; | |
| 21 var gSentTones = ''; | |
| 22 | |
| 23 var gRemoteStreams = {}; | |
|
phoglund_chromium
2016/07/22 07:28:14
Nit: just one blank line
| |
| 24 | |
| 25 | |
| 26 setAllEventsOccuredHandler(reportTestSuccess); | |
| 27 | |
| 28 // The second set of constraints should request audio (e.g. audio:true) since | |
| 29 // we expect audio to be playing after the second renegotiation. | |
| 30 function callAndRenegotiateToAudio(constraints, renegotiationConstraints) { | |
| 31 createConnections(null); | |
| 32 navigator.webkitGetUserMedia(constraints, | |
| 33 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); | |
| 34 | |
| 35 waitForConnectionToStabilize(gFirstConnection, function() { | |
| 36 gFirstConnection.removeStream(gLocalStream); | |
| 37 gSecondConnection.removeStream(gLocalStream); | |
| 38 | |
| 39 navigator.webkitGetUserMedia(renegotiationConstraints, | |
| 40 addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError); | |
| 41 | |
| 42 var onCallEstablished = function() { | |
| 43 ensureAudioPlaying(gSecondConnection); | |
| 44 }; | |
| 45 | |
| 46 waitForConnectionToStabilize(gFirstConnection, onCallEstablished); | |
| 47 }); | |
| 48 } | |
| 49 | |
| 50 function callAndEnsureAudioIsPlaying(constraints) { | |
| 51 createConnections(null); | |
| 52 | |
| 53 // Add the local stream to gFirstConnection to play one-way audio. | |
| 54 navigator.webkitGetUserMedia(constraints, | |
| 55 addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError); | |
| 56 | |
| 57 var onCallEstablished = function() { | |
| 58 ensureAudioPlaying(gSecondConnection); | |
| 59 }; | |
| 60 | |
| 61 waitForConnectionToStabilize(gFirstConnection, onCallEstablished); | |
| 62 } | |
| 63 | |
| 64 function callWithIsac16KAndEnsureAudioIsPlaying(constraints) { | |
| 65 setOfferSdpTransform(function(sdp) { | |
| 66 sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g, | |
| 67 'm=audio $1 RTP/SAVPF 103 126\r\n'); | |
| 68 sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:103 minptime=10'); | |
| 69 if (sdp.search('a=rtpmap:103 ISAC/16000') == -1) | |
| 70 failTest('Missing iSAC 16K codec on Android; cannot force codec.'); | |
| 71 | |
| 72 return sdp; | |
| 73 }); | |
| 74 callAndEnsureAudioIsPlaying(constraints); | |
| 75 } | |
| 76 | |
| 77 function enableRemoteVideo(peerConnection, enabled) { | |
| 78 remoteStream = peerConnection.getRemoteStreams()[0]; | |
| 79 remoteStream.getVideoTracks()[0].enabled = enabled; | |
| 80 } | |
| 81 | |
| 82 function enableRemoteAudio(peerConnection, enabled) { | |
| 83 remoteStream = peerConnection.getRemoteStreams()[0]; | |
| 84 remoteStream.getAudioTracks()[0].enabled = enabled; | |
| 85 } | |
| 86 | |
| 87 function enableLocalVideo(peerConnection, enabled) { | |
| 88 localStream = peerConnection.getLocalStreams()[0]; | |
| 89 localStream.getVideoTracks()[0].enabled = enabled; | |
| 90 } | |
| 91 | |
| 92 function enableLocalAudio(peerConnection, enabled) { | |
| 93 localStream = peerConnection.getLocalStreams()[0]; | |
| 94 localStream.getAudioTracks()[0].enabled = enabled; | |
| 95 } | |
| 96 | |
| 97 function callAndEnsureRemoteAudioTrackMutingWorks() { | |
| 98 callAndEnsureAudioIsPlaying({audio: true, video: true}); | |
| 99 setAllEventsOccuredHandler(function() { | |
| 100 setAllEventsOccuredHandler(reportTestSuccess); | |
| 101 | |
| 102 // Call is up, now mute the remote track and check we stop playing out | |
| 103 // audio (after a small delay, we don't expect it to happen instantly). | |
| 104 enableRemoteAudio(gSecondConnection, false); | |
| 105 ensureSilence(gSecondConnection); | |
| 106 }); | |
| 107 } | |
| 108 | |
| 109 function callAndEnsureLocalAudioTrackMutingWorks() { | |
| 110 callAndEnsureAudioIsPlaying({audio: true, video: true}); | |
| 111 setAllEventsOccuredHandler(function() { | |
| 112 setAllEventsOccuredHandler(reportTestSuccess); | |
| 113 | |
| 114 // Call is up, now mute the local track of the sending side and ensure | |
| 115 // the receiving side stops receiving audio. | |
| 116 enableLocalAudio(gFirstConnection, false); | |
| 117 ensureSilence(gSecondConnection); | |
| 118 }); | |
| 119 } | |
| 120 | |
| 121 function callAndEnsureAudioTrackUnmutingWorks() { | |
| 122 callAndEnsureAudioIsPlaying({audio: true, video: true}); | |
| 123 setAllEventsOccuredHandler(function() { | |
| 124 setAllEventsOccuredHandler(reportTestSuccess); | |
| 125 | |
| 126 // Mute, wait a while, unmute, verify audio gets back up. | |
| 127 // (Also, ensure video muting doesn't affect audio). | |
| 128 enableRemoteAudio(gSecondConnection, false); | |
| 129 enableRemoteVideo(gSecondConnection, false); | |
| 130 | |
| 131 setTimeout(function() { | |
| 132 enableRemoteAudio(gSecondConnection, true); | |
| 133 }, 500); | |
| 134 | |
| 135 setTimeout(function() { | |
| 136 ensureAudioPlaying(gSecondConnection); | |
| 137 }, 1500); | |
| 138 }); | |
| 139 } | |
| 140 | |
| 141 function callAndEnsureLocalVideoMutingDoesntMuteAudio() { | |
| 142 callAndEnsureAudioIsPlaying({audio: true, video: true}); | |
| 143 setAllEventsOccuredHandler(function() { | |
| 144 setAllEventsOccuredHandler(reportTestSuccess); | |
| 145 enableLocalVideo(gFirstConnection, false); | |
| 146 ensureAudioPlaying(gSecondConnection); | |
| 147 }); | |
| 148 } | |
| 149 | |
| 150 function callAndEnsureRemoteVideoMutingDoesntMuteAudio() { | |
| 151 callAndEnsureAudioIsPlaying({audio: true, video: true}); | |
| 152 setAllEventsOccuredHandler(function() { | |
| 153 setAllEventsOccuredHandler(reportTestSuccess); | |
| 154 enableRemoteVideo(gSecondConnection, false); | |
| 155 ensureAudioPlaying(gSecondConnection); | |
| 156 }); | |
| 157 } | |
| 158 | |
| 159 function createConnections(constraints) { | |
| 160 gFirstConnection = createConnection(constraints, 'remote-view-1'); | |
| 161 assertEquals('stable', gFirstConnection.signalingState); | |
| 162 | |
| 163 gSecondConnection = createConnection(constraints, 'remote-view-2'); | |
| 164 assertEquals('stable', gSecondConnection.signalingState); | |
| 165 } | |
| 166 | |
| 167 function createConnection(constraints, remoteView) { | |
| 168 var pc = new webkitRTCPeerConnection(null, constraints); | |
| 169 pc.onaddstream = function(event) { | |
| 170 onRemoteStream(event, remoteView); | |
| 171 } | |
| 172 return pc; | |
| 173 } | |
| 174 | |
| 175 function displayAndRemember(localStream) { | |
| 176 var localStreamUrl = URL.createObjectURL(localStream); | |
| 177 $('local-view').src = localStreamUrl; | |
| 178 | |
| 179 gLocalStream = localStream; | |
| 180 } | |
| 181 | |
| 182 // Called if getUserMedia succeeds and we want to send from both connections. | |
| 183 function addStreamToBothConnectionsAndNegotiate(localStream) { | |
| 184 displayAndRemember(localStream); | |
| 185 gFirstConnection.addStream(localStream); | |
| 186 gSecondConnection.addStream(localStream); | |
| 187 negotiate(); | |
| 188 } | |
| 189 | |
| 190 // Called if getUserMedia succeeds when we want to send from one connection. | |
| 191 function addStreamToTheFirstConnectionAndNegotiate(localStream) { | |
| 192 displayAndRemember(localStream); | |
| 193 gFirstConnection.addStream(localStream); | |
| 194 negotiate(); | |
| 195 } | |
| 196 | |
| 197 function negotiate() { | |
| 198 negotiateBetween(gFirstConnection, gSecondConnection); | |
| 199 } | |
| 200 | |
| 201 function onRemoteStream(e, target) { | |
| 202 console.log("Receiving remote stream..."); | |
| 203 if (gTestWithoutMsid && e.stream.id != "default") { | |
| 204 failTest('a default remote stream was expected but instead ' + | |
| 205 e.stream.id + ' was received.'); | |
| 206 } | |
| 207 gRemoteStreams[target] = e.stream; | |
| 208 var remoteStreamUrl = URL.createObjectURL(e.stream); | |
| 209 var remoteVideo = $(target); | |
| 210 remoteVideo.src = remoteStreamUrl; | |
| 211 } | |
| 212 | |
| 213 </script> | |
| 214 </head> | |
| 215 <body> | |
| 216 <table border="0"> | |
| 217 <tr> | |
| 218 <td><video width="320" height="240" id="local-view" style="display:none" | |
| 219 autoplay muted></video></td> | |
| 220 <td><video width="320" height="240" id="remote-view-1" | |
|
phoglund_chromium
2016/07/22 07:28:14
You don't need all of these video tags and canvase
| |
| 221 style="display:none" autoplay></video></td> | |
| 222 <td><video width="320" height="240" id="remote-view-2" | |
| 223 style="display:none" autoplay></video></td> | |
| 224 <td><video width="320" height="240" id="remote-view-3" | |
| 225 style="display:none" autoplay></video></td> | |
| 226 <td><video width="320" height="240" id="remote-view-4" | |
| 227 style="display:none" autoplay></video></td> | |
| 228 <!-- Canvases are named after their corresponding video elements. --> | |
| 229 <td><canvas width="320" height="240" id="remote-view-1-canvas" | |
| 230 style="display:none"></canvas></td> | |
| 231 <td><canvas width="320" height="240" id="remote-view-2-canvas" | |
| 232 style="display:none"></canvas></td> | |
| 233 <td><canvas width="320" height="240" id="remote-view-3-canvas" | |
| 234 style="display:none"></canvas></td> | |
| 235 <td><canvas width="320" height="240" id="remote-view-4-canvas" | |
| 236 style="display:none"></canvas></td> | |
| 237 </tr> | |
| 238 </table> | |
| 239 </body> | |
| 240 </html> | |
| OLD | NEW |