OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/blink/webmediaplayer_cast_android.h" | |
6 | |
7 #include "gpu/GLES2/gl2extchromium.h" | |
8 #include "gpu/blink/webgraphicscontext3d_impl.h" | |
9 #include "gpu/command_buffer/client/gles2_interface.h" | |
10 #include "gpu/command_buffer/common/sync_token.h" | |
11 #include "media/base/android/media_common_android.h" | |
12 #include "media/base/bind_to_current_loop.h" | |
13 #include "media/blink/webmediaplayer_impl.h" | |
14 #include "media/blink/webmediaplayer_params.h" | |
15 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h" | |
16 #include "third_party/WebKit/public/web/WebDocument.h" | |
17 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
18 #include "third_party/skia/include/core/SkCanvas.h" | |
19 #include "third_party/skia/include/core/SkPaint.h" | |
20 #include "third_party/skia/include/core/SkTypeface.h" | |
21 #include "third_party/skia/include/gpu/GrContext.h" | |
22 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
23 | |
24 #if defined(OS_ANDROID) | |
25 | |
26 using gpu::gles2::GLES2Interface; | |
27 | |
28 namespace media { | |
29 | |
30 namespace { | |
31 // File-static function is to allow it to run even after WMPI is deleted. | |
32 void OnReleaseTexture(const WebMediaPlayerParams::Context3DCB& context_3d_cb, | |
33 GLuint texture_id, | |
34 const gpu::SyncToken& sync_token) { | |
35 Context3D context_3d; | |
36 if (!context_3d_cb.is_null()) | |
37 context_3d = context_3d_cb.Run(); | |
38 // GPU Process crashed. | |
39 if (!context_3d.gl) | |
40 return; | |
41 | |
42 GLES2Interface* gl = context_3d.gl; | |
43 gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); | |
44 gl->DeleteTextures(1, &texture_id); | |
45 // Flush to ensure that the texture gets deleted in a timely fashion. | |
46 gl->ShallowFlushCHROMIUM(); | |
47 } | |
48 | |
49 } // namespace | |
50 | |
51 WebMediaPlayerCast::WebMediaPlayerCast( | |
52 WebMediaPlayerImpl* impl, | |
53 blink::WebMediaPlayerClient* client, | |
54 const WebMediaPlayerParams::Context3DCB& context_3d_cb) | |
55 : webmediaplayer_(impl), client_(client), context_3d_cb_(context_3d_cb) {} | |
56 | |
57 WebMediaPlayerCast::~WebMediaPlayerCast() { | |
58 if (player_manager_) { | |
59 if (is_player_initialized_) | |
60 player_manager_->DestroyPlayer(player_id_); | |
61 | |
62 player_manager_->UnregisterMediaPlayer(player_id_); | |
63 } | |
64 } | |
65 | |
66 void WebMediaPlayerCast::Initialize(const GURL& url, | |
67 blink::WebLocalFrame* frame) { | |
68 player_manager_->Initialize(MEDIA_PLAYER_TYPE_URL, player_id_, url, | |
69 frame->document().firstPartyForCookies(), 0, | |
70 frame->document().url(), true); | |
71 is_player_initialized_ = true; | |
72 } | |
73 | |
74 void WebMediaPlayerCast::set_media_player_manager( | |
75 RendererMediaPlayerManagerInterface* media_player_manager) { | |
76 player_manager_ = media_player_manager; | |
77 player_id_ = player_manager_->RegisterMediaPlayer(this); | |
78 } | |
79 | |
80 void WebMediaPlayerCast::requestRemotePlayback() { | |
81 player_manager_->Seek(player_id_, base::TimeDelta::FromSecondsD( | |
82 webmediaplayer_->currentTime())); | |
83 player_manager_->RequestRemotePlayback(player_id_); | |
84 } | |
85 | |
86 void WebMediaPlayerCast::requestRemotePlaybackControl() { | |
87 player_manager_->RequestRemotePlaybackControl(player_id_); | |
88 } | |
89 | |
90 // RendererMediaPlayerInterface implementation | |
91 void WebMediaPlayerCast::OnMediaMetadataChanged(base::TimeDelta duration, | |
92 int width, | |
93 int height, | |
94 bool success) {} | |
95 void WebMediaPlayerCast::OnPlaybackComplete() { | |
96 DVLOG(1) << __FUNCTION__; | |
97 webmediaplayer_->OnRemotePlaybackEnded(); | |
98 } | |
99 void WebMediaPlayerCast::OnBufferingUpdate(int percentage) { | |
100 DVLOG(1) << __FUNCTION__; | |
101 } | |
102 void WebMediaPlayerCast::OnSeekRequest(const base::TimeDelta& time_to_seek) { | |
103 DVLOG(1) << __FUNCTION__; | |
104 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
105 client_->requestSeek(time_to_seek.InSecondsF()); | |
106 } | |
107 void WebMediaPlayerCast::OnSeekComplete(const base::TimeDelta& current_time) { | |
108 DVLOG(1) << __FUNCTION__; | |
109 remote_time_at_ = base::TimeTicks::Now(); | |
110 remote_time_ = current_time; | |
111 webmediaplayer_->OnPipelineSeeked(true, PIPELINE_OK); | |
112 } | |
113 | |
114 void WebMediaPlayerCast::OnMediaError(int error_type) { | |
115 DVLOG(1) << __FUNCTION__; | |
116 } | |
117 void WebMediaPlayerCast::OnVideoSizeChanged(int width, int height) { | |
118 DVLOG(1) << __FUNCTION__; | |
119 } | |
120 | |
121 void WebMediaPlayerCast::OnTimeUpdate(base::TimeDelta current_timestamp, | |
122 base::TimeTicks current_time_ticks) { | |
123 DVLOG(1) << __FUNCTION__ << " " << current_timestamp.InSecondsF(); | |
124 remote_time_at_ = current_time_ticks; | |
125 remote_time_ = current_timestamp; | |
126 } | |
127 | |
128 void WebMediaPlayerCast::OnPlayerReleased() { | |
129 DVLOG(1) << __FUNCTION__; | |
130 } | |
131 | |
132 void WebMediaPlayerCast::OnConnectedToRemoteDevice( | |
133 const std::string& remote_playback_message) { | |
134 DVLOG(1) << __FUNCTION__; | |
135 remote_time_ = base::TimeDelta::FromSecondsD(webmediaplayer_->currentTime()); | |
136 // Set paused so that progress bar doesn't advance while remote playback | |
137 // is starting. | |
138 webmediaplayer_->pause(); | |
139 is_remote_ = true; | |
140 DrawRemotePlaybackText(remote_playback_message); | |
141 client_->connectedToRemoteDevice(); | |
142 } | |
143 | |
144 double WebMediaPlayerCast::currentTime() const { | |
145 base::TimeDelta ret = remote_time_; | |
146 if (!paused_) { | |
147 ret += base::TimeTicks::Now() - remote_time_at_; | |
148 } | |
149 return ret.InSecondsF(); | |
150 } | |
151 | |
152 void WebMediaPlayerCast::play() { | |
153 if (!paused_) | |
154 return; | |
155 | |
156 player_manager_->Start(player_id_); | |
157 remote_time_at_ = base::TimeTicks::Now(); | |
158 paused_ = false; | |
159 } | |
160 void WebMediaPlayerCast::pause() { | |
161 player_manager_->Pause(player_id_, true); | |
162 } | |
163 | |
164 void WebMediaPlayerCast::seek(base::TimeDelta t) { | |
165 should_notify_time_changed_ = true; | |
166 player_manager_->Seek(player_id_, t); | |
167 } | |
168 | |
169 void WebMediaPlayerCast::OnDisconnectedFromRemoteDevice() { | |
170 DVLOG(1) << __FUNCTION__; | |
171 is_remote_ = false; | |
172 double t = currentTime(); | |
173 if (t + media::kTimeUpdateInterval * 2 / 1000 > webmediaplayer_->duration()) { | |
174 t = webmediaplayer_->duration(); | |
175 } | |
176 webmediaplayer_->OnDisconnectedFromRemoteDevice(t); | |
177 } | |
178 | |
179 void WebMediaPlayerCast::OnDidExitFullscreen() { | |
180 DVLOG(1) << __FUNCTION__; | |
181 } | |
182 void WebMediaPlayerCast::OnMediaPlayerPlay() { | |
183 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
184 if (is_remote_ && paused_) { | |
185 paused_ = false; | |
186 remote_time_at_ = base::TimeTicks::Now(); | |
187 client_->playbackStateChanged(); | |
188 } | |
189 // Blink expects a timeChanged() in response to a seek(). | |
190 if (should_notify_time_changed_) | |
191 client_->timeChanged(); | |
192 } | |
193 void WebMediaPlayerCast::OnMediaPlayerPause() { | |
194 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
195 if (is_remote_ && !paused_) { | |
196 paused_ = true; | |
197 client_->playbackStateChanged(); | |
198 } | |
199 } | |
200 void WebMediaPlayerCast::OnRemoteRouteAvailabilityChanged( | |
201 bool routes_available) { | |
202 DVLOG(1) << __FUNCTION__; | |
203 client_->remoteRouteAvailabilityChanged(routes_available); | |
204 } | |
205 | |
206 void WebMediaPlayerCast::ReleaseMediaResources() {} | |
207 void WebMediaPlayerCast::OnWaitingForDecryptionKey() {} | |
208 bool WebMediaPlayerCast::hasVideo() const { | |
209 return true; | |
210 } | |
211 bool WebMediaPlayerCast::paused() const { | |
DaleCurtis
2016/01/12 23:21:53
Clang-format this file? there should be lines betw
hubbe
2016/01/13 00:11:56
Already read "git cl format media" which did not a
| |
212 return paused_; | |
213 } | |
214 | |
215 #if defined(VIDEO_HOLE) | |
216 bool WebMediaPlayerCast::UpdateBoundaryRectangle() { | |
217 return false; | |
218 } | |
219 const gfx::RectF WebMediaPlayerCast::GetBoundaryRectangle() { | |
220 return gfx::RectF(); | |
221 } | |
222 #endif // defined(VIDEO_HOLE) | |
223 | |
224 void WebMediaPlayerCast::DrawRemotePlaybackText( | |
DaleCurtis
2016/01/12 23:21:53
Can this just use the WMPA code instead of copy pa
hubbe
2016/01/13 00:11:56
I can break out the common parts, but it's not pos
DaleCurtis
2016/01/13 01:22:33
Seems like a util method which returns a videofram
hubbe
2016/01/13 09:24:01
Needed a little creative interfacing, but it's now
| |
225 const std::string& remote_playback_message) { | |
226 DVLOG(1) << __FUNCTION__; | |
227 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
228 | |
229 // TODO(johnme): Should redraw this frame if the layer bounds change; but | |
230 // there seems no easy way to listen for the layer resizing (as opposed to | |
231 // OnVideoSizeChanged, which is when the frame sizes of the video file | |
232 // change). Perhaps have to poll (on main thread of course)? | |
233 gfx::Size canvas_size = webmediaplayer_->GetCanvasSize(); | |
234 if (!canvas_size.width()) | |
235 return; | |
236 | |
237 SkBitmap bitmap; | |
238 bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height()); | |
239 | |
240 // Create the canvas and draw the "Casting to <Chromecast>" text on it. | |
241 SkCanvas canvas(bitmap); | |
242 canvas.drawColor(SK_ColorBLACK); | |
243 | |
244 const SkScalar kTextSize(40); | |
245 const SkScalar kMinPadding(40); | |
246 | |
247 SkPaint paint; | |
248 paint.setAntiAlias(true); | |
249 paint.setFilterQuality(kHigh_SkFilterQuality); | |
250 paint.setColor(SK_ColorWHITE); | |
251 paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold)); | |
252 paint.setTextSize(kTextSize); | |
253 | |
254 // Calculate the vertical margin from the top | |
255 SkPaint::FontMetrics font_metrics; | |
256 paint.getFontMetrics(&font_metrics); | |
257 SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent; | |
258 | |
259 // Measure the width of the entire text to display | |
260 size_t display_text_width = paint.measureText(remote_playback_message.c_str(), | |
261 remote_playback_message.size()); | |
262 std::string display_text(remote_playback_message); | |
263 | |
264 if (display_text_width + (kMinPadding * 2) > canvas_size.width()) { | |
265 // The text is too long to fit in one line, truncate it and append ellipsis | |
266 // to the end. | |
267 | |
268 // First, figure out how much of the canvas the '...' will take up. | |
269 const std::string kTruncationEllipsis("\xE2\x80\xA6"); | |
270 SkScalar sk_ellipse_width = paint.measureText(kTruncationEllipsis.c_str(), | |
271 kTruncationEllipsis.size()); | |
272 | |
273 // Then calculate how much of the text can be drawn with the '...' appended | |
274 // to the end of the string. | |
275 SkScalar sk_max_original_text_width(canvas_size.width() - | |
276 (kMinPadding * 2) - sk_ellipse_width); | |
277 size_t sk_max_original_text_length = paint.breakText( | |
278 remote_playback_message.c_str(), remote_playback_message.size(), | |
279 sk_max_original_text_width); | |
280 | |
281 // Remove the part of the string that doesn't fit and append '...'. | |
282 display_text.erase( | |
283 sk_max_original_text_length, | |
284 remote_playback_message.size() - sk_max_original_text_length); | |
285 display_text.append(kTruncationEllipsis); | |
286 display_text_width = | |
287 paint.measureText(display_text.c_str(), display_text.size()); | |
288 } | |
289 | |
290 // Center the text horizontally. | |
291 SkScalar sk_horizontal_margin = | |
292 (canvas_size.width() - display_text_width) / 2.0; | |
293 canvas.drawText(display_text.c_str(), display_text.size(), | |
294 sk_horizontal_margin, sk_vertical_margin, paint); | |
295 | |
296 Context3D context_3d; | |
297 if (!context_3d_cb_.is_null()) | |
298 context_3d = context_3d_cb_.Run(); | |
299 if (!context_3d.gl) | |
300 return; | |
301 | |
302 GLES2Interface* gl = context_3d.gl; | |
303 GLuint remote_playback_texture_id = 0; | |
304 gl->GenTextures(1, &remote_playback_texture_id); | |
305 GLuint texture_target = GL_TEXTURE_2D; | |
306 gl->BindTexture(texture_target, remote_playback_texture_id); | |
307 gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
308 gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
309 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
310 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
311 | |
312 { | |
313 SkAutoLockPixels lock(bitmap); | |
314 gl->TexImage2D(texture_target, 0 /* level */, GL_RGBA /* internalformat */, | |
315 bitmap.width(), bitmap.height(), 0 /* border */, | |
316 GL_RGBA /* format */, GL_UNSIGNED_BYTE /* type */, | |
317 bitmap.getPixels()); | |
318 } | |
319 | |
320 gpu::Mailbox texture_mailbox; | |
321 gl->GenMailboxCHROMIUM(texture_mailbox.name); | |
322 gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name); | |
323 gl->Flush(); | |
324 gpu::SyncToken texture_mailbox_sync_token(gl->InsertSyncPointCHROMIUM()); | |
325 | |
326 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture( | |
327 media::PIXEL_FORMAT_ARGB, | |
328 gpu::MailboxHolder(texture_mailbox, texture_mailbox_sync_token, | |
329 texture_target), | |
330 media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, context_3d_cb_, | |
331 remote_playback_texture_id)), | |
332 canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, | |
333 canvas_size /* natural_size */, base::TimeDelta() /* timestamp */); | |
334 | |
335 webmediaplayer_->SuspendForRemote(new_frame); | |
336 } | |
337 | |
338 } // namespace media | |
339 | |
340 #endif | |
OLD | NEW |