OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/blink/webmediaplayer_cast_android.h" | |
6 | |
7 #include "gpu/GLES2/gl2extchromium.h" | |
8 #include "gpu/blink/webgraphicscontext3d_impl.h" | |
9 #include "gpu/command_buffer/client/gles2_interface.h" | |
10 #include "gpu/command_buffer/common/sync_token.h" | |
11 #include "media/base/android/media_common_android.h" | |
12 #include "media/base/bind_to_current_loop.h" | |
13 #include "media/blink/webmediaplayer_impl.h" | |
14 #include "media/blink/webmediaplayer_params.h" | |
15 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h" | |
16 #include "third_party/WebKit/public/web/WebDocument.h" | |
17 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
18 #include "third_party/skia/include/core/SkCanvas.h" | |
19 #include "third_party/skia/include/core/SkPaint.h" | |
20 #include "third_party/skia/include/core/SkTypeface.h" | |
21 #include "third_party/skia/include/gpu/GrContext.h" | |
22 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
23 | |
24 #if defined(OS_ANDROID) | |
DaleCurtis
2016/01/14 02:01:41
Unnecessary?
hubbe
2016/01/14 18:05:00
I thought so too, but apparently other platforms w
DaleCurtis
2016/01/14 18:25:40
Probably you need to add the wmpcast_android to a
DaleCurtis
2016/01/15 19:55:44
Can you resolve this? Add a if (is_android) sectio
hubbe
2016/01/15 20:35:47
Done.
| |
25 | |
26 using gpu::gles2::GLES2Interface; | |
27 | |
28 namespace media { | |
29 | |
30 namespace { | |
31 // File-static function is to allow it to run even after WMPI is deleted. | |
32 void OnReleaseTexture(const WebMediaPlayerCast::GLContextCB& context_3d_cb, | |
33 GLuint texture_id, | |
34 const gpu::SyncToken& sync_token) { | |
35 GLES2Interface* gl = context_3d_cb.Run(); | |
36 if (!gl) | |
37 return; | |
38 | |
39 gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); | |
40 gl->DeleteTextures(1, &texture_id); | |
41 // Flush to ensure that the texture gets deleted in a timely fashion. | |
42 gl->ShallowFlushCHROMIUM(); | |
43 } | |
44 | |
45 GLES2Interface* GLCBShim( | |
46 const WebMediaPlayerParams::Context3DCB& context_3d_cb) { | |
47 return context_3d_cb.Run().gl; | |
48 } | |
49 | |
50 } // namespace | |
51 | |
52 scoped_refptr<VideoFrame> WebMediaPlayerCast::MakeTextFrameForCast( | |
53 const std::string& remote_playback_message, | |
54 gfx::Size canvas_size, | |
55 const WebMediaPlayerCast::GLContextCB& context_3d_cb) { | |
56 SkBitmap bitmap; | |
57 bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height()); | |
58 | |
59 // Create the canvas and draw the "Casting to <Chromecast>" text on it. | |
60 SkCanvas canvas(bitmap); | |
61 canvas.drawColor(SK_ColorBLACK); | |
62 | |
63 const SkScalar kTextSize(40); | |
64 const SkScalar kMinPadding(40); | |
65 | |
66 SkPaint paint; | |
67 paint.setAntiAlias(true); | |
68 paint.setFilterQuality(kHigh_SkFilterQuality); | |
69 paint.setColor(SK_ColorWHITE); | |
70 paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold)); | |
71 paint.setTextSize(kTextSize); | |
72 | |
73 // Calculate the vertical margin from the top | |
74 SkPaint::FontMetrics font_metrics; | |
75 paint.getFontMetrics(&font_metrics); | |
76 SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent; | |
77 | |
78 // Measure the width of the entire text to display | |
79 size_t display_text_width = paint.measureText(remote_playback_message.c_str(), | |
80 remote_playback_message.size()); | |
81 std::string display_text(remote_playback_message); | |
82 | |
83 if (display_text_width + (kMinPadding * 2) > canvas_size.width()) { | |
84 // The text is too long to fit in one line, truncate it and append ellipsis | |
85 // to the end. | |
86 | |
87 // First, figure out how much of the canvas the '...' will take up. | |
88 const std::string kTruncationEllipsis("\xE2\x80\xA6"); | |
89 SkScalar sk_ellipse_width = paint.measureText(kTruncationEllipsis.c_str(), | |
90 kTruncationEllipsis.size()); | |
91 | |
92 // Then calculate how much of the text can be drawn with the '...' appended | |
93 // to the end of the string. | |
94 SkScalar sk_max_original_text_width(canvas_size.width() - | |
95 (kMinPadding * 2) - sk_ellipse_width); | |
96 size_t sk_max_original_text_length = paint.breakText( | |
97 remote_playback_message.c_str(), remote_playback_message.size(), | |
98 sk_max_original_text_width); | |
99 | |
100 // Remove the part of the string that doesn't fit and append '...'. | |
101 display_text.erase( | |
102 sk_max_original_text_length, | |
103 remote_playback_message.size() - sk_max_original_text_length); | |
104 display_text.append(kTruncationEllipsis); | |
105 display_text_width = | |
106 paint.measureText(display_text.c_str(), display_text.size()); | |
107 } | |
108 | |
109 // Center the text horizontally. | |
110 SkScalar sk_horizontal_margin = | |
111 (canvas_size.width() - display_text_width) / 2.0; | |
112 canvas.drawText(display_text.c_str(), display_text.size(), | |
113 sk_horizontal_margin, sk_vertical_margin, paint); | |
114 | |
115 GLES2Interface* gl = context_3d_cb.Run(); | |
116 | |
117 // GPU Process crashed. | |
118 if (!gl) | |
119 return nullptr; | |
120 GLuint remote_playback_texture_id = 0; | |
121 gl->GenTextures(1, &remote_playback_texture_id); | |
122 GLuint texture_target = GL_TEXTURE_2D; | |
123 gl->BindTexture(texture_target, remote_playback_texture_id); | |
124 gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
125 gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
126 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
127 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
128 | |
129 { | |
130 SkAutoLockPixels lock(bitmap); | |
131 gl->TexImage2D(texture_target, 0 /* level */, GL_RGBA /* internalformat */, | |
132 bitmap.width(), bitmap.height(), 0 /* border */, | |
133 GL_RGBA /* format */, GL_UNSIGNED_BYTE /* type */, | |
134 bitmap.getPixels()); | |
135 } | |
136 | |
137 gpu::Mailbox texture_mailbox; | |
138 gl->GenMailboxCHROMIUM(texture_mailbox.name); | |
139 gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name); | |
140 gl->Flush(); | |
141 gpu::SyncToken texture_mailbox_sync_token(gl->InsertSyncPointCHROMIUM()); | |
142 | |
143 return VideoFrame::WrapNativeTexture( | |
144 media::PIXEL_FORMAT_ARGB, | |
145 gpu::MailboxHolder(texture_mailbox, texture_mailbox_sync_token, | |
146 texture_target), | |
147 media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, context_3d_cb, | |
148 remote_playback_texture_id)), | |
149 canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, | |
150 canvas_size /* natural_size */, base::TimeDelta() /* timestamp */); | |
151 } | |
152 | |
153 WebMediaPlayerCast::WebMediaPlayerCast( | |
154 WebMediaPlayerImpl* impl, | |
155 blink::WebMediaPlayerClient* client, | |
156 const WebMediaPlayerParams::Context3DCB& context_3d_cb) | |
157 : webmediaplayer_(impl), client_(client), context_3d_cb_(context_3d_cb) {} | |
158 | |
159 WebMediaPlayerCast::~WebMediaPlayerCast() { | |
160 if (player_manager_) { | |
161 if (is_player_initialized_) | |
162 player_manager_->DestroyPlayer(player_id_); | |
163 | |
164 player_manager_->UnregisterMediaPlayer(player_id_); | |
165 } | |
166 } | |
167 | |
168 void WebMediaPlayerCast::Initialize(const GURL& url, | |
169 blink::WebLocalFrame* frame) { | |
170 player_manager_->Initialize(MEDIA_PLAYER_TYPE_URL, player_id_, url, | |
171 frame->document().firstPartyForCookies(), 0, | |
172 frame->document().url(), true); | |
173 is_player_initialized_ = true; | |
174 } | |
175 | |
176 void WebMediaPlayerCast::set_media_player_manager( | |
DaleCurtis
2016/01/14 02:01:41
SetMediaPlayerManager() no hacker_style() for non-
hubbe
2016/01/14 18:05:00
Done.
| |
177 RendererMediaPlayerManagerInterface* media_player_manager) { | |
178 player_manager_ = media_player_manager; | |
179 player_id_ = player_manager_->RegisterMediaPlayer(this); | |
180 } | |
181 | |
182 void WebMediaPlayerCast::requestRemotePlayback() { | |
183 player_manager_->Seek(player_id_, base::TimeDelta::FromSecondsD( | |
184 webmediaplayer_->currentTime())); | |
185 player_manager_->RequestRemotePlayback(player_id_); | |
186 } | |
187 | |
188 void WebMediaPlayerCast::requestRemotePlaybackControl() { | |
189 player_manager_->RequestRemotePlaybackControl(player_id_); | |
190 } | |
191 | |
192 // RendererMediaPlayerInterface implementation | |
DaleCurtis
2016/01/14 02:01:41
Remove.
hubbe
2016/01/14 18:05:00
Done.
| |
193 void WebMediaPlayerCast::OnMediaMetadataChanged(base::TimeDelta duration, | |
194 int width, | |
195 int height, | |
196 bool success) {} | |
197 | |
198 void WebMediaPlayerCast::OnPlaybackComplete() { | |
199 DVLOG(1) << __FUNCTION__; | |
200 webmediaplayer_->OnRemotePlaybackEnded(); | |
201 } | |
202 | |
203 void WebMediaPlayerCast::OnBufferingUpdate(int percentage) { | |
204 DVLOG(1) << __FUNCTION__; | |
205 } | |
206 | |
207 void WebMediaPlayerCast::OnSeekRequest(const base::TimeDelta& time_to_seek) { | |
208 DVLOG(1) << __FUNCTION__; | |
209 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
DaleCurtis
2016/01/14 02:01:41
?
hubbe
2016/01/14 18:05:00
Removed
| |
210 client_->requestSeek(time_to_seek.InSecondsF()); | |
211 } | |
212 | |
213 void WebMediaPlayerCast::OnSeekComplete(const base::TimeDelta& current_time) { | |
214 DVLOG(1) << __FUNCTION__; | |
215 remote_time_at_ = base::TimeTicks::Now(); | |
216 remote_time_ = current_time; | |
217 webmediaplayer_->OnPipelineSeeked(true, PIPELINE_OK); | |
218 } | |
219 | |
220 void WebMediaPlayerCast::OnMediaError(int error_type) { | |
221 DVLOG(1) << __FUNCTION__; | |
222 } | |
223 | |
224 void WebMediaPlayerCast::OnVideoSizeChanged(int width, int height) { | |
225 DVLOG(1) << __FUNCTION__; | |
226 } | |
227 | |
228 void WebMediaPlayerCast::OnTimeUpdate(base::TimeDelta current_timestamp, | |
229 base::TimeTicks current_time_ticks) { | |
230 DVLOG(1) << __FUNCTION__ << " " << current_timestamp.InSecondsF(); | |
231 remote_time_at_ = current_time_ticks; | |
232 remote_time_ = current_timestamp; | |
233 } | |
234 | |
235 void WebMediaPlayerCast::OnPlayerReleased() { | |
236 DVLOG(1) << __FUNCTION__; | |
237 } | |
238 | |
239 void WebMediaPlayerCast::OnConnectedToRemoteDevice( | |
240 const std::string& remote_playback_message) { | |
241 DVLOG(1) << __FUNCTION__; | |
242 remote_time_ = base::TimeDelta::FromSecondsD(webmediaplayer_->currentTime()); | |
243 // Set paused so that progress bar doesn't advance while remote playback | |
244 // is starting. | |
245 webmediaplayer_->pause(); | |
DaleCurtis
2016/01/14 02:01:41
Do we want the UI to reflect this pause?
hubbe
2016/01/14 18:05:00
I think so, it seems to make sense that the local
DaleCurtis
2016/01/14 18:25:40
You'll need to call playbackStateChanged() if so t
hubbe
2016/01/15 20:35:47
Actually, it seems the tests prefer it to be in "p
| |
246 is_remote_ = true; | |
247 remote_playback_message_ = remote_playback_message; | |
248 webmediaplayer_->SuspendForRemote(); | |
249 client_->connectedToRemoteDevice(); | |
250 } | |
251 | |
252 double WebMediaPlayerCast::currentTime() const { | |
253 base::TimeDelta ret = remote_time_; | |
254 if (!paused_) { | |
255 ret += base::TimeTicks::Now() - remote_time_at_; | |
256 } | |
257 return ret.InSecondsF(); | |
258 } | |
259 | |
260 void WebMediaPlayerCast::play() { | |
261 if (!paused_) | |
262 return; | |
263 | |
264 player_manager_->Start(player_id_); | |
265 remote_time_at_ = base::TimeTicks::Now(); | |
266 paused_ = false; | |
267 } | |
268 void WebMediaPlayerCast::pause() { | |
DaleCurtis
2016/01/14 02:01:41
insert line above.
hubbe
2016/01/14 18:05:00
Done.
| |
269 player_manager_->Pause(player_id_, true); | |
270 } | |
271 | |
272 void WebMediaPlayerCast::seek(base::TimeDelta t) { | |
273 should_notify_time_changed_ = true; | |
274 player_manager_->Seek(player_id_, t); | |
275 } | |
276 | |
277 void WebMediaPlayerCast::OnDisconnectedFromRemoteDevice() { | |
278 DVLOG(1) << __FUNCTION__; | |
279 is_remote_ = false; | |
280 double t = currentTime(); | |
281 if (t + media::kTimeUpdateInterval * 2 / 1000 > webmediaplayer_->duration()) { | |
282 t = webmediaplayer_->duration(); | |
283 } | |
284 webmediaplayer_->OnDisconnectedFromRemoteDevice(t); | |
285 } | |
286 | |
287 void WebMediaPlayerCast::OnDidExitFullscreen() { | |
288 DVLOG(1) << __FUNCTION__; | |
289 } | |
290 | |
291 void WebMediaPlayerCast::OnMediaPlayerPlay() { | |
292 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
293 if (is_remote_ && paused_) { | |
294 paused_ = false; | |
295 remote_time_at_ = base::TimeTicks::Now(); | |
296 client_->playbackStateChanged(); | |
297 } | |
298 // Blink expects a timeChanged() in response to a seek(). | |
299 if (should_notify_time_changed_) | |
300 client_->timeChanged(); | |
301 } | |
302 | |
303 void WebMediaPlayerCast::OnMediaPlayerPause() { | |
304 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
305 if (is_remote_ && !paused_) { | |
306 paused_ = true; | |
307 client_->playbackStateChanged(); | |
308 } | |
309 } | |
310 | |
311 void WebMediaPlayerCast::OnRemoteRouteAvailabilityChanged( | |
312 bool routes_available) { | |
313 DVLOG(1) << __FUNCTION__; | |
314 client_->remoteRouteAvailabilityChanged(routes_available); | |
315 } | |
316 | |
317 void WebMediaPlayerCast::SuspendAndReleaseResources() {} | |
318 void WebMediaPlayerCast::OnWaitingForDecryptionKey() {} | |
319 | |
320 bool WebMediaPlayerCast::hasVideo() const { | |
321 return true; | |
322 } | |
323 | |
324 bool WebMediaPlayerCast::paused() const { | |
325 return paused_; | |
326 } | |
327 | |
328 #if defined(VIDEO_HOLE) | |
329 bool WebMediaPlayerCast::UpdateBoundaryRectangle() { | |
330 return false; | |
331 } | |
332 const gfx::RectF WebMediaPlayerCast::GetBoundaryRectangle() { | |
333 return gfx::RectF(); | |
334 } | |
335 #endif // defined(VIDEO_HOLE) | |
336 | |
337 scoped_refptr<VideoFrame> WebMediaPlayerCast::GetCastingBanner() { | |
338 DVLOG(1) << __FUNCTION__; | |
339 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
DaleCurtis
2016/01/14 02:01:41
?
hubbe
2016/01/14 18:05:00
Removed.
| |
340 | |
341 // TODO(johnme): Should redraw this frame if the layer bounds change; but | |
342 // there seems no easy way to listen for the layer resizing (as opposed to | |
343 // OnVideoSizeChanged, which is when the frame sizes of the video file | |
344 // change). Perhaps have to poll (on main thread of course)? | |
345 gfx::Size canvas_size = webmediaplayer_->GetCanvasSize(); | |
346 if (!canvas_size.width()) | |
347 return nullptr; | |
348 | |
349 return MakeTextFrameForCast(remote_playback_message_, canvas_size, | |
350 base::Bind(&GLCBShim, context_3d_cb_)); | |
351 } | |
352 | |
353 } // namespace media | |
354 | |
355 #endif // OS_ANDROID | |
OLD | NEW |