Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(60)

Side by Side Diff: media/gpu/ipc/client/gpu_jpeg_decode_accelerator_host.cc

Issue 2783273002: Fix DCHECK getting hit on destruction of GpuJpegDecodeAcceleratorHost (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/ipc/client/gpu_jpeg_decode_accelerator_host.h" 5 #include "media/gpu/ipc/client/gpu_jpeg_decode_accelerator_host.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
(...skipping 13 matching lines...) Expand all
24 // Class to receive AcceleratedJpegDecoderHostMsg_DecodeAck IPC message on IO 24 // Class to receive AcceleratedJpegDecoderHostMsg_DecodeAck IPC message on IO
25 // thread. This does very similar what MessageFilter usually does. It is not 25 // thread. This does very similar what MessageFilter usually does. It is not
26 // MessageFilter because GpuChannelHost doesn't support AddFilter. 26 // MessageFilter because GpuChannelHost doesn't support AddFilter.
27 class GpuJpegDecodeAcceleratorHost::Receiver : public IPC::Listener, 27 class GpuJpegDecodeAcceleratorHost::Receiver : public IPC::Listener,
28 public base::NonThreadSafe { 28 public base::NonThreadSafe {
29 public: 29 public:
30 Receiver(Client* client, 30 Receiver(Client* client,
31 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) 31 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
32 : client_(client), 32 : client_(client),
33 io_task_runner_(io_task_runner), 33 io_task_runner_(io_task_runner),
34 weak_factory_for_io_(this) { 34 weak_factory_for_io_(
35 base::MakeUnique<base::WeakPtrFactory<Receiver>>(this)) {
sandersd (OOO until July 31) 2017/03/30 20:58:50 I recommend creating a WeakPtr here in the constru
chfremer 2017/03/30 21:19:45 Done.
35 DCHECK(CalledOnValidThread()); 36 DCHECK(CalledOnValidThread());
36 } 37 }
37 38
38 ~Receiver() override { DCHECK(CalledOnValidThread()); } 39 ~Receiver() override {
40 DCHECK(CalledOnValidThread());
41 if (weak_factory_for_io_->HasWeakPtrs())
sandersd (OOO until July 31) 2017/03/30 20:58:49 I recommend always posting this task, it would be
chfremer 2017/03/30 21:19:45 Done.
42 io_task_runner_->DeleteSoon(FROM_HERE, weak_factory_for_io_.release());
43 }
39 44
40 void InvalidateWeakPtr(base::WaitableEvent* event) { 45 void InvalidateWeakPtrOnIOThread(base::WaitableEvent* event) {
41 DCHECK(io_task_runner_->BelongsToCurrentThread()); 46 DCHECK(io_task_runner_->BelongsToCurrentThread());
42 weak_factory_for_io_.InvalidateWeakPtrs(); 47 weak_factory_for_io_->InvalidateWeakPtrs();
43 event->Signal(); 48 event->Signal();
44 } 49 }
45 50
46 // IPC::Listener implementation. 51 // IPC::Listener implementation.
47 void OnChannelError() override { 52 void OnChannelError() override {
48 DCHECK(io_task_runner_->BelongsToCurrentThread()); 53 DCHECK(io_task_runner_->BelongsToCurrentThread());
49 54
50 OnDecodeAck(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 55 OnDecodeAck(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
51 } 56 }
52 57
53 bool OnMessageReceived(const IPC::Message& msg) override { 58 bool OnMessageReceived(const IPC::Message& msg) override {
54 DCHECK(io_task_runner_->BelongsToCurrentThread()); 59 DCHECK(io_task_runner_->BelongsToCurrentThread());
55 60
56 bool handled = true; 61 bool handled = true;
57 IPC_BEGIN_MESSAGE_MAP(GpuJpegDecodeAcceleratorHost::Receiver, msg) 62 IPC_BEGIN_MESSAGE_MAP(GpuJpegDecodeAcceleratorHost::Receiver, msg)
58 IPC_MESSAGE_HANDLER(AcceleratedJpegDecoderHostMsg_DecodeAck, OnDecodeAck) 63 IPC_MESSAGE_HANDLER(AcceleratedJpegDecoderHostMsg_DecodeAck, OnDecodeAck)
59 IPC_MESSAGE_UNHANDLED(handled = false) 64 IPC_MESSAGE_UNHANDLED(handled = false)
60 IPC_END_MESSAGE_MAP() 65 IPC_END_MESSAGE_MAP()
61 DCHECK(handled); 66 DCHECK(handled);
62 return handled; 67 return handled;
63 } 68 }
64 69
65 base::WeakPtr<IPC::Listener> AsWeakPtrForIO() { 70 base::WeakPtr<IPC::Listener> AsWeakPtrForIO() {
66 return weak_factory_for_io_.GetWeakPtr(); 71 return weak_factory_for_io_->GetWeakPtr();
67 } 72 }
68 73
69 private: 74 private:
70 void OnDecodeAck(int32_t bitstream_buffer_id, Error error) { 75 void OnDecodeAck(int32_t bitstream_buffer_id, Error error) {
71 DCHECK(io_task_runner_->BelongsToCurrentThread()); 76 DCHECK(io_task_runner_->BelongsToCurrentThread());
72 77
73 if (!client_) 78 if (!client_)
74 return; 79 return;
75 80
76 if (error == JpegDecodeAccelerator::NO_ERRORS) { 81 if (error == JpegDecodeAccelerator::NO_ERRORS) {
77 client_->VideoFrameReady(bitstream_buffer_id); 82 client_->VideoFrameReady(bitstream_buffer_id);
78 } else { 83 } else {
79 // Only NotifyError once. 84 // Only NotifyError once.
80 // Client::NotifyError() may trigger deletion of |this| (on another 85 // Client::NotifyError() may trigger deletion of |this| (on another
81 // thread), so calling it needs to be the last thing done on this stack! 86 // thread), so calling it needs to be the last thing done on this stack!
82 JpegDecodeAccelerator::Client* client = nullptr; 87 JpegDecodeAccelerator::Client* client = nullptr;
83 std::swap(client, client_); 88 std::swap(client, client_);
84 client->NotifyError(bitstream_buffer_id, error); 89 client->NotifyError(bitstream_buffer_id, error);
85 } 90 }
86 } 91 }
87 92
88 Client* client_; 93 Client* client_;
89 94
90 // GPU IO task runner. 95 // GPU IO task runner.
91 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_; 96 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
92 97
93 // Weak pointers will be invalidated on IO thread. 98 // Weak pointers will be invalidated on IO thread.
94 base::WeakPtrFactory<Receiver> weak_factory_for_io_; 99 std::unique_ptr<base::WeakPtrFactory<Receiver>> weak_factory_for_io_;
95 100
96 DISALLOW_COPY_AND_ASSIGN(Receiver); 101 DISALLOW_COPY_AND_ASSIGN(Receiver);
97 }; 102 };
98 103
99 GpuJpegDecodeAcceleratorHost::GpuJpegDecodeAcceleratorHost( 104 GpuJpegDecodeAcceleratorHost::GpuJpegDecodeAcceleratorHost(
100 scoped_refptr<gpu::GpuChannelHost> channel, 105 scoped_refptr<gpu::GpuChannelHost> channel,
101 int32_t route_id, 106 int32_t route_id,
102 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) 107 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
103 : channel_(std::move(channel)), 108 : channel_(std::move(channel)),
104 decoder_route_id_(route_id), 109 decoder_route_id_(route_id),
105 io_task_runner_(io_task_runner) { 110 io_task_runner_(io_task_runner) {
106 DCHECK(channel_); 111 DCHECK(channel_);
107 DCHECK_NE(decoder_route_id_, MSG_ROUTING_NONE); 112 DCHECK_NE(decoder_route_id_, MSG_ROUTING_NONE);
108 } 113 }
109 114
110 GpuJpegDecodeAcceleratorHost::~GpuJpegDecodeAcceleratorHost() { 115 GpuJpegDecodeAcceleratorHost::~GpuJpegDecodeAcceleratorHost() {
111 DCHECK(CalledOnValidThread()); 116 DCHECK(CalledOnValidThread());
112 Send(new AcceleratedJpegDecoderMsg_Destroy(decoder_route_id_)); 117 Send(new AcceleratedJpegDecoderMsg_Destroy(decoder_route_id_));
113 118
114 if (receiver_) { 119 if (receiver_) {
115 channel_->RemoveRoute(decoder_route_id_); 120 channel_->RemoveRoute(decoder_route_id_);
116 121
117 // Invalidate weak ptr of |receiver_|. After that, no more messages will be 122 // Invalidate weak ptr of |receiver_|. After that, no more messages will be
118 // routed to |receiver_| on IO thread. 123 // routed to |receiver_| on IO thread.
119 base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC, 124 base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
120 base::WaitableEvent::InitialState::NOT_SIGNALED); 125 base::WaitableEvent::InitialState::NOT_SIGNALED);
121 bool task_expected_to_run = io_task_runner_->PostTask( 126 bool task_expected_to_run = io_task_runner_->PostTask(
122 FROM_HERE, base::Bind(&Receiver::InvalidateWeakPtr, 127 FROM_HERE, base::Bind(&Receiver::InvalidateWeakPtrOnIOThread,
123 base::Unretained(receiver_.get()), 128 base::Unretained(receiver_.get()),
sandersd (OOO until July 31) 2017/03/30 20:58:50 I'd add a comment here explaining that Unretained
chfremer 2017/03/30 21:19:45 Done.
124 base::Unretained(&event))); 129 base::Unretained(&event)));
125 // If the current call is happening during the browser shutdown, the 130 // If the current call is happening during the browser shutdown, the
126 // |io_task_runner_| may no longer be accepting tasks. 131 // |io_task_runner_| may no longer be accepting tasks.
127 if (task_expected_to_run) 132 if (task_expected_to_run)
128 event.Wait(); 133 event.Wait();
129 } 134 }
130 } 135 }
131 136
132 bool GpuJpegDecodeAcceleratorHost::Initialize( 137 bool GpuJpegDecodeAcceleratorHost::Initialize(
133 JpegDecodeAccelerator::Client* client) { 138 JpegDecodeAccelerator::Client* client) {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
199 if (!channel_->Send(message)) { 204 if (!channel_->Send(message)) {
200 DLOG(ERROR) << "Send(" << message->type() << ") failed"; 205 DLOG(ERROR) << "Send(" << message->type() << ") failed";
201 } 206 }
202 } 207 }
203 208
204 base::WeakPtr<IPC::Listener> GpuJpegDecodeAcceleratorHost::GetReceiver() { 209 base::WeakPtr<IPC::Listener> GpuJpegDecodeAcceleratorHost::GetReceiver() {
205 return receiver_->AsWeakPtrForIO(); 210 return receiver_->AsWeakPtrForIO();
206 } 211 }
207 212
208 } // namespace media 213 } // namespace media
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698