OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_ | 5 #ifndef GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_ |
6 #define GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_ | 6 #define GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_ |
7 | 7 |
8 #include <queue> | 8 #include <queue> |
9 | 9 |
10 #include "base/atomicops.h" | 10 #include "base/atomicops.h" |
11 #include "base/atomic_ref_count.h" | 11 #include "base/atomic_ref_count.h" |
12 #include "base/callback.h" | 12 #include "base/callback.h" |
13 #include "base/memory/linked_ptr.h" | 13 #include "base/memory/linked_ptr.h" |
14 #include "base/memory/ref_counted.h" | 14 #include "base/memory/ref_counted.h" |
15 #include "base/memory/scoped_ptr.h" | 15 #include "base/memory/scoped_ptr.h" |
16 #include "base/memory/weak_ptr.h" | 16 #include "base/memory/weak_ptr.h" |
17 #include "base/shared_memory.h" | 17 #include "base/shared_memory.h" |
18 #include "gpu/command_buffer/common/command_buffer.h" | 18 #include "gpu/command_buffer/common/command_buffer.h" |
19 #include "gpu/command_buffer/service/cmd_buffer_engine.h" | 19 #include "gpu/command_buffer/service/cmd_buffer_engine.h" |
20 #include "gpu/command_buffer/service/cmd_parser.h" | 20 #include "gpu/command_buffer/service/cmd_parser.h" |
21 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | 21 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
22 #include "gpu/gpu_export.h" | 22 #include "gpu/gpu_export.h" |
23 | 23 |
24 namespace gfx { | 24 namespace gfx { |
25 class GLFence; | 25 class GLFence; |
26 } | 26 } |
27 | 27 |
28 namespace gpu { | 28 namespace gpu { |
29 | 29 |
30 struct RefCountedCounter | 30 class PreemptionFlag |
31 : public base::RefCountedThreadSafe<RefCountedCounter> { | 31 : public base::RefCountedThreadSafe<PreemptionFlag> { |
32 base::AtomicRefCount count; | 32 public: |
33 RefCountedCounter() : count(0) {} | 33 PreemptionFlag() : flag_(0) {} |
34 | 34 |
35 bool IsZero() { return base::AtomicRefCountIsZero(&count); } | 35 bool IsSet() { return base::AtomicRefCountIsZero(&flag_); } |
36 void IncCount() { base::AtomicRefCountInc(&count); } | 36 void Set() { base::AtomicRefCountInc(&flag_); } |
37 void DecCount() { base::AtomicRefCountDec(&count); } | 37 void Reset() { base::subtle::NoBarrier_Store(&flag_, 0); } |
38 void Reset() { base::subtle::NoBarrier_Store(&count, 0); } | 38 |
39 private: | 39 private: |
40 ~RefCountedCounter() {} | 40 base::AtomicRefCount flag_; |
41 | 41 |
42 friend class base::RefCountedThreadSafe<RefCountedCounter>; | 42 ~PreemptionFlag() {} |
| 43 |
| 44 friend class base::RefCountedThreadSafe<PreemptionFlag>; |
43 }; | 45 }; |
44 | 46 |
45 // This class schedules commands that have been flushed. They are received via | 47 // This class schedules commands that have been flushed. They are received via |
46 // a command buffer and forwarded to a command parser. TODO(apatrick): This | 48 // a command buffer and forwarded to a command parser. TODO(apatrick): This |
47 // class should not know about the decoder. Do not add additional dependencies | 49 // class should not know about the decoder. Do not add additional dependencies |
48 // on it. | 50 // on it. |
49 class GPU_EXPORT GpuScheduler | 51 class GPU_EXPORT GpuScheduler |
50 : NON_EXPORTED_BASE(public CommandBufferEngine), | 52 : NON_EXPORTED_BASE(public CommandBufferEngine), |
51 public base::SupportsWeakPtr<GpuScheduler> { | 53 public base::SupportsWeakPtr<GpuScheduler> { |
52 public: | 54 public: |
53 GpuScheduler(CommandBuffer* command_buffer, | 55 GpuScheduler(CommandBuffer* command_buffer, |
54 AsyncAPIInterface* handler, | 56 AsyncAPIInterface* handler, |
55 gles2::GLES2Decoder* decoder); | 57 gles2::GLES2Decoder* decoder); |
56 | 58 |
57 virtual ~GpuScheduler(); | 59 virtual ~GpuScheduler(); |
58 | 60 |
59 void PutChanged(); | 61 void PutChanged(); |
60 | 62 |
61 void SetPreemptByCounter(scoped_refptr<RefCountedCounter> counter) { | 63 void SetPreemptByFlag(scoped_refptr<PreemptionFlag> flag) { |
62 preempt_by_counter_ = counter; | 64 preemption_flag_ = flag; |
63 } | 65 } |
64 | 66 |
65 // Sets whether commands should be processed by this scheduler. Setting to | 67 // Sets whether commands should be processed by this scheduler. Setting to |
66 // false unschedules. Setting to true reschedules. Whether or not the | 68 // false unschedules. Setting to true reschedules. Whether or not the |
67 // scheduler is currently scheduled is "reference counted". Every call with | 69 // scheduler is currently scheduled is "reference counted". Every call with |
68 // false must eventually be paired by a call with true. | 70 // false must eventually be paired by a call with true. |
69 void SetScheduled(bool is_scheduled); | 71 void SetScheduled(bool is_scheduled); |
70 | 72 |
71 // Returns whether the scheduler is currently able to process more commands. | 73 // Returns whether the scheduler is currently able to process more commands. |
72 bool IsScheduled(); | 74 bool IsScheduled(); |
(...skipping 17 matching lines...) Expand all Loading... |
90 void DeferToFence(base::Closure task); | 92 void DeferToFence(base::Closure task); |
91 | 93 |
92 // Polls the fences, invoking callbacks that were waiting to be triggered | 94 // Polls the fences, invoking callbacks that were waiting to be triggered |
93 // by them and returns whether all fences were complete. | 95 // by them and returns whether all fences were complete. |
94 bool PollUnscheduleFences(); | 96 bool PollUnscheduleFences(); |
95 | 97 |
96 CommandParser* parser() const { | 98 CommandParser* parser() const { |
97 return parser_.get(); | 99 return parser_.get(); |
98 } | 100 } |
99 | 101 |
| 102 bool IsPreempted(); |
| 103 |
100 private: | 104 private: |
101 // Artificially reschedule if the scheduler is still unscheduled after a | 105 // Artificially reschedule if the scheduler is still unscheduled after a |
102 // timeout. | 106 // timeout. |
103 void RescheduleTimeOut(); | 107 void RescheduleTimeOut(); |
104 | 108 |
105 // The GpuScheduler holds a weak reference to the CommandBuffer. The | 109 // The GpuScheduler holds a weak reference to the CommandBuffer. The |
106 // CommandBuffer owns the GpuScheduler and holds a strong reference to it | 110 // CommandBuffer owns the GpuScheduler and holds a strong reference to it |
107 // through the ProcessCommands callback. | 111 // through the ProcessCommands callback. |
108 CommandBuffer* command_buffer_; | 112 CommandBuffer* command_buffer_; |
109 | 113 |
(...skipping 28 matching lines...) Expand all Loading... |
138 ~UnscheduleFence(); | 142 ~UnscheduleFence(); |
139 | 143 |
140 scoped_ptr<gfx::GLFence> fence; | 144 scoped_ptr<gfx::GLFence> fence; |
141 base::Closure task; | 145 base::Closure task; |
142 }; | 146 }; |
143 std::queue<linked_ptr<UnscheduleFence> > unschedule_fences_; | 147 std::queue<linked_ptr<UnscheduleFence> > unschedule_fences_; |
144 | 148 |
145 base::Closure scheduled_callback_; | 149 base::Closure scheduled_callback_; |
146 base::Closure command_processed_callback_; | 150 base::Closure command_processed_callback_; |
147 | 151 |
148 // If non-NULL and preempt_by_counter_->count is non-zero, | 152 // If non-NULL and |preemption_flag_->IsSet()|, exit PutChanged early. |
149 // exit PutChanged early. | 153 scoped_refptr<PreemptionFlag> preemption_flag_; |
150 scoped_refptr<RefCountedCounter> preempt_by_counter_; | |
151 bool was_preempted_; | 154 bool was_preempted_; |
152 | 155 |
153 DISALLOW_COPY_AND_ASSIGN(GpuScheduler); | 156 DISALLOW_COPY_AND_ASSIGN(GpuScheduler); |
154 }; | 157 }; |
155 | 158 |
156 } // namespace gpu | 159 } // namespace gpu |
157 | 160 |
158 #endif // GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_ | 161 #endif // GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_ |
OLD | NEW |