| OLD | NEW |
| (Empty) |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "cc/resources/raster_worker_pool.h" | |
| 6 | |
| 7 #include <algorithm> | |
| 8 | |
| 9 #include "base/debug/trace_event.h" | |
| 10 #include "base/lazy_instance.h" | |
| 11 #include "base/strings/stringprintf.h" | |
| 12 #include "base/threading/simple_thread.h" | |
| 13 #include "cc/base/scoped_ptr_deque.h" | |
| 14 #include "cc/resources/raster_source.h" | |
| 15 #include "skia/ext/refptr.h" | |
| 16 #include "third_party/skia/include/core/SkCanvas.h" | |
| 17 #include "third_party/skia/include/core/SkSurface.h" | |
| 18 | |
| 19 namespace cc { | |
| 20 namespace { | |
| 21 | |
| 22 class RasterTaskGraphRunner : public TaskGraphRunner, | |
| 23 public base::DelegateSimpleThread::Delegate { | |
| 24 public: | |
| 25 RasterTaskGraphRunner() { | |
| 26 size_t num_threads = RasterWorkerPool::GetNumRasterThreads(); | |
| 27 while (workers_.size() < num_threads) { | |
| 28 scoped_ptr<base::DelegateSimpleThread> worker = | |
| 29 make_scoped_ptr(new base::DelegateSimpleThread( | |
| 30 this, | |
| 31 base::StringPrintf("CompositorRasterWorker%u", | |
| 32 static_cast<unsigned>(workers_.size() + 1)) | |
| 33 .c_str())); | |
| 34 worker->Start(); | |
| 35 #if defined(OS_ANDROID) || defined(OS_LINUX) | |
| 36 worker->SetThreadPriority(base::kThreadPriority_Background); | |
| 37 #endif | |
| 38 workers_.push_back(worker.Pass()); | |
| 39 } | |
| 40 } | |
| 41 | |
| 42 ~RasterTaskGraphRunner() override { NOTREACHED(); } | |
| 43 | |
| 44 private: | |
| 45 // Overridden from base::DelegateSimpleThread::Delegate: | |
| 46 void Run() override { TaskGraphRunner::Run(); } | |
| 47 | |
| 48 ScopedPtrDeque<base::DelegateSimpleThread> workers_; | |
| 49 }; | |
| 50 | |
| 51 base::LazyInstance<RasterTaskGraphRunner>::Leaky g_task_graph_runner = | |
| 52 LAZY_INSTANCE_INITIALIZER; | |
| 53 | |
| 54 const int kDefaultNumRasterThreads = 1; | |
| 55 | |
| 56 int g_num_raster_threads = 0; | |
| 57 | |
| 58 class RasterFinishedTaskImpl : public RasterizerTask { | |
| 59 public: | |
| 60 explicit RasterFinishedTaskImpl( | |
| 61 base::SequencedTaskRunner* task_runner, | |
| 62 const base::Closure& on_raster_finished_callback) | |
| 63 : task_runner_(task_runner), | |
| 64 on_raster_finished_callback_(on_raster_finished_callback) {} | |
| 65 | |
| 66 // Overridden from Task: | |
| 67 void RunOnWorkerThread() override { | |
| 68 TRACE_EVENT0("cc", "RasterFinishedTaskImpl::RunOnWorkerThread"); | |
| 69 RasterFinished(); | |
| 70 } | |
| 71 | |
| 72 // Overridden from RasterizerTask: | |
| 73 void ScheduleOnOriginThread(RasterizerTaskClient* client) override {} | |
| 74 void CompleteOnOriginThread(RasterizerTaskClient* client) override {} | |
| 75 void RunReplyOnOriginThread() override {} | |
| 76 | |
| 77 protected: | |
| 78 ~RasterFinishedTaskImpl() override {} | |
| 79 | |
| 80 void RasterFinished() { | |
| 81 task_runner_->PostTask(FROM_HERE, on_raster_finished_callback_); | |
| 82 } | |
| 83 | |
| 84 private: | |
| 85 scoped_refptr<base::SequencedTaskRunner> task_runner_; | |
| 86 const base::Closure on_raster_finished_callback_; | |
| 87 | |
| 88 DISALLOW_COPY_AND_ASSIGN(RasterFinishedTaskImpl); | |
| 89 }; | |
| 90 | |
| 91 } // namespace | |
| 92 | |
| 93 // This allows a micro benchmark system to run tasks with highest priority, | |
| 94 // since it should finish as quickly as possible. | |
| 95 unsigned RasterWorkerPool::kBenchmarkRasterTaskPriority = 0u; | |
| 96 // Task priorities that make sure raster finished tasks run before any | |
| 97 // remaining raster tasks. | |
| 98 unsigned RasterWorkerPool::kRasterFinishedTaskPriority = 1u; | |
| 99 unsigned RasterWorkerPool::kRasterTaskPriorityBase = 2u; | |
| 100 | |
| 101 RasterWorkerPool::RasterWorkerPool() {} | |
| 102 | |
| 103 RasterWorkerPool::~RasterWorkerPool() {} | |
| 104 | |
| 105 // static | |
| 106 void RasterWorkerPool::SetNumRasterThreads(int num_threads) { | |
| 107 DCHECK_LT(0, num_threads); | |
| 108 DCHECK_EQ(0, g_num_raster_threads); | |
| 109 | |
| 110 g_num_raster_threads = num_threads; | |
| 111 } | |
| 112 | |
| 113 // static | |
| 114 int RasterWorkerPool::GetNumRasterThreads() { | |
| 115 if (!g_num_raster_threads) | |
| 116 g_num_raster_threads = kDefaultNumRasterThreads; | |
| 117 | |
| 118 return g_num_raster_threads; | |
| 119 } | |
| 120 | |
| 121 // static | |
| 122 TaskGraphRunner* RasterWorkerPool::GetTaskGraphRunner() { | |
| 123 return g_task_graph_runner.Pointer(); | |
| 124 } | |
| 125 | |
| 126 // static | |
| 127 scoped_refptr<RasterizerTask> RasterWorkerPool::CreateRasterFinishedTask( | |
| 128 base::SequencedTaskRunner* task_runner, | |
| 129 const base::Closure& on_raster_finished_callback) { | |
| 130 return make_scoped_refptr( | |
| 131 new RasterFinishedTaskImpl(task_runner, on_raster_finished_callback)); | |
| 132 } | |
| 133 | |
| 134 // static | |
| 135 void RasterWorkerPool::ScheduleTasksOnOriginThread(RasterizerTaskClient* client, | |
| 136 TaskGraph* graph) { | |
| 137 TRACE_EVENT0("cc", "Rasterizer::ScheduleTasksOnOriginThread"); | |
| 138 | |
| 139 for (TaskGraph::Node::Vector::iterator it = graph->nodes.begin(); | |
| 140 it != graph->nodes.end(); | |
| 141 ++it) { | |
| 142 TaskGraph::Node& node = *it; | |
| 143 RasterizerTask* task = static_cast<RasterizerTask*>(node.task); | |
| 144 | |
| 145 if (!task->HasBeenScheduled()) { | |
| 146 task->WillSchedule(); | |
| 147 task->ScheduleOnOriginThread(client); | |
| 148 task->DidSchedule(); | |
| 149 } | |
| 150 } | |
| 151 } | |
| 152 | |
| 153 // static | |
| 154 void RasterWorkerPool::InsertNodeForTask(TaskGraph* graph, | |
| 155 RasterizerTask* task, | |
| 156 unsigned priority, | |
| 157 size_t dependencies) { | |
| 158 DCHECK(std::find_if(graph->nodes.begin(), | |
| 159 graph->nodes.end(), | |
| 160 TaskGraph::Node::TaskComparator(task)) == | |
| 161 graph->nodes.end()); | |
| 162 graph->nodes.push_back(TaskGraph::Node(task, priority, dependencies)); | |
| 163 } | |
| 164 | |
| 165 // static | |
| 166 void RasterWorkerPool::InsertNodesForRasterTask( | |
| 167 TaskGraph* graph, | |
| 168 RasterTask* raster_task, | |
| 169 const ImageDecodeTask::Vector& decode_tasks, | |
| 170 unsigned priority) { | |
| 171 size_t dependencies = 0u; | |
| 172 | |
| 173 // Insert image decode tasks. | |
| 174 for (ImageDecodeTask::Vector::const_iterator it = decode_tasks.begin(); | |
| 175 it != decode_tasks.end(); | |
| 176 ++it) { | |
| 177 ImageDecodeTask* decode_task = it->get(); | |
| 178 | |
| 179 // Skip if already decoded. | |
| 180 if (decode_task->HasCompleted()) | |
| 181 continue; | |
| 182 | |
| 183 dependencies++; | |
| 184 | |
| 185 // Add decode task if it doesn't already exists in graph. | |
| 186 TaskGraph::Node::Vector::iterator decode_it = | |
| 187 std::find_if(graph->nodes.begin(), | |
| 188 graph->nodes.end(), | |
| 189 TaskGraph::Node::TaskComparator(decode_task)); | |
| 190 if (decode_it == graph->nodes.end()) | |
| 191 InsertNodeForTask(graph, decode_task, priority, 0u); | |
| 192 | |
| 193 graph->edges.push_back(TaskGraph::Edge(decode_task, raster_task)); | |
| 194 } | |
| 195 | |
| 196 InsertNodeForTask(graph, raster_task, priority, dependencies); | |
| 197 } | |
| 198 | |
| 199 static bool IsSupportedPlaybackToMemoryFormat(ResourceFormat format) { | |
| 200 switch (format) { | |
| 201 case RGBA_4444: | |
| 202 case RGBA_8888: | |
| 203 case BGRA_8888: | |
| 204 return true; | |
| 205 case ALPHA_8: | |
| 206 case LUMINANCE_8: | |
| 207 case RGB_565: | |
| 208 case ETC1: | |
| 209 case RED_8: | |
| 210 return false; | |
| 211 } | |
| 212 NOTREACHED(); | |
| 213 return false; | |
| 214 } | |
| 215 | |
| 216 // static | |
| 217 void RasterWorkerPool::PlaybackToMemory(void* memory, | |
| 218 ResourceFormat format, | |
| 219 const gfx::Size& size, | |
| 220 int stride, | |
| 221 const RasterSource* raster_source, | |
| 222 const gfx::Rect& rect, | |
| 223 float scale) { | |
| 224 DCHECK(IsSupportedPlaybackToMemoryFormat(format)) << format; | |
| 225 | |
| 226 // Uses kPremul_SkAlphaType since the result is not known to be opaque. | |
| 227 SkImageInfo info = | |
| 228 SkImageInfo::MakeN32(size.width(), size.height(), kPremul_SkAlphaType); | |
| 229 SkColorType buffer_color_type = ResourceFormatToSkColorType(format); | |
| 230 bool needs_copy = buffer_color_type != info.colorType(); | |
| 231 | |
| 232 // Use unknown pixel geometry to disable LCD text. | |
| 233 SkSurfaceProps surface_props(0, kUnknown_SkPixelGeometry); | |
| 234 if (raster_source->CanUseLCDText()) { | |
| 235 // LegacyFontHost will get LCD text and skia figures out what type to use. | |
| 236 surface_props = SkSurfaceProps(SkSurfaceProps::kLegacyFontHost_InitType); | |
| 237 } | |
| 238 | |
| 239 if (!stride) | |
| 240 stride = info.minRowBytes(); | |
| 241 | |
| 242 if (!needs_copy) { | |
| 243 skia::RefPtr<SkSurface> surface = skia::AdoptRef( | |
| 244 SkSurface::NewRasterDirect(info, memory, stride, &surface_props)); | |
| 245 skia::RefPtr<SkCanvas> canvas = skia::SharePtr(surface->getCanvas()); | |
| 246 raster_source->PlaybackToCanvas(canvas.get(), rect, scale); | |
| 247 return; | |
| 248 } | |
| 249 | |
| 250 skia::RefPtr<SkSurface> surface = | |
| 251 skia::AdoptRef(SkSurface::NewRaster(info, &surface_props)); | |
| 252 skia::RefPtr<SkCanvas> canvas = skia::SharePtr(surface->getCanvas()); | |
| 253 raster_source->PlaybackToCanvas(canvas.get(), rect, scale); | |
| 254 | |
| 255 SkImageInfo dst_info = info; | |
| 256 dst_info.fColorType = buffer_color_type; | |
| 257 // TODO(kaanb): The GL pipeline assumes a 4-byte alignment for the | |
| 258 // bitmap data. There will be no need to call SkAlign4 once crbug.com/293728 | |
| 259 // is fixed. | |
| 260 const size_t dst_row_bytes = SkAlign4(dst_info.minRowBytes()); | |
| 261 DCHECK_EQ(0u, dst_row_bytes % 4); | |
| 262 bool success = canvas->readPixels(dst_info, memory, dst_row_bytes, 0, 0); | |
| 263 DCHECK_EQ(true, success); | |
| 264 } | |
| 265 | |
| 266 } // namespace cc | |
| OLD | NEW |