Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(128)

Side by Side Diff: src/gpu/instanced/InstancedRendering.cpp

Issue 2066993003: Begin instanced rendering for simple shapes (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: get mixed samples and base instance paths working again Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "InstancedRendering.h"
9
10 #include "GrBatchFlushState.h"
11 #include "GrPipeline.h"
12 #include "GrResourceProvider.h"
13 #include "instanced/InstanceProcessor.h"
14
15 namespace gr_instanced {
16
17 InstancedRendering::InstancedRendering(GrGpu* gpu, AntialiasMode lastSupportedAA Mode)
18 : fGpu(SkRef(gpu)),
19 fLastSupportedAAMode(lastSupportedAAMode),
20 fState(State::kRecordingDraws) {
21 }
22
23 GrDrawBatch* InstancedRendering::recordRect(const SkRect& rect, const SkMatrix& viewMatrix,
24 GrColor color, bool antialias,
25 const GrInstancedPipelineInfo& info, bool* useHWAA) {
26 return this->recordShape(ShapeType::kRect, rect, viewMatrix, color, rect, an tialias, info,
27 useHWAA);
28 }
29
30 GrDrawBatch* InstancedRendering::recordRect(const SkRect& rect, const SkMatrix& viewMatrix,
31 GrColor color, const SkRect& localRe ct, bool antialias,
32 const GrInstancedPipelineInfo& info, bool* useHWAA) {
33 return this->recordShape(ShapeType::kRect, rect, viewMatrix, color, localRec t, antialias, info,
34 useHWAA);
35 }
36
37 GrDrawBatch* InstancedRendering::recordRect(const SkRect& rect, const SkMatrix& viewMatrix,
38 GrColor color, const SkMatrix& local Matrix,
39 bool antialias, const GrInstancedPip elineInfo& info,
40 bool* useHWAA) {
41 if (localMatrix.hasPerspective()) {
42 return nullptr; // Perspective is not yet supported in the local matrix.
43 }
44 if (Batch* batch = this->recordShape(ShapeType::kRect, rect, viewMatrix, col or, rect, antialias,
45 info, useHWAA)) {
46 fInstances.back().fInfo |= kLocalMatrix_InfoFlag;
47 this->appendParamsTexel(localMatrix.getScaleX(), localMatrix.getSkewX(),
48 localMatrix.getTranslateX());
49 this->appendParamsTexel(localMatrix.getSkewY(), localMatrix.getScaleY(),
50 localMatrix.getTranslateY());
51 batch->fInfo.fHasLocalMatrix = true;
52 batch->fInfo.fHasParams = true;
53 return batch;
54 }
55 return nullptr;
56 }
57
58 GrDrawBatch* InstancedRendering::recordOval(const SkRect& oval, const SkMatrix& viewMatrix,
59 GrColor color, bool antialias,
60 const GrInstancedPipelineInfo& info, bool* useHWAA) {
61 return this->recordShape(ShapeType::kOval, oval, viewMatrix, color, oval, an tialias, info,
62 useHWAA);
63 }
64
65 GrDrawBatch* InstancedRendering::recordRRect(const SkRRect& rrect, const SkMatri x& viewMatrix,
66 GrColor color, bool antialias,
67 const GrInstancedPipelineInfo& info , bool* useHWAA) {
68 if (Batch* batch = this->recordShape(GetRRectShapeType(rrect), rrect.rect(), viewMatrix, color,
69 rrect.rect(), antialias, info, useHWAA) ) {
70 this->appendRRectParams(rrect, &batch->fInfo);
71 return batch;
72 }
73 return nullptr;
74 }
75
76 GrDrawBatch* InstancedRendering::recordDRRect(const SkRRect& outer, const SkRRec t& inner,
77 const SkMatrix& viewMatrix, GrColo r color,
78 bool antialias, const GrInstancedP ipelineInfo& info,
79 bool* useHWAA) {
80 if (inner.getType() > SkRRect::kSimple_Type) {
81 return nullptr; // Complex inner round rects are not yet supported.
82 }
83 if (SkRRect::kEmpty_Type == inner.getType()) {
84 return this->recordRRect(outer, viewMatrix, color, antialias, info, useH WAA);
85 }
86 if (Batch* batch = this->recordShape(GetRRectShapeType(outer), outer.rect(), viewMatrix, color,
87 outer.rect(), antialias, info, useHWAA) ) {
88 this->appendRRectParams(outer, &batch->fInfo);
89 ShapeType innerShapeType = GetRRectShapeType(inner);
90 batch->fInfo.fInnerShapeTypes |= GetShapeFlag(innerShapeType);
91 fInstances.back().fInfo |= ((int)innerShapeType << kInnerShapeType_InfoB it);
92 this->appendParamsTexel(inner.rect().asScalars(), 4);
93 this->appendRRectParams(inner, &batch->fInfo);
94 batch->fInfo.fHasParams = true;
95 return batch;
96 }
97 return nullptr;
98 }
99
100 InstancedRendering::Batch* InstancedRendering::recordShape(ShapeType type, const SkRect& bounds,
101 const SkMatrix& viewM atrix,
102 GrColor color, const SkRect& localRect,
103 bool antialias,
104 const GrInstancedPipe lineInfo& info,
105 bool* useHWAA) {
106 SkASSERT(State::kRecordingDraws == fState);
107
108 uint32_t paramsIdx = fParams.count();
109 if (paramsIdx > kParamsIdx_InfoMask) {
110 return nullptr; // paramsIdx is too large for its allotted space.
111 }
112
113 AntialiasMode antialiasMode;
114 if (!this->selectAntialiasMode(viewMatrix, antialias, info, useHWAA, &antial iasMode)) {
115 return nullptr;
116 }
117
118 Batch* batch = this->createBatch(fInstances.count());
119 batch->fInfo.fAntialiasMode = antialiasMode;
120 batch->fInfo.fShapeTypes = GetShapeFlag(type);
121 batch->fInfo.fCannotDiscard = !info.fCanDiscard;
122
123 Instance& instance = fInstances.push_back();
124 instance.fInfo = ((int)type << kShapeType_InfoBit) | paramsIdx;
125
126 // The instanced shape renderer draws rectangles of [-1, -1, +1, +1], so we find the matrix that
127 // will map this rectangle to the same device coordinates as "viewMatrix * b ounds".
128 float sx = 0.5f * bounds.width();
129 float sy = 0.5f * bounds.height();
130 float tx = sx + bounds.fLeft;
131 float ty = sy + bounds.fTop;
132 if (!viewMatrix.hasPerspective()) {
133 float* m = instance.fShapeMatrix2x3;
134 m[0] = viewMatrix.getScaleX() * sx;
135 m[1] = viewMatrix.getSkewX() * sy;
136 m[2] = viewMatrix.getTranslateX() +
137 viewMatrix.getScaleX() * tx + viewMatrix.getSkewX() * ty;
138
139 m[3] = viewMatrix.getSkewY() * sx;
140 m[4] = viewMatrix.getScaleY() * sy;
141 m[5] = viewMatrix.getTranslateY() +
142 viewMatrix.getSkewY() * tx + viewMatrix.getScaleY() * ty;
143
144 // Since 'm' is a 2x3 matrix that maps the rect [-1, +1] into the shape' s device-space quad,
145 // it's quite simple to find the bounding rectangle:
146 float devBoundsHalfWidth = fabsf(m[0]) + fabsf(m[1]);
147 float devBoundsHalfHeight = fabsf(m[3]) + fabsf(m[4]);
148 batch->fBounds.fLeft = m[2] - devBoundsHalfWidth;
149 batch->fBounds.fRight = m[2] + devBoundsHalfWidth;
150 batch->fBounds.fTop = m[5] - devBoundsHalfHeight;
151 batch->fBounds.fBottom = m[5] + devBoundsHalfHeight;
152
153 // TODO: Is this worth the CPU overhead?
154 batch->fInfo.fNonSquare =
155 fabsf(devBoundsHalfHeight - devBoundsHalfWidth) > 0.5f || // Early o ut.
156 fabs(m[0] * m[3] + m[1] * m[4]) > 1e-3f || // Skew?
157 fabs(m[0] * m[0] + m[1] * m[1] - m[3] * m[3] - m[4] * m[4]) > 1e-2f; // Diff. lengths?
158 } else {
159 SkMatrix shapeMatrix(viewMatrix);
160 shapeMatrix.preTranslate(tx, ty);
161 shapeMatrix.preScale(sx, sy);
162 instance.fInfo |= kPerspective_InfoFlag;
163
164 float* m = instance.fShapeMatrix2x3;
165 m[0] = SkScalarToFloat(shapeMatrix.getScaleX());
166 m[1] = SkScalarToFloat(shapeMatrix.getSkewX());
167 m[2] = SkScalarToFloat(shapeMatrix.getTranslateX());
168 m[3] = SkScalarToFloat(shapeMatrix.getSkewY());
169 m[4] = SkScalarToFloat(shapeMatrix.getScaleY());
170 m[5] = SkScalarToFloat(shapeMatrix.getTranslateY());
171
172 // Send the perspective column as a param.
173 this->appendParamsTexel(shapeMatrix[SkMatrix::kMPersp0], shapeMatrix[SkM atrix::kMPersp1],
174 shapeMatrix[SkMatrix::kMPersp2]);
175 batch->fInfo.fHasPerspective = true;
176 batch->fInfo.fHasParams = true;
177
178 viewMatrix.mapRect(&batch->fBounds, bounds);
179
180 batch->fInfo.fNonSquare = true;
181 }
182
183 instance.fColor = color;
184
185 const float* rectAsFloats = localRect.asScalars(); // Ensure SkScalar == flo at.
186 memcpy(&instance.fLocalRect, rectAsFloats, 4 * sizeof(float));
187
188 fBatchList.addToTail(batch);
189 return batch;
190 }
191
192 inline bool InstancedRendering::selectAntialiasMode(const SkMatrix& viewMatrix, bool antialias,
193 const GrInstancedPipelineInf o& info,
194 bool* useHWAA, AntialiasMode * antialiasMode) {
195 SkASSERT(!info.fColorDisabled || info.fDrawingShapeToStencil);
196 SkASSERT(!info.fIsMixedSampled || info.fIsMultisampled);
197
198 if (!info.fIsMultisampled || fGpu->caps()->multisampleDisableSupport()) {
199 SkASSERT(fLastSupportedAAMode >= AntialiasMode::kCoverage);
200 if (!antialias) {
201 if (info.fDrawingShapeToStencil && !info.fCanDiscard) {
202 // We can't draw to the stencil buffer without discard (or sampl e mask if MSAA).
203 return false;
204 }
205 *antialiasMode = AntialiasMode::kNone;
206 *useHWAA = false;
207 return true;
208 }
209
210 if (info.canUseCoverageAA() && viewMatrix.preservesRightAngles()) {
211 *antialiasMode = AntialiasMode::kCoverage;
212 *useHWAA = false;
213 return true;
214 }
215 }
216
217 if (info.fIsMultisampled && fLastSupportedAAMode >= AntialiasMode::kMSAA) {
218 if (!info.fIsMixedSampled || info.fColorDisabled) {
219 *antialiasMode = AntialiasMode::kMSAA;
220 *useHWAA = true;
221 return true;
222 }
223 if (fLastSupportedAAMode >= AntialiasMode::kMixedSamples) {
224 *antialiasMode = AntialiasMode::kMixedSamples;
225 *useHWAA = true;
226 return true;
227 }
228 }
229
230 return false;
231 }
232
233 void InstancedRendering::appendRRectParams(const SkRRect& rrect, BatchInfo* batc hInfo) {
234 switch (rrect.getType()) {
235 case SkRRect::kSimple_Type: {
236 const SkVector& radii = rrect.getSimpleRadii();
237 this->appendParamsTexel(radii.x(), radii.y(), rrect.width(), rrect.h eight());
238 batchInfo->fHasParams = true;
239 return;
240 }
241 case SkRRect::kNinePatch_Type: {
242 float twoOverW = 2 / rrect.width();
243 float twoOverH = 2 / rrect.height();
244 const SkVector& radiiTL = rrect.radii(SkRRect::kUpperLeft_Corner);
245 const SkVector& radiiBR = rrect.radii(SkRRect::kLowerRight_Corner);
246 this->appendParamsTexel(radiiTL.x() * twoOverW, radiiBR.x() * twoOve rW,
247 radiiTL.y() * twoOverH, radiiBR.y() * twoOve rH);
248 batchInfo->fHasParams = true;
249 return;
250 }
251 case SkRRect::kComplex_Type: {
252 /**
253 * The x and y radii of each arc are stored in separate vectors,
254 * in the following order:
255 *
256 * __x1 _ _ _ x3__
257 * y1 | | y2
258 *
259 * | |
260 *
261 * y3 |__ _ _ _ __| y4
262 * x2 x4
263 *
264 */
265 float twoOverW = 2 / rrect.width();
266 float twoOverH = 2 / rrect.height();
267 const SkVector& radiiTL = rrect.radii(SkRRect::kUpperLeft_Corner);
268 const SkVector& radiiTR = rrect.radii(SkRRect::kUpperRight_Corner);
269 const SkVector& radiiBR = rrect.radii(SkRRect::kLowerRight_Corner);
270 const SkVector& radiiBL = rrect.radii(SkRRect::kLowerLeft_Corner);
271 this->appendParamsTexel(radiiTL.x() * twoOverW, radiiBL.x() * twoOve rW,
272 radiiTR.x() * twoOverW, radiiBR.x() * twoOve rW);
273 this->appendParamsTexel(radiiTL.y() * twoOverH, radiiTR.y() * twoOve rH,
274 radiiBL.y() * twoOverH, radiiBR.y() * twoOve rH);
275 batchInfo->fHasParams = true;
276 return;
277 }
278 default: return;
279 }
280 }
281
282 void InstancedRendering::appendParamsTexel(const SkScalar* vals, int count) {
283 SkASSERT(count <= 4 && count >= 0);
284 const float* valsAsFloats = vals; // Ensure SkScalar == float.
285 memcpy(&fParams.push_back(), valsAsFloats, count * sizeof(float));
286 }
287
288 void InstancedRendering::appendParamsTexel(SkScalar x, SkScalar y, SkScalar z, S kScalar w) {
289 ParamsTexel& texel = fParams.push_back();
290 texel.fX = SkScalarToFloat(x);
291 texel.fY = SkScalarToFloat(y);
292 texel.fZ = SkScalarToFloat(z);
293 texel.fW = SkScalarToFloat(w);
294 }
295
296 void InstancedRendering::appendParamsTexel(SkScalar x, SkScalar y, SkScalar z) {
297 ParamsTexel& texel = fParams.push_back();
298 texel.fX = SkScalarToFloat(x);
299 texel.fY = SkScalarToFloat(y);
300 texel.fZ = SkScalarToFloat(z);
301 }
302
303 InstancedRendering::Batch::Batch(uint32_t classID, InstancedRendering* ir, int i nstanceIdx)
304 : INHERITED(classID),
305 fInstancedRendering(ir) {
306 fDrawCmds.push_back().fInstanceRange = {instanceIdx, 1};
307 }
308
309 void InstancedRendering::Batch::initBatchTracker(const GrXPOverridesForBatch& ov errides) {
310 DrawCmd& cmd = this->getSingleDrawCmd(); // This will assert if we have > 1 command.
311 SkASSERT(1 == cmd.fInstanceRange.fCount); // The batch should not have been combined yet.
312 SkASSERT(SkIsPow2(fInfo.fShapeTypes));
313 SkASSERT(cmd.fGeometry.isEmpty());
314
315 if (kRect_ShapeFlag == fInfo.fShapeTypes) {
316 cmd.fGeometry = InstanceProcessor::GetIndexRangeForRect(fInfo.fAntialias Mode);
317 } else if (kOval_ShapeFlag == fInfo.fShapeTypes) {
318 cmd.fGeometry = InstanceProcessor::GetIndexRangeForOval(fInfo.fAntialias Mode, fBounds);
319 } else {
320 cmd.fGeometry = InstanceProcessor::GetIndexRangeForRRect(fInfo.fAntialia sMode);
321 }
322
323 GrColor overrideColor;
324 if (overrides.getOverrideColorIfSet(&overrideColor)) {
325 SkASSERT(State::kRecordingDraws == fInstancedRendering->fState);
326 fInstancedRendering->fInstances[cmd.getSingleInstanceIdx()].fColor = ove rrideColor;
327 }
328 fInfo.fUsesLocalCoords = overrides.readsLocalCoords();
329 fInfo.fCannotTweakAlphaForCoverage = !overrides.canTweakAlphaForCoverage();
330 }
331
332 bool InstancedRendering::Batch::onCombineIfPossible(GrBatch* other, const GrCaps & caps) {
333 Batch* that = static_cast<Batch*>(other);
334 SkASSERT(fInstancedRendering == that->fInstancedRendering);
335
336 if (!fInfo.canJoin(that->fInfo) ||
337 !GrPipeline::CanCombine(*this->pipeline(), this->bounds(),
338 *that->pipeline(), that->bounds(), caps)) {
339 return false;
340 }
341
342 fBounds.join(that->fBounds);
343 fInfo.join(that->fInfo);
344
345 // Join the draw commands.
346 int idx = 0;
347 if (fDrawCmds.back().fGeometry == that->fDrawCmds.front().fGeometry) {
348 SkASSERT(!fDrawCmds.back().fGeometry.isEmpty()); // Should have called i nitBatchTracker.
349 InstanceRange& thisRange = fDrawCmds.back().fInstanceRange;
350 const InstanceRange& thatRange = that->fDrawCmds.front().fInstanceRange;
351 if (thisRange.end() == thatRange.fStart) {
352 thisRange.fCount += thatRange.fCount;
353 ++idx;
354 }
355 }
356 if (idx < that->fDrawCmds.count()) {
357 fDrawCmds.push_back_n(that->fDrawCmds.count() - idx, &that->fDrawCmds[id x]);
358 }
359
360 return true;
361 }
362
363 void InstancedRendering::Batch::computePipelineOptimizations(GrInitInvariantOutp ut* color,
364 GrInitInvariantOutpu t* coverage,
365 GrBatchToXPOverrides * overrides) const {
366 // We need to be careful about fInfo here and consider how it might change a s batches combine.
367 // e.g. We can't make an assumption based on fInfo.isSimpleRects() because t he batch might
368 // later combine with a non-rect.
369 color->setUnknownFourComponents();
370 if (fInfo.fAntialiasMode >= AntialiasMode::kMSAA) {
371 coverage->setKnownSingleComponent(255);
372 } else if (AntialiasMode::kNone == fInfo.fAntialiasMode && !fInfo.fCannotDis card) {
373 coverage->setKnownSingleComponent(255);
374 } else {
375 coverage->setUnknownSingleComponent();
376 }
377 }
378
379 void InstancedRendering::beginFlush(GrResourceProvider* rp) {
380 SkASSERT(State::kRecordingDraws == fState);
381 fState = State::kFlushing;
382
383 if (fInstances.empty()) {
384 return;
385 }
386
387 if (!fVertexBuffer) {
388 fVertexBuffer.reset(InstanceProcessor::FindOrCreateVertexBuffer(fGpu));
389 if (!fVertexBuffer) {
390 return;
391 }
392 }
393
394 if (!fIndexBuffer) {
395 fIndexBuffer.reset(InstanceProcessor::FindOrCreateIndex8Buffer(fGpu));
396 if (!fIndexBuffer) {
397 return;
398 }
399 }
400
401 if (!fParams.empty()) {
402 fParamsBuffer.reset(rp->createBuffer(fParams.count() * sizeof(ParamsTexe l),
403 kTexel_GrBufferType, kDynamic_GrAcc essPattern,
404 GrResourceProvider::kNoPendingIO_Fl ag,
405 fParams.begin()));
406 if (!fParamsBuffer) {
407 return;
408 }
409 }
410
411 this->onBeginFlush(rp);
412 }
413
414 void InstancedRendering::Batch::onDraw(GrBatchFlushState* state) {
415 SkASSERT(State::kFlushing == fInstancedRendering->fState);
416 SkASSERT(state->gpu() == fInstancedRendering->gpu());
417
418 InstanceProcessor instProc(fInfo, fInstancedRendering->fParamsBuffer);
419 fInstancedRendering->onDraw(*this->pipeline(), instProc, this);
420 }
421
422 void InstancedRendering::endFlush() {
423 // Caller is expected to delete all instanced batches before ending the flus h.
424 SkASSERT(fBatchList.isEmpty());
425 fInstances.reset();
426 fParams.reset();
427 fParamsBuffer.reset();
428 this->onEndFlush();
429 fState = State::kRecordingDraws;
430 // Hold on to the shape coords and index buffers.
431 }
432
433 void InstancedRendering::resetGpuResources(ResetType resetType) {
434 fVertexBuffer.reset();
435 fIndexBuffer.reset();
436 fParamsBuffer.reset();
437 this->onResetGpuResources(resetType);
438 }
439
440 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698