OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "GrInOrderDrawBuffer.h" | 8 #include "GrInOrderDrawBuffer.h" |
9 | 9 |
| 10 #include "GrBufferAllocPool.h" |
10 #include "GrDefaultGeoProcFactory.h" | 11 #include "GrDefaultGeoProcFactory.h" |
11 #include "GrDrawTargetCaps.h" | 12 #include "GrDrawTargetCaps.h" |
12 #include "GrGpu.h" | 13 #include "GrGpu.h" |
13 #include "GrTemplates.h" | 14 #include "GrTemplates.h" |
14 #include "GrFontCache.h" | 15 #include "GrFontCache.h" |
15 #include "GrTexture.h" | 16 #include "GrTexture.h" |
16 | 17 |
17 GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu, | 18 GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu, |
18 GrVertexBufferAllocPool* vertexPool, | 19 GrVertexBufferAllocPool* vertexPool, |
19 GrIndexBufferAllocPool* indexPool) | 20 GrIndexBufferAllocPool* indexPool) |
20 : INHERITED(gpu, vertexPool, indexPool) | 21 : INHERITED(gpu, vertexPool, indexPool) |
21 , fCmdBuffer(kCmdBufferInitialSizeInBytes) | 22 , fCmdBuffer(kCmdBufferInitialSizeInBytes) |
22 , fPrevState(NULL) | 23 , fPrevState(NULL) |
23 , fDrawID(0) { | 24 , fDrawID(0) |
| 25 , fBatchTarget(gpu, vertexPool, indexPool) |
| 26 , fDrawBatch(NULL) { |
24 | 27 |
25 SkASSERT(vertexPool); | 28 SkASSERT(vertexPool); |
26 SkASSERT(indexPool); | 29 SkASSERT(indexPool); |
27 | 30 |
28 fPathIndexBuffer.setReserve(kPathIdxBufferMinReserve); | 31 fPathIndexBuffer.setReserve(kPathIdxBufferMinReserve); |
29 fPathTransformBuffer.setReserve(kPathXformBufferMinReserve); | 32 fPathTransformBuffer.setReserve(kPathXformBufferMinReserve); |
30 } | 33 } |
31 | 34 |
32 GrInOrderDrawBuffer::~GrInOrderDrawBuffer() { | 35 GrInOrderDrawBuffer::~GrInOrderDrawBuffer() { |
33 this->reset(); | 36 this->reset(); |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
203 } | 206 } |
204 // Check if there is a draw info that is compatible that uses the same VB fr
om the pool and | 207 // Check if there is a draw info that is compatible that uses the same VB fr
om the pool and |
205 // the same IB | 208 // the same IB |
206 if (kDraw_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { | 209 if (kDraw_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { |
207 return 0; | 210 return 0; |
208 } | 211 } |
209 | 212 |
210 Draw* draw = static_cast<Draw*>(&fCmdBuffer.back()); | 213 Draw* draw = static_cast<Draw*>(&fCmdBuffer.back()); |
211 | 214 |
212 if (!draw->fInfo.isInstanced() || | 215 if (!draw->fInfo.isInstanced() || |
| 216 draw->fInfo.primitiveType() != info.primitiveType() || |
213 draw->fInfo.verticesPerInstance() != info.verticesPerInstance() || | 217 draw->fInfo.verticesPerInstance() != info.verticesPerInstance() || |
214 draw->fInfo.indicesPerInstance() != info.indicesPerInstance() || | 218 draw->fInfo.indicesPerInstance() != info.indicesPerInstance() || |
215 draw->fInfo.vertexBuffer() != info.vertexBuffer() || | 219 draw->fInfo.vertexBuffer() != info.vertexBuffer() || |
216 draw->fInfo.indexBuffer() != geomSrc.fIndexBuffer) { | 220 draw->fInfo.indexBuffer() != geomSrc.fIndexBuffer) { |
217 return 0; | 221 return 0; |
218 } | 222 } |
219 if (draw->fInfo.startVertex() + draw->fInfo.vertexCount() != info.startVerte
x()) { | 223 if (draw->fInfo.startVertex() + draw->fInfo.vertexCount() != info.startVerte
x()) { |
220 return 0; | 224 return 0; |
221 } | 225 } |
222 | 226 |
(...skipping 17 matching lines...) Expand all Loading... |
240 return instancesToConcat; | 244 return instancesToConcat; |
241 } | 245 } |
242 | 246 |
243 void GrInOrderDrawBuffer::onDraw(const GrPipelineBuilder& pipelineBuilder, | 247 void GrInOrderDrawBuffer::onDraw(const GrPipelineBuilder& pipelineBuilder, |
244 const GrGeometryProcessor* gp, | 248 const GrGeometryProcessor* gp, |
245 const DrawInfo& info, | 249 const DrawInfo& info, |
246 const GrScissorState& scissorState, | 250 const GrScissorState& scissorState, |
247 const GrDeviceCoordTexture* dstCopy) { | 251 const GrDeviceCoordTexture* dstCopy) { |
248 SkASSERT(info.vertexBuffer() && (!info.isIndexed() || info.indexBuffer())); | 252 SkASSERT(info.vertexBuffer() && (!info.isIndexed() || info.indexBuffer())); |
249 | 253 |
| 254 // This closeBatch call is required because we may introduce new draws when
we setup clip |
| 255 this->closeBatch(); |
| 256 |
250 if (!this->recordStateAndShouldDraw(pipelineBuilder, gp, scissorState, dstCo
py)) { | 257 if (!this->recordStateAndShouldDraw(pipelineBuilder, gp, scissorState, dstCo
py)) { |
251 return; | 258 return; |
252 } | 259 } |
253 | 260 |
254 Draw* draw; | 261 Draw* draw; |
255 if (info.isInstanced()) { | 262 if (info.isInstanced()) { |
256 int instancesConcated = this->concatInstancedDraw(pipelineBuilder, info)
; | 263 int instancesConcated = this->concatInstancedDraw(pipelineBuilder, info)
; |
257 if (info.instanceCount() > instancesConcated) { | 264 if (info.instanceCount() > instancesConcated) { |
258 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); | 265 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); |
259 draw->fInfo.adjustInstanceCount(-instancesConcated); | 266 draw->fInfo.adjustInstanceCount(-instancesConcated); |
260 } else { | 267 } else { |
261 return; | 268 return; |
262 } | 269 } |
263 } else { | 270 } else { |
264 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); | 271 draw = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, Draw, (info)); |
265 } | 272 } |
266 this->recordTraceMarkersIfNecessary(); | 273 this->recordTraceMarkersIfNecessary(); |
267 } | 274 } |
268 | 275 |
| 276 void GrInOrderDrawBuffer::onDrawBatch(GrBatch* batch, |
| 277 const GrPipelineBuilder& pipelineBuilder, |
| 278 const GrScissorState& scissorState, |
| 279 const GrDeviceCoordTexture* dstCopy) { |
| 280 if (!this->recordStateAndShouldDraw(batch, pipelineBuilder, scissorState, ds
tCopy)) { |
| 281 return; |
| 282 } |
| 283 |
| 284 // Check if there is a Batch Draw we can batch with |
| 285 if (kDrawBatch_Cmd != strip_trace_bit(fCmdBuffer.back().fType)) { |
| 286 fDrawBatch = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawBatch, (batch)); |
| 287 return; |
| 288 } |
| 289 |
| 290 DrawBatch* draw = static_cast<DrawBatch*>(&fCmdBuffer.back()); |
| 291 if (draw->fBatch->combineIfPossible(batch)) { |
| 292 return; |
| 293 } else { |
| 294 this->closeBatch(); |
| 295 fDrawBatch = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawBatch, (batch)); |
| 296 } |
| 297 this->recordTraceMarkersIfNecessary(); |
| 298 } |
| 299 |
269 void GrInOrderDrawBuffer::onStencilPath(const GrPipelineBuilder& pipelineBuilder
, | 300 void GrInOrderDrawBuffer::onStencilPath(const GrPipelineBuilder& pipelineBuilder
, |
270 const GrPathProcessor* pathProc, | 301 const GrPathProcessor* pathProc, |
271 const GrPath* path, | 302 const GrPath* path, |
272 const GrScissorState& scissorState, | 303 const GrScissorState& scissorState, |
273 const GrStencilSettings& stencilSettings
) { | 304 const GrStencilSettings& stencilSettings
) { |
274 StencilPath* sp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, StencilPath, | 305 StencilPath* sp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, StencilPath, |
275 (path, pipelineBuilder.getRenderT
arget())); | 306 (path, pipelineBuilder.getRenderT
arget())); |
276 sp->fScissor = scissorState; | 307 sp->fScissor = scissorState; |
277 sp->fUseHWAA = pipelineBuilder.isHWAntialias(); | 308 sp->fUseHWAA = pipelineBuilder.isHWAntialias(); |
278 sp->fViewMatrix = pathProc->viewMatrix(); | 309 sp->fViewMatrix = pathProc->viewMatrix(); |
279 sp->fStencil = stencilSettings; | 310 sp->fStencil = stencilSettings; |
280 this->recordTraceMarkersIfNecessary(); | 311 this->recordTraceMarkersIfNecessary(); |
281 } | 312 } |
282 | 313 |
283 void GrInOrderDrawBuffer::onDrawPath(const GrPipelineBuilder& pipelineBuilder, | 314 void GrInOrderDrawBuffer::onDrawPath(const GrPipelineBuilder& pipelineBuilder, |
284 const GrPathProcessor* pathProc, | 315 const GrPathProcessor* pathProc, |
285 const GrPath* path, | 316 const GrPath* path, |
286 const GrScissorState& scissorState, | 317 const GrScissorState& scissorState, |
287 const GrStencilSettings& stencilSettings, | 318 const GrStencilSettings& stencilSettings, |
288 const GrDeviceCoordTexture* dstCopy) { | 319 const GrDeviceCoordTexture* dstCopy) { |
| 320 this->closeBatch(); |
| 321 |
289 // TODO: Only compare the subset of GrPipelineBuilder relevant to path cover
ing? | 322 // TODO: Only compare the subset of GrPipelineBuilder relevant to path cover
ing? |
290 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
dstCopy)) { | 323 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
dstCopy)) { |
291 return; | 324 return; |
292 } | 325 } |
293 DrawPath* dp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawPath, (path)); | 326 DrawPath* dp = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, DrawPath, (path)); |
294 dp->fStencilSettings = stencilSettings; | 327 dp->fStencilSettings = stencilSettings; |
295 this->recordTraceMarkersIfNecessary(); | 328 this->recordTraceMarkersIfNecessary(); |
296 } | 329 } |
297 | 330 |
298 void GrInOrderDrawBuffer::onDrawPaths(const GrPipelineBuilder& pipelineBuilder, | 331 void GrInOrderDrawBuffer::onDrawPaths(const GrPipelineBuilder& pipelineBuilder, |
299 const GrPathProcessor* pathProc, | 332 const GrPathProcessor* pathProc, |
300 const GrPathRange* pathRange, | 333 const GrPathRange* pathRange, |
301 const void* indices, | 334 const void* indices, |
302 PathIndexType indexType, | 335 PathIndexType indexType, |
303 const float transformValues[], | 336 const float transformValues[], |
304 PathTransformType transformType, | 337 PathTransformType transformType, |
305 int count, | 338 int count, |
306 const GrScissorState& scissorState, | 339 const GrScissorState& scissorState, |
307 const GrStencilSettings& stencilSettings, | 340 const GrStencilSettings& stencilSettings, |
308 const GrDeviceCoordTexture* dstCopy) { | 341 const GrDeviceCoordTexture* dstCopy) { |
309 SkASSERT(pathRange); | 342 SkASSERT(pathRange); |
310 SkASSERT(indices); | 343 SkASSERT(indices); |
311 SkASSERT(transformValues); | 344 SkASSERT(transformValues); |
312 | 345 |
| 346 this->closeBatch(); |
| 347 |
313 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
dstCopy)) { | 348 if (!this->recordStateAndShouldDraw(pipelineBuilder, pathProc, scissorState,
dstCopy)) { |
314 return; | 349 return; |
315 } | 350 } |
316 | 351 |
317 int indexBytes = GrPathRange::PathIndexSizeInBytes(indexType); | 352 int indexBytes = GrPathRange::PathIndexSizeInBytes(indexType); |
318 if (int misalign = fPathIndexBuffer.count() % indexBytes) { | 353 if (int misalign = fPathIndexBuffer.count() % indexBytes) { |
319 // Add padding to the index buffer so the indices are aligned properly. | 354 // Add padding to the index buffer so the indices are aligned properly. |
320 fPathIndexBuffer.append(indexBytes - misalign); | 355 fPathIndexBuffer.append(indexBytes - misalign); |
321 } | 356 } |
322 | 357 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
396 clr->fColor = GrColor_ILLEGAL; | 431 clr->fColor = GrColor_ILLEGAL; |
397 this->recordTraceMarkersIfNecessary(); | 432 this->recordTraceMarkersIfNecessary(); |
398 } | 433 } |
399 | 434 |
400 void GrInOrderDrawBuffer::onReset() { | 435 void GrInOrderDrawBuffer::onReset() { |
401 fCmdBuffer.reset(); | 436 fCmdBuffer.reset(); |
402 fPrevState = NULL; | 437 fPrevState = NULL; |
403 reset_data_buffer(&fPathIndexBuffer, kPathIdxBufferMinReserve); | 438 reset_data_buffer(&fPathIndexBuffer, kPathIdxBufferMinReserve); |
404 reset_data_buffer(&fPathTransformBuffer, kPathXformBufferMinReserve); | 439 reset_data_buffer(&fPathTransformBuffer, kPathXformBufferMinReserve); |
405 fGpuCmdMarkers.reset(); | 440 fGpuCmdMarkers.reset(); |
| 441 fDrawBatch = NULL; |
406 } | 442 } |
407 | 443 |
408 void GrInOrderDrawBuffer::onFlush() { | 444 void GrInOrderDrawBuffer::onFlush() { |
409 if (fCmdBuffer.empty()) { | 445 if (fCmdBuffer.empty()) { |
410 return; | 446 return; |
411 } | 447 } |
412 | 448 |
413 | |
414 CmdBuffer::Iter iter(fCmdBuffer); | |
415 | |
416 int currCmdMarker = 0; | |
417 | |
418 // Updated every time we find a set state cmd to reflect the current state i
n the playback | 449 // Updated every time we find a set state cmd to reflect the current state i
n the playback |
419 // stream. | 450 // stream. |
420 SetState* currentState = NULL; | 451 SetState* currentState = NULL; |
421 | 452 |
| 453 // TODO this is temporary while batch is being rolled out |
| 454 this->closeBatch(); |
| 455 this->getVertexAllocPool()->unmap(); |
| 456 this->getIndexAllocPool()->unmap(); |
| 457 fBatchTarget.preFlush(); |
| 458 |
| 459 currentState = NULL; |
| 460 CmdBuffer::Iter iter(fCmdBuffer); |
| 461 |
| 462 int currCmdMarker = 0; |
| 463 |
422 while (iter.next()) { | 464 while (iter.next()) { |
423 GrGpuTraceMarker newMarker("", -1); | 465 GrGpuTraceMarker newMarker("", -1); |
424 SkString traceString; | 466 SkString traceString; |
425 if (cmd_has_trace_marker(iter->fType)) { | 467 if (cmd_has_trace_marker(iter->fType)) { |
426 traceString = fGpuCmdMarkers[currCmdMarker].toString(); | 468 traceString = fGpuCmdMarkers[currCmdMarker].toString(); |
427 newMarker.fMarker = traceString.c_str(); | 469 newMarker.fMarker = traceString.c_str(); |
428 this->getGpu()->addGpuTraceMarker(&newMarker); | 470 this->getGpu()->addGpuTraceMarker(&newMarker); |
429 ++currCmdMarker; | 471 ++currCmdMarker; |
430 } | 472 } |
431 | 473 |
432 if (kSetState_Cmd == strip_trace_bit(iter->fType)) { | 474 // TODO temporary hack |
| 475 if (kDrawBatch_Cmd == strip_trace_bit(iter->fType)) { |
| 476 fBatchTarget.flushNext(); |
| 477 continue; |
| 478 } |
| 479 |
| 480 bool isSetState = kSetState_Cmd == strip_trace_bit(iter->fType); |
| 481 if (isSetState) { |
433 SetState* ss = reinterpret_cast<SetState*>(iter.get()); | 482 SetState* ss = reinterpret_cast<SetState*>(iter.get()); |
434 | 483 |
435 this->getGpu()->buildProgramDesc(&ss->fDesc, *ss->fPrimitiveProcesso
r, ss->fPipeline, | 484 // TODO sometimes we have a prim proc, othertimes we have a GrBatch.
Eventually we will |
436 ss->fPipeline.descInfo(), ss->fBatc
hTracker); | 485 // only have GrBatch and we can delete this |
| 486 if (ss->fPrimitiveProcessor) { |
| 487 this->getGpu()->buildProgramDesc(&ss->fDesc, *ss->fPrimitiveProc
essor, |
| 488 ss->fPipeline, |
| 489 ss->fPipeline.descInfo(), |
| 490 ss->fBatchTracker); |
| 491 } |
437 currentState = ss; | 492 currentState = ss; |
438 | |
439 } else { | 493 } else { |
440 iter->execute(this, currentState); | 494 iter->execute(this, currentState); |
441 } | 495 } |
442 | 496 |
443 if (cmd_has_trace_marker(iter->fType)) { | 497 if (cmd_has_trace_marker(iter->fType)) { |
444 this->getGpu()->removeGpuTraceMarker(&newMarker); | 498 this->getGpu()->removeGpuTraceMarker(&newMarker); |
445 } | 499 } |
446 } | 500 } |
447 | 501 |
| 502 // TODO see copious notes about hack |
| 503 fBatchTarget.postFlush(); |
| 504 |
448 SkASSERT(fGpuCmdMarkers.count() == currCmdMarker); | 505 SkASSERT(fGpuCmdMarkers.count() == currCmdMarker); |
449 ++fDrawID; | 506 ++fDrawID; |
450 } | 507 } |
451 | 508 |
452 void GrInOrderDrawBuffer::Draw::execute(GrInOrderDrawBuffer* buf, const SetState
* state) { | 509 void GrInOrderDrawBuffer::Draw::execute(GrInOrderDrawBuffer* buf, const SetState
* state) { |
453 SkASSERT(state); | 510 SkASSERT(state); |
454 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fPipeline, &state->f
Desc, | 511 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fPipeline, &state->f
Desc, |
455 &state->fBatchTracker); | 512 &state->fBatchTracker); |
456 buf->getGpu()->draw(args, fInfo); | 513 buf->getGpu()->draw(args, fInfo); |
457 } | 514 } |
(...skipping 19 matching lines...) Expand all Loading... |
477 void GrInOrderDrawBuffer::DrawPaths::execute(GrInOrderDrawBuffer* buf, const Set
State* state) { | 534 void GrInOrderDrawBuffer::DrawPaths::execute(GrInOrderDrawBuffer* buf, const Set
State* state) { |
478 SkASSERT(state); | 535 SkASSERT(state); |
479 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fPipeline, &state->f
Desc, | 536 DrawArgs args(state->fPrimitiveProcessor.get(), &state->fPipeline, &state->f
Desc, |
480 &state->fBatchTracker); | 537 &state->fBatchTracker); |
481 buf->getGpu()->drawPaths(args, this->pathRange(), | 538 buf->getGpu()->drawPaths(args, this->pathRange(), |
482 &buf->fPathIndexBuffer[fIndicesLocation], fIndexType
, | 539 &buf->fPathIndexBuffer[fIndicesLocation], fIndexType
, |
483 &buf->fPathTransformBuffer[fTransformsLocation], fTr
ansformType, | 540 &buf->fPathTransformBuffer[fTransformsLocation], fTr
ansformType, |
484 fCount, fStencilSettings); | 541 fCount, fStencilSettings); |
485 } | 542 } |
486 | 543 |
| 544 void GrInOrderDrawBuffer::DrawBatch::execute(GrInOrderDrawBuffer* buf, const Set
State* state) { |
| 545 SkASSERT(state); |
| 546 fBatch->generateGeometry(buf->getBatchTarget(), &state->fPipeline); |
| 547 } |
| 548 |
487 void GrInOrderDrawBuffer::SetState::execute(GrInOrderDrawBuffer*, const SetState
*) {} | 549 void GrInOrderDrawBuffer::SetState::execute(GrInOrderDrawBuffer*, const SetState
*) {} |
488 | 550 |
489 void GrInOrderDrawBuffer::Clear::execute(GrInOrderDrawBuffer* buf, const SetStat
e*) { | 551 void GrInOrderDrawBuffer::Clear::execute(GrInOrderDrawBuffer* buf, const SetStat
e*) { |
490 if (GrColor_ILLEGAL == fColor) { | 552 if (GrColor_ILLEGAL == fColor) { |
491 buf->getGpu()->discard(this->renderTarget()); | 553 buf->getGpu()->discard(this->renderTarget()); |
492 } else { | 554 } else { |
493 buf->getGpu()->clear(&fRect, fColor, fCanIgnoreRect, this->renderTarget(
)); | 555 buf->getGpu()->clear(&fRect, fColor, fCanIgnoreRect, this->renderTarget(
)); |
494 } | 556 } |
495 } | 557 } |
496 | 558 |
(...skipping 27 matching lines...) Expand all Loading... |
524 (pipelineBuilder, primProc, *this->g
etGpu()->caps(), | 586 (pipelineBuilder, primProc, *this->g
etGpu()->caps(), |
525 scissor, dstCopy)); | 587 scissor, dstCopy)); |
526 if (ss->fPipeline.mustSkip()) { | 588 if (ss->fPipeline.mustSkip()) { |
527 fCmdBuffer.pop_back(); | 589 fCmdBuffer.pop_back(); |
528 return false; | 590 return false; |
529 } | 591 } |
530 | 592 |
531 ss->fPrimitiveProcessor->initBatchTracker(&ss->fBatchTracker, | 593 ss->fPrimitiveProcessor->initBatchTracker(&ss->fBatchTracker, |
532 ss->fPipeline.getInitBatchTracker(
)); | 594 ss->fPipeline.getInitBatchTracker(
)); |
533 | 595 |
534 if (fPrevState && | 596 if (fPrevState && fPrevState->fPrimitiveProcessor.get() && |
535 fPrevState->fPrimitiveProcessor->canMakeEqual(fPrevState->fBatchTracker, | 597 fPrevState->fPrimitiveProcessor->canMakeEqual(fPrevState->fBatchTracker, |
536 *ss->fPrimitiveProcessor, | 598 *ss->fPrimitiveProcessor, |
537 ss->fBatchTracker) && | 599 ss->fBatchTracker) && |
538 fPrevState->fPipeline.isEqual(ss->fPipeline)) { | 600 fPrevState->fPipeline.isEqual(ss->fPipeline)) { |
539 fCmdBuffer.pop_back(); | 601 fCmdBuffer.pop_back(); |
540 } else { | 602 } else { |
541 fPrevState = ss; | 603 fPrevState = ss; |
542 this->recordTraceMarkersIfNecessary(); | 604 this->recordTraceMarkersIfNecessary(); |
543 } | 605 } |
544 return true; | 606 return true; |
545 } | 607 } |
546 | 608 |
| 609 bool GrInOrderDrawBuffer::recordStateAndShouldDraw(GrBatch* batch, |
| 610 const GrPipelineBuilder& pipe
lineBuilder, |
| 611 const GrScissorState& scissor
, |
| 612 const GrDeviceCoordTexture* d
stCopy) { |
| 613 // TODO this gets much simpler when we have batches everywhere. |
| 614 // If the previous command is also a set state, then we check to see if it h
as a Batch. If so, |
| 615 // and we can make the two batches equal, and we can combine the states, the
n we make them equal |
| 616 SetState* ss = GrNEW_APPEND_TO_RECORDER(fCmdBuffer, SetState, |
| 617 (batch, pipelineBuilder, *this->getG
pu()->caps(), scissor, |
| 618 dstCopy)); |
| 619 if (ss->fPipeline.mustSkip()) { |
| 620 fCmdBuffer.pop_back(); |
| 621 return false; |
| 622 } |
| 623 |
| 624 batch->initBatchTracker(ss->fPipeline.getInitBatchTracker()); |
| 625 |
| 626 if (fPrevState && !fPrevState->fPrimitiveProcessor.get() && |
| 627 fPrevState->fPipeline.isEqual(ss->fPipeline)) { |
| 628 fCmdBuffer.pop_back(); |
| 629 } else { |
| 630 this->closeBatch(); |
| 631 fPrevState = ss; |
| 632 this->recordTraceMarkersIfNecessary(); |
| 633 } |
| 634 return true; |
| 635 } |
| 636 |
547 void GrInOrderDrawBuffer::recordTraceMarkersIfNecessary() { | 637 void GrInOrderDrawBuffer::recordTraceMarkersIfNecessary() { |
548 SkASSERT(!fCmdBuffer.empty()); | 638 SkASSERT(!fCmdBuffer.empty()); |
549 SkASSERT(!cmd_has_trace_marker(fCmdBuffer.back().fType)); | 639 SkASSERT(!cmd_has_trace_marker(fCmdBuffer.back().fType)); |
550 const GrTraceMarkerSet& activeTraceMarkers = this->getActiveTraceMarkers(); | 640 const GrTraceMarkerSet& activeTraceMarkers = this->getActiveTraceMarkers(); |
551 if (activeTraceMarkers.count() > 0) { | 641 if (activeTraceMarkers.count() > 0) { |
552 fCmdBuffer.back().fType = add_trace_bit(fCmdBuffer.back().fType); | 642 fCmdBuffer.back().fType = add_trace_bit(fCmdBuffer.back().fType); |
553 fGpuCmdMarkers.push_back(activeTraceMarkers); | 643 fGpuCmdMarkers.push_back(activeTraceMarkers); |
554 } | 644 } |
555 } | 645 } |
| 646 |
| 647 void GrInOrderDrawBuffer::closeBatch() { |
| 648 if (fDrawBatch) { |
| 649 fDrawBatch->execute(this, fPrevState); |
| 650 fDrawBatch = NULL; |
| 651 } |
| 652 } |
| 653 |
| 654 void GrInOrderDrawBuffer::willReserveVertexAndIndexSpace(int vertexCount, |
| 655 size_t vertexStride, |
| 656 int indexCount) { |
| 657 this->closeBatch(); |
| 658 |
| 659 // We use geometryHints() to know whether to flush the draw buffer. We |
| 660 // can't flush if we are inside an unbalanced pushGeometrySource. |
| 661 // Moreover, flushing blows away vertex and index data that was |
| 662 // previously reserved. So if the vertex or index data is pulled from |
| 663 // reserved space and won't be released by this request then we can't |
| 664 // flush. |
| 665 bool insideGeoPush = this->getGeoPoolStateStack().count() > 1; |
| 666 |
| 667 bool unreleasedVertexSpace = |
| 668 !vertexCount && |
| 669 kReserved_GeometrySrcType == this->getGeomSrc().fVertexSrc; |
| 670 |
| 671 bool unreleasedIndexSpace = |
| 672 !indexCount && |
| 673 kReserved_GeometrySrcType == this->getGeomSrc().fIndexSrc; |
| 674 |
| 675 int vcount = vertexCount; |
| 676 int icount = indexCount; |
| 677 |
| 678 if (!insideGeoPush && |
| 679 !unreleasedVertexSpace && |
| 680 !unreleasedIndexSpace && |
| 681 this->geometryHints(vertexStride, &vcount, &icount)) { |
| 682 this->flush(); |
| 683 } |
| 684 } |
OLD | NEW |