Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(505)

Side by Side Diff: gpu/command_buffer/client/fenced_allocator_test.cc

Issue 1186393004: gpu: Remove async texture uploads. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: rebase Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 // This file contains the tests for the FencedAllocator class. 5 // This file contains the tests for the FencedAllocator class.
6 6
7 #include "base/bind.h" 7 #include "base/bind.h"
8 #include "base/bind_helpers.h" 8 #include "base/bind_helpers.h"
9 #include "base/memory/aligned_memory.h" 9 #include "base/memory/aligned_memory.h"
10 #include "gpu/command_buffer/client/cmd_buffer_helper.h" 10 #include "gpu/command_buffer/client/cmd_buffer_helper.h"
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
73 scoped_refptr<TransferBufferManagerInterface> transfer_buffer_manager_; 73 scoped_refptr<TransferBufferManagerInterface> transfer_buffer_manager_;
74 scoped_ptr<CommandBufferService> command_buffer_; 74 scoped_ptr<CommandBufferService> command_buffer_;
75 scoped_ptr<GpuScheduler> gpu_scheduler_; 75 scoped_ptr<GpuScheduler> gpu_scheduler_;
76 scoped_ptr<CommandBufferHelper> helper_; 76 scoped_ptr<CommandBufferHelper> helper_;
77 }; 77 };
78 78
79 #ifndef _MSC_VER 79 #ifndef _MSC_VER
80 const unsigned int BaseFencedAllocatorTest::kBufferSize; 80 const unsigned int BaseFencedAllocatorTest::kBufferSize;
81 #endif 81 #endif
82 82
83 namespace {
84 void EmptyPoll() {
85 }
86 }
87
88 // Test fixture for FencedAllocator test - Creates a FencedAllocator, using a 83 // Test fixture for FencedAllocator test - Creates a FencedAllocator, using a
89 // CommandBufferHelper with a mock AsyncAPIInterface for its interface (calling 84 // CommandBufferHelper with a mock AsyncAPIInterface for its interface (calling
90 // it directly, not through the RPC mechanism), making sure Noops are ignored 85 // it directly, not through the RPC mechanism), making sure Noops are ignored
91 // and SetToken are properly forwarded to the engine. 86 // and SetToken are properly forwarded to the engine.
92 class FencedAllocatorTest : public BaseFencedAllocatorTest { 87 class FencedAllocatorTest : public BaseFencedAllocatorTest {
93 protected: 88 protected:
94 void SetUp() override { 89 void SetUp() override {
95 BaseFencedAllocatorTest::SetUp(); 90 BaseFencedAllocatorTest::SetUp();
96 allocator_.reset(new FencedAllocator(kBufferSize, 91 allocator_.reset(new FencedAllocator(kBufferSize, helper_.get()));
97 helper_.get(),
98 base::Bind(&EmptyPoll)));
99 } 92 }
100 93
101 void TearDown() override { 94 void TearDown() override {
102 // If the GpuScheduler posts any tasks, this forces them to run. 95 // If the GpuScheduler posts any tasks, this forces them to run.
103 base::MessageLoop::current()->RunUntilIdle(); 96 base::MessageLoop::current()->RunUntilIdle();
104 97
105 EXPECT_TRUE(allocator_->CheckConsistency()); 98 EXPECT_TRUE(allocator_->CheckConsistency());
106 99
107 BaseFencedAllocatorTest::TearDown(); 100 BaseFencedAllocatorTest::TearDown();
108 } 101 }
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
383 // Check that the token has indeed passed. 376 // Check that the token has indeed passed.
384 EXPECT_LE(token, GetToken()); 377 EXPECT_LE(token, GetToken());
385 allocator_->Free(offset); 378 allocator_->Free(offset);
386 379
387 // Everything now has been freed... 380 // Everything now has been freed...
388 EXPECT_EQ(kBufferSize, allocator_->GetLargestFreeOrPendingSize()); 381 EXPECT_EQ(kBufferSize, allocator_->GetLargestFreeOrPendingSize());
389 // ... for real. 382 // ... for real.
390 EXPECT_EQ(kBufferSize, allocator_->GetLargestFreeSize()); 383 EXPECT_EQ(kBufferSize, allocator_->GetLargestFreeSize());
391 } 384 }
392 385
393 class FencedAllocatorPollTest : public BaseFencedAllocatorTest {
394 public:
395 static const unsigned int kAllocSize = 128;
396
397 MOCK_METHOD0(MockedPoll, void());
398
399 protected:
400 virtual void TearDown() {
401 // If the GpuScheduler posts any tasks, this forces them to run.
402 base::MessageLoop::current()->RunUntilIdle();
403
404 BaseFencedAllocatorTest::TearDown();
405 }
406 };
407
408 TEST_F(FencedAllocatorPollTest, TestPoll) {
409 scoped_ptr<FencedAllocator> allocator(
410 new FencedAllocator(kBufferSize,
411 helper_.get(),
412 base::Bind(&FencedAllocatorPollTest::MockedPoll,
413 base::Unretained(this))));
414
415 FencedAllocator::Offset mem1 = allocator->Alloc(kAllocSize);
416 FencedAllocator::Offset mem2 = allocator->Alloc(kAllocSize);
417 EXPECT_NE(mem1, FencedAllocator::kInvalidOffset);
418 EXPECT_NE(mem2, FencedAllocator::kInvalidOffset);
419 EXPECT_TRUE(allocator->CheckConsistency());
420 EXPECT_EQ(allocator->bytes_in_use(), kAllocSize * 2);
421
422 // Check that no-op Poll doesn't affect the state.
423 EXPECT_CALL(*this, MockedPoll()).RetiresOnSaturation();
424 allocator->FreeUnused();
425 EXPECT_TRUE(allocator->CheckConsistency());
426 EXPECT_EQ(allocator->bytes_in_use(), kAllocSize * 2);
427
428 // Check that freeing in Poll works.
429 base::Closure free_mem1_closure =
430 base::Bind(&FencedAllocator::Free,
431 base::Unretained(allocator.get()),
432 mem1);
433 EXPECT_CALL(*this, MockedPoll())
434 .WillOnce(InvokeWithoutArgs(&free_mem1_closure, &base::Closure::Run))
435 .RetiresOnSaturation();
436 allocator->FreeUnused();
437 EXPECT_TRUE(allocator->CheckConsistency());
438 EXPECT_EQ(allocator->bytes_in_use(), kAllocSize * 1);
439
440 // Check that freeing still works.
441 EXPECT_CALL(*this, MockedPoll()).RetiresOnSaturation();
442 allocator->Free(mem2);
443 allocator->FreeUnused();
444 EXPECT_TRUE(allocator->CheckConsistency());
445 EXPECT_EQ(allocator->bytes_in_use(), 0u);
446
447 allocator.reset();
448 }
449
450 // Test fixture for FencedAllocatorWrapper test - Creates a 386 // Test fixture for FencedAllocatorWrapper test - Creates a
451 // FencedAllocatorWrapper, using a CommandBufferHelper with a mock 387 // FencedAllocatorWrapper, using a CommandBufferHelper with a mock
452 // AsyncAPIInterface for its interface (calling it directly, not through the 388 // AsyncAPIInterface for its interface (calling it directly, not through the
453 // RPC mechanism), making sure Noops are ignored and SetToken are properly 389 // RPC mechanism), making sure Noops are ignored and SetToken are properly
454 // forwarded to the engine. 390 // forwarded to the engine.
455 class FencedAllocatorWrapperTest : public BaseFencedAllocatorTest { 391 class FencedAllocatorWrapperTest : public BaseFencedAllocatorTest {
456 protected: 392 protected:
457 void SetUp() override { 393 void SetUp() override {
458 BaseFencedAllocatorTest::SetUp(); 394 BaseFencedAllocatorTest::SetUp();
459 395
460 // Though allocating this buffer isn't strictly necessary, it makes 396 // Though allocating this buffer isn't strictly necessary, it makes
461 // allocations point to valid addresses, so they could be used for 397 // allocations point to valid addresses, so they could be used for
462 // something. 398 // something.
463 buffer_.reset(static_cast<char*>(base::AlignedAlloc( 399 buffer_.reset(static_cast<char*>(base::AlignedAlloc(
464 kBufferSize, kAllocAlignment))); 400 kBufferSize, kAllocAlignment)));
465 allocator_.reset(new FencedAllocatorWrapper(kBufferSize, 401 allocator_.reset(new FencedAllocatorWrapper(kBufferSize,
466 helper_.get(), 402 helper_.get(),
467 base::Bind(&EmptyPoll),
468 buffer_.get())); 403 buffer_.get()));
469 } 404 }
470 405
471 void TearDown() override { 406 void TearDown() override {
472 // If the GpuScheduler posts any tasks, this forces them to run. 407 // If the GpuScheduler posts any tasks, this forces them to run.
473 base::MessageLoop::current()->RunUntilIdle(); 408 base::MessageLoop::current()->RunUntilIdle();
474 409
475 EXPECT_TRUE(allocator_->CheckConsistency()); 410 EXPECT_TRUE(allocator_->CheckConsistency());
476 411
477 BaseFencedAllocatorTest::TearDown(); 412 BaseFencedAllocatorTest::TearDown();
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
627 EXPECT_LE(token, GetToken()); 562 EXPECT_LE(token, GetToken());
628 563
629 // Free up everything. 564 // Free up everything.
630 for (unsigned int i = 0; i < kAllocCount; ++i) { 565 for (unsigned int i = 0; i < kAllocCount; ++i) {
631 allocator_->Free(pointers[i]); 566 allocator_->Free(pointers[i]);
632 EXPECT_TRUE(allocator_->CheckConsistency()); 567 EXPECT_TRUE(allocator_->CheckConsistency());
633 } 568 }
634 } 569 }
635 570
636 } // namespace gpu 571 } // namespace gpu
OLDNEW
« no previous file with comments | « gpu/command_buffer/client/fenced_allocator.cc ('k') | gpu/command_buffer/client/gles2_c_lib_autogen.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698