summaryrefslogtreecommitdiffstats
path: root/chromium/gpu/command_buffer/client/fenced_allocator_test.cc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/gpu/command_buffer/client/fenced_allocator_test.cc')
-rw-r--r--chromium/gpu/command_buffer/client/fenced_allocator_test.cc75
1 files changed, 71 insertions, 4 deletions
diff --git a/chromium/gpu/command_buffer/client/fenced_allocator_test.cc b/chromium/gpu/command_buffer/client/fenced_allocator_test.cc
index cc99ed36522..7ac35d4435e 100644
--- a/chromium/gpu/command_buffer/client/fenced_allocator_test.cc
+++ b/chromium/gpu/command_buffer/client/fenced_allocator_test.cc
@@ -29,6 +29,7 @@ using testing::Truly;
using testing::Sequence;
using testing::DoAll;
using testing::Invoke;
+using testing::InvokeWithoutArgs;
using testing::_;
class BaseFencedAllocatorTest : public testing::Test {
@@ -70,7 +71,7 @@ class BaseFencedAllocatorTest : public testing::Test {
}
int32 GetToken() {
- return command_buffer_->GetState().token;
+ return command_buffer_->GetLastState().token;
}
#if defined(OS_MACOSX)
@@ -88,6 +89,11 @@ class BaseFencedAllocatorTest : public testing::Test {
const unsigned int BaseFencedAllocatorTest::kBufferSize;
#endif
+namespace {
+void EmptyPoll() {
+}
+}
+
// Test fixture for FencedAllocator test - Creates a FencedAllocator, using a
// CommandBufferHelper with a mock AsyncAPIInterface for its interface (calling
// it directly, not through the RPC mechanism), making sure Noops are ignored
@@ -96,7 +102,9 @@ class FencedAllocatorTest : public BaseFencedAllocatorTest {
protected:
virtual void SetUp() {
BaseFencedAllocatorTest::SetUp();
- allocator_.reset(new FencedAllocator(kBufferSize, helper_.get()));
+ allocator_.reset(new FencedAllocator(kBufferSize,
+ helper_.get(),
+ base::Bind(&EmptyPoll)));
}
virtual void TearDown() {
@@ -391,6 +399,63 @@ TEST_F(FencedAllocatorTest, TestGetLargestFreeOrPendingSize) {
EXPECT_EQ(kBufferSize, allocator_->GetLargestFreeSize());
}
+class FencedAllocatorPollTest : public BaseFencedAllocatorTest {
+ public:
+ static const unsigned int kAllocSize = 128;
+
+ MOCK_METHOD0(MockedPoll, void());
+
+ protected:
+ virtual void TearDown() {
+ // If the GpuScheduler posts any tasks, this forces them to run.
+ base::MessageLoop::current()->RunUntilIdle();
+
+ BaseFencedAllocatorTest::TearDown();
+ }
+};
+
+TEST_F(FencedAllocatorPollTest, TestPoll) {
+ scoped_ptr<FencedAllocator> allocator(
+ new FencedAllocator(kBufferSize,
+ helper_.get(),
+ base::Bind(&FencedAllocatorPollTest::MockedPoll,
+ base::Unretained(this))));
+
+ FencedAllocator::Offset mem1 = allocator->Alloc(kAllocSize);
+ FencedAllocator::Offset mem2 = allocator->Alloc(kAllocSize);
+ EXPECT_NE(mem1, FencedAllocator::kInvalidOffset);
+ EXPECT_NE(mem2, FencedAllocator::kInvalidOffset);
+ EXPECT_TRUE(allocator->CheckConsistency());
+ EXPECT_EQ(allocator->bytes_in_use(), kAllocSize * 2);
+
+ // Check that no-op Poll doesn't affect the state.
+ EXPECT_CALL(*this, MockedPoll()).RetiresOnSaturation();
+ allocator->FreeUnused();
+ EXPECT_TRUE(allocator->CheckConsistency());
+ EXPECT_EQ(allocator->bytes_in_use(), kAllocSize * 2);
+
+ // Check that freeing in Poll works.
+ base::Closure free_mem1_closure =
+ base::Bind(&FencedAllocator::Free,
+ base::Unretained(allocator.get()),
+ mem1);
+ EXPECT_CALL(*this, MockedPoll())
+ .WillOnce(InvokeWithoutArgs(&free_mem1_closure, &base::Closure::Run))
+ .RetiresOnSaturation();
+ allocator->FreeUnused();
+ EXPECT_TRUE(allocator->CheckConsistency());
+ EXPECT_EQ(allocator->bytes_in_use(), kAllocSize * 1);
+
+ // Check that freeing still works.
+ EXPECT_CALL(*this, MockedPoll()).RetiresOnSaturation();
+ allocator->Free(mem2);
+ allocator->FreeUnused();
+ EXPECT_TRUE(allocator->CheckConsistency());
+ EXPECT_EQ(allocator->bytes_in_use(), 0u);
+
+ allocator.reset();
+}
+
// Test fixture for FencedAllocatorWrapper test - Creates a
// FencedAllocatorWrapper, using a CommandBufferHelper with a mock
// AsyncAPIInterface for its interface (calling it directly, not through the
@@ -406,7 +471,9 @@ class FencedAllocatorWrapperTest : public BaseFencedAllocatorTest {
// something.
buffer_.reset(static_cast<char*>(base::AlignedAlloc(
kBufferSize, kAllocAlignment)));
- allocator_.reset(new FencedAllocatorWrapper(kBufferSize, helper_.get(),
+ allocator_.reset(new FencedAllocatorWrapper(kBufferSize,
+ helper_.get(),
+ base::Bind(&EmptyPoll),
buffer_.get()));
}
@@ -420,7 +487,7 @@ class FencedAllocatorWrapperTest : public BaseFencedAllocatorTest {
}
scoped_ptr<FencedAllocatorWrapper> allocator_;
- scoped_ptr_malloc<char, base::ScopedPtrAlignedFree> buffer_;
+ scoped_ptr<char, base::AlignedFreeDeleter> buffer_;
};
// Checks basic alloc and free.