1 //  Copyright (c) 2011-present, Facebook, Inc.  All rights reserved.
2 //  This source code is licensed under both the GPLv2 (found in the
3 //  COPYING file in the root directory) and Apache 2.0 License
4 //  (found in the LICENSE.Apache file in the root directory).
5 //
6 // Copyright (c) 2011 The LevelDB Authors. All rights reserved.
7 // Use of this source code is governed by a BSD-style license that can be
8 // found in the LICENSE file. See the AUTHORS file for names of contributors.
9 
10 #include "memory/arena.h"
11 #include "test_util/testharness.h"
12 #include "util/random.h"
13 
14 namespace ROCKSDB_NAMESPACE {
15 
16 namespace {
17 const size_t kHugePageSize = 2 * 1024 * 1024;
18 }  // namespace
19 class ArenaTest : public testing::Test {};
20 
TEST_F(ArenaTest,Empty)21 TEST_F(ArenaTest, Empty) { Arena arena0; }
22 
23 namespace {
CheckMemoryAllocated(size_t allocated,size_t expected)24 bool CheckMemoryAllocated(size_t allocated, size_t expected) {
25   // The value returned by Arena::MemoryAllocatedBytes() may be greater than
26   // the requested memory. We choose a somewhat arbitrary upper bound of
27   // max_expected = expected * 1.1 to detect critical overallocation.
28   size_t max_expected = expected + expected / 10;
29   return allocated >= expected && allocated <= max_expected;
30 }
31 
MemoryAllocatedBytesTest(size_t huge_page_size)32 void MemoryAllocatedBytesTest(size_t huge_page_size) {
33   const int N = 17;
34   size_t req_sz;  // requested size
35   size_t bsz = 32 * 1024;  // block size
36   size_t expected_memory_allocated;
37 
38   Arena arena(bsz, nullptr, huge_page_size);
39 
40   // requested size > quarter of a block:
41   //   allocate requested size separately
42   req_sz = 12 * 1024;
43   for (int i = 0; i < N; i++) {
44     arena.Allocate(req_sz);
45   }
46   expected_memory_allocated = req_sz * N + Arena::kInlineSize;
47   ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
48                expected_memory_allocated);
49 
50   arena.Allocate(Arena::kInlineSize - 1);
51 
52   // requested size < quarter of a block:
53   //   allocate a block with the default size, then try to use unused part
54   //   of the block. So one new block will be allocated for the first
55   //   Allocate(99) call. All the remaining calls won't lead to new allocation.
56   req_sz = 99;
57   for (int i = 0; i < N; i++) {
58     arena.Allocate(req_sz);
59   }
60   if (huge_page_size) {
61     ASSERT_TRUE(
62         CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
63                              expected_memory_allocated + bsz) ||
64         CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
65                              expected_memory_allocated + huge_page_size));
66   } else {
67     expected_memory_allocated += bsz;
68     ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
69                  expected_memory_allocated);
70   }
71 
72   // requested size > size of a block:
73   //   allocate requested size separately
74   expected_memory_allocated = arena.MemoryAllocatedBytes();
75   req_sz = 8 * 1024 * 1024;
76   for (int i = 0; i < N; i++) {
77     arena.Allocate(req_sz);
78   }
79   expected_memory_allocated += req_sz * N;
80   ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
81                expected_memory_allocated);
82 }
83 
84 // Make sure we didn't count the allocate but not used memory space in
85 // Arena::ApproximateMemoryUsage()
ApproximateMemoryUsageTest(size_t huge_page_size)86 static void ApproximateMemoryUsageTest(size_t huge_page_size) {
87   const size_t kBlockSize = 4096;
88   const size_t kEntrySize = kBlockSize / 8;
89   const size_t kZero = 0;
90   Arena arena(kBlockSize, nullptr, huge_page_size);
91   ASSERT_EQ(kZero, arena.ApproximateMemoryUsage());
92 
93   // allocate inline bytes
94   const size_t kAlignUnit = alignof(max_align_t);
95   EXPECT_TRUE(arena.IsInInlineBlock());
96   arena.AllocateAligned(kAlignUnit);
97   EXPECT_TRUE(arena.IsInInlineBlock());
98   arena.AllocateAligned(Arena::kInlineSize / 2 - (2 * kAlignUnit));
99   EXPECT_TRUE(arena.IsInInlineBlock());
100   arena.AllocateAligned(Arena::kInlineSize / 2);
101   EXPECT_TRUE(arena.IsInInlineBlock());
102   ASSERT_EQ(arena.ApproximateMemoryUsage(), Arena::kInlineSize - kAlignUnit);
103   ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
104                Arena::kInlineSize);
105 
106   auto num_blocks = kBlockSize / kEntrySize;
107 
108   // first allocation
109   arena.AllocateAligned(kEntrySize);
110   EXPECT_FALSE(arena.IsInInlineBlock());
111   auto mem_usage = arena.MemoryAllocatedBytes();
112   if (huge_page_size) {
113     ASSERT_TRUE(
114         CheckMemoryAllocated(mem_usage, kBlockSize + Arena::kInlineSize) ||
115         CheckMemoryAllocated(mem_usage, huge_page_size + Arena::kInlineSize));
116   } else {
117     ASSERT_PRED2(CheckMemoryAllocated, mem_usage,
118                  kBlockSize + Arena::kInlineSize);
119   }
120   auto usage = arena.ApproximateMemoryUsage();
121   ASSERT_LT(usage, mem_usage);
122   for (size_t i = 1; i < num_blocks; ++i) {
123     arena.AllocateAligned(kEntrySize);
124     ASSERT_EQ(mem_usage, arena.MemoryAllocatedBytes());
125     ASSERT_EQ(arena.ApproximateMemoryUsage(), usage + kEntrySize);
126     EXPECT_FALSE(arena.IsInInlineBlock());
127     usage = arena.ApproximateMemoryUsage();
128   }
129   if (huge_page_size) {
130     ASSERT_TRUE(usage > mem_usage ||
131                 usage + huge_page_size - kBlockSize == mem_usage);
132   } else {
133     ASSERT_GT(usage, mem_usage);
134   }
135 }
136 
SimpleTest(size_t huge_page_size)137 static void SimpleTest(size_t huge_page_size) {
138   std::vector<std::pair<size_t, char*>> allocated;
139   Arena arena(Arena::kMinBlockSize, nullptr, huge_page_size);
140   const int N = 100000;
141   size_t bytes = 0;
142   Random rnd(301);
143   for (int i = 0; i < N; i++) {
144     size_t s;
145     if (i % (N / 10) == 0) {
146       s = i;
147     } else {
148       s = rnd.OneIn(4000)
149               ? rnd.Uniform(6000)
150               : (rnd.OneIn(10) ? rnd.Uniform(100) : rnd.Uniform(20));
151     }
152     if (s == 0) {
153       // Our arena disallows size 0 allocations.
154       s = 1;
155     }
156     char* r;
157     if (rnd.OneIn(10)) {
158       r = arena.AllocateAligned(s);
159     } else {
160       r = arena.Allocate(s);
161     }
162 
163     for (unsigned int b = 0; b < s; b++) {
164       // Fill the "i"th allocation with a known bit pattern
165       r[b] = i % 256;
166     }
167     bytes += s;
168     allocated.push_back(std::make_pair(s, r));
169     ASSERT_GE(arena.ApproximateMemoryUsage(), bytes);
170     if (i > N / 10) {
171       ASSERT_LE(arena.ApproximateMemoryUsage(), bytes * 1.10);
172     }
173   }
174   for (unsigned int i = 0; i < allocated.size(); i++) {
175     size_t num_bytes = allocated[i].first;
176     const char* p = allocated[i].second;
177     for (unsigned int b = 0; b < num_bytes; b++) {
178       // Check the "i"th allocation for the known bit pattern
179       ASSERT_EQ(int(p[b]) & 0xff, (int)(i % 256));
180     }
181   }
182 }
183 }  // namespace
184 
TEST_F(ArenaTest,MemoryAllocatedBytes)185 TEST_F(ArenaTest, MemoryAllocatedBytes) {
186   MemoryAllocatedBytesTest(0);
187   MemoryAllocatedBytesTest(kHugePageSize);
188 }
189 
TEST_F(ArenaTest,ApproximateMemoryUsage)190 TEST_F(ArenaTest, ApproximateMemoryUsage) {
191   ApproximateMemoryUsageTest(0);
192   ApproximateMemoryUsageTest(kHugePageSize);
193 }
194 
TEST_F(ArenaTest,Simple)195 TEST_F(ArenaTest, Simple) {
196   SimpleTest(0);
197   SimpleTest(kHugePageSize);
198 }
199 }  // namespace ROCKSDB_NAMESPACE
200 
main(int argc,char ** argv)201 int main(int argc, char** argv) {
202   ::testing::InitGoogleTest(&argc, argv);
203   return RUN_ALL_TESTS();
204 }
205