OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
114 isolate_->memory_allocator_ = old_allocator_; | 114 isolate_->memory_allocator_ = old_allocator_; |
115 } | 115 } |
116 | 116 |
117 private: | 117 private: |
118 Isolate* isolate_; | 118 Isolate* isolate_; |
119 MemoryAllocator* old_allocator_; | 119 MemoryAllocator* old_allocator_; |
120 | 120 |
121 DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope); | 121 DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope); |
122 }; | 122 }; |
123 | 123 |
124 | |
125 // Temporarily sets a given code range in an isolate. | |
126 class TestCodeRangeScope { | |
127 public: | |
128 TestCodeRangeScope(Isolate* isolate, CodeRange* code_range) | |
129 : isolate_(isolate), | |
130 old_code_range_(isolate->code_range_) { | |
131 isolate->code_range_ = code_range; | |
132 } | |
133 | |
134 ~TestCodeRangeScope() { | |
135 isolate_->code_range_ = old_code_range_; | |
136 } | |
137 | |
138 private: | |
139 Isolate* isolate_; | |
140 CodeRange* old_code_range_; | |
141 | |
142 DISALLOW_COPY_AND_ASSIGN(TestCodeRangeScope); | |
143 }; | |
144 | |
145 } } // namespace v8::internal | 124 } } // namespace v8::internal |
146 | 125 |
147 | 126 |
148 static void VerifyMemoryChunk(Isolate* isolate, | |
149 Heap* heap, | |
150 CodeRange* code_range, | |
151 size_t reserve_area_size, | |
152 size_t commit_area_size, | |
153 size_t second_commit_area_size, | |
154 Executability executable) { | |
155 MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); | |
156 CHECK(memory_allocator->SetUp(heap->MaxReserved(), | |
157 heap->MaxExecutableSize())); | |
158 TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator); | |
159 TestCodeRangeScope test_code_range_scope(isolate, code_range); | |
160 | |
161 size_t header_size = (executable == EXECUTABLE) | |
162 ? MemoryAllocator::CodePageGuardStartOffset() | |
163 : MemoryChunk::kObjectStartOffset; | |
164 size_t guard_size = (executable == EXECUTABLE) | |
165 ? MemoryAllocator::CodePageGuardSize() | |
166 : 0; | |
167 | |
168 MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(reserve_area_size, | |
169 commit_area_size, | |
170 executable, | |
171 NULL); | |
172 size_t alignment = code_range->exists() ? | |
173 MemoryChunk::kAlignment : OS::CommitPageSize(); | |
174 size_t reserved_size = ((executable == EXECUTABLE)) | |
175 ? RoundUp(header_size + guard_size + reserve_area_size + guard_size, | |
176 alignment) | |
177 : RoundUp(header_size + reserve_area_size, OS::CommitPageSize()); | |
178 CHECK(memory_chunk->size() == reserved_size); | |
179 CHECK(memory_chunk->area_start() < memory_chunk->address() + | |
180 memory_chunk->size()); | |
181 CHECK(memory_chunk->area_end() <= memory_chunk->address() + | |
182 memory_chunk->size()); | |
183 CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size); | |
184 | |
185 Address area_start = memory_chunk->area_start(); | |
186 | |
187 memory_chunk->CommitArea(second_commit_area_size); | |
188 CHECK(area_start == memory_chunk->area_start()); | |
189 CHECK(memory_chunk->area_start() < memory_chunk->address() + | |
190 memory_chunk->size()); | |
191 CHECK(memory_chunk->area_end() <= memory_chunk->address() + | |
192 memory_chunk->size()); | |
193 CHECK(static_cast<size_t>(memory_chunk->area_size()) == | |
194 second_commit_area_size); | |
195 | |
196 memory_allocator->Free(memory_chunk); | |
197 memory_allocator->TearDown(); | |
198 delete memory_allocator; | |
199 } | |
200 | |
201 | |
202 static unsigned int Pseudorandom() { | |
203 static uint32_t lo = 2345; | |
204 lo = 18273 * (lo & 0xFFFFF) + (lo >> 16); | |
205 return lo & 0xFFFFF; | |
206 } | |
207 | |
208 | |
209 TEST(MemoryChunk) { | |
210 OS::SetUp(); | |
211 Isolate* isolate = Isolate::Current(); | |
212 isolate->InitializeLoggingAndCounters(); | |
213 Heap* heap = isolate->heap(); | |
214 CHECK(heap->ConfigureHeapDefault()); | |
215 | |
216 size_t reserve_area_size = 1 * MB; | |
217 size_t initial_commit_area_size, second_commit_area_size; | |
218 | |
219 for (int i = 0; i < 100; i++) { | |
220 initial_commit_area_size = Pseudorandom(); | |
221 second_commit_area_size = Pseudorandom(); | |
222 | |
223 // With CodeRange. | |
224 CodeRange* code_range = new CodeRange(isolate); | |
225 const int code_range_size = 32 * MB; | |
226 if (!code_range->SetUp(code_range_size)) return; | |
227 | |
228 VerifyMemoryChunk(isolate, | |
229 heap, | |
230 code_range, | |
231 reserve_area_size, | |
232 initial_commit_area_size, | |
233 second_commit_area_size, | |
234 EXECUTABLE); | |
235 | |
236 VerifyMemoryChunk(isolate, | |
237 heap, | |
238 code_range, | |
239 reserve_area_size, | |
240 initial_commit_area_size, | |
241 second_commit_area_size, | |
242 NOT_EXECUTABLE); | |
243 delete code_range; | |
244 | |
245 // Without CodeRange. | |
246 code_range = NULL; | |
247 VerifyMemoryChunk(isolate, | |
248 heap, | |
249 code_range, | |
250 reserve_area_size, | |
251 initial_commit_area_size, | |
252 second_commit_area_size, | |
253 EXECUTABLE); | |
254 | |
255 VerifyMemoryChunk(isolate, | |
256 heap, | |
257 code_range, | |
258 reserve_area_size, | |
259 initial_commit_area_size, | |
260 second_commit_area_size, | |
261 NOT_EXECUTABLE); | |
262 } | |
263 } | |
264 | |
265 | |
266 TEST(MemoryAllocator) { | 127 TEST(MemoryAllocator) { |
267 OS::SetUp(); | 128 OS::SetUp(); |
268 Isolate* isolate = Isolate::Current(); | 129 Isolate* isolate = Isolate::Current(); |
269 isolate->InitializeLoggingAndCounters(); | 130 isolate->InitializeLoggingAndCounters(); |
270 Heap* heap = isolate->heap(); | 131 Heap* heap = isolate->heap(); |
271 CHECK(isolate->heap()->ConfigureHeapDefault()); | 132 CHECK(isolate->heap()->ConfigureHeapDefault()); |
272 | 133 |
273 MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); | 134 MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); |
274 CHECK(memory_allocator->SetUp(heap->MaxReserved(), | 135 CHECK(memory_allocator->SetUp(heap->MaxReserved(), |
275 heap->MaxExecutableSize())); | 136 heap->MaxExecutableSize())); |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
397 { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); | 258 { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); |
398 if (!maybe_obj->ToObject(&obj)) break; | 259 if (!maybe_obj->ToObject(&obj)) break; |
399 } | 260 } |
400 CHECK(lo->Available() < available); | 261 CHECK(lo->Available() < available); |
401 }; | 262 }; |
402 | 263 |
403 CHECK(!lo->IsEmpty()); | 264 CHECK(!lo->IsEmpty()); |
404 | 265 |
405 CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure()); | 266 CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure()); |
406 } | 267 } |
OLD | NEW |