OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
217 const size_t code_range_size = 4 * pageSize; | 217 const size_t code_range_size = 4 * pageSize; |
218 if (!code_range->SetUp( | 218 if (!code_range->SetUp( |
219 code_range_size + | 219 code_range_size + |
220 RoundUp(v8::base::OS::CommitPageSize() * kReservedCodeRangePages, | 220 RoundUp(v8::base::OS::CommitPageSize() * kReservedCodeRangePages, |
221 MemoryChunk::kAlignment) + | 221 MemoryChunk::kAlignment) + |
222 v8::internal::MemoryAllocator::CodePageAreaSize())) { | 222 v8::internal::MemoryAllocator::CodePageAreaSize())) { |
223 return; | 223 return; |
224 } | 224 } |
225 Address address; | 225 Address address; |
226 size_t size; | 226 size_t size; |
| 227 size_t request_size = code_range_size - 2 * pageSize; |
227 address = code_range->AllocateRawMemory( | 228 address = code_range->AllocateRawMemory( |
228 code_range_size - 2 * pageSize, code_range_size - 2 * pageSize, &size); | 229 request_size, request_size - (2 * MemoryAllocator::CodePageGuardSize()), |
| 230 &size); |
229 CHECK(address != NULL); | 231 CHECK(address != NULL); |
230 Address null_address; | 232 Address null_address; |
231 size_t null_size; | 233 size_t null_size; |
232 null_address = code_range->AllocateRawMemory( | 234 null_address = code_range->AllocateRawMemory( |
233 code_range_size - pageSize, code_range_size - pageSize, &null_size); | 235 code_range_size - pageSize, code_range_size - pageSize, &null_size); |
234 CHECK(null_address == NULL); | 236 CHECK(null_address == NULL); |
235 code_range->FreeRawMemory(address, size); | 237 code_range->FreeRawMemory(address, size); |
236 delete code_range; | 238 delete code_range; |
237 memory_allocator->TearDown(); | 239 memory_allocator->TearDown(); |
238 delete memory_allocator; | 240 delete memory_allocator; |
(...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
627 | 629 |
628 // Turn the allocation into a proper object so isolate teardown won't | 630 // Turn the allocation into a proper object so isolate teardown won't |
629 // crash. | 631 // crash. |
630 HeapObject* free_space = NULL; | 632 HeapObject* free_space = NULL; |
631 CHECK(allocation.To(&free_space)); | 633 CHECK(allocation.To(&free_space)); |
632 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); | 634 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); |
633 } | 635 } |
634 } | 636 } |
635 isolate->Dispose(); | 637 isolate->Dispose(); |
636 } | 638 } |
OLD | NEW |