| Index: src/spaces.cc
|
| ===================================================================
|
| --- src/spaces.cc (revision 10578)
|
| +++ src/spaces.cc (working copy)
|
| @@ -339,8 +339,9 @@
|
|
|
| Address MemoryAllocator::ReserveAlignedMemory(size_t size,
|
| size_t alignment,
|
| - VirtualMemory* controller) {
|
| - VirtualMemory reservation(size, alignment);
|
| + VirtualMemory* controller,
|
| + intptr_t preferred_placement) {
|
| + VirtualMemory reservation(size, alignment, preferred_placement);
|
|
|
| if (!reservation.IsReserved()) return NULL;
|
| size_ += reservation.size();
|
| @@ -354,9 +355,11 @@
|
| Address MemoryAllocator::AllocateAlignedMemory(size_t size,
|
| size_t alignment,
|
| Executability executable,
|
| - VirtualMemory* controller) {
|
| + VirtualMemory* controller,
|
| + intptr_t preferred_placement) {
|
| VirtualMemory reservation;
|
| - Address base = ReserveAlignedMemory(size, alignment, &reservation);
|
| + Address base = ReserveAlignedMemory(
|
| + size, alignment, &reservation, preferred_placement);
|
| if (base == NULL) return NULL;
|
| if (!reservation.Commit(base,
|
| size,
|
| @@ -368,6 +371,28 @@
|
| }
|
|
|
|
|
| +Map* Page::MapFromIntraPageOffset(int32_t offset) {
|
| + Heap* heap = HEAP;
|
| + MapSpace* space = heap->map_space();
|
| + Page* page = space->anchor()->next_page();
|
| + while (page != space->anchor()) {
|
| + uintptr_t page_number =
|
| + reinterpret_cast<uintptr_t>(page) >> kPageSizeBits;
|
| + // We can't use this method for the odd pages, only for the single even
|
| + // page.
|
| + if ((page_number & 1) == 0) break;
|
| + page = page->next_page();
|
| + }
|
| + Address start_of_map =
|
| + reinterpret_cast<Address>(page) + offset;
|
| + if (Map::kSize != (1 << Map::kMapSizeBits)) {
|
| + start_of_map = page->RoundUpToObjectAlignment(start_of_map);
|
| + }
|
| + Map* map = Map::cast(HeapObject::FromAddress(start_of_map));
|
| + return map;
|
| +}
|
| +
|
| +
|
| void Page::InitializeAsAnchor(PagedSpace* owner) {
|
| set_owner(owner);
|
| set_prev_page(this);
|
| @@ -459,6 +484,26 @@
|
| }
|
|
|
|
|
| +// Inverts the order of bits in a word.
|
| +static uintptr_t InvertBits(uintptr_t x) {
|
| + uintptr_t answer = 0;
|
| + uintptr_t mask = 1;
|
| + uintptr_t shift = 31;
|
| + for (int i = 0; i < 16; i++) {
|
| + answer |= (x & mask) << shift;
|
| + mask <<= 1;
|
| + shift -= 2;
|
| + }
|
| + shift = 1;
|
| + for (int i = 0; i < 16; i++) {
|
| + answer |= (x & mask) >> shift;
|
| + mask <<= 1;
|
| + shift += 2;
|
| + }
|
| + return answer;
|
| +}
|
| +
|
| +
|
| MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
|
| Executability executable,
|
| Space* owner) {
|
| @@ -494,6 +539,33 @@
|
| // Update executable memory size.
|
| size_executable_ += reservation.size();
|
| }
|
| +#if defined(V8_TARGET_ARCH_ARM)
|
| + } else if (owner->identity() == MAP_SPACE) {
|
| + // First map page, try to allocate at a good address for map comparisons on
|
| + // ARM.
|
| + uintptr_t place = 1;
|
| + bool even = true;
|
| + if (reinterpret_cast<MapSpace*>(owner)->anchor()->next_page() !=
|
| + reinterpret_cast<MapSpace*>(owner)->anchor()) {
|
| + // Not the first page. We must allocate at an odd address.
|
| + place = 1 << (kBitsPerPointer - kPageSizeBits - 1);
|
| + even = false;
|
| + }
|
| + while (true) {
|
| + base = AllocateAlignedMemory(chunk_size,
|
| + MemoryChunk::kAlignment,
|
| + executable,
|
| + &reservation,
|
| + InvertBits(place));
|
| + if (base == NULL) return NULL;
|
| + uintptr_t base_pointer = reinterpret_cast<uintptr_t>(base);
|
| + // If the evenness of the page is as we hoped.
|
| + if ((((base_pointer >> kPageSizeBits) & 1) == 0) == even) break;
|
| + // We were given an address we did not want. Return it immediately.
|
| + FreeMemory(&reservation, NOT_EXECUTABLE);
|
| + place++;
|
| + }
|
| +#endif
|
| } else {
|
| base = AllocateAlignedMemory(chunk_size,
|
| MemoryChunk::kAlignment,
|
|
|