Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(53)

Side by Side Diff: src/heap.cc

Issue 23641009: Refactor and cleanup VirtualMemory. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed nits. Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/deoptimizer.cc ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
165 configured_(false), 165 configured_(false),
166 chunks_queued_for_free_(NULL), 166 chunks_queued_for_free_(NULL),
167 relocation_mutex_(NULL) { 167 relocation_mutex_(NULL) {
168 // Allow build-time customization of the max semispace size. Building 168 // Allow build-time customization of the max semispace size. Building
169 // V8 with snapshots and a non-default max semispace size is much 169 // V8 with snapshots and a non-default max semispace size is much
170 // easier if you can define it as part of the build environment. 170 // easier if you can define it as part of the build environment.
171 #if defined(V8_MAX_SEMISPACE_SIZE) 171 #if defined(V8_MAX_SEMISPACE_SIZE)
172 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 172 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
173 #endif 173 #endif
174 174
175 intptr_t max_virtual = OS::MaxVirtualMemory(); 175 intptr_t max_virtual = static_cast<intptr_t>(VirtualMemory::GetLimit());
176
177 if (max_virtual > 0) { 176 if (max_virtual > 0) {
178 if (code_range_size_ > 0) { 177 if (code_range_size_ > 0) {
179 // Reserve no more than 1/8 of the memory for the code range. 178 // Reserve no more than 1/8 of the memory for the code range.
180 code_range_size_ = Min(code_range_size_, max_virtual >> 3); 179 code_range_size_ = Min(code_range_size_, max_virtual >> 3);
181 } 180 }
182 } 181 }
183 182
184 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 183 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
185 native_contexts_list_ = NULL; 184 native_contexts_list_ = NULL;
186 array_buffers_list_ = Smi::FromInt(0); 185 array_buffers_list_ = Smi::FromInt(0);
(...skipping 3957 matching lines...) Expand 10 before | Expand all | Expand 10 after
4144 // Compute size. 4143 // Compute size.
4145 int body_size = RoundUp(desc.instr_size, kObjectAlignment); 4144 int body_size = RoundUp(desc.instr_size, kObjectAlignment);
4146 int obj_size = Code::SizeFor(body_size); 4145 int obj_size = Code::SizeFor(body_size);
4147 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); 4146 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
4148 MaybeObject* maybe_result; 4147 MaybeObject* maybe_result;
4149 // Large code objects and code objects which should stay at a fixed address 4148 // Large code objects and code objects which should stay at a fixed address
4150 // are allocated in large object space. 4149 // are allocated in large object space.
4151 HeapObject* result; 4150 HeapObject* result;
4152 bool force_lo_space = obj_size > code_space()->AreaSize(); 4151 bool force_lo_space = obj_size > code_space()->AreaSize();
4153 if (force_lo_space) { 4152 if (force_lo_space) {
4154 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 4153 maybe_result = lo_space_->AllocateRaw(obj_size, VirtualMemory::EXECUTABLE);
4155 } else { 4154 } else {
4156 maybe_result = code_space_->AllocateRaw(obj_size); 4155 maybe_result = code_space_->AllocateRaw(obj_size);
4157 } 4156 }
4158 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; 4157 if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
4159 4158
4160 if (immovable && !force_lo_space && 4159 if (immovable && !force_lo_space &&
4161 // Objects on the first page of each space are never moved. 4160 // Objects on the first page of each space are never moved.
4162 !code_space_->FirstPage()->Contains(result->address())) { 4161 !code_space_->FirstPage()->Contains(result->address())) {
4163 // Discard the first code allocation, which was on a page where it could be 4162 // Discard the first code allocation, which was on a page where it could be
4164 // moved. 4163 // moved.
4165 CreateFillerObjectAt(result->address(), obj_size); 4164 CreateFillerObjectAt(result->address(), obj_size);
4166 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 4165 maybe_result = lo_space_->AllocateRaw(obj_size, VirtualMemory::EXECUTABLE);
4167 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; 4166 if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
4168 } 4167 }
4169 4168
4170 // Initialize the object 4169 // Initialize the object
4171 result->set_map_no_write_barrier(code_map()); 4170 result->set_map_no_write_barrier(code_map());
4172 Code* code = Code::cast(result); 4171 Code* code = Code::cast(result);
4173 ASSERT(!isolate_->code_range()->exists() || 4172 ASSERT(!isolate_->code_range()->exists() ||
4174 isolate_->code_range()->contains(code->address())); 4173 isolate_->code_range()->contains(code->address()));
4175 code->set_instruction_size(desc.instr_size); 4174 code->set_instruction_size(desc.instr_size);
4176 code->set_relocation_info(reloc_info); 4175 code->set_relocation_info(reloc_info);
(...skipping 30 matching lines...) Expand all
4207 #endif 4206 #endif
4208 return code; 4207 return code;
4209 } 4208 }
4210 4209
4211 4210
4212 MaybeObject* Heap::CopyCode(Code* code) { 4211 MaybeObject* Heap::CopyCode(Code* code) {
4213 // Allocate an object the same size as the code object. 4212 // Allocate an object the same size as the code object.
4214 int obj_size = code->Size(); 4213 int obj_size = code->Size();
4215 MaybeObject* maybe_result; 4214 MaybeObject* maybe_result;
4216 if (obj_size > code_space()->AreaSize()) { 4215 if (obj_size > code_space()->AreaSize()) {
4217 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 4216 maybe_result = lo_space_->AllocateRaw(obj_size, VirtualMemory::EXECUTABLE);
4218 } else { 4217 } else {
4219 maybe_result = code_space_->AllocateRaw(obj_size); 4218 maybe_result = code_space_->AllocateRaw(obj_size);
4220 } 4219 }
4221 4220
4222 Object* result; 4221 Object* result;
4223 if (!maybe_result->ToObject(&result)) return maybe_result; 4222 if (!maybe_result->ToObject(&result)) return maybe_result;
4224 4223
4225 // Copy code object. 4224 // Copy code object.
4226 Address old_addr = code->address(); 4225 Address old_addr = code->address();
4227 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 4226 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
(...skipping 22 matching lines...) Expand all
4250 4249
4251 int new_obj_size = Code::SizeFor(new_body_size); 4250 int new_obj_size = Code::SizeFor(new_body_size);
4252 4251
4253 Address old_addr = code->address(); 4252 Address old_addr = code->address();
4254 4253
4255 size_t relocation_offset = 4254 size_t relocation_offset =
4256 static_cast<size_t>(code->instruction_end() - old_addr); 4255 static_cast<size_t>(code->instruction_end() - old_addr);
4257 4256
4258 MaybeObject* maybe_result; 4257 MaybeObject* maybe_result;
4259 if (new_obj_size > code_space()->AreaSize()) { 4258 if (new_obj_size > code_space()->AreaSize()) {
4260 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); 4259 maybe_result = lo_space_->AllocateRaw(
4260 new_obj_size, VirtualMemory::EXECUTABLE);
4261 } else { 4261 } else {
4262 maybe_result = code_space_->AllocateRaw(new_obj_size); 4262 maybe_result = code_space_->AllocateRaw(new_obj_size);
4263 } 4263 }
4264 4264
4265 Object* result; 4265 Object* result;
4266 if (!maybe_result->ToObject(&result)) return maybe_result; 4266 if (!maybe_result->ToObject(&result)) return maybe_result;
4267 4267
4268 // Copy code object. 4268 // Copy code object.
4269 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 4269 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
4270 4270
(...skipping 1092 matching lines...) Expand 10 before | Expand all | Expand 10 after
5363 if (chars > SeqTwoByteString::kMaxLength) { 5363 if (chars > SeqTwoByteString::kMaxLength) {
5364 return Failure::OutOfMemoryException(0xa); 5364 return Failure::OutOfMemoryException(0xa);
5365 } 5365 }
5366 map = internalized_string_map(); 5366 map = internalized_string_map();
5367 size = SeqTwoByteString::SizeFor(chars); 5367 size = SeqTwoByteString::SizeFor(chars);
5368 } 5368 }
5369 5369
5370 // Allocate string. 5370 // Allocate string.
5371 Object* result; 5371 Object* result;
5372 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize) 5372 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
5373 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) 5373 ? lo_space_->AllocateRaw(size, VirtualMemory::NOT_EXECUTABLE)
5374 : old_data_space_->AllocateRaw(size); 5374 : old_data_space_->AllocateRaw(size);
5375 if (!maybe_result->ToObject(&result)) return maybe_result; 5375 if (!maybe_result->ToObject(&result)) return maybe_result;
5376 } 5376 }
5377 5377
5378 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); 5378 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
5379 // Set length and hash fields of the allocated string. 5379 // Set length and hash fields of the allocated string.
5380 String* answer = String::cast(result); 5380 String* answer = String::cast(result);
5381 answer->set_length(chars); 5381 answer->set_length(chars);
5382 answer->set_hash_field(hash_field); 5382 answer->set_hash_field(hash_field);
5383 5383
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
5516 if (length < 0 || length > FixedArray::kMaxLength) { 5516 if (length < 0 || length > FixedArray::kMaxLength) {
5517 return Failure::OutOfMemoryException(0xd); 5517 return Failure::OutOfMemoryException(0xd);
5518 } 5518 }
5519 ASSERT(length > 0); 5519 ASSERT(length > 0);
5520 // Use the general function if we're forced to always allocate. 5520 // Use the general function if we're forced to always allocate.
5521 if (always_allocate()) return AllocateFixedArray(length, TENURED); 5521 if (always_allocate()) return AllocateFixedArray(length, TENURED);
5522 // Allocate the raw data for a fixed array. 5522 // Allocate the raw data for a fixed array.
5523 int size = FixedArray::SizeFor(length); 5523 int size = FixedArray::SizeFor(length);
5524 return size <= Page::kMaxNonCodeHeapObjectSize 5524 return size <= Page::kMaxNonCodeHeapObjectSize
5525 ? new_space_.AllocateRaw(size) 5525 ? new_space_.AllocateRaw(size)
5526 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); 5526 : lo_space_->AllocateRaw(size, VirtualMemory::NOT_EXECUTABLE);
5527 } 5527 }
5528 5528
5529 5529
5530 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 5530 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
5531 int len = src->length(); 5531 int len = src->length();
5532 Object* obj; 5532 Object* obj;
5533 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 5533 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
5534 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 5534 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
5535 } 5535 }
5536 if (InNewSpace(obj)) { 5536 if (InNewSpace(obj)) {
(...skipping 1334 matching lines...) Expand 10 before | Expand all | Expand 10 after
6871 // Set up new space. 6871 // Set up new space.
6872 if (!new_space_.SetUp(reserved_semispace_size_, max_semispace_size_)) { 6872 if (!new_space_.SetUp(reserved_semispace_size_, max_semispace_size_)) {
6873 return false; 6873 return false;
6874 } 6874 }
6875 6875
6876 // Initialize old pointer space. 6876 // Initialize old pointer space.
6877 old_pointer_space_ = 6877 old_pointer_space_ =
6878 new OldSpace(this, 6878 new OldSpace(this,
6879 max_old_generation_size_, 6879 max_old_generation_size_,
6880 OLD_POINTER_SPACE, 6880 OLD_POINTER_SPACE,
6881 NOT_EXECUTABLE); 6881 VirtualMemory::NOT_EXECUTABLE);
6882 if (old_pointer_space_ == NULL) return false; 6882 if (old_pointer_space_ == NULL) return false;
6883 if (!old_pointer_space_->SetUp()) return false; 6883 if (!old_pointer_space_->SetUp()) return false;
6884 6884
6885 // Initialize old data space. 6885 // Initialize old data space.
6886 old_data_space_ = 6886 old_data_space_ =
6887 new OldSpace(this, 6887 new OldSpace(this,
6888 max_old_generation_size_, 6888 max_old_generation_size_,
6889 OLD_DATA_SPACE, 6889 OLD_DATA_SPACE,
6890 NOT_EXECUTABLE); 6890 VirtualMemory::NOT_EXECUTABLE);
6891 if (old_data_space_ == NULL) return false; 6891 if (old_data_space_ == NULL) return false;
6892 if (!old_data_space_->SetUp()) return false; 6892 if (!old_data_space_->SetUp()) return false;
6893 6893
6894 // Initialize the code space, set its maximum capacity to the old 6894 // Initialize the code space, set its maximum capacity to the old
6895 // generation size. It needs executable memory. 6895 // generation size. It needs executable memory.
6896 // On 64-bit platform(s), we put all code objects in a 2 GB range of 6896 // On 64-bit platform(s), we put all code objects in a 2 GB range of
6897 // virtual address space, so that they can call each other with near calls. 6897 // virtual address space, so that they can call each other with near calls.
6898 if (code_range_size_ > 0) { 6898 if (code_range_size_ > 0) {
6899 if (!isolate_->code_range()->SetUp(code_range_size_)) { 6899 if (!isolate_->code_range()->SetUp(code_range_size_)) {
6900 return false; 6900 return false;
6901 } 6901 }
6902 } 6902 }
6903 6903
6904 code_space_ = 6904 code_space_ = new OldSpace(
6905 new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE); 6905 this, max_old_generation_size_, CODE_SPACE, VirtualMemory::EXECUTABLE);
6906 if (code_space_ == NULL) return false; 6906 if (code_space_ == NULL) return false;
6907 if (!code_space_->SetUp()) return false; 6907 if (!code_space_->SetUp()) return false;
6908 6908
6909 // Initialize map space. 6909 // Initialize map space.
6910 map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE); 6910 map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
6911 if (map_space_ == NULL) return false; 6911 if (map_space_ == NULL) return false;
6912 if (!map_space_->SetUp()) return false; 6912 if (!map_space_->SetUp()) return false;
6913 6913
6914 // Initialize simple cell space. 6914 // Initialize simple cell space.
6915 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE); 6915 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
(...skipping 1076 matching lines...) Expand 10 before | Expand all | Expand 10 after
7992 // To work around this we split large chunk into normal kPageSize aligned 7992 // To work around this we split large chunk into normal kPageSize aligned
7993 // pieces and initialize size, owner and flags field of every piece. 7993 // pieces and initialize size, owner and flags field of every piece.
7994 // If FromAnyPointerAddress encounters a slot that belongs to one of 7994 // If FromAnyPointerAddress encounters a slot that belongs to one of
7995 // these smaller pieces it will treat it as a slot on a normal Page. 7995 // these smaller pieces it will treat it as a slot on a normal Page.
7996 Address chunk_end = chunk->address() + chunk->size(); 7996 Address chunk_end = chunk->address() + chunk->size();
7997 MemoryChunk* inner = MemoryChunk::FromAddress( 7997 MemoryChunk* inner = MemoryChunk::FromAddress(
7998 chunk->address() + Page::kPageSize); 7998 chunk->address() + Page::kPageSize);
7999 MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1); 7999 MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1);
8000 while (inner <= inner_last) { 8000 while (inner <= inner_last) {
8001 // Size of a large chunk is always a multiple of 8001 // Size of a large chunk is always a multiple of
8002 // OS::AllocateAlignment() so there is always 8002 // VirtualMemory::GetAllocationGranularity() so
8003 // enough space for a fake MemoryChunk header. 8003 // there is always enough space for a fake
8004 // MemoryChunk header.
8004 Address area_end = Min(inner->address() + Page::kPageSize, chunk_end); 8005 Address area_end = Min(inner->address() + Page::kPageSize, chunk_end);
8005 // Guard against overflow. 8006 // Guard against overflow.
8006 if (area_end < inner->address()) area_end = chunk_end; 8007 if (area_end < inner->address()) area_end = chunk_end;
8007 inner->SetArea(inner->address(), area_end); 8008 inner->SetArea(inner->address(), area_end);
8008 inner->set_size(Page::kPageSize); 8009 inner->set_size(Page::kPageSize);
8009 inner->set_owner(lo_space()); 8010 inner->set_owner(lo_space());
8010 inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED); 8011 inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
8011 inner = MemoryChunk::FromAddress( 8012 inner = MemoryChunk::FromAddress(
8012 inner->address() + Page::kPageSize); 8013 inner->address() + Page::kPageSize);
8013 } 8014 }
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
8101 if (FLAG_concurrent_recompilation) { 8102 if (FLAG_concurrent_recompilation) {
8102 heap_->relocation_mutex_->Lock(); 8103 heap_->relocation_mutex_->Lock();
8103 #ifdef DEBUG 8104 #ifdef DEBUG
8104 heap_->relocation_mutex_locked_by_optimizer_thread_ = 8105 heap_->relocation_mutex_locked_by_optimizer_thread_ =
8105 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 8106 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
8106 #endif // DEBUG 8107 #endif // DEBUG
8107 } 8108 }
8108 } 8109 }
8109 8110
8110 } } // namespace v8::internal 8111 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/deoptimizer.cc ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698