Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(210)

Side by Side Diff: src/heap.cc

Issue 325553002: --verify-predictable mode added for ensuring that GC behaves deterministically. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressing review comments Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/once.h" 9 #include "src/base/once.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
57 max_executable_size_(256ul * (kPointerSize / 4) * MB), 57 max_executable_size_(256ul * (kPointerSize / 4) * MB),
58 // Variables set based on semispace_size_ and old_generation_size_ in 58 // Variables set based on semispace_size_ and old_generation_size_ in
59 // ConfigureHeap. 59 // ConfigureHeap.
60 // Will be 4 * reserved_semispace_size_ to ensure that young 60 // Will be 4 * reserved_semispace_size_ to ensure that young
61 // generation can be aligned to its size. 61 // generation can be aligned to its size.
62 maximum_committed_(0), 62 maximum_committed_(0),
63 old_space_growing_factor_(4), 63 old_space_growing_factor_(4),
64 survived_since_last_expansion_(0), 64 survived_since_last_expansion_(0),
65 sweep_generation_(0), 65 sweep_generation_(0),
66 always_allocate_scope_depth_(0), 66 always_allocate_scope_depth_(0),
67 linear_allocation_scope_depth_(0),
68 contexts_disposed_(0), 67 contexts_disposed_(0),
69 global_ic_age_(0), 68 global_ic_age_(0),
70 flush_monomorphic_ics_(false), 69 flush_monomorphic_ics_(false),
71 scan_on_scavenge_pages_(0), 70 scan_on_scavenge_pages_(0),
72 new_space_(this), 71 new_space_(this),
73 old_pointer_space_(NULL), 72 old_pointer_space_(NULL),
74 old_data_space_(NULL), 73 old_data_space_(NULL),
75 code_space_(NULL), 74 code_space_(NULL),
76 map_space_(NULL), 75 map_space_(NULL),
77 cell_space_(NULL), 76 cell_space_(NULL),
78 property_cell_space_(NULL), 77 property_cell_space_(NULL),
79 lo_space_(NULL), 78 lo_space_(NULL),
80 gc_state_(NOT_IN_GC), 79 gc_state_(NOT_IN_GC),
81 gc_post_processing_depth_(0), 80 gc_post_processing_depth_(0),
81 allocations_count_(0),
82 raw_allocations_hash_(0),
83 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc),
82 ms_count_(0), 84 ms_count_(0),
83 gc_count_(0), 85 gc_count_(0),
84 remembered_unmapped_pages_index_(0), 86 remembered_unmapped_pages_index_(0),
85 unflattened_strings_length_(0), 87 unflattened_strings_length_(0),
86 #ifdef DEBUG 88 #ifdef DEBUG
87 allocation_timeout_(0), 89 allocation_timeout_(0),
88 #endif // DEBUG 90 #endif // DEBUG
89 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit), 91 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
90 size_of_old_gen_at_last_old_space_gc_(0), 92 size_of_old_gen_at_last_old_space_gc_(0),
91 old_gen_exhausted_(false), 93 old_gen_exhausted_(false),
(...skipping 1858 matching lines...) Expand 10 before | Expand all | Expand 10 after
1950 int size)) { 1952 int size)) {
1951 // Copy the content of source to target. 1953 // Copy the content of source to target.
1952 heap->CopyBlock(target->address(), source->address(), size); 1954 heap->CopyBlock(target->address(), source->address(), size);
1953 1955
1954 // Set the forwarding address. 1956 // Set the forwarding address.
1955 source->set_map_word(MapWord::FromForwardingAddress(target)); 1957 source->set_map_word(MapWord::FromForwardingAddress(target));
1956 1958
1957 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { 1959 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1958 // Update NewSpace stats if necessary. 1960 // Update NewSpace stats if necessary.
1959 RecordCopiedObject(heap, target); 1961 RecordCopiedObject(heap, target);
1960 Isolate* isolate = heap->isolate(); 1962 heap->OnMoveEvent(target, source, size);
1961 HeapProfiler* heap_profiler = isolate->heap_profiler();
1962 if (heap_profiler->is_tracking_object_moves()) {
1963 heap_profiler->ObjectMoveEvent(source->address(), target->address(),
1964 size);
1965 }
1966 if (isolate->logger()->is_logging_code_events() ||
1967 isolate->cpu_profiler()->is_profiling()) {
1968 if (target->IsSharedFunctionInfo()) {
1969 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1970 source->address(), target->address()));
1971 }
1972 }
1973 } 1963 }
1974 1964
1975 if (marks_handling == TRANSFER_MARKS) { 1965 if (marks_handling == TRANSFER_MARKS) {
1976 if (Marking::TransferColor(source, target)) { 1966 if (Marking::TransferColor(source, target)) {
1977 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); 1967 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
1978 } 1968 }
1979 } 1969 }
1980 } 1970 }
1981 1971
1982 1972
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after
2217 LOGGING_AND_PROFILING_DISABLED>::Initialize(); 2207 LOGGING_AND_PROFILING_DISABLED>::Initialize();
2218 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); 2208 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
2219 ScavengingVisitor<TRANSFER_MARKS, 2209 ScavengingVisitor<TRANSFER_MARKS,
2220 LOGGING_AND_PROFILING_ENABLED>::Initialize(); 2210 LOGGING_AND_PROFILING_ENABLED>::Initialize();
2221 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); 2211 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
2222 } 2212 }
2223 2213
2224 2214
2225 void Heap::SelectScavengingVisitorsTable() { 2215 void Heap::SelectScavengingVisitorsTable() {
2226 bool logging_and_profiling = 2216 bool logging_and_profiling =
2217 FLAG_verify_predictable ||
2227 isolate()->logger()->is_logging() || 2218 isolate()->logger()->is_logging() ||
2228 isolate()->cpu_profiler()->is_profiling() || 2219 isolate()->cpu_profiler()->is_profiling() ||
2229 (isolate()->heap_profiler() != NULL && 2220 (isolate()->heap_profiler() != NULL &&
2230 isolate()->heap_profiler()->is_tracking_object_moves()); 2221 isolate()->heap_profiler()->is_tracking_object_moves());
2231 2222
2232 if (!incremental_marking()->IsMarking()) { 2223 if (!incremental_marking()->IsMarking()) {
2233 if (!logging_and_profiling) { 2224 if (!logging_and_profiling) {
2234 scavenging_visitors_table_.CopyFrom( 2225 scavenging_visitors_table_.CopyFrom(
2235 ScavengingVisitor<IGNORE_MARKS, 2226 ScavengingVisitor<IGNORE_MARKS,
2236 LOGGING_AND_PROFILING_DISABLED>::GetTable()); 2227 LOGGING_AND_PROFILING_DISABLED>::GetTable());
(...skipping 1094 matching lines...) Expand 10 before | Expand all | Expand 10 after
3331 } 3322 }
3332 3323
3333 object->set_map(MapForFixedTypedArray(array_type)); 3324 object->set_map(MapForFixedTypedArray(array_type));
3334 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); 3325 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
3335 elements->set_length(length); 3326 elements->set_length(length);
3336 memset(elements->DataPtr(), 0, elements->DataSize()); 3327 memset(elements->DataPtr(), 0, elements->DataSize());
3337 return elements; 3328 return elements;
3338 } 3329 }
3339 3330
3340 3331
3341 AllocationResult Heap::AllocateCode(int object_size, 3332 AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
3342 bool immovable) {
3343 ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); 3333 ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
3344 AllocationResult allocation; 3334 AllocationResult allocation =
3345 // Large code objects and code objects which should stay at a fixed address 3335 AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
3346 // are allocated in large object space. 3336
3347 HeapObject* result; 3337 HeapObject* result;
3348 bool force_lo_space = object_size > code_space()->AreaSize();
3349 if (force_lo_space) {
3350 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3351 } else {
3352 allocation = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
3353 }
3354 if (!allocation.To(&result)) return allocation; 3338 if (!allocation.To(&result)) return allocation;
3355 3339
3356 if (immovable && !force_lo_space && 3340 if (immovable) {
3357 // Objects on the first page of each space are never moved. 3341 Address address = result->address();
3358 !code_space_->FirstPage()->Contains(result->address())) { 3342 // Code objects which should stay at a fixed address are allocated either
3359 // Discard the first code allocation, which was on a page where it could be 3343 // in the first page of code space (objects on the first page of each space
3360 // moved. 3344 // are never moved) or in large object space.
3361 CreateFillerObjectAt(result->address(), object_size); 3345 if (!code_space_->FirstPage()->Contains(address) &&
3362 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); 3346 MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) {
3363 if (!allocation.To(&result)) return allocation; 3347 // Discard the first code allocation, which was on a page where it could
3348 // be moved.
3349 CreateFillerObjectAt(result->address(), object_size);
3350 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3351 if (!allocation.To(&result)) return allocation;
3352 OnAllocationEvent(result, object_size);
3353 }
3364 } 3354 }
3365 3355
3366 result->set_map_no_write_barrier(code_map()); 3356 result->set_map_no_write_barrier(code_map());
3367 Code* code = Code::cast(result); 3357 Code* code = Code::cast(result);
3368 ASSERT(!isolate_->code_range()->exists() || 3358 ASSERT(!isolate_->code_range()->exists() ||
3369 isolate_->code_range()->contains(code->address())); 3359 isolate_->code_range()->contains(code->address()));
3370 code->set_gc_metadata(Smi::FromInt(0)); 3360 code->set_gc_metadata(Smi::FromInt(0));
3371 code->set_ic_age(global_ic_age_); 3361 code->set_ic_age(global_ic_age_);
3372 return code; 3362 return code;
3373 } 3363 }
3374 3364
3375 3365
3376 AllocationResult Heap::CopyCode(Code* code) { 3366 AllocationResult Heap::CopyCode(Code* code) {
3377 AllocationResult allocation; 3367 AllocationResult allocation;
3378 HeapObject* new_constant_pool; 3368 HeapObject* new_constant_pool;
3379 if (FLAG_enable_ool_constant_pool && 3369 if (FLAG_enable_ool_constant_pool &&
3380 code->constant_pool() != empty_constant_pool_array()) { 3370 code->constant_pool() != empty_constant_pool_array()) {
3381 // Copy the constant pool, since edits to the copied code may modify 3371 // Copy the constant pool, since edits to the copied code may modify
3382 // the constant pool. 3372 // the constant pool.
3383 allocation = CopyConstantPoolArray(code->constant_pool()); 3373 allocation = CopyConstantPoolArray(code->constant_pool());
3384 if (!allocation.To(&new_constant_pool)) return allocation; 3374 if (!allocation.To(&new_constant_pool)) return allocation;
3385 } else { 3375 } else {
3386 new_constant_pool = empty_constant_pool_array(); 3376 new_constant_pool = empty_constant_pool_array();
3387 } 3377 }
3388 3378
3379 HeapObject* result;
3389 // Allocate an object the same size as the code object. 3380 // Allocate an object the same size as the code object.
3390 int obj_size = code->Size(); 3381 int obj_size = code->Size();
3391 if (obj_size > code_space()->AreaSize()) { 3382 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
3392 allocation = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3393 } else {
3394 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
3395 }
3396
3397 HeapObject* result;
3398 if (!allocation.To(&result)) return allocation; 3383 if (!allocation.To(&result)) return allocation;
3399 3384
3400 // Copy code object. 3385 // Copy code object.
3401 Address old_addr = code->address(); 3386 Address old_addr = code->address();
3402 Address new_addr = result->address(); 3387 Address new_addr = result->address();
3403 CopyBlock(new_addr, old_addr, obj_size); 3388 CopyBlock(new_addr, old_addr, obj_size);
3404 Code* new_code = Code::cast(result); 3389 Code* new_code = Code::cast(result);
3405 3390
3406 // Update the constant pool. 3391 // Update the constant pool.
3407 new_code->set_constant_pool(new_constant_pool); 3392 new_code->set_constant_pool(new_constant_pool);
(...skipping 28 matching lines...) Expand all
3436 3421
3437 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 3422 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3438 3423
3439 int new_obj_size = Code::SizeFor(new_body_size); 3424 int new_obj_size = Code::SizeFor(new_body_size);
3440 3425
3441 Address old_addr = code->address(); 3426 Address old_addr = code->address();
3442 3427
3443 size_t relocation_offset = 3428 size_t relocation_offset =
3444 static_cast<size_t>(code->instruction_end() - old_addr); 3429 static_cast<size_t>(code->instruction_end() - old_addr);
3445 3430
3446 AllocationResult allocation;
3447 if (new_obj_size > code_space()->AreaSize()) {
3448 allocation = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
3449 } else {
3450 allocation = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3451 }
3452
3453 HeapObject* result; 3431 HeapObject* result;
3432 AllocationResult allocation =
3433 AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3454 if (!allocation.To(&result)) return allocation; 3434 if (!allocation.To(&result)) return allocation;
3455 3435
3456 // Copy code object. 3436 // Copy code object.
3457 Address new_addr = result->address(); 3437 Address new_addr = result->address();
3458 3438
3459 // Copy header and instructions. 3439 // Copy header and instructions.
3460 CopyBytes(new_addr, old_addr, relocation_offset); 3440 CopyBytes(new_addr, old_addr, relocation_offset);
3461 3441
3462 Code* new_code = Code::cast(result); 3442 Code* new_code = Code::cast(result);
3463 new_code->set_relocation_info(reloc_info_array); 3443 new_code->set_relocation_info(reloc_info_array);
(...skipping 1846 matching lines...) Expand 10 before | Expand all | Expand 10 after
5310 map_space_->MaximumCommittedMemory()); 5290 map_space_->MaximumCommittedMemory());
5311 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ", 5291 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ",
5312 cell_space_->MaximumCommittedMemory()); 5292 cell_space_->MaximumCommittedMemory());
5313 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ", 5293 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ",
5314 property_cell_space_->MaximumCommittedMemory()); 5294 property_cell_space_->MaximumCommittedMemory());
5315 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ", 5295 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
5316 lo_space_->MaximumCommittedMemory()); 5296 lo_space_->MaximumCommittedMemory());
5317 PrintF("\n\n"); 5297 PrintF("\n\n");
5318 } 5298 }
5319 5299
5300 if (FLAG_verify_predictable) {
5301 PrintAlloctionsHash();
5302 }
5303
5320 TearDownArrayBuffers(); 5304 TearDownArrayBuffers();
5321 5305
5322 isolate_->global_handles()->TearDown(); 5306 isolate_->global_handles()->TearDown();
5323 5307
5324 external_string_table_.TearDown(); 5308 external_string_table_.TearDown();
5325 5309
5326 mark_compact_collector()->TearDown(); 5310 mark_compact_collector()->TearDown();
5327 5311
5328 new_space_.TearDown(); 5312 new_space_.TearDown();
5329 5313
(...skipping 1100 matching lines...) Expand 10 before | Expand all | Expand 10 after
6430 static_cast<int>(object_sizes_last_time_[index])); 6414 static_cast<int>(object_sizes_last_time_[index]));
6431 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6415 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6432 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6416 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6433 6417
6434 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6418 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6435 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6419 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6436 ClearObjectStats(); 6420 ClearObjectStats();
6437 } 6421 }
6438 6422
6439 } } // namespace v8::internal 6423 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698