Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(646)

Side by Side Diff: src/heap.cc

Issue 325553002: --verify-predictable mode added for ensuring that GC behaves deterministically. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressing comments Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/once.h" 9 #include "src/base/once.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
72 new_space_(this), 72 new_space_(this),
73 old_pointer_space_(NULL), 73 old_pointer_space_(NULL),
74 old_data_space_(NULL), 74 old_data_space_(NULL),
75 code_space_(NULL), 75 code_space_(NULL),
76 map_space_(NULL), 76 map_space_(NULL),
77 cell_space_(NULL), 77 cell_space_(NULL),
78 property_cell_space_(NULL), 78 property_cell_space_(NULL),
79 lo_space_(NULL), 79 lo_space_(NULL),
80 gc_state_(NOT_IN_GC), 80 gc_state_(NOT_IN_GC),
81 gc_post_processing_depth_(0), 81 gc_post_processing_depth_(0),
82 allocations_count_(0),
83 raw_allocations_hash_(0),
84 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc),
82 ms_count_(0), 85 ms_count_(0),
83 gc_count_(0), 86 gc_count_(0),
84 remembered_unmapped_pages_index_(0), 87 remembered_unmapped_pages_index_(0),
85 unflattened_strings_length_(0), 88 unflattened_strings_length_(0),
86 #ifdef DEBUG 89 #ifdef DEBUG
87 allocation_timeout_(0), 90 allocation_timeout_(0),
88 #endif // DEBUG 91 #endif // DEBUG
89 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit), 92 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
90 size_of_old_gen_at_last_old_space_gc_(0), 93 size_of_old_gen_at_last_old_space_gc_(0),
91 old_gen_exhausted_(false), 94 old_gen_exhausted_(false),
(...skipping 1858 matching lines...) Expand 10 before | Expand all | Expand 10 after
1950 int size)) { 1953 int size)) {
1951 // Copy the content of source to target. 1954 // Copy the content of source to target.
1952 heap->CopyBlock(target->address(), source->address(), size); 1955 heap->CopyBlock(target->address(), source->address(), size);
1953 1956
1954 // Set the forwarding address. 1957 // Set the forwarding address.
1955 source->set_map_word(MapWord::FromForwardingAddress(target)); 1958 source->set_map_word(MapWord::FromForwardingAddress(target));
1956 1959
1957 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { 1960 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1958 // Update NewSpace stats if necessary. 1961 // Update NewSpace stats if necessary.
1959 RecordCopiedObject(heap, target); 1962 RecordCopiedObject(heap, target);
1960 Isolate* isolate = heap->isolate(); 1963 heap->OnMoveEvent(target, source, size);
1961 HeapProfiler* heap_profiler = isolate->heap_profiler();
1962 if (heap_profiler->is_tracking_object_moves()) {
1963 heap_profiler->ObjectMoveEvent(source->address(), target->address(),
1964 size);
1965 }
1966 if (isolate->logger()->is_logging_code_events() ||
1967 isolate->cpu_profiler()->is_profiling()) {
1968 if (target->IsSharedFunctionInfo()) {
1969 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1970 source->address(), target->address()));
1971 }
1972 }
1973 } 1964 }
1974 1965
1975 if (marks_handling == TRANSFER_MARKS) { 1966 if (marks_handling == TRANSFER_MARKS) {
1976 if (Marking::TransferColor(source, target)) { 1967 if (Marking::TransferColor(source, target)) {
1977 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); 1968 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
1978 } 1969 }
1979 } 1970 }
1980 } 1971 }
1981 1972
1982 1973
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after
2217 LOGGING_AND_PROFILING_DISABLED>::Initialize(); 2208 LOGGING_AND_PROFILING_DISABLED>::Initialize();
2218 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); 2209 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
2219 ScavengingVisitor<TRANSFER_MARKS, 2210 ScavengingVisitor<TRANSFER_MARKS,
2220 LOGGING_AND_PROFILING_ENABLED>::Initialize(); 2211 LOGGING_AND_PROFILING_ENABLED>::Initialize();
2221 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); 2212 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
2222 } 2213 }
2223 2214
2224 2215
2225 void Heap::SelectScavengingVisitorsTable() { 2216 void Heap::SelectScavengingVisitorsTable() {
2226 bool logging_and_profiling = 2217 bool logging_and_profiling =
2218 FLAG_verify_predictable ||
2227 isolate()->logger()->is_logging() || 2219 isolate()->logger()->is_logging() ||
2228 isolate()->cpu_profiler()->is_profiling() || 2220 isolate()->cpu_profiler()->is_profiling() ||
2229 (isolate()->heap_profiler() != NULL && 2221 (isolate()->heap_profiler() != NULL &&
2230 isolate()->heap_profiler()->is_tracking_object_moves()); 2222 isolate()->heap_profiler()->is_tracking_object_moves());
2231 2223
2232 if (!incremental_marking()->IsMarking()) { 2224 if (!incremental_marking()->IsMarking()) {
2233 if (!logging_and_profiling) { 2225 if (!logging_and_profiling) {
2234 scavenging_visitors_table_.CopyFrom( 2226 scavenging_visitors_table_.CopyFrom(
2235 ScavengingVisitor<IGNORE_MARKS, 2227 ScavengingVisitor<IGNORE_MARKS,
2236 LOGGING_AND_PROFILING_DISABLED>::GetTable()); 2228 LOGGING_AND_PROFILING_DISABLED>::GetTable());
(...skipping 1094 matching lines...) Expand 10 before | Expand all | Expand 10 after
3331 } 3323 }
3332 3324
3333 object->set_map(MapForFixedTypedArray(array_type)); 3325 object->set_map(MapForFixedTypedArray(array_type));
3334 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); 3326 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
3335 elements->set_length(length); 3327 elements->set_length(length);
3336 memset(elements->DataPtr(), 0, elements->DataSize()); 3328 memset(elements->DataPtr(), 0, elements->DataSize());
3337 return elements; 3329 return elements;
3338 } 3330 }
3339 3331
3340 3332
3341 AllocationResult Heap::AllocateCode(int object_size, 3333 AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
3342 bool immovable) {
3343 ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); 3334 ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
3344 AllocationResult allocation; 3335 AllocationResult allocation;
3345 // Large code objects and code objects which should stay at a fixed address 3336 // Large code objects and code objects which should stay at a fixed address
3346 // are allocated in large object space. 3337 // are allocated in large object space.
3347 HeapObject* result; 3338 HeapObject* result;
3348 bool force_lo_space = object_size > code_space()->AreaSize(); 3339 bool force_lo_space = object_size > code_space()->AreaSize();
3349 if (force_lo_space) { 3340 if (force_lo_space) {
3350 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); 3341 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3342 if (!allocation.To(&result)) return allocation;
3343 OnAllocationEvent(result, object_size);
Hannes Payer (out of office) 2014/06/17 12:54:49 As discussed, offline: let's refactor large object
Igor Sheludko 2014/06/17 14:08:10 Done.
3351 } else { 3344 } else {
3352 allocation = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE); 3345 allocation = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
3346 if (!allocation.To(&result)) return allocation;
3353 } 3347 }
3354 if (!allocation.To(&result)) return allocation;
3355 3348
3356 if (immovable && !force_lo_space && 3349 if (immovable && !force_lo_space &&
3357 // Objects on the first page of each space are never moved. 3350 // Objects on the first page of each space are never moved.
3358 !code_space_->FirstPage()->Contains(result->address())) { 3351 !code_space_->FirstPage()->Contains(result->address())) {
3359 // Discard the first code allocation, which was on a page where it could be 3352 // Discard the first code allocation, which was on a page where it could be
3360 // moved. 3353 // moved.
3361 CreateFillerObjectAt(result->address(), object_size); 3354 CreateFillerObjectAt(result->address(), object_size);
3362 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); 3355 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3363 if (!allocation.To(&result)) return allocation; 3356 if (!allocation.To(&result)) return allocation;
3357 OnAllocationEvent(result, object_size);
3364 } 3358 }
3365 3359
3366 result->set_map_no_write_barrier(code_map()); 3360 result->set_map_no_write_barrier(code_map());
3367 Code* code = Code::cast(result); 3361 Code* code = Code::cast(result);
3368 ASSERT(!isolate_->code_range()->exists() || 3362 ASSERT(!isolate_->code_range()->exists() ||
3369 isolate_->code_range()->contains(code->address())); 3363 isolate_->code_range()->contains(code->address()));
3370 code->set_gc_metadata(Smi::FromInt(0)); 3364 code->set_gc_metadata(Smi::FromInt(0));
3371 code->set_ic_age(global_ic_age_); 3365 code->set_ic_age(global_ic_age_);
3372 return code; 3366 return code;
3373 } 3367 }
3374 3368
3375 3369
3376 AllocationResult Heap::CopyCode(Code* code) { 3370 AllocationResult Heap::CopyCode(Code* code) {
3377 AllocationResult allocation; 3371 AllocationResult allocation;
3378 HeapObject* new_constant_pool; 3372 HeapObject* new_constant_pool;
3379 if (FLAG_enable_ool_constant_pool && 3373 if (FLAG_enable_ool_constant_pool &&
3380 code->constant_pool() != empty_constant_pool_array()) { 3374 code->constant_pool() != empty_constant_pool_array()) {
3381 // Copy the constant pool, since edits to the copied code may modify 3375 // Copy the constant pool, since edits to the copied code may modify
3382 // the constant pool. 3376 // the constant pool.
3383 allocation = CopyConstantPoolArray(code->constant_pool()); 3377 allocation = CopyConstantPoolArray(code->constant_pool());
3384 if (!allocation.To(&new_constant_pool)) return allocation; 3378 if (!allocation.To(&new_constant_pool)) return allocation;
3385 } else { 3379 } else {
3386 new_constant_pool = empty_constant_pool_array(); 3380 new_constant_pool = empty_constant_pool_array();
3387 } 3381 }
3388 3382
3383 HeapObject* result;
3389 // Allocate an object the same size as the code object. 3384 // Allocate an object the same size as the code object.
3390 int obj_size = code->Size(); 3385 int obj_size = code->Size();
3391 if (obj_size > code_space()->AreaSize()) { 3386 if (obj_size > code_space()->AreaSize()) {
3392 allocation = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 3387 allocation = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3388 if (!allocation.To(&result)) return allocation;
3389 OnAllocationEvent(result, obj_size);
3393 } else { 3390 } else {
3394 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); 3391 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
3392 if (!allocation.To(&result)) return allocation;
3395 } 3393 }
3396 3394
3397 HeapObject* result;
3398 if (!allocation.To(&result)) return allocation;
3399
3400 // Copy code object. 3395 // Copy code object.
3401 Address old_addr = code->address(); 3396 Address old_addr = code->address();
3402 Address new_addr = result->address(); 3397 Address new_addr = result->address();
3403 CopyBlock(new_addr, old_addr, obj_size); 3398 CopyBlock(new_addr, old_addr, obj_size);
3404 Code* new_code = Code::cast(result); 3399 Code* new_code = Code::cast(result);
3405 3400
3406 // Update the constant pool. 3401 // Update the constant pool.
3407 new_code->set_constant_pool(new_constant_pool); 3402 new_code->set_constant_pool(new_constant_pool);
3408 3403
3409 // Relocate the copy. 3404 // Relocate the copy.
(...skipping 26 matching lines...) Expand all
3436 3431
3437 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 3432 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3438 3433
3439 int new_obj_size = Code::SizeFor(new_body_size); 3434 int new_obj_size = Code::SizeFor(new_body_size);
3440 3435
3441 Address old_addr = code->address(); 3436 Address old_addr = code->address();
3442 3437
3443 size_t relocation_offset = 3438 size_t relocation_offset =
3444 static_cast<size_t>(code->instruction_end() - old_addr); 3439 static_cast<size_t>(code->instruction_end() - old_addr);
3445 3440
3441 HeapObject* result;
3446 AllocationResult allocation; 3442 AllocationResult allocation;
3447 if (new_obj_size > code_space()->AreaSize()) { 3443 if (new_obj_size > code_space()->AreaSize()) {
3448 allocation = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); 3444 allocation = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
3445 if (!allocation.To(&result)) return allocation;
3446 OnAllocationEvent(result, new_obj_size);
3449 } else { 3447 } else {
3450 allocation = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); 3448 allocation = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3449 if (!allocation.To(&result)) return allocation;
3451 } 3450 }
3452 3451
3453 HeapObject* result;
3454 if (!allocation.To(&result)) return allocation;
3455
3456 // Copy code object. 3452 // Copy code object.
3457 Address new_addr = result->address(); 3453 Address new_addr = result->address();
3458 3454
3459 // Copy header and instructions. 3455 // Copy header and instructions.
3460 CopyBytes(new_addr, old_addr, relocation_offset); 3456 CopyBytes(new_addr, old_addr, relocation_offset);
3461 3457
3462 Code* new_code = Code::cast(result); 3458 Code* new_code = Code::cast(result);
3463 new_code->set_relocation_info(reloc_info_array); 3459 new_code->set_relocation_info(reloc_info_array);
3464 3460
3465 // Update constant pool. 3461 // Update constant pool.
(...skipping 1844 matching lines...) Expand 10 before | Expand all | Expand 10 after
5310 map_space_->MaximumCommittedMemory()); 5306 map_space_->MaximumCommittedMemory());
5311 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ", 5307 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ",
5312 cell_space_->MaximumCommittedMemory()); 5308 cell_space_->MaximumCommittedMemory());
5313 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ", 5309 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ",
5314 property_cell_space_->MaximumCommittedMemory()); 5310 property_cell_space_->MaximumCommittedMemory());
5315 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ", 5311 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
5316 lo_space_->MaximumCommittedMemory()); 5312 lo_space_->MaximumCommittedMemory());
5317 PrintF("\n\n"); 5313 PrintF("\n\n");
5318 } 5314 }
5319 5315
5316 if (FLAG_verify_predictable) {
5317 PrintAlloctionsHash();
5318 }
5319
5320 TearDownArrayBuffers(); 5320 TearDownArrayBuffers();
5321 5321
5322 isolate_->global_handles()->TearDown(); 5322 isolate_->global_handles()->TearDown();
5323 5323
5324 external_string_table_.TearDown(); 5324 external_string_table_.TearDown();
5325 5325
5326 mark_compact_collector()->TearDown(); 5326 mark_compact_collector()->TearDown();
5327 5327
5328 new_space_.TearDown(); 5328 new_space_.TearDown();
5329 5329
(...skipping 1100 matching lines...) Expand 10 before | Expand all | Expand 10 after
6430 static_cast<int>(object_sizes_last_time_[index])); 6430 static_cast<int>(object_sizes_last_time_[index]));
6431 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6431 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6432 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6432 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6433 6433
6434 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6434 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6435 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6435 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6436 ClearObjectStats(); 6436 ClearObjectStats();
6437 } 6437 }
6438 6438
6439 } } // namespace v8::internal 6439 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698