Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/heap.cc

Issue 11028027: Revert trunk to bleeding_edge at r12484 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 30 matching lines...) Expand all
41 #include "mark-compact.h" 41 #include "mark-compact.h"
42 #include "natives.h" 42 #include "natives.h"
43 #include "objects-visiting.h" 43 #include "objects-visiting.h"
44 #include "objects-visiting-inl.h" 44 #include "objects-visiting-inl.h"
45 #include "once.h" 45 #include "once.h"
46 #include "runtime-profiler.h" 46 #include "runtime-profiler.h"
47 #include "scopeinfo.h" 47 #include "scopeinfo.h"
48 #include "snapshot.h" 48 #include "snapshot.h"
49 #include "store-buffer.h" 49 #include "store-buffer.h"
50 #include "v8threads.h" 50 #include "v8threads.h"
51 #include "v8utils.h"
52 #include "vm-state-inl.h" 51 #include "vm-state-inl.h"
53 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 52 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
54 #include "regexp-macro-assembler.h" 53 #include "regexp-macro-assembler.h"
55 #include "arm/regexp-macro-assembler-arm.h" 54 #include "arm/regexp-macro-assembler-arm.h"
56 #endif 55 #endif
57 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP 56 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
58 #include "regexp-macro-assembler.h" 57 #include "regexp-macro-assembler.h"
59 #include "mips/regexp-macro-assembler-mips.h" 58 #include "mips/regexp-macro-assembler-mips.h"
60 #endif 59 #endif
61 60
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after
364 ", committed: %6" V8_PTR_PREFIX "d KB\n", 363 ", committed: %6" V8_PTR_PREFIX "d KB\n",
365 cell_space_->SizeOfObjects() / KB, 364 cell_space_->SizeOfObjects() / KB,
366 cell_space_->Available() / KB, 365 cell_space_->Available() / KB,
367 cell_space_->CommittedMemory() / KB); 366 cell_space_->CommittedMemory() / KB);
368 PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB" 367 PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB"
369 ", available: %6" V8_PTR_PREFIX "d KB" 368 ", available: %6" V8_PTR_PREFIX "d KB"
370 ", committed: %6" V8_PTR_PREFIX "d KB\n", 369 ", committed: %6" V8_PTR_PREFIX "d KB\n",
371 lo_space_->SizeOfObjects() / KB, 370 lo_space_->SizeOfObjects() / KB,
372 lo_space_->Available() / KB, 371 lo_space_->Available() / KB,
373 lo_space_->CommittedMemory() / KB); 372 lo_space_->CommittedMemory() / KB);
374 PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB"
375 ", available: %6" V8_PTR_PREFIX "d KB"
376 ", committed: %6" V8_PTR_PREFIX "d KB\n",
377 this->SizeOfObjects() / KB,
378 this->Available() / KB,
379 this->CommittedMemory() / KB);
380 PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_); 373 PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_);
381 } 374 }
382 375
383 376
384 // TODO(1238405): Combine the infrastructure for --heap-stats and 377 // TODO(1238405): Combine the infrastructure for --heap-stats and
385 // --log-gc to avoid the complicated preprocessor and flag testing. 378 // --log-gc to avoid the complicated preprocessor and flag testing.
386 void Heap::ReportStatisticsAfterGC() { 379 void Heap::ReportStatisticsAfterGC() {
387 // Similar to the before GC, we use some complicated logic to ensure that 380 // Similar to the before GC, we use some complicated logic to ensure that
388 // NewSpace statistics are logged exactly once when --log-gc is turned on. 381 // NewSpace statistics are logged exactly once when --log-gc is turned on.
389 #if defined(DEBUG) 382 #if defined(DEBUG)
(...skipping 26 matching lines...) Expand all
416 #endif // DEBUG 409 #endif // DEBUG
417 410
418 #if defined(DEBUG) 411 #if defined(DEBUG)
419 ReportStatisticsBeforeGC(); 412 ReportStatisticsBeforeGC();
420 #endif // DEBUG 413 #endif // DEBUG
421 414
422 LiveObjectList::GCPrologue(); 415 LiveObjectList::GCPrologue();
423 store_buffer()->GCPrologue(); 416 store_buffer()->GCPrologue();
424 } 417 }
425 418
426
427 intptr_t Heap::SizeOfObjects() { 419 intptr_t Heap::SizeOfObjects() {
428 intptr_t total = 0; 420 intptr_t total = 0;
429 AllSpaces spaces; 421 AllSpaces spaces;
430 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { 422 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
431 total += space->SizeOfObjects(); 423 total += space->SizeOfObjects();
432 } 424 }
433 return total; 425 return total;
434 } 426 }
435 427
436
437 void Heap::RepairFreeListsAfterBoot() {
438 PagedSpaces spaces;
439 for (PagedSpace* space = spaces.next();
440 space != NULL;
441 space = spaces.next()) {
442 space->RepairFreeListsAfterBoot();
443 }
444 }
445
446
447 void Heap::GarbageCollectionEpilogue() { 428 void Heap::GarbageCollectionEpilogue() {
448 store_buffer()->GCEpilogue(); 429 store_buffer()->GCEpilogue();
449 LiveObjectList::GCEpilogue(); 430 LiveObjectList::GCEpilogue();
450 #ifdef DEBUG 431 #ifdef DEBUG
451 allow_allocation(true); 432 allow_allocation(true);
452 ZapFromSpace(); 433 ZapFromSpace();
453 434
454 if (FLAG_verify_heap) { 435 if (FLAG_verify_heap) {
455 Verify(); 436 Verify();
456 } 437 }
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
618 tracer.set_collector(collector); 599 tracer.set_collector(collector);
619 600
620 HistogramTimer* rate = (collector == SCAVENGER) 601 HistogramTimer* rate = (collector == SCAVENGER)
621 ? isolate_->counters()->gc_scavenger() 602 ? isolate_->counters()->gc_scavenger()
622 : isolate_->counters()->gc_compactor(); 603 : isolate_->counters()->gc_compactor();
623 rate->Start(); 604 rate->Start();
624 next_gc_likely_to_collect_more = 605 next_gc_likely_to_collect_more =
625 PerformGarbageCollection(collector, &tracer); 606 PerformGarbageCollection(collector, &tracer);
626 rate->Stop(); 607 rate->Stop();
627 608
628 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
629
630 // This can do debug callbacks and restart incremental marking.
631 GarbageCollectionEpilogue(); 609 GarbageCollectionEpilogue();
632 } 610 }
633 611
612 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
634 if (incremental_marking()->IsStopped()) { 613 if (incremental_marking()->IsStopped()) {
635 if (incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) { 614 if (incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) {
636 incremental_marking()->Start(); 615 incremental_marking()->Start();
637 } 616 }
638 } 617 }
639 618
640 return next_gc_likely_to_collect_more; 619 return next_gc_likely_to_collect_more;
641 } 620 }
642 621
643 622
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
681 AllocationSpace space, 660 AllocationSpace space,
682 const char* gc_reason = NULL) { 661 const char* gc_reason = NULL) {
683 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); 662 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
684 bool result = heap->CollectGarbage(space, gc_reason); 663 bool result = heap->CollectGarbage(space, gc_reason);
685 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); 664 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
686 return result; 665 return result;
687 } 666 }
688 667
689 668
690 void Heap::ReserveSpace( 669 void Heap::ReserveSpace(
691 int *sizes, 670 int new_space_size,
692 Address *locations_out) { 671 int pointer_space_size,
672 int data_space_size,
673 int code_space_size,
674 int map_space_size,
675 int cell_space_size,
676 int large_object_size) {
677 NewSpace* new_space = Heap::new_space();
678 PagedSpace* old_pointer_space = Heap::old_pointer_space();
679 PagedSpace* old_data_space = Heap::old_data_space();
680 PagedSpace* code_space = Heap::code_space();
681 PagedSpace* map_space = Heap::map_space();
682 PagedSpace* cell_space = Heap::cell_space();
683 LargeObjectSpace* lo_space = Heap::lo_space();
693 bool gc_performed = true; 684 bool gc_performed = true;
694 int counter = 0; 685 int counter = 0;
695 static const int kThreshold = 20; 686 static const int kThreshold = 20;
696 while (gc_performed && counter++ < kThreshold) { 687 while (gc_performed && counter++ < kThreshold) {
697 gc_performed = false; 688 gc_performed = false;
698 ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1); 689 if (!new_space->ReserveSpace(new_space_size)) {
699 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { 690 Heap::CollectGarbage(NEW_SPACE,
700 if (sizes[space] != 0) { 691 "failed to reserve space in the new space");
701 MaybeObject* allocation; 692 gc_performed = true;
702 if (space == NEW_SPACE) { 693 }
703 allocation = new_space()->AllocateRaw(sizes[space]); 694 if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
704 } else { 695 AbortIncrementalMarkingAndCollectGarbage(this, OLD_POINTER_SPACE,
705 allocation = paged_space(space)->AllocateRaw(sizes[space]); 696 "failed to reserve space in the old pointer space");
706 } 697 gc_performed = true;
707 FreeListNode* node; 698 }
708 if (!allocation->To<FreeListNode>(&node)) { 699 if (!(old_data_space->ReserveSpace(data_space_size))) {
709 if (space == NEW_SPACE) { 700 AbortIncrementalMarkingAndCollectGarbage(this, OLD_DATA_SPACE,
710 Heap::CollectGarbage(NEW_SPACE, 701 "failed to reserve space in the old data space");
711 "failed to reserve space in the new space"); 702 gc_performed = true;
712 } else { 703 }
713 AbortIncrementalMarkingAndCollectGarbage( 704 if (!(code_space->ReserveSpace(code_space_size))) {
714 this, 705 AbortIncrementalMarkingAndCollectGarbage(this, CODE_SPACE,
715 static_cast<AllocationSpace>(space), 706 "failed to reserve space in the code space");
716 "failed to reserve space in paged space"); 707 gc_performed = true;
717 } 708 }
718 gc_performed = true; 709 if (!(map_space->ReserveSpace(map_space_size))) {
719 break; 710 AbortIncrementalMarkingAndCollectGarbage(this, MAP_SPACE,
720 } else { 711 "failed to reserve space in the map space");
721 // Mark with a free list node, in case we have a GC before 712 gc_performed = true;
722 // deserializing. 713 }
723 node->set_size(this, sizes[space]); 714 if (!(cell_space->ReserveSpace(cell_space_size))) {
724 locations_out[space] = node->address(); 715 AbortIncrementalMarkingAndCollectGarbage(this, CELL_SPACE,
725 } 716 "failed to reserve space in the cell space");
726 } 717 gc_performed = true;
718 }
719 // We add a slack-factor of 2 in order to have space for a series of
720 // large-object allocations that are only just larger than the page size.
721 large_object_size *= 2;
722 // The ReserveSpace method on the large object space checks how much
723 // we can expand the old generation. This includes expansion caused by
724 // allocation in the other spaces.
725 large_object_size += cell_space_size + map_space_size + code_space_size +
726 data_space_size + pointer_space_size;
727 if (!(lo_space->ReserveSpace(large_object_size))) {
728 AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE,
729 "failed to reserve space in the large object space");
730 gc_performed = true;
727 } 731 }
728 } 732 }
729 733
730 if (gc_performed) { 734 if (gc_performed) {
731 // Failed to reserve the space after several attempts. 735 // Failed to reserve the space after several attempts.
732 V8::FatalProcessOutOfMemory("Heap::ReserveSpace"); 736 V8::FatalProcessOutOfMemory("Heap::ReserveSpace");
733 } 737 }
734 } 738 }
735 739
736 740
(...skipping 615 matching lines...) Expand 10 before | Expand all | Expand 10 after
1352 1356
1353 1357
1354 void Heap::UpdateNewSpaceReferencesInExternalStringTable( 1358 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1355 ExternalStringTableUpdaterCallback updater_func) { 1359 ExternalStringTableUpdaterCallback updater_func) {
1356 if (FLAG_verify_heap) { 1360 if (FLAG_verify_heap) {
1357 external_string_table_.Verify(); 1361 external_string_table_.Verify();
1358 } 1362 }
1359 1363
1360 if (external_string_table_.new_space_strings_.is_empty()) return; 1364 if (external_string_table_.new_space_strings_.is_empty()) return;
1361 1365
1362 Object** start_slot = &external_string_table_.new_space_strings_[0]; 1366 Object** start = &external_string_table_.new_space_strings_[0];
1363 Object** end_slot = 1367 Object** end = start + external_string_table_.new_space_strings_.length();
1364 start_slot + external_string_table_.new_space_strings_.length(); 1368 Object** last = start;
1365 Object** last = start_slot;
1366 1369
1367 for (Object** p = start_slot; p < end_slot; ++p) { 1370 for (Object** p = start; p < end; ++p) {
1368 ASSERT(InFromSpace(*p)); 1371 ASSERT(InFromSpace(*p));
1369 String* target = updater_func(this, p); 1372 String* target = updater_func(this, p);
1370 1373
1371 if (target == NULL) continue; 1374 if (target == NULL) continue;
1372 1375
1373 ASSERT(target->IsExternalString()); 1376 ASSERT(target->IsExternalString());
1374 1377
1375 if (InNewSpace(target)) { 1378 if (InNewSpace(target)) {
1376 // String is still in new space. Update the table entry. 1379 // String is still in new space. Update the table entry.
1377 *last = target; 1380 *last = target;
1378 ++last; 1381 ++last;
1379 } else { 1382 } else {
1380 // String got promoted. Move it to the old string list. 1383 // String got promoted. Move it to the old string list.
1381 external_string_table_.AddOldString(target); 1384 external_string_table_.AddOldString(target);
1382 } 1385 }
1383 } 1386 }
1384 1387
1385 ASSERT(last <= end_slot); 1388 ASSERT(last <= end);
1386 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start_slot)); 1389 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1387 } 1390 }
1388 1391
1389 1392
1390 void Heap::UpdateReferencesInExternalStringTable( 1393 void Heap::UpdateReferencesInExternalStringTable(
1391 ExternalStringTableUpdaterCallback updater_func) { 1394 ExternalStringTableUpdaterCallback updater_func) {
1392 1395
1393 // Update old space string references. 1396 // Update old space string references.
1394 if (external_string_table_.old_space_strings_.length() > 0) { 1397 if (external_string_table_.old_space_strings_.length() > 0) {
1395 Object** start_slot = &external_string_table_.old_space_strings_[0]; 1398 Object** start = &external_string_table_.old_space_strings_[0];
1396 Object** end_slot = 1399 Object** end = start + external_string_table_.old_space_strings_.length();
1397 start_slot + external_string_table_.old_space_strings_.length(); 1400 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1398 for (Object** p = start_slot; p < end_slot; ++p) *p = updater_func(this, p);
1399 } 1401 }
1400 1402
1401 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1403 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1402 } 1404 }
1403 1405
1404 1406
1405 static Object* ProcessFunctionWeakReferences(Heap* heap, 1407 static Object* ProcessFunctionWeakReferences(Heap* heap,
1406 Object* function, 1408 Object* function,
1407 WeakObjectRetainer* retainer, 1409 WeakObjectRetainer* retainer,
1408 bool record_slots) { 1410 bool record_slots) {
(...skipping 646 matching lines...) Expand 10 before | Expand all | Expand 10 after
2055 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 2057 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
2056 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 2058 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
2057 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 2059 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
2058 reinterpret_cast<Map*>(result)->set_visitor_id( 2060 reinterpret_cast<Map*>(result)->set_visitor_id(
2059 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 2061 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2060 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 2062 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
2061 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 2063 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
2062 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 2064 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
2063 reinterpret_cast<Map*>(result)->set_bit_field(0); 2065 reinterpret_cast<Map*>(result)->set_bit_field(0);
2064 reinterpret_cast<Map*>(result)->set_bit_field2(0); 2066 reinterpret_cast<Map*>(result)->set_bit_field2(0);
2065 int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) | 2067 reinterpret_cast<Map*>(result)->set_bit_field3(0);
2066 Map::OwnsDescriptors::encode(true);
2067 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
2068 return result; 2068 return result;
2069 } 2069 }
2070 2070
2071 2071
2072 MaybeObject* Heap::AllocateMap(InstanceType instance_type, 2072 MaybeObject* Heap::AllocateMap(InstanceType instance_type,
2073 int instance_size, 2073 int instance_size,
2074 ElementsKind elements_kind) { 2074 ElementsKind elements_kind) {
2075 Object* result; 2075 Object* result;
2076 MaybeObject* maybe_result = AllocateRawMap(); 2076 MaybeObject* maybe_result = AllocateRawMap();
2077 if (!maybe_result->To(&result)) return maybe_result; 2077 if (!maybe_result->To(&result)) return maybe_result;
2078 2078
2079 Map* map = reinterpret_cast<Map*>(result); 2079 Map* map = reinterpret_cast<Map*>(result);
2080 map->set_map_no_write_barrier(meta_map()); 2080 map->set_map_no_write_barrier(meta_map());
2081 map->set_instance_type(instance_type); 2081 map->set_instance_type(instance_type);
2082 map->set_visitor_id( 2082 map->set_visitor_id(
2083 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 2083 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2084 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); 2084 map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
2085 map->set_constructor(null_value(), SKIP_WRITE_BARRIER); 2085 map->set_constructor(null_value(), SKIP_WRITE_BARRIER);
2086 map->set_instance_size(instance_size); 2086 map->set_instance_size(instance_size);
2087 map->set_inobject_properties(0); 2087 map->set_inobject_properties(0);
2088 map->set_pre_allocated_property_fields(0); 2088 map->set_pre_allocated_property_fields(0);
2089 map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER); 2089 map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
2090 map->init_back_pointer(undefined_value()); 2090 map->init_back_pointer(undefined_value());
2091 map->set_unused_property_fields(0); 2091 map->set_unused_property_fields(0);
2092 map->set_bit_field(0); 2092 map->set_bit_field(0);
2093 map->set_bit_field2(1 << Map::kIsExtensible); 2093 map->set_bit_field2(1 << Map::kIsExtensible);
2094 int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) | 2094 int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache);
2095 Map::OwnsDescriptors::encode(true);
2096 map->set_bit_field3(bit_field3); 2095 map->set_bit_field3(bit_field3);
2097 map->set_elements_kind(elements_kind); 2096 map->set_elements_kind(elements_kind);
2098 2097
2099 // If the map object is aligned fill the padding area with Smi 0 objects. 2098 // If the map object is aligned fill the padding area with Smi 0 objects.
2100 if (Map::kPadStart < Map::kSize) { 2099 if (Map::kPadStart < Map::kSize) {
2101 memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag, 2100 memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
2102 0, 2101 0,
2103 Map::kSize - Map::kPadStart); 2102 Map::kSize - Map::kPadStart);
2104 } 2103 }
2105 return map; 2104 return map;
(...skipping 1484 matching lines...) Expand 10 before | Expand all | Expand 10 after
3590 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); 3589 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
3591 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; 3590 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info;
3592 3591
3593 // Compute size. 3592 // Compute size.
3594 int body_size = RoundUp(desc.instr_size, kObjectAlignment); 3593 int body_size = RoundUp(desc.instr_size, kObjectAlignment);
3595 int obj_size = Code::SizeFor(body_size); 3594 int obj_size = Code::SizeFor(body_size);
3596 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); 3595 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
3597 MaybeObject* maybe_result; 3596 MaybeObject* maybe_result;
3598 // Large code objects and code objects which should stay at a fixed address 3597 // Large code objects and code objects which should stay at a fixed address
3599 // are allocated in large object space. 3598 // are allocated in large object space.
3600 HeapObject* result; 3599 if (obj_size > code_space()->AreaSize() || immovable) {
3601 bool force_lo_space = obj_size > code_space()->AreaSize();
3602 if (force_lo_space) {
3603 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 3600 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3604 } else { 3601 } else {
3605 maybe_result = code_space_->AllocateRaw(obj_size); 3602 maybe_result = code_space_->AllocateRaw(obj_size);
3606 } 3603 }
3607 if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
3608 3604
3609 if (immovable && !force_lo_space && 3605 Object* result;
3610 // Objects on the first page of each space are never moved. 3606 if (!maybe_result->ToObject(&result)) return maybe_result;
3611 !code_space_->FirstPage()->Contains(result->address())) {
3612 // Discard the first code allocation, which was on a page where it could be
3613 // moved.
3614 CreateFillerObjectAt(result->address(), obj_size);
3615 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3616 if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
3617 }
3618 3607
3619 // Initialize the object 3608 // Initialize the object
3620 result->set_map_no_write_barrier(code_map()); 3609 HeapObject::cast(result)->set_map_no_write_barrier(code_map());
3621 Code* code = Code::cast(result); 3610 Code* code = Code::cast(result);
3622 ASSERT(!isolate_->code_range()->exists() || 3611 ASSERT(!isolate_->code_range()->exists() ||
3623 isolate_->code_range()->contains(code->address())); 3612 isolate_->code_range()->contains(code->address()));
3624 code->set_instruction_size(desc.instr_size); 3613 code->set_instruction_size(desc.instr_size);
3625 code->set_relocation_info(reloc_info); 3614 code->set_relocation_info(reloc_info);
3626 code->set_flags(flags); 3615 code->set_flags(flags);
3627 if (code->is_call_stub() || code->is_keyed_call_stub()) { 3616 if (code->is_call_stub() || code->is_keyed_call_stub()) {
3628 code->set_check_type(RECEIVER_MAP_CHECK); 3617 code->set_check_type(RECEIVER_MAP_CHECK);
3629 } 3618 }
3630 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); 3619 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
(...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after
4190 4179
4191 // Initial size of the backing store to avoid resize of the storage during 4180 // Initial size of the backing store to avoid resize of the storage during
4192 // bootstrapping. The size differs between the JS global object ad the 4181 // bootstrapping. The size differs between the JS global object ad the
4193 // builtins object. 4182 // builtins object.
4194 int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512; 4183 int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
4195 4184
4196 // Allocate a dictionary object for backing storage. 4185 // Allocate a dictionary object for backing storage.
4197 StringDictionary* dictionary; 4186 StringDictionary* dictionary;
4198 MaybeObject* maybe_dictionary = 4187 MaybeObject* maybe_dictionary =
4199 StringDictionary::Allocate( 4188 StringDictionary::Allocate(
4200 map->NumberOfOwnDescriptors() * 2 + initial_size); 4189 map->NumberOfDescribedProperties() * 2 + initial_size);
4201 if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary; 4190 if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4202 4191
4203 // The global object might be created from an object template with accessors. 4192 // The global object might be created from an object template with accessors.
4204 // Fill these accessors into the dictionary. 4193 // Fill these accessors into the dictionary.
4205 DescriptorArray* descs = map->instance_descriptors(); 4194 DescriptorArray* descs = map->instance_descriptors();
4206 for (int i = 0; i < descs->number_of_descriptors(); i++) { 4195 for (int i = 0; i < descs->number_of_descriptors(); i++) {
4207 PropertyDetails details = descs->GetDetails(i); 4196 PropertyDetails details = descs->GetDetails(i);
4208 ASSERT(details.type() == CALLBACKS); // Only accessors are expected. 4197 ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
4209 PropertyDetails d = PropertyDetails(details.attributes(), 4198 PropertyDetails d = PropertyDetails(details.attributes(),
4210 CALLBACKS, 4199 CALLBACKS,
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after
4394 object->set_map(constructor->initial_map()); 4383 object->set_map(constructor->initial_map());
4395 4384
4396 // Reinitialize the object from the constructor map. 4385 // Reinitialize the object from the constructor map.
4397 InitializeJSObjectFromMap(object, FixedArray::cast(properties), map); 4386 InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
4398 return object; 4387 return object;
4399 } 4388 }
4400 4389
4401 4390
4402 MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string, 4391 MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string,
4403 PretenureFlag pretenure) { 4392 PretenureFlag pretenure) {
4404 int length = string.length(); 4393 if (string.length() == 1) {
4405 if (length == 1) {
4406 return Heap::LookupSingleCharacterStringFromCode(string[0]); 4394 return Heap::LookupSingleCharacterStringFromCode(string[0]);
4407 } 4395 }
4408 Object* result; 4396 Object* result;
4409 { MaybeObject* maybe_result = 4397 { MaybeObject* maybe_result =
4410 AllocateRawAsciiString(string.length(), pretenure); 4398 AllocateRawAsciiString(string.length(), pretenure);
4411 if (!maybe_result->ToObject(&result)) return maybe_result; 4399 if (!maybe_result->ToObject(&result)) return maybe_result;
4412 } 4400 }
4413 4401
4414 // Copy the characters into the new object. 4402 // Copy the characters into the new object.
4415 CopyChars(SeqAsciiString::cast(result)->GetChars(), string.start(), length); 4403 SeqAsciiString* string_result = SeqAsciiString::cast(result);
4404 for (int i = 0; i < string.length(); i++) {
4405 string_result->SeqAsciiStringSet(i, string[i]);
4406 }
4416 return result; 4407 return result;
4417 } 4408 }
4418 4409
4419 4410
4420 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, 4411 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
4421 PretenureFlag pretenure) { 4412 PretenureFlag pretenure) {
4422 // Count the number of characters in the UTF-8 string and check if 4413 // Count the number of characters in the UTF-8 string and check if
4423 // it is an ASCII string. 4414 // it is an ASCII string.
4424 Access<UnicodeCache::Utf8Decoder> 4415 Access<UnicodeCache::Utf8Decoder>
4425 decoder(isolate_->unicode_cache()->utf8_decoder()); 4416 decoder(isolate_->unicode_cache()->utf8_decoder());
4426 decoder->Reset(string.start(), string.length()); 4417 decoder->Reset(string.start(), string.length());
4427 int chars = 0; 4418 int chars = 0;
4428 while (decoder->has_more()) { 4419 while (decoder->has_more()) {
4429 uint32_t r = decoder->GetNext(); 4420 uint32_t r = decoder->GetNext();
4430 if (r <= unibrow::Utf16::kMaxNonSurrogateCharCode) { 4421 if (r <= unibrow::Utf16::kMaxNonSurrogateCharCode) {
4431 chars++; 4422 chars++;
4432 } else { 4423 } else {
4433 chars += 2; 4424 chars += 2;
4434 } 4425 }
4435 } 4426 }
4436 4427
4437 Object* result; 4428 Object* result;
4438 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure); 4429 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
4439 if (!maybe_result->ToObject(&result)) return maybe_result; 4430 if (!maybe_result->ToObject(&result)) return maybe_result;
4440 } 4431 }
4441 4432
4442 // Convert and copy the characters into the new object. 4433 // Convert and copy the characters into the new object.
4443 SeqTwoByteString* twobyte = SeqTwoByteString::cast(result); 4434 String* string_result = String::cast(result);
4444 decoder->Reset(string.start(), string.length()); 4435 decoder->Reset(string.start(), string.length());
4445 int i = 0; 4436 int i = 0;
4446 while (i < chars) { 4437 while (i < chars) {
4447 uint32_t r = decoder->GetNext(); 4438 uint32_t r = decoder->GetNext();
4448 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) { 4439 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4449 twobyte->SeqTwoByteStringSet(i++, unibrow::Utf16::LeadSurrogate(r)); 4440 string_result->Set(i++, unibrow::Utf16::LeadSurrogate(r));
4450 twobyte->SeqTwoByteStringSet(i++, unibrow::Utf16::TrailSurrogate(r)); 4441 string_result->Set(i++, unibrow::Utf16::TrailSurrogate(r));
4451 } else { 4442 } else {
4452 twobyte->SeqTwoByteStringSet(i++, r); 4443 string_result->Set(i++, r);
4453 } 4444 }
4454 } 4445 }
4455 return result; 4446 return result;
4456 } 4447 }
4457 4448
4458 4449
4459 MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string, 4450 MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
4460 PretenureFlag pretenure) { 4451 PretenureFlag pretenure) {
4461 // Check if the string is an ASCII string. 4452 // Check if the string is an ASCII string.
4453 MaybeObject* maybe_result;
4454 if (String::IsAscii(string.start(), string.length())) {
4455 maybe_result = AllocateRawAsciiString(string.length(), pretenure);
4456 } else { // It's not an ASCII string.
4457 maybe_result = AllocateRawTwoByteString(string.length(), pretenure);
4458 }
4462 Object* result; 4459 Object* result;
4463 int length = string.length(); 4460 if (!maybe_result->ToObject(&result)) return maybe_result;
4464 const uc16* start = string.start();
4465 4461
4466 if (String::IsAscii(start, length)) { 4462 // Copy the characters into the new object, which may be either ASCII or
4467 MaybeObject* maybe_result = AllocateRawAsciiString(length, pretenure); 4463 // UTF-16.
4468 if (!maybe_result->ToObject(&result)) return maybe_result; 4464 String* string_result = String::cast(result);
4469 CopyChars(SeqAsciiString::cast(result)->GetChars(), start, length); 4465 for (int i = 0; i < string.length(); i++) {
4470 } else { // It's not an ASCII string. 4466 string_result->Set(i, string[i]);
4471 MaybeObject* maybe_result = AllocateRawTwoByteString(length, pretenure);
4472 if (!maybe_result->ToObject(&result)) return maybe_result;
4473 CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length);
4474 } 4467 }
4475 return result; 4468 return result;
4476 } 4469 }
4477 4470
4478 4471
4479 Map* Heap::SymbolMapForString(String* string) { 4472 Map* Heap::SymbolMapForString(String* string) {
4480 // If the string is in new space it cannot be used as a symbol. 4473 // If the string is in new space it cannot be used as a symbol.
4481 if (InNewSpace(string)) return NULL; 4474 if (InNewSpace(string)) return NULL;
4482 4475
4483 // Find the corresponding symbol map for strings. 4476 // Find the corresponding symbol map for strings.
(...skipping 644 matching lines...) Expand 10 before | Expand all | Expand 10 after
5128 bool Heap::IdleNotification(int hint) { 5121 bool Heap::IdleNotification(int hint) {
5129 // Hints greater than this value indicate that 5122 // Hints greater than this value indicate that
5130 // the embedder is requesting a lot of GC work. 5123 // the embedder is requesting a lot of GC work.
5131 const int kMaxHint = 1000; 5124 const int kMaxHint = 1000;
5132 // Minimal hint that allows to do full GC. 5125 // Minimal hint that allows to do full GC.
5133 const int kMinHintForFullGC = 100; 5126 const int kMinHintForFullGC = 100;
5134 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; 5127 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4;
5135 // The size factor is in range [5..250]. The numbers here are chosen from 5128 // The size factor is in range [5..250]. The numbers here are chosen from
5136 // experiments. If you changes them, make sure to test with 5129 // experiments. If you changes them, make sure to test with
5137 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* 5130 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.*
5138 intptr_t step_size = 5131 intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold;
5139 size_factor * IncrementalMarking::kAllocatedThreshold;
5140 5132
5141 if (contexts_disposed_ > 0) { 5133 if (contexts_disposed_ > 0) {
5142 if (hint >= kMaxHint) { 5134 if (hint >= kMaxHint) {
5143 // The embedder is requesting a lot of GC work after context disposal, 5135 // The embedder is requesting a lot of GC work after context disposal,
5144 // we age inline caches so that they don't keep objects from 5136 // we age inline caches so that they don't keep objects from
5145 // the old context alive. 5137 // the old context alive.
5146 AgeInlineCaches(); 5138 AgeInlineCaches();
5147 } 5139 }
5148 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); 5140 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000);
5149 if (hint >= mark_sweep_time && !FLAG_expose_gc && 5141 if (hint >= mark_sweep_time && !FLAG_expose_gc &&
(...skipping 1635 matching lines...) Expand 10 before | Expand all | Expand 10 after
6785 // not visited yet 6777 // not visited yet
6786 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map)); 6778 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
6787 6779
6788 Address map_addr = map_p->address(); 6780 Address map_addr = map_p->address();
6789 6781
6790 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag)); 6782 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
6791 6783
6792 // Scan the object body. 6784 // Scan the object body.
6793 if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) { 6785 if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
6794 // This is specialized to scan Context's properly. 6786 // This is specialized to scan Context's properly.
6795 Object** start_slot = reinterpret_cast<Object**>(obj->address() + 6787 Object** start = reinterpret_cast<Object**>(obj->address() +
6796 Context::kHeaderSize); 6788 Context::kHeaderSize);
6797 Object** end_slot = reinterpret_cast<Object**>(obj->address() + 6789 Object** end = reinterpret_cast<Object**>(obj->address() +
6798 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize); 6790 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize);
6799 mark_visitor->VisitPointers(start_slot, end_slot); 6791 mark_visitor->VisitPointers(start, end);
6800 } else { 6792 } else {
6801 obj->IterateBody(map_p->instance_type(), 6793 obj->IterateBody(map_p->instance_type(),
6802 obj->SizeFromMap(map_p), 6794 obj->SizeFromMap(map_p),
6803 mark_visitor); 6795 mark_visitor);
6804 } 6796 }
6805 6797
6806 // Scan the map after the body because the body is a lot more interesting 6798 // Scan the map after the body because the body is a lot more interesting
6807 // when doing leak detection. 6799 // when doing leak detection.
6808 MarkRecursively(&map, mark_visitor); 6800 MarkRecursively(&map, mark_visitor);
6809 6801
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after
7130 } 7122 }
7131 } 7123 }
7132 7124
7133 7125
7134 void KeyedLookupCache::Clear() { 7126 void KeyedLookupCache::Clear() {
7135 for (int index = 0; index < kLength; index++) keys_[index].map = NULL; 7127 for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
7136 } 7128 }
7137 7129
7138 7130
7139 void DescriptorLookupCache::Clear() { 7131 void DescriptorLookupCache::Clear() {
7140 for (int index = 0; index < kLength; index++) keys_[index].source = NULL; 7132 for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
7141 } 7133 }
7142 7134
7143 7135
7144 #ifdef DEBUG 7136 #ifdef DEBUG
7145 void Heap::GarbageCollectionGreedyCheck() { 7137 void Heap::GarbageCollectionGreedyCheck() {
7146 ASSERT(FLAG_gc_greedy); 7138 ASSERT(FLAG_gc_greedy);
7147 if (isolate_->bootstrapper()->IsActive()) return; 7139 if (isolate_->bootstrapper()->IsActive()) return;
7148 if (disallow_allocation_failure()) return; 7140 if (disallow_allocation_failure()) return;
7149 CollectGarbage(NEW_SPACE); 7141 CollectGarbage(NEW_SPACE);
7150 } 7142 }
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
7329 static_cast<int>(object_sizes_last_time_[index])); 7321 static_cast<int>(object_sizes_last_time_[index]));
7330 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7322 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7331 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7323 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7332 7324
7333 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7325 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7334 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7326 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7335 ClearObjectStats(); 7327 ClearObjectStats();
7336 } 7328 }
7337 7329
7338 } } // namespace v8::internal 7330 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698