| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 596 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 607 | 607 |
| 608 AllowHeapAllocation for_the_rest_of_the_epilogue; | 608 AllowHeapAllocation for_the_rest_of_the_epilogue; |
| 609 | 609 |
| 610 #ifdef DEBUG | 610 #ifdef DEBUG |
| 611 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); | 611 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); |
| 612 if (FLAG_print_handles) PrintHandles(); | 612 if (FLAG_print_handles) PrintHandles(); |
| 613 if (FLAG_gc_verbose) Print(); | 613 if (FLAG_gc_verbose) Print(); |
| 614 if (FLAG_code_stats) ReportCodeStatistics("After GC"); | 614 if (FLAG_code_stats) ReportCodeStatistics("After GC"); |
| 615 #endif | 615 #endif |
| 616 if (FLAG_deopt_every_n_garbage_collections > 0) { | 616 if (FLAG_deopt_every_n_garbage_collections > 0) { |
| 617 // TODO(jkummerow/ulan/jarin): This is not safe! We can't assume that |
| 618 // the topmost optimized frame can be deoptimized safely, because it |
| 619 // might not have a lazy bailout point right after its current PC. |
| 617 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) { | 620 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) { |
| 618 Deoptimizer::DeoptimizeAll(isolate()); | 621 Deoptimizer::DeoptimizeAll(isolate()); |
| 619 gcs_since_last_deopt_ = 0; | 622 gcs_since_last_deopt_ = 0; |
| 620 } | 623 } |
| 621 } | 624 } |
| 622 | 625 |
| 623 UpdateMaximumCommitted(); | 626 UpdateMaximumCommitted(); |
| 624 | 627 |
| 625 isolate_->counters()->alive_after_last_gc()->Set( | 628 isolate_->counters()->alive_after_last_gc()->Set( |
| 626 static_cast<int>(SizeOfObjects())); | 629 static_cast<int>(SizeOfObjects())); |
| (...skipping 2050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2677 return accessors; | 2680 return accessors; |
| 2678 } | 2681 } |
| 2679 | 2682 |
| 2680 | 2683 |
| 2681 MaybeObject* Heap::AllocateTypeFeedbackInfo() { | 2684 MaybeObject* Heap::AllocateTypeFeedbackInfo() { |
| 2682 TypeFeedbackInfo* info; | 2685 TypeFeedbackInfo* info; |
| 2683 { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE); | 2686 { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE); |
| 2684 if (!maybe_info->To(&info)) return maybe_info; | 2687 if (!maybe_info->To(&info)) return maybe_info; |
| 2685 } | 2688 } |
| 2686 info->initialize_storage(); | 2689 info->initialize_storage(); |
| 2687 info->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER); | |
| 2688 return info; | 2690 return info; |
| 2689 } | 2691 } |
| 2690 | 2692 |
| 2691 | 2693 |
| 2692 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) { | 2694 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) { |
| 2693 AliasedArgumentsEntry* entry; | 2695 AliasedArgumentsEntry* entry; |
| 2694 { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE); | 2696 { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE); |
| 2695 if (!maybe_entry->To(&entry)) return maybe_entry; | 2697 if (!maybe_entry->To(&entry)) return maybe_entry; |
| 2696 } | 2698 } |
| 2697 entry->set_aliased_context_slot(aliased_context_slot); | 2699 entry->set_aliased_context_slot(aliased_context_slot); |
| (...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2860 TYPED_ARRAYS(ALLOCATE_EXTERNAL_ARRAY_MAP) | 2862 TYPED_ARRAYS(ALLOCATE_EXTERNAL_ARRAY_MAP) |
| 2861 #undef ALLOCATE_EXTERNAL_ARRAY_MAP | 2863 #undef ALLOCATE_EXTERNAL_ARRAY_MAP |
| 2862 | 2864 |
| 2863 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \ | 2865 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \ |
| 2864 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, \ | 2866 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, \ |
| 2865 fixed_##type##_array) | 2867 fixed_##type##_array) |
| 2866 | 2868 |
| 2867 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) | 2869 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) |
| 2868 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP | 2870 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP |
| 2869 | 2871 |
| 2870 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, non_strict_arguments_elements) | 2872 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) |
| 2871 | 2873 |
| 2872 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) | 2874 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) |
| 2873 | 2875 |
| 2874 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) | 2876 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) |
| 2875 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) | 2877 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) |
| 2876 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) | 2878 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) |
| 2877 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) | 2879 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) |
| 2878 | 2880 |
| 2879 | 2881 |
| 2880 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { | 2882 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { |
| (...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3300 { MaybeObject* maybe_obj = AllocateSymbol(); | 3302 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3301 if (!maybe_obj->ToObject(&obj)) return false; | 3303 if (!maybe_obj->ToObject(&obj)) return false; |
| 3302 } | 3304 } |
| 3303 Symbol::cast(obj)->set_is_private(true); | 3305 Symbol::cast(obj)->set_is_private(true); |
| 3304 set_frozen_symbol(Symbol::cast(obj)); | 3306 set_frozen_symbol(Symbol::cast(obj)); |
| 3305 | 3307 |
| 3306 { MaybeObject* maybe_obj = AllocateSymbol(); | 3308 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3307 if (!maybe_obj->ToObject(&obj)) return false; | 3309 if (!maybe_obj->ToObject(&obj)) return false; |
| 3308 } | 3310 } |
| 3309 Symbol::cast(obj)->set_is_private(true); | 3311 Symbol::cast(obj)->set_is_private(true); |
| 3312 set_nonexistent_symbol(Symbol::cast(obj)); |
| 3313 |
| 3314 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3315 if (!maybe_obj->ToObject(&obj)) return false; |
| 3316 } |
| 3317 Symbol::cast(obj)->set_is_private(true); |
| 3310 set_elements_transition_symbol(Symbol::cast(obj)); | 3318 set_elements_transition_symbol(Symbol::cast(obj)); |
| 3311 | 3319 |
| 3320 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3321 if (!maybe_obj->ToObject(&obj)) return false; |
| 3322 } |
| 3323 Symbol::cast(obj)->set_is_private(true); |
| 3324 set_uninitialized_symbol(Symbol::cast(obj)); |
| 3325 |
| 3326 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3327 if (!maybe_obj->ToObject(&obj)) return false; |
| 3328 } |
| 3329 Symbol::cast(obj)->set_is_private(true); |
| 3330 set_megamorphic_symbol(Symbol::cast(obj)); |
| 3331 |
| 3312 { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED); | 3332 { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED); |
| 3313 if (!maybe_obj->ToObject(&obj)) return false; | 3333 if (!maybe_obj->ToObject(&obj)) return false; |
| 3314 } | 3334 } |
| 3315 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); | 3335 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); |
| 3316 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); | 3336 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); |
| 3317 | 3337 |
| 3318 { MaybeObject* maybe_obj = AllocateSymbol(); | 3338 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3319 if (!maybe_obj->ToObject(&obj)) return false; | 3339 if (!maybe_obj->ToObject(&obj)) return false; |
| 3320 } | 3340 } |
| 3321 Symbol::cast(obj)->set_is_private(true); | 3341 Symbol::cast(obj)->set_is_private(true); |
| 3322 set_observed_symbol(Symbol::cast(obj)); | 3342 set_observed_symbol(Symbol::cast(obj)); |
| 3323 | 3343 |
| 3324 { MaybeObject* maybe_obj = AllocateFixedArray(0, TENURED); | 3344 { MaybeObject* maybe_obj = AllocateFixedArray(0, TENURED); |
| 3325 if (!maybe_obj->ToObject(&obj)) return false; | 3345 if (!maybe_obj->ToObject(&obj)) return false; |
| 3326 } | 3346 } |
| 3327 set_materialized_objects(FixedArray::cast(obj)); | 3347 set_materialized_objects(FixedArray::cast(obj)); |
| 3328 | 3348 |
| 3329 // Handling of script id generation is in Factory::NewScript. | 3349 // Handling of script id generation is in Factory::NewScript. |
| 3330 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); | 3350 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); |
| 3331 | 3351 |
| 3332 { MaybeObject* maybe_obj = AllocateAllocationSitesScratchpad(); | 3352 { MaybeObject* maybe_obj = AllocateAllocationSitesScratchpad(); |
| 3333 if (!maybe_obj->ToObject(&obj)) return false; | 3353 if (!maybe_obj->ToObject(&obj)) return false; |
| 3334 } | 3354 } |
| 3335 set_allocation_sites_scratchpad(FixedArray::cast(obj)); | 3355 set_allocation_sites_scratchpad(FixedArray::cast(obj)); |
| 3336 InitializeAllocationSitesScratchpad(); | 3356 InitializeAllocationSitesScratchpad(); |
| 3337 | 3357 |
| 3338 // Initialize keyed lookup cache. | 3358 // Initialize keyed lookup cache. |
| 3339 isolate_->keyed_lookup_cache()->Clear(); | 3359 isolate_->keyed_lookup_cache()->Clear(); |
| 3340 | 3360 |
| (...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3641 | 3661 |
| 3642 void Heap::InitializeAllocationSitesScratchpad() { | 3662 void Heap::InitializeAllocationSitesScratchpad() { |
| 3643 ASSERT(allocation_sites_scratchpad()->length() == | 3663 ASSERT(allocation_sites_scratchpad()->length() == |
| 3644 kAllocationSiteScratchpadSize); | 3664 kAllocationSiteScratchpadSize); |
| 3645 for (int i = 0; i < kAllocationSiteScratchpadSize; i++) { | 3665 for (int i = 0; i < kAllocationSiteScratchpadSize; i++) { |
| 3646 allocation_sites_scratchpad()->set_undefined(i); | 3666 allocation_sites_scratchpad()->set_undefined(i); |
| 3647 } | 3667 } |
| 3648 } | 3668 } |
| 3649 | 3669 |
| 3650 | 3670 |
| 3651 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) { | 3671 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site, |
| 3672 ScratchpadSlotMode mode) { |
| 3652 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) { | 3673 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) { |
| 3653 // We cannot use the normal write-barrier because slots need to be | 3674 // We cannot use the normal write-barrier because slots need to be |
| 3654 // recorded with non-incremental marking as well. We have to explicitly | 3675 // recorded with non-incremental marking as well. We have to explicitly |
| 3655 // record the slot to take evacuation candidates into account. | 3676 // record the slot to take evacuation candidates into account. |
| 3656 allocation_sites_scratchpad()->set( | 3677 allocation_sites_scratchpad()->set( |
| 3657 allocation_sites_scratchpad_length_, site, SKIP_WRITE_BARRIER); | 3678 allocation_sites_scratchpad_length_, site, SKIP_WRITE_BARRIER); |
| 3658 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt( | 3679 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt( |
| 3659 allocation_sites_scratchpad_length_); | 3680 allocation_sites_scratchpad_length_); |
| 3660 mark_compact_collector()->RecordSlot(slot, slot, *slot); | 3681 |
| 3682 if (mode == RECORD_SCRATCHPAD_SLOT) { |
| 3683 // We need to allow slots buffer overflow here since the evacuation |
| 3684 // candidates are not part of the global list of old space pages and |
| 3685 // releasing an evacuation candidate due to a slots buffer overflow |
| 3686 // results in lost pages. |
| 3687 mark_compact_collector()->RecordSlot( |
| 3688 slot, slot, *slot, SlotsBuffer::IGNORE_OVERFLOW); |
| 3689 } |
| 3661 allocation_sites_scratchpad_length_++; | 3690 allocation_sites_scratchpad_length_++; |
| 3662 } | 3691 } |
| 3663 } | 3692 } |
| 3664 | 3693 |
| 3665 | 3694 |
| 3666 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { | 3695 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { |
| 3667 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); | 3696 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); |
| 3668 } | 3697 } |
| 3669 | 3698 |
| 3670 | 3699 |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3772 share->set_optimized_code_map(Smi::FromInt(0)); | 3801 share->set_optimized_code_map(Smi::FromInt(0)); |
| 3773 share->set_scope_info(ScopeInfo::Empty(isolate_)); | 3802 share->set_scope_info(ScopeInfo::Empty(isolate_)); |
| 3774 Code* construct_stub = | 3803 Code* construct_stub = |
| 3775 isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric); | 3804 isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric); |
| 3776 share->set_construct_stub(construct_stub); | 3805 share->set_construct_stub(construct_stub); |
| 3777 share->set_instance_class_name(Object_string()); | 3806 share->set_instance_class_name(Object_string()); |
| 3778 share->set_function_data(undefined_value(), SKIP_WRITE_BARRIER); | 3807 share->set_function_data(undefined_value(), SKIP_WRITE_BARRIER); |
| 3779 share->set_script(undefined_value(), SKIP_WRITE_BARRIER); | 3808 share->set_script(undefined_value(), SKIP_WRITE_BARRIER); |
| 3780 share->set_debug_info(undefined_value(), SKIP_WRITE_BARRIER); | 3809 share->set_debug_info(undefined_value(), SKIP_WRITE_BARRIER); |
| 3781 share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER); | 3810 share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER); |
| 3811 share->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 3782 share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER); | 3812 share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER); |
| 3783 share->set_ast_node_count(0); | 3813 share->set_ast_node_count(0); |
| 3784 share->set_counters(0); | 3814 share->set_counters(0); |
| 3785 | 3815 |
| 3786 // Set integer fields (smi or int, depending on the architecture). | 3816 // Set integer fields (smi or int, depending on the architecture). |
| 3787 share->set_length(0); | 3817 share->set_length(0); |
| 3788 share->set_formal_parameter_count(0); | 3818 share->set_formal_parameter_count(0); |
| 3789 share->set_expected_nof_properties(0); | 3819 share->set_expected_nof_properties(0); |
| 3790 share->set_num_literals(0); | 3820 share->set_num_literals(0); |
| 3791 share->set_start_position_and_type(0); | 3821 share->set_start_position_and_type(0); |
| (...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4180 AllocationSite* allocation_site) { | 4210 AllocationSite* allocation_site) { |
| 4181 memento->set_map_no_write_barrier(allocation_memento_map()); | 4211 memento->set_map_no_write_barrier(allocation_memento_map()); |
| 4182 ASSERT(allocation_site->map() == allocation_site_map()); | 4212 ASSERT(allocation_site->map() == allocation_site_map()); |
| 4183 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); | 4213 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); |
| 4184 if (FLAG_allocation_site_pretenuring) { | 4214 if (FLAG_allocation_site_pretenuring) { |
| 4185 allocation_site->IncrementMementoCreateCount(); | 4215 allocation_site->IncrementMementoCreateCount(); |
| 4186 } | 4216 } |
| 4187 } | 4217 } |
| 4188 | 4218 |
| 4189 | 4219 |
| 4190 MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space, | 4220 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, |
| 4191 Handle<AllocationSite> allocation_site) { | 4221 AllocationSite* allocation_site) { |
| 4192 ASSERT(gc_state_ == NOT_IN_GC); | 4222 ASSERT(gc_state_ == NOT_IN_GC); |
| 4193 ASSERT(map->instance_type() != MAP_TYPE); | 4223 ASSERT(map->instance_type() != MAP_TYPE); |
| 4194 // If allocation failures are disallowed, we may allocate in a different | 4224 // If allocation failures are disallowed, we may allocate in a different |
| 4195 // space when new space is full and the object is not a large object. | |
| 4196 AllocationSpace retry_space = | |
| 4197 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | |
| 4198 int size = map->instance_size() + AllocationMemento::kSize; | |
| 4199 Object* result; | |
| 4200 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | |
| 4201 if (!maybe_result->ToObject(&result)) return maybe_result; | |
| 4202 // No need for write barrier since object is white and map is in old space. | |
| 4203 HeapObject::cast(result)->set_map_no_write_barrier(map); | |
| 4204 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | |
| 4205 reinterpret_cast<Address>(result) + map->instance_size()); | |
| 4206 InitializeAllocationMemento(alloc_memento, *allocation_site); | |
| 4207 return result; | |
| 4208 } | |
| 4209 | |
| 4210 | |
| 4211 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { | |
| 4212 ASSERT(gc_state_ == NOT_IN_GC); | |
| 4213 ASSERT(map->instance_type() != MAP_TYPE); | |
| 4214 // If allocation failures are disallowed, we may allocate in a different | |
| 4215 // space when new space is full and the object is not a large object. | 4225 // space when new space is full and the object is not a large object. |
| 4216 AllocationSpace retry_space = | 4226 AllocationSpace retry_space = |
| 4217 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 4227 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
| 4218 int size = map->instance_size(); | 4228 int size = map->instance_size(); |
| 4229 if (allocation_site != NULL) { |
| 4230 size += AllocationMemento::kSize; |
| 4231 } |
| 4219 Object* result; | 4232 Object* result; |
| 4220 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4233 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
| 4221 if (!maybe_result->ToObject(&result)) return maybe_result; | 4234 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4222 // No need for write barrier since object is white and map is in old space. | 4235 // No need for write barrier since object is white and map is in old space. |
| 4223 HeapObject::cast(result)->set_map_no_write_barrier(map); | 4236 HeapObject::cast(result)->set_map_no_write_barrier(map); |
| 4237 if (allocation_site != NULL) { |
| 4238 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
| 4239 reinterpret_cast<Address>(result) + map->instance_size()); |
| 4240 InitializeAllocationMemento(alloc_memento, allocation_site); |
| 4241 } |
| 4224 return result; | 4242 return result; |
| 4225 } | 4243 } |
| 4226 | 4244 |
| 4227 | 4245 |
| 4228 void Heap::InitializeFunction(JSFunction* function, | 4246 void Heap::InitializeFunction(JSFunction* function, |
| 4229 SharedFunctionInfo* shared, | 4247 SharedFunctionInfo* shared, |
| 4230 Object* prototype) { | 4248 Object* prototype) { |
| 4231 ASSERT(!prototype->IsMap()); | 4249 ASSERT(!prototype->IsMap()); |
| 4232 function->initialize_properties(); | 4250 function->initialize_properties(); |
| 4233 function->initialize_elements(); | 4251 function->initialize_elements(); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 4255 } | 4273 } |
| 4256 | 4274 |
| 4257 | 4275 |
| 4258 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { | 4276 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { |
| 4259 // To get fast allocation and map sharing for arguments objects we | 4277 // To get fast allocation and map sharing for arguments objects we |
| 4260 // allocate them based on an arguments boilerplate. | 4278 // allocate them based on an arguments boilerplate. |
| 4261 | 4279 |
| 4262 JSObject* boilerplate; | 4280 JSObject* boilerplate; |
| 4263 int arguments_object_size; | 4281 int arguments_object_size; |
| 4264 bool strict_mode_callee = callee->IsJSFunction() && | 4282 bool strict_mode_callee = callee->IsJSFunction() && |
| 4265 !JSFunction::cast(callee)->shared()->is_classic_mode(); | 4283 JSFunction::cast(callee)->shared()->strict_mode() == STRICT; |
| 4266 if (strict_mode_callee) { | 4284 if (strict_mode_callee) { |
| 4267 boilerplate = | 4285 boilerplate = |
| 4268 isolate()->context()->native_context()-> | 4286 isolate()->context()->native_context()->strict_arguments_boilerplate(); |
| 4269 strict_mode_arguments_boilerplate(); | 4287 arguments_object_size = kStrictArgumentsObjectSize; |
| 4270 arguments_object_size = kArgumentsObjectSizeStrict; | |
| 4271 } else { | 4288 } else { |
| 4272 boilerplate = | 4289 boilerplate = |
| 4273 isolate()->context()->native_context()->arguments_boilerplate(); | 4290 isolate()->context()->native_context()->sloppy_arguments_boilerplate(); |
| 4274 arguments_object_size = kArgumentsObjectSize; | 4291 arguments_object_size = kSloppyArgumentsObjectSize; |
| 4275 } | 4292 } |
| 4276 | 4293 |
| 4277 // Check that the size of the boilerplate matches our | 4294 // Check that the size of the boilerplate matches our |
| 4278 // expectations. The ArgumentsAccessStub::GenerateNewObject relies | 4295 // expectations. The ArgumentsAccessStub::GenerateNewObject relies |
| 4279 // on the size being a known constant. | 4296 // on the size being a known constant. |
| 4280 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); | 4297 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); |
| 4281 | 4298 |
| 4282 // Do the allocation. | 4299 // Do the allocation. |
| 4283 Object* result; | 4300 Object* result; |
| 4284 { MaybeObject* maybe_result = | 4301 { MaybeObject* maybe_result = |
| 4285 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); | 4302 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); |
| 4286 if (!maybe_result->ToObject(&result)) return maybe_result; | 4303 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4287 } | 4304 } |
| 4288 | 4305 |
| 4289 // Copy the content. The arguments boilerplate doesn't have any | 4306 // Copy the content. The arguments boilerplate doesn't have any |
| 4290 // fields that point to new space so it's safe to skip the write | 4307 // fields that point to new space so it's safe to skip the write |
| 4291 // barrier here. | 4308 // barrier here. |
| 4292 CopyBlock(HeapObject::cast(result)->address(), | 4309 CopyBlock(HeapObject::cast(result)->address(), |
| 4293 boilerplate->address(), | 4310 boilerplate->address(), |
| 4294 JSObject::kHeaderSize); | 4311 JSObject::kHeaderSize); |
| 4295 | 4312 |
| 4296 // Set the length property. | 4313 // Set the length property. |
| 4297 JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex, | 4314 JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex, |
| 4298 Smi::FromInt(length), | 4315 Smi::FromInt(length), |
| 4299 SKIP_WRITE_BARRIER); | 4316 SKIP_WRITE_BARRIER); |
| 4300 // Set the callee property for non-strict mode arguments object only. | 4317 // Set the callee property for sloppy mode arguments object only. |
| 4301 if (!strict_mode_callee) { | 4318 if (!strict_mode_callee) { |
| 4302 JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex, | 4319 JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex, |
| 4303 callee); | 4320 callee); |
| 4304 } | 4321 } |
| 4305 | 4322 |
| 4306 // Check the state of the object | 4323 // Check the state of the object |
| 4307 ASSERT(JSObject::cast(result)->HasFastProperties()); | 4324 ASSERT(JSObject::cast(result)->HasFastProperties()); |
| 4308 ASSERT(JSObject::cast(result)->HasFastObjectElements()); | 4325 ASSERT(JSObject::cast(result)->HasFastObjectElements()); |
| 4309 | 4326 |
| 4310 return result; | 4327 return result; |
| (...skipping 26 matching lines...) Expand all Loading... |
| 4337 ASSERT(obj->GetInternalFieldCount() == 0); | 4354 ASSERT(obj->GetInternalFieldCount() == 0); |
| 4338 filler = Heap::one_pointer_filler_map(); | 4355 filler = Heap::one_pointer_filler_map(); |
| 4339 } else { | 4356 } else { |
| 4340 filler = Heap::undefined_value(); | 4357 filler = Heap::undefined_value(); |
| 4341 } | 4358 } |
| 4342 obj->InitializeBody(map, Heap::undefined_value(), filler); | 4359 obj->InitializeBody(map, Heap::undefined_value(), filler); |
| 4343 } | 4360 } |
| 4344 | 4361 |
| 4345 | 4362 |
| 4346 MaybeObject* Heap::AllocateJSObjectFromMap( | 4363 MaybeObject* Heap::AllocateJSObjectFromMap( |
| 4347 Map* map, PretenureFlag pretenure, bool allocate_properties) { | 4364 Map* map, |
| 4365 PretenureFlag pretenure, |
| 4366 bool allocate_properties, |
| 4367 AllocationSite* allocation_site) { |
| 4348 // JSFunctions should be allocated using AllocateFunction to be | 4368 // JSFunctions should be allocated using AllocateFunction to be |
| 4349 // properly initialized. | 4369 // properly initialized. |
| 4350 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); | 4370 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); |
| 4351 | 4371 |
| 4352 // Both types of global objects should be allocated using | 4372 // Both types of global objects should be allocated using |
| 4353 // AllocateGlobalObject to be properly initialized. | 4373 // AllocateGlobalObject to be properly initialized. |
| 4354 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); | 4374 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); |
| 4355 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); | 4375 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); |
| 4356 | 4376 |
| 4357 // Allocate the backing storage for the properties. | 4377 // Allocate the backing storage for the properties. |
| 4358 FixedArray* properties; | 4378 FixedArray* properties; |
| 4359 if (allocate_properties) { | 4379 if (allocate_properties) { |
| 4360 int prop_size = map->InitialPropertiesLength(); | 4380 int prop_size = map->InitialPropertiesLength(); |
| 4361 ASSERT(prop_size >= 0); | 4381 ASSERT(prop_size >= 0); |
| 4362 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); | 4382 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); |
| 4363 if (!maybe_properties->To(&properties)) return maybe_properties; | 4383 if (!maybe_properties->To(&properties)) return maybe_properties; |
| 4364 } | 4384 } |
| 4365 } else { | 4385 } else { |
| 4366 properties = empty_fixed_array(); | 4386 properties = empty_fixed_array(); |
| 4367 } | 4387 } |
| 4368 | 4388 |
| 4369 // Allocate the JSObject. | 4389 // Allocate the JSObject. |
| 4370 int size = map->instance_size(); | 4390 int size = map->instance_size(); |
| 4371 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); | 4391 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
| 4372 Object* obj; | 4392 Object* obj; |
| 4373 MaybeObject* maybe_obj = Allocate(map, space); | 4393 MaybeObject* maybe_obj = Allocate(map, space, allocation_site); |
| 4374 if (!maybe_obj->To(&obj)) return maybe_obj; | 4394 if (!maybe_obj->To(&obj)) return maybe_obj; |
| 4375 | 4395 |
| 4376 // Initialize the JSObject. | 4396 // Initialize the JSObject. |
| 4377 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); | 4397 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); |
| 4378 ASSERT(JSObject::cast(obj)->HasFastElements() || | 4398 ASSERT(JSObject::cast(obj)->HasFastElements() || |
| 4379 JSObject::cast(obj)->HasExternalArrayElements()); | 4399 JSObject::cast(obj)->HasExternalArrayElements()); |
| 4380 return obj; | 4400 return obj; |
| 4381 } | 4401 } |
| 4382 | 4402 |
| 4383 | 4403 |
| 4384 MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite( | |
| 4385 Map* map, Handle<AllocationSite> allocation_site) { | |
| 4386 // JSFunctions should be allocated using AllocateFunction to be | |
| 4387 // properly initialized. | |
| 4388 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); | |
| 4389 | |
| 4390 // Both types of global objects should be allocated using | |
| 4391 // AllocateGlobalObject to be properly initialized. | |
| 4392 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); | |
| 4393 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); | |
| 4394 | |
| 4395 // Allocate the backing storage for the properties. | |
| 4396 int prop_size = map->InitialPropertiesLength(); | |
| 4397 ASSERT(prop_size >= 0); | |
| 4398 FixedArray* properties; | |
| 4399 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size); | |
| 4400 if (!maybe_properties->To(&properties)) return maybe_properties; | |
| 4401 } | |
| 4402 | |
| 4403 // Allocate the JSObject. | |
| 4404 int size = map->instance_size(); | |
| 4405 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, NOT_TENURED); | |
| 4406 Object* obj; | |
| 4407 MaybeObject* maybe_obj = | |
| 4408 AllocateWithAllocationSite(map, space, allocation_site); | |
| 4409 if (!maybe_obj->To(&obj)) return maybe_obj; | |
| 4410 | |
| 4411 // Initialize the JSObject. | |
| 4412 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); | |
| 4413 ASSERT(JSObject::cast(obj)->HasFastElements()); | |
| 4414 return obj; | |
| 4415 } | |
| 4416 | |
| 4417 | |
| 4418 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, | 4404 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| 4419 PretenureFlag pretenure) { | 4405 PretenureFlag pretenure, |
| 4406 AllocationSite* allocation_site) { |
| 4420 ASSERT(constructor->has_initial_map()); | 4407 ASSERT(constructor->has_initial_map()); |
| 4408 |
| 4421 // Allocate the object based on the constructors initial map. | 4409 // Allocate the object based on the constructors initial map. |
| 4422 MaybeObject* result = AllocateJSObjectFromMap( | 4410 MaybeObject* result = AllocateJSObjectFromMap(constructor->initial_map(), |
| 4423 constructor->initial_map(), pretenure); | 4411 pretenure, |
| 4412 true, |
| 4413 allocation_site); |
| 4424 #ifdef DEBUG | 4414 #ifdef DEBUG |
| 4425 // Make sure result is NOT a global object if valid. | 4415 // Make sure result is NOT a global object if valid. |
| 4426 Object* non_failure; | 4416 Object* non_failure; |
| 4427 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | |
| 4428 #endif | |
| 4429 return result; | |
| 4430 } | |
| 4431 | |
| 4432 | |
| 4433 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor, | |
| 4434 Handle<AllocationSite> allocation_site) { | |
| 4435 ASSERT(constructor->has_initial_map()); | |
| 4436 // Allocate the object based on the constructors initial map, or the payload | |
| 4437 // advice | |
| 4438 Map* initial_map = constructor->initial_map(); | |
| 4439 | |
| 4440 ElementsKind to_kind = allocation_site->GetElementsKind(); | |
| 4441 AllocationSiteMode mode = TRACK_ALLOCATION_SITE; | |
| 4442 if (to_kind != initial_map->elements_kind()) { | |
| 4443 MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind); | |
| 4444 if (!maybe_new_map->To(&initial_map)) return maybe_new_map; | |
| 4445 // Possibly alter the mode, since we found an updated elements kind | |
| 4446 // in the type info cell. | |
| 4447 mode = AllocationSite::GetMode(to_kind); | |
| 4448 } | |
| 4449 | |
| 4450 MaybeObject* result; | |
| 4451 if (mode == TRACK_ALLOCATION_SITE) { | |
| 4452 result = AllocateJSObjectFromMapWithAllocationSite(initial_map, | |
| 4453 allocation_site); | |
| 4454 } else { | |
| 4455 result = AllocateJSObjectFromMap(initial_map, NOT_TENURED); | |
| 4456 } | |
| 4457 #ifdef DEBUG | |
| 4458 // Make sure result is NOT a global object if valid. | |
| 4459 Object* non_failure; | |
| 4460 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | 4417 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); |
| 4461 #endif | 4418 #endif |
| 4462 return result; | 4419 return result; |
| 4463 } | 4420 } |
| 4464 | 4421 |
| 4465 | 4422 |
| 4466 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { | 4423 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { |
| 4467 // Allocate a fresh map. Modules do not have a prototype. | 4424 // Allocate a fresh map. Modules do not have a prototype. |
| 4468 Map* map; | 4425 Map* map; |
| 4469 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); | 4426 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); |
| (...skipping 652 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5122 dst->address() + FixedDoubleArray::kLengthOffset, | 5079 dst->address() + FixedDoubleArray::kLengthOffset, |
| 5123 src->address() + FixedDoubleArray::kLengthOffset, | 5080 src->address() + FixedDoubleArray::kLengthOffset, |
| 5124 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); | 5081 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); |
| 5125 return obj; | 5082 return obj; |
| 5126 } | 5083 } |
| 5127 | 5084 |
| 5128 | 5085 |
| 5129 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, | 5086 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, |
| 5130 Map* map) { | 5087 Map* map) { |
| 5131 int int64_entries = src->count_of_int64_entries(); | 5088 int int64_entries = src->count_of_int64_entries(); |
| 5132 int ptr_entries = src->count_of_ptr_entries(); | 5089 int code_ptr_entries = src->count_of_code_ptr_entries(); |
| 5090 int heap_ptr_entries = src->count_of_heap_ptr_entries(); |
| 5133 int int32_entries = src->count_of_int32_entries(); | 5091 int int32_entries = src->count_of_int32_entries(); |
| 5134 Object* obj; | 5092 Object* obj; |
| 5135 { MaybeObject* maybe_obj = | 5093 { MaybeObject* maybe_obj = |
| 5136 AllocateConstantPoolArray(int64_entries, ptr_entries, int32_entries); | 5094 AllocateConstantPoolArray(int64_entries, code_ptr_entries, |
| 5095 heap_ptr_entries, int32_entries); |
| 5137 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 5096 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 5138 } | 5097 } |
| 5139 HeapObject* dst = HeapObject::cast(obj); | 5098 HeapObject* dst = HeapObject::cast(obj); |
| 5140 dst->set_map_no_write_barrier(map); | 5099 dst->set_map_no_write_barrier(map); |
| 5100 int size = ConstantPoolArray::SizeFor( |
| 5101 int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries); |
| 5141 CopyBlock( | 5102 CopyBlock( |
| 5142 dst->address() + ConstantPoolArray::kLengthOffset, | 5103 dst->address() + ConstantPoolArray::kLengthOffset, |
| 5143 src->address() + ConstantPoolArray::kLengthOffset, | 5104 src->address() + ConstantPoolArray::kLengthOffset, |
| 5144 ConstantPoolArray::SizeFor(int64_entries, ptr_entries, int32_entries) | 5105 size - ConstantPoolArray::kLengthOffset); |
| 5145 - ConstantPoolArray::kLengthOffset); | |
| 5146 return obj; | 5106 return obj; |
| 5147 } | 5107 } |
| 5148 | 5108 |
| 5149 | 5109 |
| 5150 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { | 5110 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { |
| 5151 if (length < 0 || length > FixedArray::kMaxLength) { | 5111 if (length < 0 || length > FixedArray::kMaxLength) { |
| 5152 return Failure::OutOfMemoryException(0xe); | 5112 return Failure::OutOfMemoryException(0xe); |
| 5153 } | 5113 } |
| 5154 int size = FixedArray::SizeFor(length); | 5114 int size = FixedArray::SizeFor(length); |
| 5155 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); | 5115 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5272 HeapObject* object; | 5232 HeapObject* object; |
| 5273 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); | 5233 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 5274 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5234 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| 5275 } | 5235 } |
| 5276 | 5236 |
| 5277 return EnsureDoubleAligned(this, object, size); | 5237 return EnsureDoubleAligned(this, object, size); |
| 5278 } | 5238 } |
| 5279 | 5239 |
| 5280 | 5240 |
| 5281 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, | 5241 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, |
| 5282 int number_of_ptr_entries, | 5242 int number_of_code_ptr_entries, |
| 5243 int number_of_heap_ptr_entries, |
| 5283 int number_of_int32_entries) { | 5244 int number_of_int32_entries) { |
| 5284 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 || | 5245 ASSERT(number_of_int64_entries > 0 || number_of_code_ptr_entries > 0 || |
| 5285 number_of_int32_entries > 0); | 5246 number_of_heap_ptr_entries > 0 || number_of_int32_entries > 0); |
| 5286 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, | 5247 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, |
| 5287 number_of_ptr_entries, | 5248 number_of_code_ptr_entries, |
| 5249 number_of_heap_ptr_entries, |
| 5288 number_of_int32_entries); | 5250 number_of_int32_entries); |
| 5289 #ifndef V8_HOST_ARCH_64_BIT | 5251 #ifndef V8_HOST_ARCH_64_BIT |
| 5290 size += kPointerSize; | 5252 size += kPointerSize; |
| 5291 #endif | 5253 #endif |
| 5292 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); | 5254 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); |
| 5293 | 5255 |
| 5294 HeapObject* object; | 5256 HeapObject* object; |
| 5295 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); | 5257 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); |
| 5296 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5258 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| 5297 } | 5259 } |
| 5298 object = EnsureDoubleAligned(this, object, size); | 5260 object = EnsureDoubleAligned(this, object, size); |
| 5299 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); | 5261 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); |
| 5300 | 5262 |
| 5301 ConstantPoolArray* constant_pool = | 5263 ConstantPoolArray* constant_pool = |
| 5302 reinterpret_cast<ConstantPoolArray*>(object); | 5264 reinterpret_cast<ConstantPoolArray*>(object); |
| 5303 constant_pool->SetEntryCounts(number_of_int64_entries, | 5265 constant_pool->SetEntryCounts(number_of_int64_entries, |
| 5304 number_of_ptr_entries, | 5266 number_of_code_ptr_entries, |
| 5267 number_of_heap_ptr_entries, |
| 5305 number_of_int32_entries); | 5268 number_of_int32_entries); |
| 5306 if (number_of_ptr_entries > 0) { | 5269 if (number_of_code_ptr_entries > 0) { |
| 5270 int offset = |
| 5271 constant_pool->OffsetOfElementAt(constant_pool->first_code_ptr_index()); |
| 5307 MemsetPointer( | 5272 MemsetPointer( |
| 5308 HeapObject::RawField( | 5273 reinterpret_cast<Address*>(HeapObject::RawField(constant_pool, offset)), |
| 5309 constant_pool, | 5274 isolate()->builtins()->builtin(Builtins::kIllegal)->entry(), |
| 5310 constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())), | 5275 number_of_code_ptr_entries); |
| 5276 } |
| 5277 if (number_of_heap_ptr_entries > 0) { |
| 5278 int offset = |
| 5279 constant_pool->OffsetOfElementAt(constant_pool->first_code_ptr_index()); |
| 5280 MemsetPointer( |
| 5281 HeapObject::RawField(constant_pool, offset), |
| 5311 undefined_value(), | 5282 undefined_value(), |
| 5312 number_of_ptr_entries); | 5283 number_of_heap_ptr_entries); |
| 5313 } | 5284 } |
| 5314 return constant_pool; | 5285 return constant_pool; |
| 5315 } | 5286 } |
| 5316 | 5287 |
| 5317 | 5288 |
| 5318 MaybeObject* Heap::AllocateEmptyConstantPoolArray() { | 5289 MaybeObject* Heap::AllocateEmptyConstantPoolArray() { |
| 5319 int size = ConstantPoolArray::SizeFor(0, 0, 0); | 5290 int size = ConstantPoolArray::SizeFor(0, 0, 0, 0); |
| 5320 Object* result; | 5291 Object* result; |
| 5321 { MaybeObject* maybe_result = | 5292 { MaybeObject* maybe_result = |
| 5322 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); | 5293 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); |
| 5323 if (!maybe_result->ToObject(&result)) return maybe_result; | 5294 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5324 } | 5295 } |
| 5325 HeapObject::cast(result)->set_map_no_write_barrier(constant_pool_array_map()); | 5296 HeapObject::cast(result)->set_map_no_write_barrier(constant_pool_array_map()); |
| 5326 ConstantPoolArray::cast(result)->SetEntryCounts(0, 0, 0); | 5297 ConstantPoolArray::cast(result)->SetEntryCounts(0, 0, 0, 0); |
| 5327 return result; | 5298 return result; |
| 5328 } | 5299 } |
| 5329 | 5300 |
| 5330 | 5301 |
| 5331 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 5302 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
| 5332 Object* result; | 5303 Object* result; |
| 5333 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 5304 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
| 5334 if (!maybe_result->ToObject(&result)) return maybe_result; | 5305 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5335 } | 5306 } |
| 5336 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( | 5307 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( |
| (...skipping 2406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7743 static_cast<int>(object_sizes_last_time_[index])); | 7714 static_cast<int>(object_sizes_last_time_[index])); |
| 7744 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7715 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7745 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7716 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7746 | 7717 |
| 7747 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7718 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7748 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7719 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7749 ClearObjectStats(); | 7720 ClearObjectStats(); |
| 7750 } | 7721 } |
| 7751 | 7722 |
| 7752 } } // namespace v8::internal | 7723 } } // namespace v8::internal |
| OLD | NEW |