OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 14 matching lines...) Expand all Loading... |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #include "accessors.h" | 30 #include "accessors.h" |
31 #include "api.h" | 31 #include "api.h" |
32 #include "bootstrapper.h" | 32 #include "bootstrapper.h" |
33 #include "codegen.h" | 33 #include "codegen.h" |
34 #include "compilation-cache.h" | 34 #include "compilation-cache.h" |
| 35 #include "cpu-profiler.h" |
35 #include "debug.h" | 36 #include "debug.h" |
36 #include "deoptimizer.h" | 37 #include "deoptimizer.h" |
37 #include "global-handles.h" | 38 #include "global-handles.h" |
38 #include "heap-profiler.h" | 39 #include "heap-profiler.h" |
39 #include "incremental-marking.h" | 40 #include "incremental-marking.h" |
40 #include "mark-compact.h" | 41 #include "mark-compact.h" |
41 #include "natives.h" | 42 #include "natives.h" |
42 #include "objects-visiting.h" | 43 #include "objects-visiting.h" |
43 #include "objects-visiting-inl.h" | 44 #include "objects-visiting-inl.h" |
44 #include "once.h" | 45 #include "once.h" |
(...skipping 14 matching lines...) Expand all Loading... |
59 #endif | 60 #endif |
60 | 61 |
61 namespace v8 { | 62 namespace v8 { |
62 namespace internal { | 63 namespace internal { |
63 | 64 |
64 | 65 |
65 Heap::Heap() | 66 Heap::Heap() |
66 : isolate_(NULL), | 67 : isolate_(NULL), |
67 // semispace_size_ should be a power of 2 and old_generation_size_ should be | 68 // semispace_size_ should be a power of 2 and old_generation_size_ should be |
68 // a multiple of Page::kPageSize. | 69 // a multiple of Page::kPageSize. |
69 #if defined(V8_TARGET_ARCH_X64) | 70 #if V8_TARGET_ARCH_X64 |
70 #define LUMP_OF_MEMORY (2 * MB) | 71 #define LUMP_OF_MEMORY (2 * MB) |
71 code_range_size_(512*MB), | 72 code_range_size_(512*MB), |
72 #else | 73 #else |
73 #define LUMP_OF_MEMORY MB | 74 #define LUMP_OF_MEMORY MB |
74 code_range_size_(0), | 75 code_range_size_(0), |
75 #endif | 76 #endif |
76 #if defined(ANDROID) | 77 #if defined(ANDROID) |
77 reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)), | 78 reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)), |
78 max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)), | 79 max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)), |
79 initial_semispace_size_(Page::kPageSize), | 80 initial_semispace_size_(Page::kPageSize), |
(...skipping 2767 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2847 if (!maybe_result->ToObject(&result)) return maybe_result; | 2848 if (!maybe_result->ToObject(&result)) return maybe_result; |
2848 } | 2849 } |
2849 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); | 2850 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); |
2850 Cell::cast(result)->set_value(value); | 2851 Cell::cast(result)->set_value(value); |
2851 return result; | 2852 return result; |
2852 } | 2853 } |
2853 | 2854 |
2854 | 2855 |
2855 MaybeObject* Heap::AllocatePropertyCell(Object* value) { | 2856 MaybeObject* Heap::AllocatePropertyCell(Object* value) { |
2856 Object* result; | 2857 Object* result; |
2857 { MaybeObject* maybe_result = AllocateRawPropertyCell(); | 2858 MaybeObject* maybe_result = AllocateRawPropertyCell(); |
2858 if (!maybe_result->ToObject(&result)) return maybe_result; | 2859 if (!maybe_result->ToObject(&result)) return maybe_result; |
2859 } | 2860 |
2860 HeapObject::cast(result)->set_map_no_write_barrier( | 2861 HeapObject::cast(result)->set_map_no_write_barrier( |
2861 global_property_cell_map()); | 2862 global_property_cell_map()); |
2862 PropertyCell* cell = PropertyCell::cast(result); | 2863 PropertyCell* cell = PropertyCell::cast(result); |
2863 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 2864 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
2864 SKIP_WRITE_BARRIER); | 2865 SKIP_WRITE_BARRIER); |
2865 cell->set_value(value); | 2866 cell->set_value(value); |
2866 cell->set_type(Type::None()); | 2867 cell->set_type(Type::None()); |
| 2868 maybe_result = cell->SetValueInferType(value); |
| 2869 if (maybe_result->IsFailure()) return maybe_result; |
2867 return result; | 2870 return result; |
2868 } | 2871 } |
2869 | 2872 |
2870 | 2873 |
2871 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { | 2874 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { |
2872 Box* result; | 2875 Box* result; |
2873 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); | 2876 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); |
2874 if (!maybe_result->To(&result)) return maybe_result; | 2877 if (!maybe_result->To(&result)) return maybe_result; |
2875 result->set_value(value); | 2878 result->set_value(value); |
2876 return result; | 2879 return result; |
2877 } | 2880 } |
2878 | 2881 |
2879 | 2882 |
| 2883 MaybeObject* Heap::AllocateAllocationSite() { |
| 2884 Object* result; |
| 2885 MaybeObject* maybe_result = Allocate(allocation_site_map(), |
| 2886 OLD_POINTER_SPACE); |
| 2887 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2888 AllocationSite::cast(result)->Initialize(); |
| 2889 return result; |
| 2890 } |
| 2891 |
| 2892 |
2880 MaybeObject* Heap::CreateOddball(const char* to_string, | 2893 MaybeObject* Heap::CreateOddball(const char* to_string, |
2881 Object* to_number, | 2894 Object* to_number, |
2882 byte kind) { | 2895 byte kind) { |
2883 Object* result; | 2896 Object* result; |
2884 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE); | 2897 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE); |
2885 if (!maybe_result->ToObject(&result)) return maybe_result; | 2898 if (!maybe_result->ToObject(&result)) return maybe_result; |
2886 } | 2899 } |
2887 return Oddball::cast(result)->Initialize(to_string, to_number, kind); | 2900 return Oddball::cast(result)->Initialize(to_string, to_number, kind); |
2888 } | 2901 } |
2889 | 2902 |
(...skipping 608 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3498 case EXTERNAL_DOUBLE_ELEMENTS: | 3511 case EXTERNAL_DOUBLE_ELEMENTS: |
3499 return kEmptyExternalDoubleArrayRootIndex; | 3512 return kEmptyExternalDoubleArrayRootIndex; |
3500 case EXTERNAL_PIXEL_ELEMENTS: | 3513 case EXTERNAL_PIXEL_ELEMENTS: |
3501 return kEmptyExternalPixelArrayRootIndex; | 3514 return kEmptyExternalPixelArrayRootIndex; |
3502 default: | 3515 default: |
3503 UNREACHABLE(); | 3516 UNREACHABLE(); |
3504 return kUndefinedValueRootIndex; | 3517 return kUndefinedValueRootIndex; |
3505 } | 3518 } |
3506 } | 3519 } |
3507 | 3520 |
| 3521 |
3508 ExternalArray* Heap::EmptyExternalArrayForMap(Map* map) { | 3522 ExternalArray* Heap::EmptyExternalArrayForMap(Map* map) { |
3509 return ExternalArray::cast( | 3523 return ExternalArray::cast( |
3510 roots_[RootIndexForEmptyExternalArray(map->elements_kind())]); | 3524 roots_[RootIndexForEmptyExternalArray(map->elements_kind())]); |
3511 } | 3525 } |
3512 | 3526 |
3513 | 3527 |
3514 | 3528 |
3515 | 3529 |
3516 MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) { | 3530 MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) { |
3517 // We need to distinguish the minus zero value and this cannot be | 3531 // We need to distinguish the minus zero value and this cannot be |
(...skipping 662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4180 #ifdef VERIFY_HEAP | 4194 #ifdef VERIFY_HEAP |
4181 if (FLAG_verify_heap) { | 4195 if (FLAG_verify_heap) { |
4182 code->Verify(); | 4196 code->Verify(); |
4183 } | 4197 } |
4184 #endif | 4198 #endif |
4185 return new_code; | 4199 return new_code; |
4186 } | 4200 } |
4187 | 4201 |
4188 | 4202 |
4189 MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space, | 4203 MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space, |
4190 Handle<Object> allocation_site_info_payload) { | 4204 Handle<AllocationSite> allocation_site) { |
4191 ASSERT(gc_state_ == NOT_IN_GC); | 4205 ASSERT(gc_state_ == NOT_IN_GC); |
4192 ASSERT(map->instance_type() != MAP_TYPE); | 4206 ASSERT(map->instance_type() != MAP_TYPE); |
4193 // If allocation failures are disallowed, we may allocate in a different | 4207 // If allocation failures are disallowed, we may allocate in a different |
4194 // space when new space is full and the object is not a large object. | 4208 // space when new space is full and the object is not a large object. |
4195 AllocationSpace retry_space = | 4209 AllocationSpace retry_space = |
4196 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 4210 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
4197 int size = map->instance_size() + AllocationSiteInfo::kSize; | 4211 int size = map->instance_size() + AllocationSiteInfo::kSize; |
4198 Object* result; | 4212 Object* result; |
4199 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4213 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
4200 if (!maybe_result->ToObject(&result)) return maybe_result; | 4214 if (!maybe_result->ToObject(&result)) return maybe_result; |
4201 // No need for write barrier since object is white and map is in old space. | 4215 // No need for write barrier since object is white and map is in old space. |
4202 HeapObject::cast(result)->set_map_no_write_barrier(map); | 4216 HeapObject::cast(result)->set_map_no_write_barrier(map); |
4203 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( | 4217 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( |
4204 reinterpret_cast<Address>(result) + map->instance_size()); | 4218 reinterpret_cast<Address>(result) + map->instance_size()); |
4205 alloc_info->set_map_no_write_barrier(allocation_site_info_map()); | 4219 alloc_info->set_map_no_write_barrier(allocation_site_info_map()); |
4206 alloc_info->set_payload(*allocation_site_info_payload, SKIP_WRITE_BARRIER); | 4220 alloc_info->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); |
4207 return result; | 4221 return result; |
4208 } | 4222 } |
4209 | 4223 |
4210 | 4224 |
4211 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { | 4225 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
4212 ASSERT(gc_state_ == NOT_IN_GC); | 4226 ASSERT(gc_state_ == NOT_IN_GC); |
4213 ASSERT(map->instance_type() != MAP_TYPE); | 4227 ASSERT(map->instance_type() != MAP_TYPE); |
4214 // If allocation failures are disallowed, we may allocate in a different | 4228 // If allocation failures are disallowed, we may allocate in a different |
4215 // space when new space is full and the object is not a large object. | 4229 // space when new space is full and the object is not a large object. |
4216 AllocationSpace retry_space = | 4230 AllocationSpace retry_space = |
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4455 InitializeJSObjectFromMap(JSObject::cast(obj), | 4469 InitializeJSObjectFromMap(JSObject::cast(obj), |
4456 FixedArray::cast(properties), | 4470 FixedArray::cast(properties), |
4457 map); | 4471 map); |
4458 ASSERT(JSObject::cast(obj)->HasFastElements() || | 4472 ASSERT(JSObject::cast(obj)->HasFastElements() || |
4459 JSObject::cast(obj)->HasExternalArrayElements()); | 4473 JSObject::cast(obj)->HasExternalArrayElements()); |
4460 return obj; | 4474 return obj; |
4461 } | 4475 } |
4462 | 4476 |
4463 | 4477 |
4464 MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(Map* map, | 4478 MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(Map* map, |
4465 Handle<Object> allocation_site_info_payload) { | 4479 Handle<AllocationSite> allocation_site) { |
4466 // JSFunctions should be allocated using AllocateFunction to be | 4480 // JSFunctions should be allocated using AllocateFunction to be |
4467 // properly initialized. | 4481 // properly initialized. |
4468 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); | 4482 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); |
4469 | 4483 |
4470 // Both types of global objects should be allocated using | 4484 // Both types of global objects should be allocated using |
4471 // AllocateGlobalObject to be properly initialized. | 4485 // AllocateGlobalObject to be properly initialized. |
4472 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); | 4486 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); |
4473 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); | 4487 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); |
4474 | 4488 |
4475 // Allocate the backing storage for the properties. | 4489 // Allocate the backing storage for the properties. |
4476 int prop_size = | 4490 int prop_size = |
4477 map->pre_allocated_property_fields() + | 4491 map->pre_allocated_property_fields() + |
4478 map->unused_property_fields() - | 4492 map->unused_property_fields() - |
4479 map->inobject_properties(); | 4493 map->inobject_properties(); |
4480 ASSERT(prop_size >= 0); | 4494 ASSERT(prop_size >= 0); |
4481 Object* properties; | 4495 Object* properties; |
4482 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size); | 4496 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size); |
4483 if (!maybe_properties->ToObject(&properties)) return maybe_properties; | 4497 if (!maybe_properties->ToObject(&properties)) return maybe_properties; |
4484 } | 4498 } |
4485 | 4499 |
4486 // Allocate the JSObject. | 4500 // Allocate the JSObject. |
4487 AllocationSpace space = NEW_SPACE; | 4501 AllocationSpace space = NEW_SPACE; |
4488 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; | 4502 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; |
4489 Object* obj; | 4503 Object* obj; |
4490 MaybeObject* maybe_obj = AllocateWithAllocationSite(map, space, | 4504 MaybeObject* maybe_obj = |
4491 allocation_site_info_payload); | 4505 AllocateWithAllocationSite(map, space, allocation_site); |
4492 if (!maybe_obj->To(&obj)) return maybe_obj; | 4506 if (!maybe_obj->To(&obj)) return maybe_obj; |
4493 | 4507 |
4494 // Initialize the JSObject. | 4508 // Initialize the JSObject. |
4495 InitializeJSObjectFromMap(JSObject::cast(obj), | 4509 InitializeJSObjectFromMap(JSObject::cast(obj), |
4496 FixedArray::cast(properties), | 4510 FixedArray::cast(properties), |
4497 map); | 4511 map); |
4498 ASSERT(JSObject::cast(obj)->HasFastElements()); | 4512 ASSERT(JSObject::cast(obj)->HasFastElements()); |
4499 return obj; | 4513 return obj; |
4500 } | 4514 } |
4501 | 4515 |
(...skipping 15 matching lines...) Expand all Loading... |
4517 #ifdef DEBUG | 4531 #ifdef DEBUG |
4518 // Make sure result is NOT a global object if valid. | 4532 // Make sure result is NOT a global object if valid. |
4519 Object* non_failure; | 4533 Object* non_failure; |
4520 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | 4534 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); |
4521 #endif | 4535 #endif |
4522 return result; | 4536 return result; |
4523 } | 4537 } |
4524 | 4538 |
4525 | 4539 |
4526 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor, | 4540 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor, |
4527 Handle<Object> allocation_site_info_payload) { | 4541 Handle<AllocationSite> allocation_site) { |
4528 // Allocate the initial map if absent. | 4542 // Allocate the initial map if absent. |
4529 if (!constructor->has_initial_map()) { | 4543 if (!constructor->has_initial_map()) { |
4530 Object* initial_map; | 4544 Object* initial_map; |
4531 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); | 4545 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); |
4532 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; | 4546 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; |
4533 } | 4547 } |
4534 constructor->set_initial_map(Map::cast(initial_map)); | 4548 constructor->set_initial_map(Map::cast(initial_map)); |
4535 Map::cast(initial_map)->set_constructor(constructor); | 4549 Map::cast(initial_map)->set_constructor(constructor); |
4536 } | 4550 } |
4537 // Allocate the object based on the constructors initial map, or the payload | 4551 // Allocate the object based on the constructors initial map, or the payload |
4538 // advice | 4552 // advice |
4539 Map* initial_map = constructor->initial_map(); | 4553 Map* initial_map = constructor->initial_map(); |
4540 | 4554 |
4541 Cell* cell = Cell::cast(*allocation_site_info_payload); | 4555 Smi* smi = Smi::cast(allocation_site->payload()); |
4542 Smi* smi = Smi::cast(cell->value()); | |
4543 ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); | 4556 ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); |
4544 AllocationSiteMode mode = TRACK_ALLOCATION_SITE; | 4557 AllocationSiteMode mode = TRACK_ALLOCATION_SITE; |
4545 if (to_kind != initial_map->elements_kind()) { | 4558 if (to_kind != initial_map->elements_kind()) { |
4546 MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind); | 4559 MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind); |
4547 if (!maybe_new_map->To(&initial_map)) return maybe_new_map; | 4560 if (!maybe_new_map->To(&initial_map)) return maybe_new_map; |
4548 // Possibly alter the mode, since we found an updated elements kind | 4561 // Possibly alter the mode, since we found an updated elements kind |
4549 // in the type info cell. | 4562 // in the type info cell. |
4550 mode = AllocationSiteInfo::GetMode(to_kind); | 4563 mode = AllocationSite::GetMode(to_kind); |
4551 } | 4564 } |
4552 | 4565 |
4553 MaybeObject* result; | 4566 MaybeObject* result; |
4554 if (mode == TRACK_ALLOCATION_SITE) { | 4567 if (mode == TRACK_ALLOCATION_SITE) { |
4555 result = AllocateJSObjectFromMapWithAllocationSite(initial_map, | 4568 result = AllocateJSObjectFromMapWithAllocationSite(initial_map, |
4556 allocation_site_info_payload); | 4569 allocation_site); |
4557 } else { | 4570 } else { |
4558 result = AllocateJSObjectFromMap(initial_map, NOT_TENURED); | 4571 result = AllocateJSObjectFromMap(initial_map, NOT_TENURED); |
4559 } | 4572 } |
4560 #ifdef DEBUG | 4573 #ifdef DEBUG |
4561 // Make sure result is NOT a global object if valid. | 4574 // Make sure result is NOT a global object if valid. |
4562 Object* non_failure; | 4575 Object* non_failure; |
4563 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | 4576 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); |
4564 #endif | 4577 #endif |
4565 return result; | 4578 return result; |
4566 } | 4579 } |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4641 array->set_elements(elms); | 4654 array->set_elements(elms); |
4642 array->set_length(Smi::FromInt(length)); | 4655 array->set_length(Smi::FromInt(length)); |
4643 return array; | 4656 return array; |
4644 } | 4657 } |
4645 | 4658 |
4646 | 4659 |
4647 MaybeObject* Heap::AllocateJSArrayAndStorageWithAllocationSite( | 4660 MaybeObject* Heap::AllocateJSArrayAndStorageWithAllocationSite( |
4648 ElementsKind elements_kind, | 4661 ElementsKind elements_kind, |
4649 int length, | 4662 int length, |
4650 int capacity, | 4663 int capacity, |
4651 Handle<Object> allocation_site_payload, | 4664 Handle<AllocationSite> allocation_site, |
4652 ArrayStorageAllocationMode mode) { | 4665 ArrayStorageAllocationMode mode) { |
4653 MaybeObject* maybe_array = AllocateJSArrayWithAllocationSite(elements_kind, | 4666 MaybeObject* maybe_array = AllocateJSArrayWithAllocationSite(elements_kind, |
4654 allocation_site_payload); | 4667 allocation_site); |
4655 JSArray* array; | 4668 JSArray* array; |
4656 if (!maybe_array->To(&array)) return maybe_array; | 4669 if (!maybe_array->To(&array)) return maybe_array; |
4657 return AllocateJSArrayStorage(array, length, capacity, mode); | 4670 return AllocateJSArrayStorage(array, length, capacity, mode); |
4658 } | 4671 } |
4659 | 4672 |
4660 | 4673 |
4661 MaybeObject* Heap::AllocateJSArrayStorage( | 4674 MaybeObject* Heap::AllocateJSArrayStorage( |
4662 JSArray* array, | 4675 JSArray* array, |
4663 int length, | 4676 int length, |
4664 int capacity, | 4677 int capacity, |
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4893 { MaybeObject* maybe_prop = CopyFixedArray(properties); | 4906 { MaybeObject* maybe_prop = CopyFixedArray(properties); |
4894 if (!maybe_prop->ToObject(&prop)) return maybe_prop; | 4907 if (!maybe_prop->ToObject(&prop)) return maybe_prop; |
4895 } | 4908 } |
4896 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); | 4909 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); |
4897 } | 4910 } |
4898 // Return the new clone. | 4911 // Return the new clone. |
4899 return clone; | 4912 return clone; |
4900 } | 4913 } |
4901 | 4914 |
4902 | 4915 |
4903 MaybeObject* Heap::CopyJSObjectWithAllocationSite(JSObject* source) { | 4916 MaybeObject* Heap::CopyJSObjectWithAllocationSite( |
| 4917 JSObject* source, |
| 4918 AllocationSite* site) { |
4904 // Never used to copy functions. If functions need to be copied we | 4919 // Never used to copy functions. If functions need to be copied we |
4905 // have to be careful to clear the literals array. | 4920 // have to be careful to clear the literals array. |
4906 SLOW_ASSERT(!source->IsJSFunction()); | 4921 SLOW_ASSERT(!source->IsJSFunction()); |
4907 | 4922 |
4908 // Make the clone. | 4923 // Make the clone. |
4909 Map* map = source->map(); | 4924 Map* map = source->map(); |
4910 int object_size = map->instance_size(); | 4925 int object_size = map->instance_size(); |
4911 Object* clone; | 4926 Object* clone; |
4912 | 4927 |
4913 ASSERT(map->CanTrackAllocationSite()); | 4928 ASSERT(map->CanTrackAllocationSite()); |
(...skipping 29 matching lines...) Expand all Loading... |
4943 } | 4958 } |
4944 | 4959 |
4945 // Track allocation site information, if we failed to allocate it inline. | 4960 // Track allocation site information, if we failed to allocate it inline. |
4946 if (InNewSpace(clone) && | 4961 if (InNewSpace(clone) && |
4947 adjusted_object_size == object_size) { | 4962 adjusted_object_size == object_size) { |
4948 MaybeObject* maybe_alloc_info = | 4963 MaybeObject* maybe_alloc_info = |
4949 AllocateStruct(ALLOCATION_SITE_INFO_TYPE); | 4964 AllocateStruct(ALLOCATION_SITE_INFO_TYPE); |
4950 AllocationSiteInfo* alloc_info; | 4965 AllocationSiteInfo* alloc_info; |
4951 if (maybe_alloc_info->To(&alloc_info)) { | 4966 if (maybe_alloc_info->To(&alloc_info)) { |
4952 alloc_info->set_map_no_write_barrier(allocation_site_info_map()); | 4967 alloc_info->set_map_no_write_barrier(allocation_site_info_map()); |
4953 alloc_info->set_payload(source, SKIP_WRITE_BARRIER); | 4968 alloc_info->set_allocation_site(site, SKIP_WRITE_BARRIER); |
4954 } | 4969 } |
4955 } | 4970 } |
4956 } else { | 4971 } else { |
4957 wb_mode = SKIP_WRITE_BARRIER; | 4972 wb_mode = SKIP_WRITE_BARRIER; |
4958 adjusted_object_size += AllocationSiteInfo::kSize; | 4973 adjusted_object_size += AllocationSiteInfo::kSize; |
4959 | 4974 |
4960 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | 4975 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
4961 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4976 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
4962 } | 4977 } |
4963 SLOW_ASSERT(InNewSpace(clone)); | 4978 SLOW_ASSERT(InNewSpace(clone)); |
4964 // Since we know the clone is allocated in new space, we can copy | 4979 // Since we know the clone is allocated in new space, we can copy |
4965 // the contents without worrying about updating the write barrier. | 4980 // the contents without worrying about updating the write barrier. |
4966 CopyBlock(HeapObject::cast(clone)->address(), | 4981 CopyBlock(HeapObject::cast(clone)->address(), |
4967 source->address(), | 4982 source->address(), |
4968 object_size); | 4983 object_size); |
4969 } | 4984 } |
4970 | 4985 |
4971 if (adjusted_object_size > object_size) { | 4986 if (adjusted_object_size > object_size) { |
4972 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( | 4987 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( |
4973 reinterpret_cast<Address>(clone) + object_size); | 4988 reinterpret_cast<Address>(clone) + object_size); |
4974 alloc_info->set_map_no_write_barrier(allocation_site_info_map()); | 4989 alloc_info->set_map_no_write_barrier(allocation_site_info_map()); |
4975 alloc_info->set_payload(source, SKIP_WRITE_BARRIER); | 4990 alloc_info->set_allocation_site(site, SKIP_WRITE_BARRIER); |
4976 } | 4991 } |
4977 | 4992 |
4978 SLOW_ASSERT( | 4993 SLOW_ASSERT( |
4979 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 4994 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
4980 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 4995 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
4981 FixedArray* properties = FixedArray::cast(source->properties()); | 4996 FixedArray* properties = FixedArray::cast(source->properties()); |
4982 // Update elements if necessary. | 4997 // Update elements if necessary. |
4983 if (elements->length() > 0) { | 4998 if (elements->length() > 0) { |
4984 Object* elem; | 4999 Object* elem; |
4985 { MaybeObject* maybe_elem; | 5000 { MaybeObject* maybe_elem; |
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5228 ASSERT(stream_length == 0); | 5243 ASSERT(stream_length == 0); |
5229 ASSERT(len == 0); | 5244 ASSERT(len == 0); |
5230 } | 5245 } |
5231 | 5246 |
5232 | 5247 |
5233 static inline void WriteOneByteData(String* s, uint8_t* chars, int len) { | 5248 static inline void WriteOneByteData(String* s, uint8_t* chars, int len) { |
5234 ASSERT(s->length() == len); | 5249 ASSERT(s->length() == len); |
5235 String::WriteToFlat(s, chars, 0, len); | 5250 String::WriteToFlat(s, chars, 0, len); |
5236 } | 5251 } |
5237 | 5252 |
| 5253 |
5238 static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { | 5254 static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { |
5239 ASSERT(s->length() == len); | 5255 ASSERT(s->length() == len); |
5240 String::WriteToFlat(s, chars, 0, len); | 5256 String::WriteToFlat(s, chars, 0, len); |
5241 } | 5257 } |
5242 | 5258 |
5243 | 5259 |
5244 template<bool is_one_byte, typename T> | 5260 template<bool is_one_byte, typename T> |
5245 MaybeObject* Heap::AllocateInternalizedStringImpl( | 5261 MaybeObject* Heap::AllocateInternalizedStringImpl( |
5246 T t, int chars, uint32_t hash_field) { | 5262 T t, int chars, uint32_t hash_field) { |
5247 ASSERT(chars >= 0); | 5263 ASSERT(chars >= 0); |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5381 JSFunction* array_function = native_context->array_function(); | 5397 JSFunction* array_function = native_context->array_function(); |
5382 Map* map = array_function->initial_map(); | 5398 Map* map = array_function->initial_map(); |
5383 Map* transition_map = isolate()->get_initial_js_array_map(elements_kind); | 5399 Map* transition_map = isolate()->get_initial_js_array_map(elements_kind); |
5384 if (transition_map != NULL) map = transition_map; | 5400 if (transition_map != NULL) map = transition_map; |
5385 return AllocateJSObjectFromMap(map, pretenure); | 5401 return AllocateJSObjectFromMap(map, pretenure); |
5386 } | 5402 } |
5387 | 5403 |
5388 | 5404 |
5389 MaybeObject* Heap::AllocateJSArrayWithAllocationSite( | 5405 MaybeObject* Heap::AllocateJSArrayWithAllocationSite( |
5390 ElementsKind elements_kind, | 5406 ElementsKind elements_kind, |
5391 Handle<Object> allocation_site_info_payload) { | 5407 Handle<AllocationSite> allocation_site) { |
5392 Context* native_context = isolate()->context()->native_context(); | 5408 Context* native_context = isolate()->context()->native_context(); |
5393 JSFunction* array_function = native_context->array_function(); | 5409 JSFunction* array_function = native_context->array_function(); |
5394 Map* map = array_function->initial_map(); | 5410 Map* map = array_function->initial_map(); |
5395 Object* maybe_map_array = native_context->js_array_maps(); | 5411 Object* maybe_map_array = native_context->js_array_maps(); |
5396 if (!maybe_map_array->IsUndefined()) { | 5412 if (!maybe_map_array->IsUndefined()) { |
5397 Object* maybe_transitioned_map = | 5413 Object* maybe_transitioned_map = |
5398 FixedArray::cast(maybe_map_array)->get(elements_kind); | 5414 FixedArray::cast(maybe_map_array)->get(elements_kind); |
5399 if (!maybe_transitioned_map->IsUndefined()) { | 5415 if (!maybe_transitioned_map->IsUndefined()) { |
5400 map = Map::cast(maybe_transitioned_map); | 5416 map = Map::cast(maybe_transitioned_map); |
5401 } | 5417 } |
5402 } | 5418 } |
5403 return AllocateJSObjectFromMapWithAllocationSite(map, | 5419 return AllocateJSObjectFromMapWithAllocationSite(map, allocation_site); |
5404 allocation_site_info_payload); | |
5405 } | 5420 } |
5406 | 5421 |
5407 | 5422 |
5408 MaybeObject* Heap::AllocateEmptyFixedArray() { | 5423 MaybeObject* Heap::AllocateEmptyFixedArray() { |
5409 int size = FixedArray::SizeFor(0); | 5424 int size = FixedArray::SizeFor(0); |
5410 Object* result; | 5425 Object* result; |
5411 { MaybeObject* maybe_result = | 5426 { MaybeObject* maybe_result = |
5412 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); | 5427 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); |
5413 if (!maybe_result->ToObject(&result)) return maybe_result; | 5428 if (!maybe_result->ToObject(&result)) return maybe_result; |
5414 } | 5429 } |
5415 // Initialize the object. | 5430 // Initialize the object. |
5416 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier( | 5431 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier( |
5417 fixed_array_map()); | 5432 fixed_array_map()); |
5418 reinterpret_cast<FixedArray*>(result)->set_length(0); | 5433 reinterpret_cast<FixedArray*>(result)->set_length(0); |
5419 return result; | 5434 return result; |
5420 } | 5435 } |
5421 | 5436 |
| 5437 |
5422 MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) { | 5438 MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) { |
5423 return AllocateExternalArray(0, array_type, NULL, TENURED); | 5439 return AllocateExternalArray(0, array_type, NULL, TENURED); |
5424 } | 5440 } |
5425 | 5441 |
5426 | 5442 |
5427 MaybeObject* Heap::AllocateRawFixedArray(int length) { | 5443 MaybeObject* Heap::AllocateRawFixedArray(int length) { |
5428 if (length < 0 || length > FixedArray::kMaxLength) { | 5444 if (length < 0 || length > FixedArray::kMaxLength) { |
5429 return Failure::OutOfMemoryException(0xd); | 5445 return Failure::OutOfMemoryException(0xd); |
5430 } | 5446 } |
5431 ASSERT(length > 0); | 5447 ASSERT(length > 0); |
(...skipping 1316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6748 | 6764 |
6749 | 6765 |
6750 V8_DECLARE_ONCE(initialize_gc_once); | 6766 V8_DECLARE_ONCE(initialize_gc_once); |
6751 | 6767 |
6752 static void InitializeGCOnce() { | 6768 static void InitializeGCOnce() { |
6753 InitializeScavengingVisitorsTables(); | 6769 InitializeScavengingVisitorsTables(); |
6754 NewSpaceScavenger::Initialize(); | 6770 NewSpaceScavenger::Initialize(); |
6755 MarkCompactCollector::Initialize(); | 6771 MarkCompactCollector::Initialize(); |
6756 } | 6772 } |
6757 | 6773 |
| 6774 |
6758 bool Heap::SetUp() { | 6775 bool Heap::SetUp() { |
6759 #ifdef DEBUG | 6776 #ifdef DEBUG |
6760 allocation_timeout_ = FLAG_gc_interval; | 6777 allocation_timeout_ = FLAG_gc_interval; |
6761 #endif | 6778 #endif |
6762 | 6779 |
6763 // Initialize heap spaces and initial maps and objects. Whenever something | 6780 // Initialize heap spaces and initial maps and objects. Whenever something |
6764 // goes wrong, just return false. The caller should check the results and | 6781 // goes wrong, just return false. The caller should check the results and |
6765 // call Heap::TearDown() to release allocated memory. | 6782 // call Heap::TearDown() to release allocated memory. |
6766 // | 6783 // |
6767 // If the heap is not yet configured (e.g. through the API), configure it. | 6784 // If the heap is not yet configured (e.g. through the API), configure it. |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6858 store_buffer()->SetUp(); | 6875 store_buffer()->SetUp(); |
6859 | 6876 |
6860 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex(); | 6877 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex(); |
6861 #ifdef DEBUG | 6878 #ifdef DEBUG |
6862 relocation_mutex_locked_by_optimizer_thread_ = false; | 6879 relocation_mutex_locked_by_optimizer_thread_ = false; |
6863 #endif // DEBUG | 6880 #endif // DEBUG |
6864 | 6881 |
6865 return true; | 6882 return true; |
6866 } | 6883 } |
6867 | 6884 |
| 6885 |
6868 bool Heap::CreateHeapObjects() { | 6886 bool Heap::CreateHeapObjects() { |
6869 // Create initial maps. | 6887 // Create initial maps. |
6870 if (!CreateInitialMaps()) return false; | 6888 if (!CreateInitialMaps()) return false; |
6871 if (!CreateApiObjects()) return false; | 6889 if (!CreateApiObjects()) return false; |
6872 | 6890 |
6873 // Create initial objects | 6891 // Create initial objects |
6874 if (!CreateInitialObjects()) return false; | 6892 if (!CreateInitialObjects()) return false; |
6875 | 6893 |
6876 native_contexts_list_ = undefined_value(); | 6894 native_contexts_list_ = undefined_value(); |
6877 array_buffers_list_ = undefined_value(); | 6895 array_buffers_list_ = undefined_value(); |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7023 class PrintHandleVisitor: public ObjectVisitor { | 7041 class PrintHandleVisitor: public ObjectVisitor { |
7024 public: | 7042 public: |
7025 void VisitPointers(Object** start, Object** end) { | 7043 void VisitPointers(Object** start, Object** end) { |
7026 for (Object** p = start; p < end; p++) | 7044 for (Object** p = start; p < end; p++) |
7027 PrintF(" handle %p to %p\n", | 7045 PrintF(" handle %p to %p\n", |
7028 reinterpret_cast<void*>(p), | 7046 reinterpret_cast<void*>(p), |
7029 reinterpret_cast<void*>(*p)); | 7047 reinterpret_cast<void*>(*p)); |
7030 } | 7048 } |
7031 }; | 7049 }; |
7032 | 7050 |
| 7051 |
7033 void Heap::PrintHandles() { | 7052 void Heap::PrintHandles() { |
7034 PrintF("Handles:\n"); | 7053 PrintF("Handles:\n"); |
7035 PrintHandleVisitor v; | 7054 PrintHandleVisitor v; |
7036 isolate_->handle_scope_implementer()->Iterate(&v); | 7055 isolate_->handle_scope_implementer()->Iterate(&v); |
7037 } | 7056 } |
7038 | 7057 |
7039 #endif | 7058 #endif |
7040 | 7059 |
7041 | 7060 |
7042 Space* AllSpaces::next() { | 7061 Space* AllSpaces::next() { |
(...skipping 1077 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8120 if (FLAG_parallel_recompilation) { | 8139 if (FLAG_parallel_recompilation) { |
8121 heap_->relocation_mutex_->Lock(); | 8140 heap_->relocation_mutex_->Lock(); |
8122 #ifdef DEBUG | 8141 #ifdef DEBUG |
8123 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8142 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8124 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8143 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8125 #endif // DEBUG | 8144 #endif // DEBUG |
8126 } | 8145 } |
8127 } | 8146 } |
8128 | 8147 |
8129 } } // namespace v8::internal | 8148 } } // namespace v8::internal |
OLD | NEW |