Chromium Code Reviews| Index: src/heap.cc |
| diff --git a/src/heap.cc b/src/heap.cc |
| index 8cef1aea5394f07db103f61e639695339bcb8343..3312c445bd6be5853f5ff3763fe1e3c17e39cf02 100644 |
| --- a/src/heap.cc |
| +++ b/src/heap.cc |
| @@ -3926,6 +3926,36 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
| } |
| +// TODO(mvstanton): consolidate this with the function above. |
| +MaybeObject* Heap::AllocateWithAllocationSiteInfo(Map* map, |
| + AllocationSpace space, |
| + Handle<Object>* allocation_site_info_payload) { |
| + ASSERT(gc_state_ == NOT_IN_GC); |
| + ASSERT(map->instance_type() != MAP_TYPE); |
| + // If allocation failures are disallowed, we may allocate in a different |
| + // space when new space is full and the object is not a large object. |
| + AllocationSpace retry_space = |
| + (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
| + Object* result; |
| + { MaybeObject* maybe_result = |
|
Toon Verwaest
2013/02/13 15:14:51
Remove {...}
mvstanton
2013/02/19 11:04:08
Done, and I consolidated this function with the on
|
| + AllocateRaw(map->instance_size() + AllocationSiteInfo::kSize, |
| + space, |
| + retry_space); |
| + if (!maybe_result->ToObject(&result)) return maybe_result; |
|
Toon Verwaest
2013/02/13 15:14:51
->To(&
|
| + } |
| + // No need for write barrier since object is white and map is in old space. |
| + HeapObject::cast(result)->set_map_no_write_barrier(map); |
| + Object* allocation_pointer = reinterpret_cast<Object*>( |
| + reinterpret_cast<Address>(result) + map->instance_size()); |
| + HeapObject::cast(allocation_pointer)->set_map_no_write_barrier( |
| + allocation_site_info_map()); |
| + AllocationSiteInfo* allocation_site_info = |
| + AllocationSiteInfo::cast(allocation_pointer); |
| + allocation_site_info->set_payload(**allocation_site_info_payload); |
| + return result; |
| +} |
| + |
| + |
| void Heap::InitializeFunction(JSFunction* function, |
| SharedFunctionInfo* shared, |
| Object* prototype) { |
| @@ -4161,7 +4191,10 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj, |
| } |
| -MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) { |
| +MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure, |
| + AllocationSiteMode mode, |
| + Handle<Object>* allocation_site_info_payload) { |
| + ASSERT(pretenure == NOT_TENURED || mode == DONT_TRACK_ALLOCATION_SITE); |
| // JSFunctions should be allocated using AllocateFunction to be |
| // properly initialized. |
| ASSERT(map->instance_type() != JS_FUNCTION_TYPE); |
| @@ -4187,7 +4220,14 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) { |
| (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; |
| Object* obj; |
| - { MaybeObject* maybe_obj = Allocate(map, space); |
| + { |
|
Toon Verwaest
2013/02/13 15:14:51
Remove {. ->To(&.
mvstanton
2013/02/19 11:04:08
Done.
mvstanton
2013/02/19 11:04:08
Done.
|
| + MaybeObject* maybe_obj; |
| + if (mode == TRACK_ALLOCATION_SITE) { |
| + maybe_obj = AllocateWithAllocationSiteInfo(map, space, |
| + allocation_site_info_payload); |
| + } else { |
| + maybe_obj = Allocate(map, space); |
| + } |
| if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| } |
| @@ -4201,7 +4241,9 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) { |
| MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| - PretenureFlag pretenure) { |
| + PretenureFlag pretenure, |
| + AllocationSiteMode mode, |
| + Handle<Object>* allocation_site_info_payload) { |
| // Allocate the initial map if absent. |
| if (!constructor->has_initial_map()) { |
| Object* initial_map; |
| @@ -4211,9 +4253,30 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| constructor->set_initial_map(Map::cast(initial_map)); |
| Map::cast(initial_map)->set_constructor(constructor); |
| } |
| - // Allocate the object based on the constructors initial map. |
| + // Allocate the object based on the constructors initial map, or the payload |
| + // advice |
| + Map* initial_map = constructor->initial_map(); |
| + if (mode == TRACK_ALLOCATION_SITE) { |
| + ASSERT(allocation_site_info_payload != NULL); |
| + ASSERT((*allocation_site_info_payload)->IsJSGlobalPropertyCell()); |
|
Toon Verwaest
2013/02/13 15:14:51
The cast below will take care of this assert.
mvstanton
2013/02/19 11:04:08
Done.
|
| + JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast( |
| + **allocation_site_info_payload); |
| + ASSERT(cell->value()->IsSmi()); |
|
Toon Verwaest
2013/02/13 15:14:51
The cast below will take care of this assert.
mvstanton
2013/02/19 11:04:08
Done.
|
| + Smi* smi = Smi::cast(cell->value()); |
| + ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); |
| + if (to_kind != initial_map->elements_kind()) { |
| + initial_map = initial_map->LookupElementsTransitionMap(to_kind); |
| + // TODO(mvstanton): I may have to allocate this transition, right? |
| + ASSERT(initial_map != NULL); |
|
Toon Verwaest
2013/02/13 15:14:51
Yeah, it seems like you should ensure that the map
mvstanton
2013/02/19 11:04:08
Done.
|
| + // constructor->set_initial_map(Map::cast(initial_map)); |
|
Toon Verwaest
2013/02/13 15:14:51
omit Map::cast.
mvstanton
2013/02/19 11:04:08
Done.
|
| + // Map::cast(initial_map)->set_constructor(constructor); |
| + mode = DONT_TRACK_ALLOCATION_SITE; |
|
Toon Verwaest
2013/02/13 15:14:51
Weird;... you do all this work to set up the initi
mvstanton
2013/02/19 11:04:08
The code is trying to express that the advice was
|
| + } |
| + } |
| + |
| MaybeObject* result = AllocateJSObjectFromMap( |
| - constructor->initial_map(), pretenure); |
| + initial_map, pretenure, |
| + mode, allocation_site_info_payload); |
| #ifdef DEBUG |
| // Make sure result is NOT a global object if valid. |
| Object* non_failure; |
| @@ -4242,10 +4305,20 @@ MaybeObject* Heap::AllocateJSArrayAndStorage( |
| ElementsKind elements_kind, |
| int length, |
| int capacity, |
| + AllocationSiteMode allocation_site_info_mode, |
| + Handle<Object> *allocation_site_payload, |
| ArrayStorageAllocationMode mode, |
| PretenureFlag pretenure) { |
| ASSERT(capacity >= length); |
| - MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure); |
| + ASSERT(allocation_site_payload != NULL || |
| + allocation_site_info_mode == DONT_TRACK_ALLOCATION_SITE); |
| + if (pretenure == TENURED && |
| + allocation_site_info_mode == TRACK_ALLOCATION_SITE) { |
| + PrintF("Sorry, can't track yet in tenured space\n"); |
|
Toon Verwaest
2013/02/13 15:14:51
Euhm... Shouldn't we overwrite allocation_site_inf
mvstanton
2013/02/19 11:04:08
I changed this to an assert that if you want pre-t
|
| + } |
| + MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure, |
| + allocation_site_info_mode, |
| + allocation_site_payload); |
| JSArray* array; |
| if (!maybe_array->To(&array)) return maybe_array; |
| @@ -4424,7 +4497,6 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, |
| bool track_origin = mode == TRACK_ALLOCATION_SITE && |
| map->CanTrackAllocationSite(); |
| - |
| WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
| // If we're forced to always allocate, we use the general allocation |
| @@ -4451,8 +4523,20 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, |
| RecordWrites(clone_address, |
| JSObject::kHeaderSize, |
| (object_size - JSObject::kHeaderSize) / kPointerSize); |
| + |
| + // Track allocation site information |
| + if (track_origin && InNewSpace(clone)) { |
| + MaybeObject* maybe_alloc_info = |
| + AllocateStruct(ALLOCATION_SITE_INFO_TYPE); |
| + AllocationSiteInfo* alloc_info; |
| + if (maybe_alloc_info->To(&alloc_info)) { |
| + alloc_info->set_map(allocation_site_info_map()); |
| + alloc_info->set_payload(source); |
| + } |
| + } |
| } else { |
| wb_mode = SKIP_WRITE_BARRIER; |
| + |
| if (track_origin) { |
| adjusted_object_size += AllocationSiteInfo::kSize; |
| } |
| @@ -4466,6 +4550,13 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, |
| CopyBlock(HeapObject::cast(clone)->address(), |
| source->address(), |
| object_size); |
| + |
| + if (track_origin) { |
| + AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( |
| + reinterpret_cast<Address>(clone) + object_size); |
| + alloc_info->set_map(allocation_site_info_map()); |
| + alloc_info->set_payload(source); |
| + } |
| } |
| if (adjusted_object_size > object_size) { |
| @@ -4885,7 +4976,9 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, |
| MaybeObject* Heap::AllocateJSArray( |
| ElementsKind elements_kind, |
| - PretenureFlag pretenure) { |
| + PretenureFlag pretenure, |
| + AllocationSiteMode mode, |
| + Handle<Object>* allocation_site_info_payload) { |
| Context* native_context = isolate()->context()->native_context(); |
| JSFunction* array_function = native_context->array_function(); |
| Map* map = array_function->initial_map(); |
| @@ -4898,7 +4991,8 @@ MaybeObject* Heap::AllocateJSArray( |
| } |
| } |
| - return AllocateJSObjectFromMap(map, pretenure); |
| + return AllocateJSObjectFromMap(map, pretenure, mode, |
| + allocation_site_info_payload); |
| } |