Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3908 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3919 { MaybeObject* maybe_result = | 3919 { MaybeObject* maybe_result = |
| 3920 AllocateRaw(map->instance_size(), space, retry_space); | 3920 AllocateRaw(map->instance_size(), space, retry_space); |
| 3921 if (!maybe_result->ToObject(&result)) return maybe_result; | 3921 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3922 } | 3922 } |
| 3923 // No need for write barrier since object is white and map is in old space. | 3923 // No need for write barrier since object is white and map is in old space. |
| 3924 HeapObject::cast(result)->set_map_no_write_barrier(map); | 3924 HeapObject::cast(result)->set_map_no_write_barrier(map); |
| 3925 return result; | 3925 return result; |
| 3926 } | 3926 } |
| 3927 | 3927 |
| 3928 | 3928 |
| 3929 // TODO(mvstanton): consolidate this with the function above. | |
| 3930 MaybeObject* Heap::AllocateWithAllocationSiteInfo(Map* map, | |
| 3931 AllocationSpace space, | |
| 3932 Handle<Object>* allocation_site_info_payload) { | |
| 3933 ASSERT(gc_state_ == NOT_IN_GC); | |
| 3934 ASSERT(map->instance_type() != MAP_TYPE); | |
| 3935 // If allocation failures are disallowed, we may allocate in a different | |
| 3936 // space when new space is full and the object is not a large object. | |
| 3937 AllocationSpace retry_space = | |
| 3938 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | |
| 3939 Object* result; | |
| 3940 { MaybeObject* maybe_result = | |
|
Toon Verwaest
2013/02/13 15:14:51
Remove {...}
mvstanton
2013/02/19 11:04:08
Done, and I consolidated this function with the on
| |
| 3941 AllocateRaw(map->instance_size() + AllocationSiteInfo::kSize, | |
| 3942 space, | |
| 3943 retry_space); | |
| 3944 if (!maybe_result->ToObject(&result)) return maybe_result; | |
|
Toon Verwaest
2013/02/13 15:14:51
->To(&
| |
| 3945 } | |
| 3946 // No need for write barrier since object is white and map is in old space. | |
| 3947 HeapObject::cast(result)->set_map_no_write_barrier(map); | |
| 3948 Object* allocation_pointer = reinterpret_cast<Object*>( | |
| 3949 reinterpret_cast<Address>(result) + map->instance_size()); | |
| 3950 HeapObject::cast(allocation_pointer)->set_map_no_write_barrier( | |
| 3951 allocation_site_info_map()); | |
| 3952 AllocationSiteInfo* allocation_site_info = | |
| 3953 AllocationSiteInfo::cast(allocation_pointer); | |
| 3954 allocation_site_info->set_payload(**allocation_site_info_payload); | |
| 3955 return result; | |
| 3956 } | |
| 3957 | |
| 3958 | |
| 3929 void Heap::InitializeFunction(JSFunction* function, | 3959 void Heap::InitializeFunction(JSFunction* function, |
| 3930 SharedFunctionInfo* shared, | 3960 SharedFunctionInfo* shared, |
| 3931 Object* prototype) { | 3961 Object* prototype) { |
| 3932 ASSERT(!prototype->IsMap()); | 3962 ASSERT(!prototype->IsMap()); |
| 3933 function->initialize_properties(); | 3963 function->initialize_properties(); |
| 3934 function->initialize_elements(); | 3964 function->initialize_elements(); |
| 3935 function->set_shared(shared); | 3965 function->set_shared(shared); |
| 3936 function->set_code(shared->code()); | 3966 function->set_code(shared->code()); |
| 3937 function->set_prototype_or_initial_map(prototype); | 3967 function->set_prototype_or_initial_map(prototype); |
| 3938 function->set_context(undefined_value()); | 3968 function->set_context(undefined_value()); |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4154 // We might want to shrink the object later. | 4184 // We might want to shrink the object later. |
| 4155 ASSERT(obj->GetInternalFieldCount() == 0); | 4185 ASSERT(obj->GetInternalFieldCount() == 0); |
| 4156 filler = Heap::one_pointer_filler_map(); | 4186 filler = Heap::one_pointer_filler_map(); |
| 4157 } else { | 4187 } else { |
| 4158 filler = Heap::undefined_value(); | 4188 filler = Heap::undefined_value(); |
| 4159 } | 4189 } |
| 4160 obj->InitializeBody(map, Heap::undefined_value(), filler); | 4190 obj->InitializeBody(map, Heap::undefined_value(), filler); |
| 4161 } | 4191 } |
| 4162 | 4192 |
| 4163 | 4193 |
| 4164 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) { | 4194 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure, |
| 4195 AllocationSiteMode mode, | |
| 4196 Handle<Object>* allocation_site_info_payload) { | |
| 4197 ASSERT(pretenure == NOT_TENURED || mode == DONT_TRACK_ALLOCATION_SITE); | |
| 4165 // JSFunctions should be allocated using AllocateFunction to be | 4198 // JSFunctions should be allocated using AllocateFunction to be |
| 4166 // properly initialized. | 4199 // properly initialized. |
| 4167 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); | 4200 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); |
| 4168 | 4201 |
| 4169 // Both types of global objects should be allocated using | 4202 // Both types of global objects should be allocated using |
| 4170 // AllocateGlobalObject to be properly initialized. | 4203 // AllocateGlobalObject to be properly initialized. |
| 4171 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); | 4204 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); |
| 4172 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); | 4205 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); |
| 4173 | 4206 |
| 4174 // Allocate the backing storage for the properties. | 4207 // Allocate the backing storage for the properties. |
| 4175 int prop_size = | 4208 int prop_size = |
| 4176 map->pre_allocated_property_fields() + | 4209 map->pre_allocated_property_fields() + |
| 4177 map->unused_property_fields() - | 4210 map->unused_property_fields() - |
| 4178 map->inobject_properties(); | 4211 map->inobject_properties(); |
| 4179 ASSERT(prop_size >= 0); | 4212 ASSERT(prop_size >= 0); |
| 4180 Object* properties; | 4213 Object* properties; |
| 4181 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); | 4214 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); |
| 4182 if (!maybe_properties->ToObject(&properties)) return maybe_properties; | 4215 if (!maybe_properties->ToObject(&properties)) return maybe_properties; |
| 4183 } | 4216 } |
| 4184 | 4217 |
| 4185 // Allocate the JSObject. | 4218 // Allocate the JSObject. |
| 4186 AllocationSpace space = | 4219 AllocationSpace space = |
| 4187 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 4220 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| 4188 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; | 4221 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; |
| 4189 Object* obj; | 4222 Object* obj; |
| 4190 { MaybeObject* maybe_obj = Allocate(map, space); | 4223 { |
|
Toon Verwaest
2013/02/13 15:14:51
Remove {. ->To(&.
mvstanton
2013/02/19 11:04:08
Done.
mvstanton
2013/02/19 11:04:08
Done.
| |
| 4224 MaybeObject* maybe_obj; | |
| 4225 if (mode == TRACK_ALLOCATION_SITE) { | |
| 4226 maybe_obj = AllocateWithAllocationSiteInfo(map, space, | |
| 4227 allocation_site_info_payload); | |
| 4228 } else { | |
| 4229 maybe_obj = Allocate(map, space); | |
| 4230 } | |
| 4191 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 4231 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 4192 } | 4232 } |
| 4193 | 4233 |
| 4194 // Initialize the JSObject. | 4234 // Initialize the JSObject. |
| 4195 InitializeJSObjectFromMap(JSObject::cast(obj), | 4235 InitializeJSObjectFromMap(JSObject::cast(obj), |
| 4196 FixedArray::cast(properties), | 4236 FixedArray::cast(properties), |
| 4197 map); | 4237 map); |
| 4198 ASSERT(JSObject::cast(obj)->HasFastElements()); | 4238 ASSERT(JSObject::cast(obj)->HasFastElements()); |
| 4199 return obj; | 4239 return obj; |
| 4200 } | 4240 } |
| 4201 | 4241 |
| 4202 | 4242 |
| 4203 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, | 4243 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| 4204 PretenureFlag pretenure) { | 4244 PretenureFlag pretenure, |
| 4245 AllocationSiteMode mode, | |
| 4246 Handle<Object>* allocation_site_info_payload) { | |
| 4205 // Allocate the initial map if absent. | 4247 // Allocate the initial map if absent. |
| 4206 if (!constructor->has_initial_map()) { | 4248 if (!constructor->has_initial_map()) { |
| 4207 Object* initial_map; | 4249 Object* initial_map; |
| 4208 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); | 4250 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); |
| 4209 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; | 4251 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; |
| 4210 } | 4252 } |
| 4211 constructor->set_initial_map(Map::cast(initial_map)); | 4253 constructor->set_initial_map(Map::cast(initial_map)); |
| 4212 Map::cast(initial_map)->set_constructor(constructor); | 4254 Map::cast(initial_map)->set_constructor(constructor); |
| 4213 } | 4255 } |
| 4214 // Allocate the object based on the constructors initial map. | 4256 // Allocate the object based on the constructors initial map, or the payload |
| 4257 // advice | |
| 4258 Map* initial_map = constructor->initial_map(); | |
| 4259 if (mode == TRACK_ALLOCATION_SITE) { | |
| 4260 ASSERT(allocation_site_info_payload != NULL); | |
| 4261 ASSERT((*allocation_site_info_payload)->IsJSGlobalPropertyCell()); | |
|
Toon Verwaest
2013/02/13 15:14:51
The cast below will take care of this assert.
mvstanton
2013/02/19 11:04:08
Done.
| |
| 4262 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast( | |
| 4263 **allocation_site_info_payload); | |
| 4264 ASSERT(cell->value()->IsSmi()); | |
|
Toon Verwaest
2013/02/13 15:14:51
The cast below will take care of this assert.
mvstanton
2013/02/19 11:04:08
Done.
| |
| 4265 Smi* smi = Smi::cast(cell->value()); | |
| 4266 ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); | |
| 4267 if (to_kind != initial_map->elements_kind()) { | |
| 4268 initial_map = initial_map->LookupElementsTransitionMap(to_kind); | |
| 4269 // TODO(mvstanton): I may have to allocate this transition, right? | |
| 4270 ASSERT(initial_map != NULL); | |
|
Toon Verwaest
2013/02/13 15:14:51
Yeah, it seems like you should ensure that the map
mvstanton
2013/02/19 11:04:08
Done.
| |
| 4271 // constructor->set_initial_map(Map::cast(initial_map)); | |
|
Toon Verwaest
2013/02/13 15:14:51
omit Map::cast.
mvstanton
2013/02/19 11:04:08
Done.
| |
| 4272 // Map::cast(initial_map)->set_constructor(constructor); | |
| 4273 mode = DONT_TRACK_ALLOCATION_SITE; | |
|
Toon Verwaest
2013/02/13 15:14:51
Weird;... you do all this work to set up the initi
mvstanton
2013/02/19 11:04:08
The code is trying to express that the advice was
| |
| 4274 } | |
| 4275 } | |
| 4276 | |
| 4215 MaybeObject* result = AllocateJSObjectFromMap( | 4277 MaybeObject* result = AllocateJSObjectFromMap( |
| 4216 constructor->initial_map(), pretenure); | 4278 initial_map, pretenure, |
| 4279 mode, allocation_site_info_payload); | |
| 4217 #ifdef DEBUG | 4280 #ifdef DEBUG |
| 4218 // Make sure result is NOT a global object if valid. | 4281 // Make sure result is NOT a global object if valid. |
| 4219 Object* non_failure; | 4282 Object* non_failure; |
| 4220 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | 4283 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); |
| 4221 #endif | 4284 #endif |
| 4222 return result; | 4285 return result; |
| 4223 } | 4286 } |
| 4224 | 4287 |
| 4225 | 4288 |
| 4226 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { | 4289 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { |
| 4227 // Allocate a fresh map. Modules do not have a prototype. | 4290 // Allocate a fresh map. Modules do not have a prototype. |
| 4228 Map* map; | 4291 Map* map; |
| 4229 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); | 4292 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); |
| 4230 if (!maybe_map->To(&map)) return maybe_map; | 4293 if (!maybe_map->To(&map)) return maybe_map; |
| 4231 // Allocate the object based on the map. | 4294 // Allocate the object based on the map. |
| 4232 JSModule* module; | 4295 JSModule* module; |
| 4233 MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED); | 4296 MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED); |
| 4234 if (!maybe_module->To(&module)) return maybe_module; | 4297 if (!maybe_module->To(&module)) return maybe_module; |
| 4235 module->set_context(context); | 4298 module->set_context(context); |
| 4236 module->set_scope_info(scope_info); | 4299 module->set_scope_info(scope_info); |
| 4237 return module; | 4300 return module; |
| 4238 } | 4301 } |
| 4239 | 4302 |
| 4240 | 4303 |
| 4241 MaybeObject* Heap::AllocateJSArrayAndStorage( | 4304 MaybeObject* Heap::AllocateJSArrayAndStorage( |
| 4242 ElementsKind elements_kind, | 4305 ElementsKind elements_kind, |
| 4243 int length, | 4306 int length, |
| 4244 int capacity, | 4307 int capacity, |
| 4308 AllocationSiteMode allocation_site_info_mode, | |
| 4309 Handle<Object> *allocation_site_payload, | |
| 4245 ArrayStorageAllocationMode mode, | 4310 ArrayStorageAllocationMode mode, |
| 4246 PretenureFlag pretenure) { | 4311 PretenureFlag pretenure) { |
| 4247 ASSERT(capacity >= length); | 4312 ASSERT(capacity >= length); |
| 4248 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure); | 4313 ASSERT(allocation_site_payload != NULL || |
| 4314 allocation_site_info_mode == DONT_TRACK_ALLOCATION_SITE); | |
| 4315 if (pretenure == TENURED && | |
| 4316 allocation_site_info_mode == TRACK_ALLOCATION_SITE) { | |
| 4317 PrintF("Sorry, can't track yet in tenured space\n"); | |
|
Toon Verwaest
2013/02/13 15:14:51
Euhm... Shouldn't we overwrite allocation_site_inf
mvstanton
2013/02/19 11:04:08
I changed this to an assert that if you want pre-t
| |
| 4318 } | |
| 4319 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure, | |
| 4320 allocation_site_info_mode, | |
| 4321 allocation_site_payload); | |
| 4249 JSArray* array; | 4322 JSArray* array; |
| 4250 if (!maybe_array->To(&array)) return maybe_array; | 4323 if (!maybe_array->To(&array)) return maybe_array; |
| 4251 | 4324 |
| 4252 if (capacity == 0) { | 4325 if (capacity == 0) { |
| 4253 array->set_length(Smi::FromInt(0)); | 4326 array->set_length(Smi::FromInt(0)); |
| 4254 array->set_elements(empty_fixed_array()); | 4327 array->set_elements(empty_fixed_array()); |
| 4255 return array; | 4328 return array; |
| 4256 } | 4329 } |
| 4257 | 4330 |
| 4258 FixedArrayBase* elms; | 4331 FixedArrayBase* elms; |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4417 // have to be careful to clear the literals array. | 4490 // have to be careful to clear the literals array. |
| 4418 SLOW_ASSERT(!source->IsJSFunction()); | 4491 SLOW_ASSERT(!source->IsJSFunction()); |
| 4419 | 4492 |
| 4420 // Make the clone. | 4493 // Make the clone. |
| 4421 Map* map = source->map(); | 4494 Map* map = source->map(); |
| 4422 int object_size = map->instance_size(); | 4495 int object_size = map->instance_size(); |
| 4423 Object* clone; | 4496 Object* clone; |
| 4424 | 4497 |
| 4425 bool track_origin = mode == TRACK_ALLOCATION_SITE && | 4498 bool track_origin = mode == TRACK_ALLOCATION_SITE && |
| 4426 map->CanTrackAllocationSite(); | 4499 map->CanTrackAllocationSite(); |
| 4427 | |
| 4428 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; | 4500 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
| 4429 | 4501 |
| 4430 // If we're forced to always allocate, we use the general allocation | 4502 // If we're forced to always allocate, we use the general allocation |
| 4431 // functions which may leave us with an object in old space. | 4503 // functions which may leave us with an object in old space. |
| 4432 int adjusted_object_size = object_size; | 4504 int adjusted_object_size = object_size; |
| 4433 if (always_allocate()) { | 4505 if (always_allocate()) { |
| 4434 // We'll only track origin if we are certain to allocate in new space | 4506 // We'll only track origin if we are certain to allocate in new space |
| 4435 if (track_origin) { | 4507 if (track_origin) { |
| 4436 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4; | 4508 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4; |
| 4437 if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) { | 4509 if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) { |
| 4438 adjusted_object_size += AllocationSiteInfo::kSize; | 4510 adjusted_object_size += AllocationSiteInfo::kSize; |
| 4439 } | 4511 } |
| 4440 } | 4512 } |
| 4441 | 4513 |
| 4442 { MaybeObject* maybe_clone = | 4514 { MaybeObject* maybe_clone = |
| 4443 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE); | 4515 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE); |
| 4444 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4516 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
| 4445 } | 4517 } |
| 4446 Address clone_address = HeapObject::cast(clone)->address(); | 4518 Address clone_address = HeapObject::cast(clone)->address(); |
| 4447 CopyBlock(clone_address, | 4519 CopyBlock(clone_address, |
| 4448 source->address(), | 4520 source->address(), |
| 4449 object_size); | 4521 object_size); |
| 4450 // Update write barrier for all fields that lie beyond the header. | 4522 // Update write barrier for all fields that lie beyond the header. |
| 4451 RecordWrites(clone_address, | 4523 RecordWrites(clone_address, |
| 4452 JSObject::kHeaderSize, | 4524 JSObject::kHeaderSize, |
| 4453 (object_size - JSObject::kHeaderSize) / kPointerSize); | 4525 (object_size - JSObject::kHeaderSize) / kPointerSize); |
| 4526 | |
| 4527 // Track allocation site information | |
| 4528 if (track_origin && InNewSpace(clone)) { | |
| 4529 MaybeObject* maybe_alloc_info = | |
| 4530 AllocateStruct(ALLOCATION_SITE_INFO_TYPE); | |
| 4531 AllocationSiteInfo* alloc_info; | |
| 4532 if (maybe_alloc_info->To(&alloc_info)) { | |
| 4533 alloc_info->set_map(allocation_site_info_map()); | |
| 4534 alloc_info->set_payload(source); | |
| 4535 } | |
| 4536 } | |
| 4454 } else { | 4537 } else { |
| 4455 wb_mode = SKIP_WRITE_BARRIER; | 4538 wb_mode = SKIP_WRITE_BARRIER; |
| 4539 | |
| 4456 if (track_origin) { | 4540 if (track_origin) { |
| 4457 adjusted_object_size += AllocationSiteInfo::kSize; | 4541 adjusted_object_size += AllocationSiteInfo::kSize; |
| 4458 } | 4542 } |
| 4459 | 4543 |
| 4460 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | 4544 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
| 4461 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4545 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
| 4462 } | 4546 } |
| 4463 SLOW_ASSERT(InNewSpace(clone)); | 4547 SLOW_ASSERT(InNewSpace(clone)); |
| 4464 // Since we know the clone is allocated in new space, we can copy | 4548 // Since we know the clone is allocated in new space, we can copy |
| 4465 // the contents without worrying about updating the write barrier. | 4549 // the contents without worrying about updating the write barrier. |
| 4466 CopyBlock(HeapObject::cast(clone)->address(), | 4550 CopyBlock(HeapObject::cast(clone)->address(), |
| 4467 source->address(), | 4551 source->address(), |
| 4468 object_size); | 4552 object_size); |
| 4553 | |
| 4554 if (track_origin) { | |
| 4555 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( | |
| 4556 reinterpret_cast<Address>(clone) + object_size); | |
| 4557 alloc_info->set_map(allocation_site_info_map()); | |
| 4558 alloc_info->set_payload(source); | |
| 4559 } | |
| 4469 } | 4560 } |
| 4470 | 4561 |
| 4471 if (adjusted_object_size > object_size) { | 4562 if (adjusted_object_size > object_size) { |
| 4472 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( | 4563 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( |
| 4473 reinterpret_cast<Address>(clone) + object_size); | 4564 reinterpret_cast<Address>(clone) + object_size); |
| 4474 alloc_info->set_map(allocation_site_info_map()); | 4565 alloc_info->set_map(allocation_site_info_map()); |
| 4475 alloc_info->set_payload(source); | 4566 alloc_info->set_payload(source); |
| 4476 } | 4567 } |
| 4477 | 4568 |
| 4478 SLOW_ASSERT( | 4569 SLOW_ASSERT( |
| (...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4878 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); | 4969 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); |
| 4879 String::cast(result)->set_length(length); | 4970 String::cast(result)->set_length(length); |
| 4880 String::cast(result)->set_hash_field(String::kEmptyHashField); | 4971 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 4881 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 4972 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
| 4882 return result; | 4973 return result; |
| 4883 } | 4974 } |
| 4884 | 4975 |
| 4885 | 4976 |
| 4886 MaybeObject* Heap::AllocateJSArray( | 4977 MaybeObject* Heap::AllocateJSArray( |
| 4887 ElementsKind elements_kind, | 4978 ElementsKind elements_kind, |
| 4888 PretenureFlag pretenure) { | 4979 PretenureFlag pretenure, |
| 4980 AllocationSiteMode mode, | |
| 4981 Handle<Object>* allocation_site_info_payload) { | |
| 4889 Context* native_context = isolate()->context()->native_context(); | 4982 Context* native_context = isolate()->context()->native_context(); |
| 4890 JSFunction* array_function = native_context->array_function(); | 4983 JSFunction* array_function = native_context->array_function(); |
| 4891 Map* map = array_function->initial_map(); | 4984 Map* map = array_function->initial_map(); |
| 4892 Object* maybe_map_array = native_context->js_array_maps(); | 4985 Object* maybe_map_array = native_context->js_array_maps(); |
| 4893 if (!maybe_map_array->IsUndefined()) { | 4986 if (!maybe_map_array->IsUndefined()) { |
| 4894 Object* maybe_transitioned_map = | 4987 Object* maybe_transitioned_map = |
| 4895 FixedArray::cast(maybe_map_array)->get(elements_kind); | 4988 FixedArray::cast(maybe_map_array)->get(elements_kind); |
| 4896 if (!maybe_transitioned_map->IsUndefined()) { | 4989 if (!maybe_transitioned_map->IsUndefined()) { |
| 4897 map = Map::cast(maybe_transitioned_map); | 4990 map = Map::cast(maybe_transitioned_map); |
| 4898 } | 4991 } |
| 4899 } | 4992 } |
| 4900 | 4993 |
| 4901 return AllocateJSObjectFromMap(map, pretenure); | 4994 return AllocateJSObjectFromMap(map, pretenure, mode, |
| 4995 allocation_site_info_payload); | |
| 4902 } | 4996 } |
| 4903 | 4997 |
| 4904 | 4998 |
| 4905 MaybeObject* Heap::AllocateEmptyFixedArray() { | 4999 MaybeObject* Heap::AllocateEmptyFixedArray() { |
| 4906 int size = FixedArray::SizeFor(0); | 5000 int size = FixedArray::SizeFor(0); |
| 4907 Object* result; | 5001 Object* result; |
| 4908 { MaybeObject* maybe_result = | 5002 { MaybeObject* maybe_result = |
| 4909 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); | 5003 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); |
| 4910 if (!maybe_result->ToObject(&result)) return maybe_result; | 5004 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4911 } | 5005 } |
| (...skipping 2625 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7537 static_cast<int>(object_sizes_last_time_[index])); | 7631 static_cast<int>(object_sizes_last_time_[index])); |
| 7538 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7632 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7539 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7633 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7540 | 7634 |
| 7541 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7635 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7542 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7636 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7543 ClearObjectStats(); | 7637 ClearObjectStats(); |
| 7544 } | 7638 } |
| 7545 | 7639 |
| 7546 } } // namespace v8::internal | 7640 } } // namespace v8::internal |
| OLD | NEW |