Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(168)

Side by Side Diff: src/heap.cc

Issue 11818021: Allocation Info Tracking, continued. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: A partial delta against Toon's previous review Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 3885 matching lines...) Expand 10 before | Expand all | Expand 10 after
3896 3896
3897 #ifdef VERIFY_HEAP 3897 #ifdef VERIFY_HEAP
3898 if (FLAG_verify_heap) { 3898 if (FLAG_verify_heap) {
3899 code->Verify(); 3899 code->Verify();
3900 } 3900 }
3901 #endif 3901 #endif
3902 return new_code; 3902 return new_code;
3903 } 3903 }
3904 3904
3905 3905
3906 MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
3907 Handle<Object> allocation_site_info_payload) {
3908 ASSERT(gc_state_ == NOT_IN_GC);
3909 ASSERT(map->instance_type() != MAP_TYPE);
3910 // If allocation failures are disallowed, we may allocate in a different
3911 // space when new space is full and the object is not a large object.
3912 AllocationSpace retry_space =
3913 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3914 int size = map->instance_size() + AllocationSiteInfo::kSize;
3915 Object* result;
3916 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
3917 if (!maybe_result->ToObject(&result)) return maybe_result;
3918 // No need for write barrier since object is white and map is in old space.
3919 HeapObject::cast(result)->set_map_no_write_barrier(map);
3920 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
3921 reinterpret_cast<Address>(result) + map->instance_size());
3922 alloc_info->set_map_no_write_barrier(allocation_site_info_map());
3923 alloc_info->set_payload(*allocation_site_info_payload, SKIP_WRITE_BARRIER);
3924 return result;
3925 }
3926
3927
3906 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { 3928 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
3907 ASSERT(gc_state_ == NOT_IN_GC); 3929 ASSERT(gc_state_ == NOT_IN_GC);
3908 ASSERT(map->instance_type() != MAP_TYPE); 3930 ASSERT(map->instance_type() != MAP_TYPE);
3909 // If allocation failures are disallowed, we may allocate in a different 3931 // If allocation failures are disallowed, we may allocate in a different
3910 // space when new space is full and the object is not a large object. 3932 // space when new space is full and the object is not a large object.
3911 AllocationSpace retry_space = 3933 AllocationSpace retry_space =
3912 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); 3934 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3935 int size = map->instance_size();
3913 Object* result; 3936 Object* result;
3914 { MaybeObject* maybe_result = 3937 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
3915 AllocateRaw(map->instance_size(), space, retry_space); 3938 if (!maybe_result->ToObject(&result)) return maybe_result;
3916 if (!maybe_result->ToObject(&result)) return maybe_result;
3917 }
3918 // No need for write barrier since object is white and map is in old space. 3939 // No need for write barrier since object is white and map is in old space.
3919 HeapObject::cast(result)->set_map_no_write_barrier(map); 3940 HeapObject::cast(result)->set_map_no_write_barrier(map);
3920 return result; 3941 return result;
3921 } 3942 }
3922 3943
3923
3924 // TODO(mvstanton): consolidate this with the function above.
3925 MaybeObject* Heap::AllocateWithAllocationSiteInfo(Map* map,
3926 AllocationSpace space,
3927 Handle<Object>* allocation_site_info_payload) {
3928 ASSERT(gc_state_ == NOT_IN_GC);
3929 ASSERT(map->instance_type() != MAP_TYPE);
3930 // If allocation failures are disallowed, we may allocate in a different
3931 // space when new space is full and the object is not a large object.
3932 AllocationSpace retry_space =
3933 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3934 Object* result;
3935 { MaybeObject* maybe_result =
3936 AllocateRaw(map->instance_size() + AllocationSiteInfo::kSize,
3937 space,
3938 retry_space);
3939 if (!maybe_result->ToObject(&result)) return maybe_result;
3940 }
3941 // No need for write barrier since object is white and map is in old space.
3942 HeapObject::cast(result)->set_map_no_write_barrier(map);
3943 Object* allocation_pointer = reinterpret_cast<Object*>(
3944 reinterpret_cast<Address>(result) + map->instance_size());
3945 HeapObject::cast(allocation_pointer)->set_map_no_write_barrier(
3946 allocation_site_info_map());
3947 AllocationSiteInfo* allocation_site_info =
3948 AllocationSiteInfo::cast(allocation_pointer);
3949 allocation_site_info->set_payload(**allocation_site_info_payload);
3950 return result;
3951 }
3952
3953 3944
3954 void Heap::InitializeFunction(JSFunction* function, 3945 void Heap::InitializeFunction(JSFunction* function,
3955 SharedFunctionInfo* shared, 3946 SharedFunctionInfo* shared,
3956 Object* prototype) { 3947 Object* prototype) {
3957 ASSERT(!prototype->IsMap()); 3948 ASSERT(!prototype->IsMap());
3958 function->initialize_properties(); 3949 function->initialize_properties();
3959 function->initialize_elements(); 3950 function->initialize_elements();
3960 function->set_shared(shared); 3951 function->set_shared(shared);
3961 function->set_code(shared->code()); 3952 function->set_code(shared->code());
3962 function->set_prototype_or_initial_map(prototype); 3953 function->set_prototype_or_initial_map(prototype);
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
4179 // We might want to shrink the object later. 4170 // We might want to shrink the object later.
4180 ASSERT(obj->GetInternalFieldCount() == 0); 4171 ASSERT(obj->GetInternalFieldCount() == 0);
4181 filler = Heap::one_pointer_filler_map(); 4172 filler = Heap::one_pointer_filler_map();
4182 } else { 4173 } else {
4183 filler = Heap::undefined_value(); 4174 filler = Heap::undefined_value();
4184 } 4175 }
4185 obj->InitializeBody(map, Heap::undefined_value(), filler); 4176 obj->InitializeBody(map, Heap::undefined_value(), filler);
4186 } 4177 }
4187 4178
4188 4179
4189 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure, 4180 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
4190 AllocationSiteMode mode,
4191 Handle<Object>* allocation_site_info_payload) {
4192 ASSERT(pretenure == NOT_TENURED || mode == DONT_TRACK_ALLOCATION_SITE);
4193 // JSFunctions should be allocated using AllocateFunction to be 4181 // JSFunctions should be allocated using AllocateFunction to be
4194 // properly initialized. 4182 // properly initialized.
4195 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); 4183 ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
4196 4184
4197 // Both types of global objects should be allocated using 4185 // Both types of global objects should be allocated using
4198 // AllocateGlobalObject to be properly initialized. 4186 // AllocateGlobalObject to be properly initialized.
4199 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); 4187 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
4200 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); 4188 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
4201 4189
4202 // Allocate the backing storage for the properties. 4190 // Allocate the backing storage for the properties.
4203 int prop_size = 4191 int prop_size =
4204 map->pre_allocated_property_fields() + 4192 map->pre_allocated_property_fields() +
4205 map->unused_property_fields() - 4193 map->unused_property_fields() -
4206 map->inobject_properties(); 4194 map->inobject_properties();
4207 ASSERT(prop_size >= 0); 4195 ASSERT(prop_size >= 0);
4208 Object* properties; 4196 Object* properties;
4209 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); 4197 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure);
4210 if (!maybe_properties->ToObject(&properties)) return maybe_properties; 4198 if (!maybe_properties->ToObject(&properties)) return maybe_properties;
4211 } 4199 }
4212 4200
4213 // Allocate the JSObject. 4201 // Allocate the JSObject.
4214 AllocationSpace space = 4202 AllocationSpace space =
4215 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 4203 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
4216 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; 4204 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
4217 Object* obj; 4205 Object* obj;
4218 { 4206 MaybeObject* maybe_obj = Allocate(map, space);
4219 MaybeObject* maybe_obj; 4207 if (!maybe_obj->To(&obj)) return maybe_obj;
4220 if (mode == TRACK_ALLOCATION_SITE) {
4221 maybe_obj = AllocateWithAllocationSiteInfo(map, space,
4222 allocation_site_info_payload);
4223 } else {
4224 maybe_obj = Allocate(map, space);
4225 }
4226 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4227 }
4228 4208
4229 // Initialize the JSObject. 4209 // Initialize the JSObject.
4230 InitializeJSObjectFromMap(JSObject::cast(obj), 4210 InitializeJSObjectFromMap(JSObject::cast(obj),
4231 FixedArray::cast(properties), 4211 FixedArray::cast(properties),
4212 map);
4213 ASSERT(JSObject::cast(obj)->HasFastElements());
4214 return obj;
4215 }
4216
4217
4218 MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(Map* map,
4219 Handle<Object> allocation_site_info_payload) {
4220 // JSFunctions should be allocated using AllocateFunction to be
4221 // properly initialized.
4222 ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
4223
4224 // Both types of global objects should be allocated using
4225 // AllocateGlobalObject to be properly initialized.
4226 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
4227 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
4228
4229 // Allocate the backing storage for the properties.
4230 int prop_size =
4231 map->pre_allocated_property_fields() +
4232 map->unused_property_fields() -
4233 map->inobject_properties();
4234 ASSERT(prop_size >= 0);
4235 Object* properties;
4236 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size);
4237 if (!maybe_properties->ToObject(&properties)) return maybe_properties;
4238 }
4239
4240 // Allocate the JSObject.
4241 AllocationSpace space = NEW_SPACE;
4242 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
4243 Object* obj;
4244 MaybeObject* maybe_obj = AllocateWithAllocationSite(map, space,
4245 allocation_site_info_payload);
4246 if (!maybe_obj->To(&obj)) return maybe_obj;
4247
4248 // Initialize the JSObject.
4249 InitializeJSObjectFromMap(JSObject::cast(obj),
4250 FixedArray::cast(properties),
4232 map); 4251 map);
4233 ASSERT(JSObject::cast(obj)->HasFastElements()); 4252 ASSERT(JSObject::cast(obj)->HasFastElements());
4234 return obj; 4253 return obj;
4235 } 4254 }
4236 4255
4237 4256
4238 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, 4257 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
4239 PretenureFlag pretenure, 4258 PretenureFlag pretenure) {
4240 AllocationSiteMode mode,
4241 Handle<Object>* allocation_site_info_payload) {
4242 // Allocate the initial map if absent. 4259 // Allocate the initial map if absent.
4243 if (!constructor->has_initial_map()) { 4260 if (!constructor->has_initial_map()) {
4244 Object* initial_map; 4261 Object* initial_map;
4262 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
4263 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
4264 }
4265 constructor->set_initial_map(Map::cast(initial_map));
4266 Map::cast(initial_map)->set_constructor(constructor);
4267 }
4268 // Allocate the object based on the constructors initial map.
4269 MaybeObject* result = AllocateJSObjectFromMap(
4270 constructor->initial_map(), pretenure);
4271 #ifdef DEBUG
4272 // Make sure result is NOT a global object if valid.
4273 Object* non_failure;
4274 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
4275 #endif
4276 return result;
4277 }
4278
4279
4280 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
4281 Handle<Object> allocation_site_info_payload) {
4282 // Allocate the initial map if absent.
4283 if (!constructor->has_initial_map()) {
4284 Object* initial_map;
4245 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); 4285 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
4246 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; 4286 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
4247 } 4287 }
4248 constructor->set_initial_map(Map::cast(initial_map)); 4288 constructor->set_initial_map(Map::cast(initial_map));
4249 Map::cast(initial_map)->set_constructor(constructor); 4289 Map::cast(initial_map)->set_constructor(constructor);
4250 } 4290 }
4251 // Allocate the object based on the constructors initial map, or the payload 4291 // Allocate the object based on the constructors initial map, or the payload
4252 // advice 4292 // advice
4253 Map* initial_map = constructor->initial_map(); 4293 Map* initial_map = constructor->initial_map();
4254 if (mode == TRACK_ALLOCATION_SITE) { 4294
4255 ASSERT(allocation_site_info_payload != NULL); 4295 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(
4256 ASSERT((*allocation_site_info_payload)->IsJSGlobalPropertyCell()); 4296 *allocation_site_info_payload);
4257 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast( 4297 Smi* smi = Smi::cast(cell->value());
4258 **allocation_site_info_payload); 4298 ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
4259 ASSERT(cell->value()->IsSmi()); 4299 AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
4260 Smi* smi = Smi::cast(cell->value()); 4300 if (to_kind != initial_map->elements_kind()) {
4261 ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); 4301 MaybeObject* maybe_new_map = constructor->GetElementsTransitionMap(
4262 if (to_kind != initial_map->elements_kind()) { 4302 isolate(), to_kind);
4263 initial_map = initial_map->LookupElementsTransitionMap(to_kind); 4303 if (!maybe_new_map->To(&initial_map)) return maybe_new_map;
4264 // TODO(mvstanton): I may have to allocate this transition, right? 4304 // Possibly alter the mode, since we found an updated elements kind
4265 ASSERT(initial_map != NULL); 4305 // in the type info cell.
4266 // constructor->set_initial_map(Map::cast(initial_map)); 4306 mode = AllocationSiteInfo::GetMode(to_kind);
4267 // Map::cast(initial_map)->set_constructor(constructor);
4268 mode = DONT_TRACK_ALLOCATION_SITE;
4269 }
4270 } 4307 }
4271 4308
4272 MaybeObject* result = AllocateJSObjectFromMap( 4309 MaybeObject* result;
4273 initial_map, pretenure, 4310 if (mode == TRACK_ALLOCATION_SITE) {
4274 mode, allocation_site_info_payload); 4311 result = AllocateJSObjectFromMapWithAllocationSite(initial_map,
4312 allocation_site_info_payload);
4313 } else {
4314 result = AllocateJSObjectFromMap(initial_map, NOT_TENURED);
4315 }
4275 #ifdef DEBUG 4316 #ifdef DEBUG
4276 // Make sure result is NOT a global object if valid. 4317 // Make sure result is NOT a global object if valid.
4277 Object* non_failure; 4318 Object* non_failure;
4278 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); 4319 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
4279 #endif 4320 #endif
4280 return result; 4321 return result;
4281 } 4322 }
4282 4323
4283 4324
4284 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { 4325 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
4285 // Allocate a fresh map. Modules do not have a prototype. 4326 // Allocate a fresh map. Modules do not have a prototype.
4286 Map* map; 4327 Map* map;
4287 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); 4328 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
4288 if (!maybe_map->To(&map)) return maybe_map; 4329 if (!maybe_map->To(&map)) return maybe_map;
4289 // Allocate the object based on the map. 4330 // Allocate the object based on the map.
4290 JSModule* module; 4331 JSModule* module;
4291 MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED); 4332 MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED);
4292 if (!maybe_module->To(&module)) return maybe_module; 4333 if (!maybe_module->To(&module)) return maybe_module;
4293 module->set_context(context); 4334 module->set_context(context);
4294 module->set_scope_info(scope_info); 4335 module->set_scope_info(scope_info);
4295 return module; 4336 return module;
4296 } 4337 }
4297 4338
4298 4339
4299 MaybeObject* Heap::AllocateJSArrayAndStorage( 4340 MaybeObject* Heap::AllocateJSArrayAndStorage(
4300 ElementsKind elements_kind, 4341 ElementsKind elements_kind,
4301 int length, 4342 int length,
4302 int capacity, 4343 int capacity,
4303 AllocationSiteMode allocation_site_info_mode,
4304 Handle<Object> *allocation_site_payload,
4305 ArrayStorageAllocationMode mode, 4344 ArrayStorageAllocationMode mode,
4306 PretenureFlag pretenure) { 4345 PretenureFlag pretenure) {
4307 ASSERT(capacity >= length); 4346 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4308 ASSERT(allocation_site_payload != NULL ||
4309 allocation_site_info_mode == DONT_TRACK_ALLOCATION_SITE);
4310 if (pretenure == TENURED &&
4311 allocation_site_info_mode == TRACK_ALLOCATION_SITE) {
4312 PrintF("Sorry, can't track yet in tenured space\n");
4313 }
4314 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure,
4315 allocation_site_info_mode,
4316 allocation_site_payload);
4317 JSArray* array; 4347 JSArray* array;
4318 if (!maybe_array->To(&array)) return maybe_array; 4348 if (!maybe_array->To(&array)) return maybe_array;
4319 4349
4350 // TODO(mvstanton): this body of code is duplicate with AllocateJSArrayStorage
4351 // for performance reasons.
4352 ASSERT(capacity >= length);
4353
4320 if (capacity == 0) { 4354 if (capacity == 0) {
4321 array->set_length(Smi::FromInt(0)); 4355 array->set_length(Smi::FromInt(0));
4356 array->set_elements(empty_fixed_array());
4357 return array;
4358 }
4359
4360 FixedArrayBase* elms;
4361 MaybeObject* maybe_elms = NULL;
4362 if (IsFastDoubleElementsKind(elements_kind)) {
4363 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
4364 maybe_elms = AllocateUninitializedFixedDoubleArray(capacity);
4365 } else {
4366 ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
4367 maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
4368 }
4369 } else {
4370 ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
4371 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
4372 maybe_elms = AllocateUninitializedFixedArray(capacity);
4373 } else {
4374 ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
4375 maybe_elms = AllocateFixedArrayWithHoles(capacity);
4376 }
4377 }
4378 if (!maybe_elms->To(&elms)) return maybe_elms;
4379
4380 array->set_elements(elms);
4381 array->set_length(Smi::FromInt(length));
4382 return array;
4383 }
4384
4385
4386 MaybeObject* Heap::AllocateJSArrayAndStorageWithAllocationSite(
4387 ElementsKind elements_kind,
4388 int length,
4389 int capacity,
4390 Handle<Object> allocation_site_payload,
4391 ArrayStorageAllocationMode mode) {
4392 MaybeObject* maybe_array = AllocateJSArrayWithAllocationSite(elements_kind,
4393 allocation_site_payload);
4394 JSArray* array;
4395 if (!maybe_array->To(&array)) return maybe_array;
4396 return AllocateJSArrayStorage(array, length, capacity, mode);
4397 }
4398
4399
4400 MaybeObject* Heap::AllocateJSArrayStorage(
4401 JSArray* array,
4402 int length,
4403 int capacity,
4404 ArrayStorageAllocationMode mode) {
4405 ASSERT(capacity >= length);
4406
4407 if (capacity == 0) {
4408 array->set_length(Smi::FromInt(0));
4322 array->set_elements(empty_fixed_array()); 4409 array->set_elements(empty_fixed_array());
4323 return array; 4410 return array;
4324 } 4411 }
4325 4412
4326 FixedArrayBase* elms; 4413 FixedArrayBase* elms;
4327 MaybeObject* maybe_elms = NULL; 4414 MaybeObject* maybe_elms = NULL;
4415 ElementsKind elements_kind = array->GetElementsKind();
4328 if (IsFastDoubleElementsKind(elements_kind)) { 4416 if (IsFastDoubleElementsKind(elements_kind)) {
4329 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { 4417 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
4330 maybe_elms = AllocateUninitializedFixedDoubleArray(capacity); 4418 maybe_elms = AllocateUninitializedFixedDoubleArray(capacity);
4331 } else { 4419 } else {
4332 ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); 4420 ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
4333 maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity); 4421 maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
4334 } 4422 }
4335 } else { 4423 } else {
4336 ASSERT(IsFastSmiOrObjectElementsKind(elements_kind)); 4424 ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
4337 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { 4425 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
4472 global->set_map(new_map); 4560 global->set_map(new_map);
4473 global->set_properties(dictionary); 4561 global->set_properties(dictionary);
4474 4562
4475 // Make sure result is a global object with properties in dictionary. 4563 // Make sure result is a global object with properties in dictionary.
4476 ASSERT(global->IsGlobalObject()); 4564 ASSERT(global->IsGlobalObject());
4477 ASSERT(!global->HasFastProperties()); 4565 ASSERT(!global->HasFastProperties());
4478 return global; 4566 return global;
4479 } 4567 }
4480 4568
4481 4569
4482 MaybeObject* Heap::CopyJSObject(JSObject* source, 4570 MaybeObject* Heap::CopyJSObject(JSObject* source) {
4483 AllocationSiteMode mode) {
4484 // Never used to copy functions. If functions need to be copied we 4571 // Never used to copy functions. If functions need to be copied we
4485 // have to be careful to clear the literals array. 4572 // have to be careful to clear the literals array.
4486 SLOW_ASSERT(!source->IsJSFunction()); 4573 SLOW_ASSERT(!source->IsJSFunction());
4487 4574
4488 // Make the clone. 4575 // Make the clone.
4489 Map* map = source->map(); 4576 Map* map = source->map();
4490 int object_size = map->instance_size(); 4577 int object_size = map->instance_size();
4491 Object* clone; 4578 Object* clone;
4492 4579
4493 bool track_origin = mode == TRACK_ALLOCATION_SITE &&
4494 map->CanTrackAllocationSite();
4495 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; 4580 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
4496 4581
4497 // If we're forced to always allocate, we use the general allocation 4582 // If we're forced to always allocate, we use the general allocation
4583 // functions which may leave us with an object in old space.
4584 if (always_allocate()) {
4585 { MaybeObject* maybe_clone =
4586 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
4587 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4588 }
4589 Address clone_address = HeapObject::cast(clone)->address();
4590 CopyBlock(clone_address,
4591 source->address(),
4592 object_size);
4593 // Update write barrier for all fields that lie beyond the header.
4594 RecordWrites(clone_address,
4595 JSObject::kHeaderSize,
4596 (object_size - JSObject::kHeaderSize) / kPointerSize);
4597 } else {
4598 wb_mode = SKIP_WRITE_BARRIER;
4599
4600 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
4601 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4602 }
4603 SLOW_ASSERT(InNewSpace(clone));
4604 // Since we know the clone is allocated in new space, we can copy
4605 // the contents without worrying about updating the write barrier.
4606 CopyBlock(HeapObject::cast(clone)->address(),
4607 source->address(),
4608 object_size);
4609 }
4610
4611 SLOW_ASSERT(
4612 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
4613 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
4614 FixedArray* properties = FixedArray::cast(source->properties());
4615 // Update elements if necessary.
4616 if (elements->length() > 0) {
4617 Object* elem;
4618 { MaybeObject* maybe_elem;
4619 if (elements->map() == fixed_cow_array_map()) {
4620 maybe_elem = FixedArray::cast(elements);
4621 } else if (source->HasFastDoubleElements()) {
4622 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
4623 } else {
4624 maybe_elem = CopyFixedArray(FixedArray::cast(elements));
4625 }
4626 if (!maybe_elem->ToObject(&elem)) return maybe_elem;
4627 }
4628 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
4629 }
4630 // Update properties if necessary.
4631 if (properties->length() > 0) {
4632 Object* prop;
4633 { MaybeObject* maybe_prop = CopyFixedArray(properties);
4634 if (!maybe_prop->ToObject(&prop)) return maybe_prop;
4635 }
4636 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
4637 }
4638 // Return the new clone.
4639 return clone;
4640 }
4641
4642
4643 MaybeObject* Heap::CopyJSObjectWithAllocationSite(JSObject* source) {
4644 // Never used to copy functions. If functions need to be copied we
4645 // have to be careful to clear the literals array.
4646 SLOW_ASSERT(!source->IsJSFunction());
4647
4648 // Make the clone.
4649 Map* map = source->map();
4650 int object_size = map->instance_size();
4651 Object* clone;
4652
4653 ASSERT(map->CanTrackAllocationSite());
4654 ASSERT(map->instance_type() == JS_ARRAY_TYPE);
4655 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
4656
4657 // If we're forced to always allocate, we use the general allocation
4498 // functions which may leave us with an object in old space. 4658 // functions which may leave us with an object in old space.
4499 int adjusted_object_size = object_size; 4659 int adjusted_object_size = object_size;
4500 if (always_allocate()) { 4660 if (always_allocate()) {
4501 // We'll only track origin if we are certain to allocate in new space 4661 // We'll only track origin if we are certain to allocate in new space
4502 if (track_origin) { 4662 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
4503 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4; 4663 if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) {
4504 if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) { 4664 adjusted_object_size += AllocationSiteInfo::kSize;
4505 adjusted_object_size += AllocationSiteInfo::kSize;
4506 }
4507 } 4665 }
4508 4666
4509 { MaybeObject* maybe_clone = 4667 { MaybeObject* maybe_clone =
4510 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE); 4668 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
4511 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 4669 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4512 } 4670 }
4513 Address clone_address = HeapObject::cast(clone)->address(); 4671 Address clone_address = HeapObject::cast(clone)->address();
4514 CopyBlock(clone_address, 4672 CopyBlock(clone_address,
4515 source->address(), 4673 source->address(),
4516 object_size); 4674 object_size);
4517 // Update write barrier for all fields that lie beyond the header. 4675 // Update write barrier for all fields that lie beyond the header.
4518 RecordWrites(clone_address, 4676 int write_barrier_offset = adjusted_object_size > object_size
4519 JSObject::kHeaderSize, 4677 ? JSArray::kSize + AllocationSiteInfo::kSize
4520 (object_size - JSObject::kHeaderSize) / kPointerSize); 4678 : JSObject::kHeaderSize;
4679 if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
4680 RecordWrites(clone_address,
4681 write_barrier_offset,
4682 (object_size - write_barrier_offset) / kPointerSize);
4683 }
4521 4684
4522 // Track allocation site information 4685 // Track allocation site information, if we failed to allocate it inline.
4523 if (track_origin && InNewSpace(clone)) { 4686 if (InNewSpace(clone) &&
4687 adjusted_object_size == object_size) {
4524 MaybeObject* maybe_alloc_info = 4688 MaybeObject* maybe_alloc_info =
4525 AllocateStruct(ALLOCATION_SITE_INFO_TYPE); 4689 AllocateStruct(ALLOCATION_SITE_INFO_TYPE);
4526 AllocationSiteInfo* alloc_info; 4690 AllocationSiteInfo* alloc_info;
4527 if (maybe_alloc_info->To(&alloc_info)) { 4691 if (maybe_alloc_info->To(&alloc_info)) {
4528 alloc_info->set_map(allocation_site_info_map()); 4692 alloc_info->set_map_no_write_barrier(allocation_site_info_map());
4529 alloc_info->set_payload(source); 4693 alloc_info->set_payload(source, SKIP_WRITE_BARRIER);
4530 } 4694 }
4531 } 4695 }
4532 } else { 4696 } else {
4533 wb_mode = SKIP_WRITE_BARRIER; 4697 wb_mode = SKIP_WRITE_BARRIER;
4534 4698 adjusted_object_size += AllocationSiteInfo::kSize;
4535 if (track_origin) {
4536 adjusted_object_size += AllocationSiteInfo::kSize;
4537 }
4538 4699
4539 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); 4700 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
4540 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 4701 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4541 } 4702 }
4542 SLOW_ASSERT(InNewSpace(clone)); 4703 SLOW_ASSERT(InNewSpace(clone));
4543 // Since we know the clone is allocated in new space, we can copy 4704 // Since we know the clone is allocated in new space, we can copy
4544 // the contents without worrying about updating the write barrier. 4705 // the contents without worrying about updating the write barrier.
4545 CopyBlock(HeapObject::cast(clone)->address(), 4706 CopyBlock(HeapObject::cast(clone)->address(),
4546 source->address(), 4707 source->address(),
4547 object_size); 4708 object_size);
4548
4549 if (track_origin) {
4550 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
4551 reinterpret_cast<Address>(clone) + object_size);
4552 alloc_info->set_map(allocation_site_info_map());
4553 alloc_info->set_payload(source);
4554 }
4555 } 4709 }
4556 4710
4557 if (adjusted_object_size > object_size) { 4711 if (adjusted_object_size > object_size) {
4558 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( 4712 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
4559 reinterpret_cast<Address>(clone) + object_size); 4713 reinterpret_cast<Address>(clone) + object_size);
4560 alloc_info->set_map(allocation_site_info_map()); 4714 alloc_info->set_map_no_write_barrier(allocation_site_info_map());
4561 alloc_info->set_payload(source); 4715 alloc_info->set_payload(source, SKIP_WRITE_BARRIER);
4562 } 4716 }
4563 4717
4564 SLOW_ASSERT( 4718 SLOW_ASSERT(
4565 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); 4719 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
4566 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); 4720 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
4567 FixedArray* properties = FixedArray::cast(source->properties()); 4721 FixedArray* properties = FixedArray::cast(source->properties());
4568 // Update elements if necessary. 4722 // Update elements if necessary.
4569 if (elements->length() > 0) { 4723 if (elements->length() > 0) {
4570 Object* elem; 4724 Object* elem;
4571 { MaybeObject* maybe_elem; 4725 { MaybeObject* maybe_elem;
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after
4964 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); 5118 HeapObject::cast(result)->set_map_no_write_barrier(string_map());
4965 String::cast(result)->set_length(length); 5119 String::cast(result)->set_length(length);
4966 String::cast(result)->set_hash_field(String::kEmptyHashField); 5120 String::cast(result)->set_hash_field(String::kEmptyHashField);
4967 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 5121 ASSERT_EQ(size, HeapObject::cast(result)->Size());
4968 return result; 5122 return result;
4969 } 5123 }
4970 5124
4971 5125
4972 MaybeObject* Heap::AllocateJSArray( 5126 MaybeObject* Heap::AllocateJSArray(
4973 ElementsKind elements_kind, 5127 ElementsKind elements_kind,
4974 PretenureFlag pretenure, 5128 PretenureFlag pretenure) {
4975 AllocationSiteMode mode,
4976 Handle<Object>* allocation_site_info_payload) {
4977 Context* native_context = isolate()->context()->native_context(); 5129 Context* native_context = isolate()->context()->native_context();
4978 JSFunction* array_function = native_context->array_function(); 5130 JSFunction* array_function = native_context->array_function();
4979 Map* map = array_function->initial_map(); 5131 Map* map = array_function->initial_map();
4980 Object* maybe_map_array = native_context->js_array_maps(); 5132 Object* maybe_map_array = native_context->js_array_maps();
4981 if (!maybe_map_array->IsUndefined()) { 5133 if (!maybe_map_array->IsUndefined()) {
4982 Object* maybe_transitioned_map = 5134 Object* maybe_transitioned_map =
4983 FixedArray::cast(maybe_map_array)->get(elements_kind); 5135 FixedArray::cast(maybe_map_array)->get(elements_kind);
4984 if (!maybe_transitioned_map->IsUndefined()) { 5136 if (!maybe_transitioned_map->IsUndefined()) {
4985 map = Map::cast(maybe_transitioned_map); 5137 map = Map::cast(maybe_transitioned_map);
4986 } 5138 }
4987 } 5139 }
4988 5140
4989 return AllocateJSObjectFromMap(map, pretenure, mode, 5141 return AllocateJSObjectFromMap(map, pretenure);
4990 allocation_site_info_payload);
4991 } 5142 }
4992 5143
4993 5144
5145 MaybeObject* Heap::AllocateJSArrayWithAllocationSite(
5146 ElementsKind elements_kind,
5147 Handle<Object> allocation_site_info_payload) {
5148 Context* native_context = isolate()->context()->native_context();
5149 JSFunction* array_function = native_context->array_function();
5150 Map* map = array_function->initial_map();
5151 Object* maybe_map_array = native_context->js_array_maps();
5152 if (!maybe_map_array->IsUndefined()) {
5153 Object* maybe_transitioned_map =
5154 FixedArray::cast(maybe_map_array)->get(elements_kind);
5155 if (!maybe_transitioned_map->IsUndefined()) {
5156 map = Map::cast(maybe_transitioned_map);
5157 }
5158 }
5159 return AllocateJSObjectFromMapWithAllocationSite(map,
5160 allocation_site_info_payload);
5161 }
5162
5163
4994 MaybeObject* Heap::AllocateEmptyFixedArray() { 5164 MaybeObject* Heap::AllocateEmptyFixedArray() {
4995 int size = FixedArray::SizeFor(0); 5165 int size = FixedArray::SizeFor(0);
4996 Object* result; 5166 Object* result;
4997 { MaybeObject* maybe_result = 5167 { MaybeObject* maybe_result =
4998 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); 5168 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
4999 if (!maybe_result->ToObject(&result)) return maybe_result; 5169 if (!maybe_result->ToObject(&result)) return maybe_result;
5000 } 5170 }
5001 // Initialize the object. 5171 // Initialize the object.
5002 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier( 5172 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier(
5003 fixed_array_map()); 5173 fixed_array_map());
(...skipping 2608 matching lines...) Expand 10 before | Expand all | Expand 10 after
7612 static_cast<int>(object_sizes_last_time_[index])); 7782 static_cast<int>(object_sizes_last_time_[index]));
7613 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7783 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7614 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7784 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7615 7785
7616 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7786 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7617 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7787 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7618 ClearObjectStats(); 7788 ClearObjectStats();
7619 } 7789 }
7620 7790
7621 } } // namespace v8::internal 7791 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698