OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4387 global->set_map(new_map); | 4387 global->set_map(new_map); |
4388 global->set_properties(dictionary); | 4388 global->set_properties(dictionary); |
4389 | 4389 |
4390 // Make sure result is a global object with properties in dictionary. | 4390 // Make sure result is a global object with properties in dictionary. |
4391 ASSERT(global->IsGlobalObject()); | 4391 ASSERT(global->IsGlobalObject()); |
4392 ASSERT(!global->HasFastProperties()); | 4392 ASSERT(!global->HasFastProperties()); |
4393 return global; | 4393 return global; |
4394 } | 4394 } |
4395 | 4395 |
4396 | 4396 |
4397 MaybeObject* Heap::CopyJSObject(JSObject* source) { | 4397 MaybeObject* Heap::CopyJSObject(JSObject* source, |
| 4398 AllocationSiteMode mode) { |
4398 // Never used to copy functions. If functions need to be copied we | 4399 // Never used to copy functions. If functions need to be copied we |
4399 // have to be careful to clear the literals array. | 4400 // have to be careful to clear the literals array. |
4400 SLOW_ASSERT(!source->IsJSFunction()); | 4401 SLOW_ASSERT(!source->IsJSFunction()); |
4401 | 4402 |
4402 // Make the clone. | 4403 // Make the clone. |
4403 Map* map = source->map(); | 4404 Map* map = source->map(); |
4404 int object_size = map->instance_size(); | 4405 int object_size = map->instance_size(); |
4405 Object* clone; | 4406 Object* clone; |
4406 | 4407 |
| 4408 bool track_origin = mode == TRACK_ALLOCATION_SITE && |
| 4409 map->CanTrackAllocationSite(); |
| 4410 |
4407 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; | 4411 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
4408 | 4412 |
4409 // If we're forced to always allocate, we use the general allocation | 4413 // If we're forced to always allocate, we use the general allocation |
4410 // functions which may leave us with an object in old space. | 4414 // functions which may leave us with an object in old space. |
| 4415 int adjusted_object_size = object_size; |
4411 if (always_allocate()) { | 4416 if (always_allocate()) { |
| 4417 // We'll only track origin if we are certain to allocate in new space |
| 4418 if (track_origin) { |
| 4419 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4; |
| 4420 if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) { |
| 4421 adjusted_object_size += AllocationSiteInfo::kSize; |
| 4422 } |
| 4423 } |
| 4424 |
4412 { MaybeObject* maybe_clone = | 4425 { MaybeObject* maybe_clone = |
4413 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); | 4426 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE); |
4414 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4427 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
4415 } | 4428 } |
4416 Address clone_address = HeapObject::cast(clone)->address(); | 4429 Address clone_address = HeapObject::cast(clone)->address(); |
4417 CopyBlock(clone_address, | 4430 CopyBlock(clone_address, |
4418 source->address(), | 4431 source->address(), |
4419 object_size); | 4432 object_size); |
4420 // Update write barrier for all fields that lie beyond the header. | 4433 // Update write barrier for all fields that lie beyond the header. |
4421 RecordWrites(clone_address, | 4434 RecordWrites(clone_address, |
4422 JSObject::kHeaderSize, | 4435 JSObject::kHeaderSize, |
4423 (object_size - JSObject::kHeaderSize) / kPointerSize); | 4436 (object_size - JSObject::kHeaderSize) / kPointerSize); |
4424 } else { | 4437 } else { |
4425 wb_mode = SKIP_WRITE_BARRIER; | 4438 wb_mode = SKIP_WRITE_BARRIER; |
4426 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); | 4439 if (track_origin) { |
| 4440 adjusted_object_size += AllocationSiteInfo::kSize; |
| 4441 } |
| 4442 |
| 4443 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
4427 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4444 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
4428 } | 4445 } |
4429 SLOW_ASSERT(InNewSpace(clone)); | 4446 SLOW_ASSERT(InNewSpace(clone)); |
4430 // Since we know the clone is allocated in new space, we can copy | 4447 // Since we know the clone is allocated in new space, we can copy |
4431 // the contents without worrying about updating the write barrier. | 4448 // the contents without worrying about updating the write barrier. |
4432 CopyBlock(HeapObject::cast(clone)->address(), | 4449 CopyBlock(HeapObject::cast(clone)->address(), |
4433 source->address(), | 4450 source->address(), |
4434 object_size); | 4451 object_size); |
4435 } | 4452 } |
4436 | 4453 |
| 4454 if (adjusted_object_size > object_size) { |
| 4455 AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>( |
| 4456 reinterpret_cast<Address>(clone) + object_size); |
| 4457 alloc_info->set_map(allocation_site_info_map()); |
| 4458 alloc_info->set_payload(source); |
| 4459 } |
| 4460 |
4437 SLOW_ASSERT( | 4461 SLOW_ASSERT( |
4438 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 4462 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
4439 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 4463 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
4440 FixedArray* properties = FixedArray::cast(source->properties()); | 4464 FixedArray* properties = FixedArray::cast(source->properties()); |
4441 // Update elements if necessary. | 4465 // Update elements if necessary. |
4442 if (elements->length() > 0) { | 4466 if (elements->length() > 0) { |
4443 Object* elem; | 4467 Object* elem; |
4444 { MaybeObject* maybe_elem; | 4468 { MaybeObject* maybe_elem; |
4445 if (elements->map() == fixed_cow_array_map()) { | 4469 if (elements->map() == fixed_cow_array_map()) { |
4446 maybe_elem = FixedArray::cast(elements); | 4470 maybe_elem = FixedArray::cast(elements); |
(...skipping 3063 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7510 static_cast<int>(object_sizes_last_time_[index])); | 7534 static_cast<int>(object_sizes_last_time_[index])); |
7511 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7535 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
7512 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7536 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7513 | 7537 |
7514 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7538 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7515 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7539 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7516 ClearObjectStats(); | 7540 ClearObjectStats(); |
7517 } | 7541 } |
7518 | 7542 |
7519 } } // namespace v8::internal | 7543 } } // namespace v8::internal |
OLD | NEW |