| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/builtins/builtins-constructor-gen.h" | 5 #include "src/builtins/builtins-constructor-gen.h" |
| 6 | 6 |
| 7 #include "src/ast/ast.h" | 7 #include "src/ast/ast.h" |
| 8 #include "src/builtins/builtins-constructor.h" | 8 #include "src/builtins/builtins-constructor.h" |
| 9 #include "src/builtins/builtins-utils-gen.h" | 9 #include "src/builtins/builtins-utils-gen.h" |
| 10 #include "src/builtins/builtins.h" | 10 #include "src/builtins/builtins.h" |
| (...skipping 599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 610 TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) { | 610 TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) { |
| 611 CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE); | 611 CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE); |
| 612 } | 612 } |
| 613 | 613 |
| 614 TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) { | 614 TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) { |
| 615 CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE); | 615 CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE); |
| 616 } | 616 } |
| 617 | 617 |
| 618 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject( | 618 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject( |
| 619 Label* call_runtime, Node* closure, Node* literals_index, | 619 Label* call_runtime, Node* closure, Node* literals_index, |
| 620 Node* properties_count) { | 620 Node* fast_properties_count) { |
| 621 Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset); | 621 Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset); |
| 622 Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset); | 622 Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset); |
| 623 Node* allocation_site = LoadFixedArrayElement( | 623 Node* allocation_site = LoadFixedArrayElement( |
| 624 feedback_vector, literals_index, 0, CodeStubAssembler::SMI_PARAMETERS); | 624 feedback_vector, literals_index, 0, CodeStubAssembler::SMI_PARAMETERS); |
| 625 GotoIf(IsUndefined(allocation_site), call_runtime); | 625 GotoIf(IsUndefined(allocation_site), call_runtime); |
| 626 | 626 |
| 627 Node* boilerplate = |
| 628 LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset); |
| 629 Node* boilerplate_map = LoadMap(boilerplate); |
| 630 Variable properties(this, MachineRepresentation::kTagged, |
| 631 EmptyFixedArrayConstant()); |
| 632 // TODO(cbruni): directly use the property count from the boilerplate map. |
| 633 Variable in_object_property_count(this, MachineType::PointerRepresentation(), |
| 634 fast_properties_count); |
| 635 // Directly copy over the property store for dict-mode boilerplates. |
| 636 Label dict_properties(this), allocate_object(this); |
| 637 Branch(IsDictionaryMap(boilerplate_map), &dict_properties, &allocate_object); |
| 638 Bind(&dict_properties); |
| 639 { |
| 640 properties.Bind( |
| 641 CopyNameDictionary(LoadProperties(boilerplate), call_runtime)); |
| 642 in_object_property_count.Bind(IntPtrConstant(0)); |
| 643 Goto(&allocate_object); |
| 644 } |
| 645 Bind(&allocate_object); |
| 646 |
| 627 // Calculate the object and allocation size based on the properties count. | 647 // Calculate the object and allocation size based on the properties count. |
| 628 Node* object_size = IntPtrAdd(WordShl(properties_count, kPointerSizeLog2), | 648 Node* object_size = |
| 629 IntPtrConstant(JSObject::kHeaderSize)); | 649 IntPtrAdd(WordShl(in_object_property_count.value(), kPointerSizeLog2), |
| 650 IntPtrConstant(JSObject::kHeaderSize)); |
| 630 Node* allocation_size = object_size; | 651 Node* allocation_size = object_size; |
| 631 if (FLAG_allocation_site_pretenuring) { | 652 if (FLAG_allocation_site_pretenuring) { |
| 632 allocation_size = | 653 allocation_size = |
| 633 IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize)); | 654 IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize)); |
| 634 } | 655 } |
| 635 Node* boilerplate = | 656 |
| 636 LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset); | |
| 637 Node* boilerplate_map = LoadMap(boilerplate); | |
| 638 Node* instance_size = LoadMapInstanceSize(boilerplate_map); | 657 Node* instance_size = LoadMapInstanceSize(boilerplate_map); |
| 639 Node* size_in_words = WordShr(object_size, kPointerSizeLog2); | 658 Node* size_in_words = WordShr(object_size, kPointerSizeLog2); |
| 640 GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime); | 659 GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime); |
| 641 | 660 |
| 642 Node* copy = AllocateInNewSpace(allocation_size); | 661 Node* copy = AllocateInNewSpace(allocation_size); |
| 643 | |
| 644 // Copy boilerplate elements. | 662 // Copy boilerplate elements. |
| 645 VARIABLE(offset, MachineType::PointerRepresentation()); | 663 VARIABLE(offset, MachineType::PointerRepresentation()); |
| 646 offset.Bind(IntPtrConstant(-kHeapObjectTag)); | 664 offset.Bind(IntPtrConstant(-kHeapObjectTag)); |
| 647 Node* end_offset = IntPtrAdd(object_size, offset.value()); | 665 Node* end_offset = IntPtrAdd(object_size, offset.value()); |
| 648 Label loop_body(this, &offset), loop_check(this, &offset); | 666 Label loop_body(this, &offset), loop_check(this, &offset); |
| 649 // We should always have an object size greater than zero. | 667 // We should always have an object size greater than zero. |
| 650 Goto(&loop_body); | 668 Goto(&loop_body); |
| 651 BIND(&loop_body); | 669 BIND(&loop_body); |
| 652 { | 670 { |
| 653 // The Allocate above guarantees that the copy lies in new space. This | 671 // The Allocate above guarantees that the copy lies in new space. This |
| 654 // allows us to skip write barriers. This is necessary since we may also be | 672 // allows us to skip write barriers. This is necessary since we may also be |
| 655 // copying unboxed doubles. | 673 // copying unboxed doubles. |
| 656 Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value()); | 674 Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value()); |
| 657 StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy, | 675 StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy, |
| 658 offset.value(), field); | 676 offset.value(), field); |
| 659 Goto(&loop_check); | 677 Goto(&loop_check); |
| 660 } | 678 } |
| 661 BIND(&loop_check); | 679 BIND(&loop_check); |
| 662 { | 680 { |
| 663 offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize))); | 681 offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize))); |
| 664 GotoIfNot(IntPtrGreaterThanOrEqual(offset.value(), end_offset), &loop_body); | 682 GotoIfNot(IntPtrGreaterThanOrEqual(offset.value(), end_offset), &loop_body); |
| 665 } | 683 } |
| 666 | 684 |
| 685 StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOffset, |
| 686 properties.value()); |
| 687 |
| 667 if (FLAG_allocation_site_pretenuring) { | 688 if (FLAG_allocation_site_pretenuring) { |
| 668 Node* memento = InnerAllocate(copy, object_size); | 689 Node* memento = InnerAllocate(copy, object_size); |
| 669 StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex); | 690 StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex); |
| 670 StoreObjectFieldNoWriteBarrier( | 691 StoreObjectFieldNoWriteBarrier( |
| 671 memento, AllocationMemento::kAllocationSiteOffset, allocation_site); | 692 memento, AllocationMemento::kAllocationSiteOffset, allocation_site); |
| 672 Node* memento_create_count = LoadObjectField( | 693 Node* memento_create_count = LoadObjectField( |
| 673 allocation_site, AllocationSite::kPretenureCreateCountOffset); | 694 allocation_site, AllocationSite::kPretenureCreateCountOffset); |
| 674 memento_create_count = | 695 memento_create_count = |
| 675 SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1))); | 696 SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1))); |
| 676 StoreObjectFieldNoWriteBarrier(allocation_site, | 697 StoreObjectFieldNoWriteBarrier(allocation_site, |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 715 SHALLOW_OBJECT_BUILTIN(0); | 736 SHALLOW_OBJECT_BUILTIN(0); |
| 716 SHALLOW_OBJECT_BUILTIN(1); | 737 SHALLOW_OBJECT_BUILTIN(1); |
| 717 SHALLOW_OBJECT_BUILTIN(2); | 738 SHALLOW_OBJECT_BUILTIN(2); |
| 718 SHALLOW_OBJECT_BUILTIN(3); | 739 SHALLOW_OBJECT_BUILTIN(3); |
| 719 SHALLOW_OBJECT_BUILTIN(4); | 740 SHALLOW_OBJECT_BUILTIN(4); |
| 720 SHALLOW_OBJECT_BUILTIN(5); | 741 SHALLOW_OBJECT_BUILTIN(5); |
| 721 SHALLOW_OBJECT_BUILTIN(6); | 742 SHALLOW_OBJECT_BUILTIN(6); |
| 722 | 743 |
| 723 } // namespace internal | 744 } // namespace internal |
| 724 } // namespace v8 | 745 } // namespace v8 |
| OLD | NEW |