Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1395)

Side by Side Diff: src/code-stub-assembler.cc

Issue 1903723003: [stubs]: Implement ArrayNoArgumentConstructor as a TF stub (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Review feedback Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 7
8 namespace v8 { 8 namespace v8 {
9 namespace internal { 9 namespace internal {
10 10
(...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after
548 return Load(MachineType::AnyTagged(), object, offset); 548 return Load(MachineType::AnyTagged(), object, offset);
549 } 549 }
550 550
551 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object, 551 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object,
552 int index) { 552 int index) {
553 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag + 553 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag +
554 index * kPointerSize); 554 index * kPointerSize);
555 return Load(MachineType::AnyTagged(), object, offset); 555 return Load(MachineType::AnyTagged(), object, offset);
556 } 556 }
557 557
558 Node* CodeStubAssembler::LoadNativeContext(Node* context) {
559 return LoadFixedArrayElementConstantIndex(context,
560 Context::NATIVE_CONTEXT_INDEX);
561 }
562
558 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { 563 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
559 return StoreNoWriteBarrier( 564 return StoreNoWriteBarrier(
560 MachineRepresentation::kFloat64, object, 565 MachineRepresentation::kFloat64, object,
561 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value); 566 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
562 } 567 }
563 568
564 Node* CodeStubAssembler::StoreObjectField( 569 Node* CodeStubAssembler::StoreObjectField(
565 Node* object, int offset, Node* value) { 570 Node* object, int offset, Node* value) {
566 return Store(MachineRepresentation::kTagged, object, 571 return Store(MachineRepresentation::kTagged, object,
567 IntPtrConstant(offset - kHeapObjectTag), value); 572 IntPtrConstant(offset - kHeapObjectTag), value);
568 } 573 }
569 574
570 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( 575 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
571 Node* object, int offset, Node* value, MachineRepresentation rep) { 576 Node* object, int offset, Node* value, MachineRepresentation rep) {
572 return StoreNoWriteBarrier(rep, object, 577 return StoreNoWriteBarrier(rep, object,
573 IntPtrConstant(offset - kHeapObjectTag), value); 578 IntPtrConstant(offset - kHeapObjectTag), value);
574 } 579 }
575 580
576 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { 581 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
577 return StoreNoWriteBarrier( 582 return StoreNoWriteBarrier(
578 MachineRepresentation::kTagged, object, 583 MachineRepresentation::kTagged, object,
579 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map); 584 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
580 } 585 }
581 586
582 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object, 587 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object,
583 Node* index, 588 Node* index,
584 Node* value) { 589 Node* value) {
590 if (Is64()) {
591 index = ChangeInt32ToInt64(index);
592 }
585 Node* offset = 593 Node* offset =
586 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), 594 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)),
587 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); 595 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
588 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, 596 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
589 value); 597 value);
590 } 598 }
591 599
592 Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object, 600 Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object,
593 Node* index, 601 Node* index,
594 Node* value) { 602 Node* value) {
595 if (Is64()) { 603 if (Is64()) {
596 index = ChangeInt32ToInt64(index); 604 index = ChangeInt32ToInt64(index);
597 } 605 }
598 Node* offset = 606 Node* offset =
599 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), 607 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)),
600 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); 608 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
601 return Store(MachineRepresentation::kTagged, object, offset, value); 609 return Store(MachineRepresentation::kTagged, object, offset, value);
602 } 610 }
603 611
612 Node* CodeStubAssembler::StoreFixedDoubleArrayElementInt32Index(Node* object,
613 Node* index,
614 Node* value) {
615 if (Is64()) {
616 index = ChangeInt32ToInt64(index);
Benedikt Meurer 2016/05/03 03:57:49 How about adding a ChangeInt32ToIntPtr helper that
danno 2016/05/03 07:10:48 Done.
617 }
618 Node* offset =
619 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)),
620 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
621 return StoreNoWriteBarrier(MachineRepresentation::kFloat64, object, offset,
622 value);
623 }
624
625 Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object,
626 int index,
627 Node* value) {
628 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag +
629 index * kPointerSize);
630 return Store(MachineRepresentation::kTagged, object, offset, value);
631 }
632
633 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object,
634 int index,
635 Node* value) {
636 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag +
637 index * kPointerSize);
638 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
639 value);
640 }
641
642 Node* CodeStubAssembler::StoreFixedDoubleArrayElementInt32Index(Node* object,
643 int index,
644 Node* value) {
645 Node* offset = IntPtrConstant(FixedDoubleArray::kHeaderSize - kHeapObjectTag +
646 index * kDoubleSize);
647 return StoreNoWriteBarrier(MachineRepresentation::kFloat64, object, offset,
648 value);
649 }
650
604 Node* CodeStubAssembler::AllocateHeapNumber() { 651 Node* CodeStubAssembler::AllocateHeapNumber() {
605 Node* result = Allocate(HeapNumber::kSize, kNone); 652 Node* result = Allocate(HeapNumber::kSize, kNone);
606 StoreMapNoWriteBarrier(result, HeapNumberMapConstant()); 653 StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
607 return result; 654 return result;
608 } 655 }
609 656
610 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) { 657 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) {
611 Node* result = AllocateHeapNumber(); 658 Node* result = AllocateHeapNumber();
612 StoreHeapNumberValue(result, value); 659 StoreHeapNumberValue(result, value);
613 return result; 660 return result;
(...skipping 12 matching lines...) Expand all
626 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) { 673 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) {
627 Node* result = Allocate(SeqTwoByteString::SizeFor(length)); 674 Node* result = Allocate(SeqTwoByteString::SizeFor(length));
628 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex)); 675 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
629 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset, 676 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
630 SmiConstant(Smi::FromInt(length))); 677 SmiConstant(Smi::FromInt(length)));
631 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, 678 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
632 IntPtrConstant(String::kEmptyHashField)); 679 IntPtrConstant(String::kEmptyHashField));
633 return result; 680 return result;
634 } 681 }
635 682
683 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind,
684 Node* native_context, int capacity,
685 int length,
686 compiler::Node* allocation_site) {
687 bool is_double = IsFastDoubleElementsKind(kind);
688 int element_size = is_double ? kDoubleSize : kPointerSize;
689 int total_size =
690 JSArray::kSize + FixedArray::kHeaderSize + element_size * capacity;
691 int elements_offset = JSArray::kSize;
692
693 if (allocation_site != nullptr) {
694 total_size += AllocationMemento::kSize;
695 elements_offset += AllocationMemento::kSize;
696 }
697
698 // Allocate both array and elements object, and initialize the JSArray.
699 Heap* heap = isolate()->heap();
700 Node* array = Allocate(total_size);
701 Node* array_map = LoadFixedArrayElementConstantIndex(
702 native_context, Context::ArrayMapIndex(kind));
703 StoreMapNoWriteBarrier(array, array_map);
704 Node* empty_properties =
705 HeapConstant(Handle<HeapObject>(heap->empty_fixed_array()));
706 StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset,
707 empty_properties);
708 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset,
709 SmiConstant(Smi::FromInt(length)));
710
711 if (allocation_site != nullptr) {
712 CreateAllocationMemento(array, JSArray::kSize, allocation_site);
713 }
714
715 // Setup elements object.
716 Node* elements = InnerAllocate(array, elements_offset);
717 StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
718 Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
719 : heap->fixed_array_map());
720 StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
721 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
722 SmiConstant(Smi::FromInt(capacity)));
723
724 Node* double_hole = Float64Constant(bit_cast<double>(kHoleNanInt64));
725 Node* hole = HeapConstant(Handle<HeapObject>(heap->the_hole_value()));
726 if (capacity <= kElementLoopUnrollThreshold) {
727 for (int i = 0; i < capacity; ++i) {
728 if (is_double) {
729 StoreFixedDoubleArrayElementInt32Index(elements, i, double_hole);
730 } else {
731 StoreFixedArrayElementNoWriteBarrier(elements, i, hole);
732 }
733 }
734 } else {
735 // TODO(danno): Add a loop for initialization
736 UNIMPLEMENTED();
737 }
738
739 return array;
740 }
741
742 void CodeStubAssembler::CreateAllocationMemento(
743 compiler::Node* base_allocation, int base_allocation_size,
744 compiler::Node* allocation_site) {
745 StoreObjectFieldNoWriteBarrier(
746 base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
747 HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
748 StoreObjectFieldNoWriteBarrier(
749 base_allocation,
750 AllocationMemento::kAllocationSiteOffset + base_allocation_size,
751 allocation_site);
752 if (FLAG_allocation_site_pretenuring) {
753 Node* count = LoadObjectField(allocation_site,
754 AllocationSite::kPretenureCreateCountOffset);
755 Node* incremented_count = IntPtrAdd(count, SmiConstant(Smi::FromInt(1)));
756 StoreObjectFieldNoWriteBarrier(allocation_site,
757 AllocationSite::kPretenureCreateCountOffset,
758 incremented_count);
759 }
760 }
761
636 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) { 762 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
637 // We might need to loop once due to ToNumber conversion. 763 // We might need to loop once due to ToNumber conversion.
638 Variable var_value(this, MachineRepresentation::kTagged), 764 Variable var_value(this, MachineRepresentation::kTagged),
639 var_result(this, MachineRepresentation::kFloat64); 765 var_result(this, MachineRepresentation::kFloat64);
640 Label loop(this, &var_value), done_loop(this, &var_result); 766 Label loop(this, &var_value), done_loop(this, &var_result);
641 var_value.Bind(value); 767 var_value.Bind(value);
642 Goto(&loop); 768 Goto(&loop);
643 Bind(&loop); 769 Bind(&loop);
644 { 770 {
645 // Load the current {value}. 771 // Load the current {value}.
(...skipping 497 matching lines...) Expand 10 before | Expand all | Expand 10 after
1143 } 1269 }
1144 1270
1145 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, 1271 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
1146 uint32_t mask) { 1272 uint32_t mask) {
1147 return Word32Shr(Word32And(word32, Int32Constant(mask)), 1273 return Word32Shr(Word32And(word32, Int32Constant(mask)),
1148 Int32Constant(shift)); 1274 Int32Constant(shift));
1149 } 1275 }
1150 1276
1151 } // namespace internal 1277 } // namespace internal
1152 } // namespace v8 1278 } // namespace v8
OLDNEW
« src/code-stub-assembler.h ('K') | « src/code-stub-assembler.h ('k') | src/code-stubs.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698