Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(96)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2146293003: [builtins] implement Array.prototype.includes in TurboFan (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: try lots of tight loops Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 #include "src/frames-inl.h" 7 #include "src/frames-inl.h"
8 #include "src/frames.h" 8 #include "src/frames.h"
9 #include "src/ic/stub-cache.h" 9 #include "src/ic/stub-cache.h"
10 10
(...skipping 453 matching lines...) Expand 10 before | Expand all | Expand 10 after
464 464
465 Node* CodeStubAssembler::WordIsSmi(Node* a) { 465 Node* CodeStubAssembler::WordIsSmi(Node* a) {
466 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0)); 466 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
467 } 467 }
468 468
469 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { 469 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
470 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), 470 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
471 IntPtrConstant(0)); 471 IntPtrConstant(0));
472 } 472 }
473 473
474 void CodeStubAssembler::BranchIfSameValueZero(Node* a, Node* b, Node* context,
475 Label* if_true, Label* if_false) {
476 Node* number_map = HeapNumberMapConstant();
477 Label a_isnumber(this), a_isnotnumber(this), b_isnumber(this), a_isnan(this),
478 float_not_equal(this);
479 // If register A and register B are identical, goto `if_true`
480 GotoIf(WordEqual(a, b), if_true);
481 // If either register A or B are Smis, goto `if_false`
482 GotoIf(Word32Or(WordIsSmi(a), WordIsSmi(b)), if_false);
483 // GotoIf(WordIsSmi(b), if_false);
484
485 Node* a_map = LoadMap(a);
486 Node* b_map = LoadMap(b);
487 Branch(WordEqual(a_map, number_map), &a_isnumber, &a_isnotnumber);
488
489 // If both register A and B are HeapNumbers, return true if they are equal,
490 // or if both are NaN
491 Bind(&a_isnumber);
492 {
493 Branch(WordEqual(b_map, number_map), &b_isnumber, if_false);
494
495 Bind(&b_isnumber);
496 Node* a_value = LoadHeapNumberValue(a);
497 Node* b_value = LoadHeapNumberValue(b);
498 BranchIfFloat64Equal(a_value, b_value, if_true, &float_not_equal);
499
500 Bind(&float_not_equal);
501 BranchIfFloat64IsNaN(a_value, &a_isnan, if_false);
502
503 Bind(&a_isnan);
504 BranchIfFloat64IsNaN(a_value, if_true, if_false);
505 }
506
507 Bind(&a_isnotnumber);
508 {
509 Label a_isstring(this), a_isnotstring(this);
510 Node* a_instance_type = LoadMapInstanceType(a_map);
511
512 Branch(Int32LessThan(a_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
513 &a_isstring, &a_isnotstring);
514
515 Bind(&a_isstring);
516 {
517 Label b_isstring(this), b_isnotstring(this);
518 Node* b_instance_type = LoadInstanceType(b_map);
519
520 Branch(
521 Int32LessThan(b_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
522 &b_isstring, if_false);
523
524 Bind(&b_isstring);
525 {
526 Callable callable = CodeFactory::StringEqual(isolate());
527 Node* result = CallStub(callable, context, a, b);
528 Branch(WordEqual(BooleanConstant(true), result), if_true, if_false);
529 }
530 }
531
532 Bind(&a_isnotstring);
533 {
534 // Check if {lhs} is a Simd128Value.
535 Label a_issimd128value(this);
536 Branch(Word32Equal(a_instance_type, Int32Constant(SIMD128_VALUE_TYPE)),
537 &a_issimd128value, if_false);
538
539 Bind(&a_issimd128value);
540 {
541 // Load the map of {rhs}.
542 BranchIfSimd128Equal(a, a_map, b, b_map, if_true, if_false);
543 }
544 }
545 }
546 }
547
548 void CodeStubAssembler::BranchIfSimd128Equal(Node* a, Node* a_map, Node* b,
549 Node* b_map, Label* if_true,
550 Label* if_false) {
551 Label if_mapsame(this);
552
553 Node* simd128_value_type = Int32Constant(SIMD128_VALUE_TYPE);
554 Assert(Word32Equal(LoadMapInstanceType(a_map), simd128_value_type));
555
556 Branch(Word32Equal(LoadMapInstanceType(b_map), simd128_value_type),
557 &if_mapsame, if_false);
558
559 Bind(&if_mapsame);
560 {
561 Label if_float32x4(this), if_notfloat32x4(this);
562 Node* float32x4_map = HeapConstant(factory()->float32x4_map());
563 Branch(WordEqual(a_map, float32x4_map), &if_float32x4, &if_notfloat32x4);
564
565 Bind(&if_float32x4);
566 {
567 for (int offset = Float32x4::kValueOffset - kHeapObjectTag;
568 offset < Float32x4::kSize - kHeapObjectTag;
569 offset += sizeof(float)) {
570 // Load the floating point values for {lhs} and {rhs}.
571 Node* a_value = Load(MachineType::Float32(), a, IntPtrConstant(offset));
572 Node* b_value = Load(MachineType::Float32(), b, IntPtrConstant(offset));
573
574 // Perform a floating point comparison.
575 Label if_valueequal(this);
576 Branch(Float32Equal(a_value, b_value), &if_valueequal, if_false);
577 Bind(&if_valueequal);
578 }
579 Goto(if_true);
580 }
581
582 Bind(&if_notfloat32x4);
583 {
584 // For other Simd128Values we just perform a bitwise comparison.
585 for (int offset = Simd128Value::kValueOffset - kHeapObjectTag;
586 offset < Simd128Value::kSize - kHeapObjectTag;
587 offset += kPointerSize) {
588 // Load the word values for {lhs} and {rhs}.
589 Node* a_value = Load(MachineType::Pointer(), a, IntPtrConstant(offset));
590 Node* b_value = Load(MachineType::Pointer(), b, IntPtrConstant(offset));
591
592 // Perform a bitwise word-comparison.
593 Label if_valueequal(this);
594 Branch(WordEqual(a_value, b_value), &if_valueequal, if_false);
595 Bind(&if_valueequal);
596 }
597
598 // Bitwise comparison succeeded, {lhs} and {rhs} considered equal.
599 Goto(if_true);
600 }
601 }
602 }
603
604 void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
605 Label* if_true, Label* if_false) {
606 Node* int32_zero = Int32Constant(0);
caitp 2016/07/17 03:26:47 Code to test if it's safe to iterate only over own
Benedikt Meurer 2016/07/17 06:03:18 Yeah, I think this needs to check for indexed inte
caitp 2016/07/18 14:57:44 added interceptors checks --- accessors are only p
caitp 2016/07/18 14:59:06 er, which _are_ handles in the slow case
607 Node* int32_one = Int32Constant(1);
608
609 Node* native_context = LoadNativeContext(context);
610 Node* array_prototype = LoadFixedArrayElement(
611 native_context, Int32Constant(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
612
613 Variable last_map(this, MachineRepresentation::kTagged);
614 Label check_prototype(this);
615
616 // Bailout if Smi
617 GotoIf(WordIsSmi(object), if_false);
618
619 Node* map = LoadMap(object);
620 last_map.Bind(map);
621
622 // Bailout if instance type is not JS_ARRAY_TYPE
623 GotoIf(WordNotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
624 if_false);
625
626 // Bailout if access checks required
627 Node* bit_field = LoadMapBitField(map);
628 Node* is_access_check_needed = Int32Constant(1 << Map::kIsAccessCheckNeeded);
629 GotoIf(
630 Word32NotEqual(Word32And(bit_field, is_access_check_needed), int32_zero),
631 if_false);
632
633 Node* bit_field2 = LoadMapBitField2(map);
634 Node* elements_kind = BitFieldDecode<Map::ElementsKindBits>(bit_field2);
635
636 // Bailout if slow receiver elements
637 GotoIf(
638 Int32GreaterThan(elements_kind, Int32Constant(LAST_FAST_ELEMENTS_KIND)),
639 if_false);
640
641 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
642 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
643 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
644
645 // If receiver has packed elements, don't check prototype
646 Node* holey_elements = Word32And(elements_kind, int32_one);
647 Branch(Word32Equal(holey_elements, int32_zero), if_true, &check_prototype);
648
649 Bind(&check_prototype);
650 {
651 Label prototype_checks(this), loop_body(this, &last_map);
652 Goto(&loop_body);
653 Bind(&loop_body);
654 Node* current_map = last_map.value();
655 Node* proto = LoadObjectField(current_map, Map::kPrototypeOffset);
656
657 // End loop
658 GotoIf(WordEqual(proto, NullConstant()), if_true);
659 GotoIf(WordNotEqual(array_prototype, proto), &prototype_checks);
660 Node* array_protector = LoadObjectField(
661 LoadRoot(Heap::kArrayProtectorRootIndex), PropertyCell::kValueOffset);
662 Branch(WordEqual(array_protector,
663 SmiConstant(Smi::FromInt(Isolate::kArrayProtectorValid))),
664 if_true, &prototype_checks);
665
666 Bind(&prototype_checks);
667 Node* proto_map = LoadMap(proto);
668
669 // Bailout if a Proxy found on the prototype chain
670 GotoIf(Word32Equal(LoadMapInstanceType(proto_map),
671 Int32Constant(JS_PROXY_TYPE)),
672 if_false);
673
674 // Bailout if access checks are needed on the prototype
675 Node* bit_field = LoadMapBitField(proto_map);
676 GotoIf(Word32NotEqual(Word32And(bit_field, is_access_check_needed),
677 int32_zero),
678 if_false);
679
680 // Bailout if contains elements
681 Node* bit_field2 = LoadMapBitField2(proto_map);
682 Node* elements_kind = BitFieldDecode<Map::ElementsKindBits>(bit_field2);
683 GotoUnless(Word32Equal(elements_kind, Int32Constant(NO_ELEMENTS)),
684 if_false);
685
686 last_map.Bind(proto_map);
687 Goto(&loop_body);
688 }
689 }
690
474 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, 691 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
475 AllocationFlags flags, 692 AllocationFlags flags,
476 Node* top_address, 693 Node* top_address,
477 Node* limit_address) { 694 Node* limit_address) {
478 Node* top = Load(MachineType::Pointer(), top_address); 695 Node* top = Load(MachineType::Pointer(), top_address);
479 Node* limit = Load(MachineType::Pointer(), limit_address); 696 Node* limit = Load(MachineType::Pointer(), limit_address);
480 697
481 // If there's not enough space, call the runtime. 698 // If there's not enough space, call the runtime.
482 Variable result(this, MachineRepresentation::kTagged); 699 Variable result(this, MachineRepresentation::kTagged);
483 Label runtime_call(this, Label::kDeferred), no_runtime_call(this); 700 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
(...skipping 2575 matching lines...) Expand 10 before | Expand all | Expand 10 after
3059 } 3276 }
3060 Bind(&miss); 3277 Bind(&miss);
3061 { 3278 {
3062 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot, 3279 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot,
3063 p->vector); 3280 p->vector);
3064 } 3281 }
3065 } 3282 }
3066 3283
3067 } // namespace internal 3284 } // namespace internal
3068 } // namespace v8 3285 } // namespace v8
OLDNEW
« src/builtins/builtins.cc ('K') | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698