OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 1034 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1045 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1045 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
1046 int additional_offset, | 1046 int additional_offset, |
1047 ParameterMode parameter_mode) { | 1047 ParameterMode parameter_mode) { |
1048 int32_t header_size = | 1048 int32_t header_size = |
1049 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1049 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1050 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1050 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1051 parameter_mode, header_size); | 1051 parameter_mode, header_size); |
1052 return Load(MachineType::AnyTagged(), object, offset); | 1052 return Load(MachineType::AnyTagged(), object, offset); |
1053 } | 1053 } |
1054 | 1054 |
| 1055 Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
| 1056 Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
| 1057 ParameterMode parameter_mode) { |
| 1058 Node* offset = |
| 1059 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0); |
| 1060 MachineType type; |
| 1061 switch (elements_kind) { |
| 1062 case UINT8_ELEMENTS: /* fall through */ |
| 1063 case UINT8_CLAMPED_ELEMENTS: |
| 1064 type = MachineType::Uint8(); |
| 1065 break; |
| 1066 case INT8_ELEMENTS: |
| 1067 type = MachineType::Int8(); |
| 1068 break; |
| 1069 case UINT16_ELEMENTS: |
| 1070 type = MachineType::Uint16(); |
| 1071 break; |
| 1072 case INT16_ELEMENTS: |
| 1073 type = MachineType::Int16(); |
| 1074 break; |
| 1075 case UINT32_ELEMENTS: |
| 1076 type = MachineType::Uint32(); |
| 1077 break; |
| 1078 case INT32_ELEMENTS: |
| 1079 type = MachineType::Int32(); |
| 1080 break; |
| 1081 case FLOAT32_ELEMENTS: |
| 1082 type = MachineType::Float32(); |
| 1083 break; |
| 1084 case FLOAT64_ELEMENTS: |
| 1085 type = MachineType::Float64(); |
| 1086 break; |
| 1087 default: |
| 1088 UNREACHABLE(); |
| 1089 } |
| 1090 return Load(type, data_pointer, offset); |
| 1091 } |
| 1092 |
1055 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1093 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
1056 Node* object, Node* index_node, int additional_offset, | 1094 Node* object, Node* index_node, int additional_offset, |
1057 ParameterMode parameter_mode) { | 1095 ParameterMode parameter_mode) { |
1058 int32_t header_size = | 1096 int32_t header_size = |
1059 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1097 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1060 #if V8_TARGET_LITTLE_ENDIAN | 1098 #if V8_TARGET_LITTLE_ENDIAN |
1061 if (Is64()) { | 1099 if (Is64()) { |
1062 header_size += kPointerSize / 2; | 1100 header_size += kPointerSize / 2; |
1063 } | 1101 } |
1064 #endif | 1102 #endif |
(...skipping 5074 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6139 BuildFastLoop( | 6177 BuildFastLoop( |
6140 MachineType::PointerRepresentation(), start, limit, | 6178 MachineType::PointerRepresentation(), start, limit, |
6141 [fixed_array, body](CodeStubAssembler* assembler, Node* offset) { | 6179 [fixed_array, body](CodeStubAssembler* assembler, Node* offset) { |
6142 body(assembler, fixed_array, offset); | 6180 body(assembler, fixed_array, offset); |
6143 }, | 6181 }, |
6144 direction == ForEachDirection::kReverse ? -increment : increment, | 6182 direction == ForEachDirection::kReverse ? -increment : increment, |
6145 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre | 6183 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre |
6146 : IndexAdvanceMode::kPost); | 6184 : IndexAdvanceMode::kPost); |
6147 } | 6185 } |
6148 | 6186 |
| 6187 void CodeStubAssembler::BranchIfNumericRelationalComparison( |
| 6188 RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs, |
| 6189 Label* if_true, Label* if_false) { |
| 6190 typedef compiler::Node Node; |
| 6191 |
| 6192 Label end(this); |
| 6193 Variable result(this, MachineRepresentation::kTagged); |
| 6194 |
| 6195 // Shared entry for floating point comparison. |
| 6196 Label do_fcmp(this); |
| 6197 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64), |
| 6198 var_fcmp_rhs(this, MachineRepresentation::kFloat64); |
| 6199 |
| 6200 // Check if the {lhs} is a Smi or a HeapObject. |
| 6201 Label if_lhsissmi(this), if_lhsisnotsmi(this); |
| 6202 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi); |
| 6203 |
| 6204 Bind(&if_lhsissmi); |
| 6205 { |
| 6206 // Check if {rhs} is a Smi or a HeapObject. |
| 6207 Label if_rhsissmi(this), if_rhsisnotsmi(this); |
| 6208 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); |
| 6209 |
| 6210 Bind(&if_rhsissmi); |
| 6211 { |
| 6212 // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison. |
| 6213 switch (mode) { |
| 6214 case kLessThan: |
| 6215 BranchIfSmiLessThan(lhs, rhs, if_true, if_false); |
| 6216 break; |
| 6217 case kLessThanOrEqual: |
| 6218 BranchIfSmiLessThanOrEqual(lhs, rhs, if_true, if_false); |
| 6219 break; |
| 6220 case kGreaterThan: |
| 6221 BranchIfSmiLessThan(rhs, lhs, if_true, if_false); |
| 6222 break; |
| 6223 case kGreaterThanOrEqual: |
| 6224 BranchIfSmiLessThanOrEqual(rhs, lhs, if_true, if_false); |
| 6225 break; |
| 6226 } |
| 6227 } |
| 6228 |
| 6229 Bind(&if_rhsisnotsmi); |
| 6230 { |
| 6231 Assert(WordEqual(LoadMap(rhs), HeapNumberMapConstant())); |
| 6232 // Convert the {lhs} and {rhs} to floating point values, and |
| 6233 // perform a floating point comparison. |
| 6234 var_fcmp_lhs.Bind(SmiToFloat64(lhs)); |
| 6235 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs)); |
| 6236 Goto(&do_fcmp); |
| 6237 } |
| 6238 } |
| 6239 |
| 6240 Bind(&if_lhsisnotsmi); |
| 6241 { |
| 6242 Assert(WordEqual(LoadMap(lhs), HeapNumberMapConstant())); |
| 6243 |
| 6244 // Check if {rhs} is a Smi or a HeapObject. |
| 6245 Label if_rhsissmi(this), if_rhsisnotsmi(this); |
| 6246 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); |
| 6247 |
| 6248 Bind(&if_rhsissmi); |
| 6249 { |
| 6250 // Convert the {lhs} and {rhs} to floating point values, and |
| 6251 // perform a floating point comparison. |
| 6252 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs)); |
| 6253 var_fcmp_rhs.Bind(SmiToFloat64(rhs)); |
| 6254 Goto(&do_fcmp); |
| 6255 } |
| 6256 |
| 6257 Bind(&if_rhsisnotsmi); |
| 6258 { |
| 6259 Assert(WordEqual(LoadMap(rhs), HeapNumberMapConstant())); |
| 6260 |
| 6261 // Convert the {lhs} and {rhs} to floating point values, and |
| 6262 // perform a floating point comparison. |
| 6263 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs)); |
| 6264 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs)); |
| 6265 Goto(&do_fcmp); |
| 6266 } |
| 6267 } |
| 6268 |
| 6269 Bind(&do_fcmp); |
| 6270 { |
| 6271 // Load the {lhs} and {rhs} floating point values. |
| 6272 Node* lhs = var_fcmp_lhs.value(); |
| 6273 Node* rhs = var_fcmp_rhs.value(); |
| 6274 |
| 6275 // Perform a fast floating point comparison. |
| 6276 switch (mode) { |
| 6277 case kLessThan: |
| 6278 BranchIfFloat64LessThan(lhs, rhs, if_true, if_false); |
| 6279 break; |
| 6280 case kLessThanOrEqual: |
| 6281 BranchIfFloat64LessThanOrEqual(lhs, rhs, if_true, if_false); |
| 6282 break; |
| 6283 case kGreaterThan: |
| 6284 BranchIfFloat64GreaterThan(lhs, rhs, if_true, if_false); |
| 6285 break; |
| 6286 case kGreaterThanOrEqual: |
| 6287 BranchIfFloat64GreaterThanOrEqual(lhs, rhs, if_true, if_false); |
| 6288 break; |
| 6289 } |
| 6290 } |
| 6291 } |
| 6292 |
| 6293 void CodeStubAssembler::GotoUnlessNumberLessThan(compiler::Node* lhs, |
| 6294 compiler::Node* rhs, |
| 6295 Label* if_false) { |
| 6296 Label if_true(this); |
| 6297 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); |
| 6298 Bind(&if_true); |
| 6299 } |
| 6300 |
6149 compiler::Node* CodeStubAssembler::RelationalComparison( | 6301 compiler::Node* CodeStubAssembler::RelationalComparison( |
6150 RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs, | 6302 RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs, |
6151 compiler::Node* context) { | 6303 compiler::Node* context) { |
6152 typedef compiler::Node Node; | 6304 typedef compiler::Node Node; |
6153 | 6305 |
6154 Label return_true(this), return_false(this), end(this); | 6306 Label return_true(this), return_false(this), end(this); |
6155 Variable result(this, MachineRepresentation::kTagged); | 6307 Variable result(this, MachineRepresentation::kTagged); |
6156 | 6308 |
6157 // Shared entry for floating point comparison. | 6309 // Shared entry for floating point comparison. |
6158 Label do_fcmp(this); | 6310 Label do_fcmp(this); |
(...skipping 1324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7483 Bind(&return_runtime); | 7635 Bind(&return_runtime); |
7484 { | 7636 { |
7485 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); | 7637 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); |
7486 Goto(&end); | 7638 Goto(&end); |
7487 } | 7639 } |
7488 | 7640 |
7489 Bind(&end); | 7641 Bind(&end); |
7490 return result.value(); | 7642 return result.value(); |
7491 } | 7643 } |
7492 | 7644 |
| 7645 compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
| 7646 Variable var_result(this, MachineRepresentation::kTagged), |
| 7647 var_finc_value(this, MachineRepresentation::kFloat64); |
| 7648 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
| 7649 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
| 7650 |
| 7651 Bind(&if_issmi); |
| 7652 { |
| 7653 // Try fast Smi addition first. |
| 7654 Node* one = SmiConstant(Smi::FromInt(1)); |
| 7655 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value), |
| 7656 BitcastTaggedToWord(one)); |
| 7657 Node* overflow = Projection(1, pair); |
| 7658 |
| 7659 // Check if the Smi addition overflowed. |
| 7660 Label if_overflow(this), if_notoverflow(this); |
| 7661 Branch(overflow, &if_overflow, &if_notoverflow); |
| 7662 |
| 7663 Bind(&if_notoverflow); |
| 7664 var_result.Bind(Projection(0, pair)); |
| 7665 Goto(&end); |
| 7666 |
| 7667 Bind(&if_overflow); |
| 7668 { |
| 7669 var_finc_value.Bind(SmiToFloat64(value)); |
| 7670 Goto(&do_finc); |
| 7671 } |
| 7672 } |
| 7673 |
| 7674 Bind(&if_isnotsmi); |
| 7675 { |
| 7676 // Check if the value is a HeapNumber. |
| 7677 Assert(IsHeapNumberMap(LoadMap(value))); |
| 7678 |
| 7679 // Load the HeapNumber value. |
| 7680 var_finc_value.Bind(LoadHeapNumberValue(value)); |
| 7681 Goto(&do_finc); |
| 7682 } |
| 7683 |
| 7684 Bind(&do_finc); |
| 7685 { |
| 7686 Node* finc_value = var_finc_value.value(); |
| 7687 Node* one = Float64Constant(1.0); |
| 7688 Node* finc_result = Float64Add(finc_value, one); |
| 7689 var_result.Bind(ChangeFloat64ToTagged(finc_result)); |
| 7690 Goto(&end); |
| 7691 } |
| 7692 |
| 7693 Bind(&end); |
| 7694 return var_result.value(); |
| 7695 } |
| 7696 |
| 7697 compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| 7698 compiler::Node* array, compiler::Node* array_map, |
| 7699 compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
| 7700 int kBaseMapIndex; |
| 7701 switch (mode) { |
| 7702 case IterationKind::kKeys: |
| 7703 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; |
| 7704 break; |
| 7705 case IterationKind::kValues: |
| 7706 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7707 break; |
| 7708 case IterationKind::kEntries: |
| 7709 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX; |
| 7710 break; |
| 7711 } |
| 7712 |
| 7713 // Fast Array iterator map index: |
| 7714 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays) |
| 7715 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays) |
| 7716 const int kFastIteratorOffset = |
| 7717 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7718 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7719 STATIC_ASSERT(kFastIteratorOffset == |
| 7720 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7721 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7722 |
| 7723 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset) |
| 7724 const int kSlowIteratorOffset = |
| 7725 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX - |
| 7726 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; |
| 7727 STATIC_ASSERT(kSlowIteratorOffset == |
| 7728 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX - |
| 7729 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX)); |
| 7730 |
| 7731 // Assert: Type(array) is Object |
| 7732 Assert(IsJSReceiverInstanceType(array_type)); |
| 7733 |
| 7734 Variable var_result(this, MachineRepresentation::kTagged); |
| 7735 Variable var_map_index(this, MachineType::PointerRepresentation()); |
| 7736 Variable var_array_map(this, MachineRepresentation::kTagged); |
| 7737 |
| 7738 Label return_result(this); |
| 7739 Label allocate_iterator(this); |
| 7740 |
| 7741 if (mode == IterationKind::kKeys) { |
| 7742 // There are only two key iterator maps, branch depending on whether or not |
| 7743 // the receiver is a TypedArray or not. |
| 7744 |
| 7745 Label if_isarray(this), if_istypedarray(this), if_isgeneric(this); |
| 7746 Label* kInstanceTypeHandlers[] = {&if_isarray, &if_istypedarray}; |
| 7747 |
| 7748 static int32_t kInstanceType[] = {JS_ARRAY_TYPE, JS_TYPED_ARRAY_TYPE}; |
| 7749 |
| 7750 Switch(array_type, &if_isgeneric, kInstanceType, kInstanceTypeHandlers, |
| 7751 arraysize(kInstanceType)); |
| 7752 |
| 7753 Bind(&if_isarray); |
| 7754 { |
| 7755 var_map_index.Bind( |
| 7756 IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7757 var_array_map.Bind(array_map); |
| 7758 Goto(&allocate_iterator); |
| 7759 } |
| 7760 |
| 7761 Bind(&if_istypedarray); |
| 7762 { |
| 7763 var_map_index.Bind( |
| 7764 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7765 var_array_map.Bind(UndefinedConstant()); |
| 7766 Goto(&allocate_iterator); |
| 7767 } |
| 7768 |
| 7769 Bind(&if_isgeneric); |
| 7770 { |
| 7771 var_map_index.Bind( |
| 7772 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX)); |
| 7773 var_array_map.Bind(UndefinedConstant()); |
| 7774 Goto(&allocate_iterator); |
| 7775 } |
| 7776 } else { |
| 7777 Label if_istypedarray(this), if_isgeneric(this); |
| 7778 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), |
| 7779 &if_istypedarray, &if_isgeneric); |
| 7780 |
| 7781 Bind(&if_isgeneric); |
| 7782 { |
| 7783 Label if_isfast(this), if_isslow(this); |
| 7784 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
| 7785 |
| 7786 Bind(&if_isfast); |
| 7787 { |
| 7788 Node* map_index = |
| 7789 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset), |
| 7790 LoadMapElementsKind(array_map)); |
| 7791 Assert(IntPtrGreaterThanOrEqual( |
| 7792 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7793 Assert(IntPtrLessThan( |
| 7794 map_index, IntPtrConstant(kBaseMapIndex + kSlowIteratorOffset))); |
| 7795 |
| 7796 var_map_index.Bind(map_index); |
| 7797 var_array_map.Bind(array_map); |
| 7798 Goto(&allocate_iterator); |
| 7799 } |
| 7800 |
| 7801 Bind(&if_isslow); |
| 7802 { |
| 7803 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex), |
| 7804 IntPtrConstant(kSlowIteratorOffset)); |
| 7805 var_map_index.Bind(map_index); |
| 7806 var_array_map.Bind(UndefinedConstant()); |
| 7807 Goto(&allocate_iterator); |
| 7808 } |
| 7809 } |
| 7810 |
| 7811 Bind(&if_istypedarray); |
| 7812 { |
| 7813 Node* map_index = |
| 7814 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS), |
| 7815 LoadMapElementsKind(array_map)); |
| 7816 Assert(IntPtrLessThan( |
| 7817 map_index, IntPtrConstant(kBaseMapIndex + kFastIteratorOffset))); |
| 7818 Assert( |
| 7819 IntPtrGreaterThanOrEqual(map_index, IntPtrConstant(kBaseMapIndex))); |
| 7820 var_map_index.Bind(map_index); |
| 7821 var_array_map.Bind(UndefinedConstant()); |
| 7822 Goto(&allocate_iterator); |
| 7823 } |
| 7824 } |
| 7825 |
| 7826 Bind(&allocate_iterator); |
| 7827 { |
| 7828 Node* map = |
| 7829 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(), |
| 7830 0, CodeStubAssembler::INTPTR_PARAMETERS); |
| 7831 var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map)); |
| 7832 Goto(&return_result); |
| 7833 } |
| 7834 |
| 7835 Bind(&return_result); |
| 7836 return var_result.value(); |
| 7837 } |
| 7838 |
| 7839 compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( |
| 7840 compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { |
| 7841 Node* iterator = Allocate(JSArrayIterator::kSize); |
| 7842 StoreMapNoWriteBarrier(iterator, map); |
| 7843 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, |
| 7844 Heap::kEmptyFixedArrayRootIndex); |
| 7845 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset, |
| 7846 Heap::kEmptyFixedArrayRootIndex); |
| 7847 StoreObjectFieldNoWriteBarrier(iterator, |
| 7848 JSArrayIterator::kIteratedObjectOffset, array); |
| 7849 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset, |
| 7850 SmiConstant(Smi::FromInt(0))); |
| 7851 StoreObjectFieldNoWriteBarrier( |
| 7852 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map); |
| 7853 return iterator; |
| 7854 } |
| 7855 |
| 7856 compiler::Node* CodeStubAssembler::IsDetachedBuffer(compiler::Node* buffer) { |
| 7857 AssertInstanceType(buffer, JS_ARRAY_BUFFER_TYPE); |
| 7858 |
| 7859 Node* buffer_bit_field = LoadObjectField( |
| 7860 buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32()); |
| 7861 Node* was_neutered_mask = Int32Constant(JSArrayBuffer::WasNeutered::kMask); |
| 7862 |
| 7863 return Word32NotEqual(Word32And(buffer_bit_field, was_neutered_mask), |
| 7864 Int32Constant(0)); |
| 7865 } |
| 7866 |
7493 } // namespace internal | 7867 } // namespace internal |
7494 } // namespace v8 | 7868 } // namespace v8 |
OLD | NEW |