OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 2863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2874 __ ld(scratch2, | 2874 __ ld(scratch2, |
2875 ContextMemOperand(native_context, | 2875 ContextMemOperand(native_context, |
2876 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | 2876 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
2877 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); | 2877 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
2878 | 2878 |
2879 // For FastPacked kinds, iteration will have the same effect as simply | 2879 // For FastPacked kinds, iteration will have the same effect as simply |
2880 // accessing each property in order. | 2880 // accessing each property in order. |
2881 Label no_protector_check; | 2881 Label no_protector_check; |
2882 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | 2882 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
2883 __ DecodeField<Map::ElementsKindBits>(scratch); | 2883 __ DecodeField<Map::ElementsKindBits>(scratch); |
2884 __ Branch(&runtime_call, hi, scratch, Operand(LAST_FAST_ELEMENTS_KIND)); | 2884 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); |
2885 // For non-FastHoley kinds, we can skip the protector check. | 2885 // For non-FastHoley kinds, we can skip the protector check. |
2886 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); | 2886 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); |
2887 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); | 2887 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); |
2888 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_DOUBLE_ELEMENTS)); | |
2889 // Check the ArrayProtector cell. | 2888 // Check the ArrayProtector cell. |
2890 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | 2889 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
2891 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | 2890 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
2892 __ Branch(&runtime_call, ne, scratch, | 2891 __ Branch(&runtime_call, ne, scratch, |
2893 Operand(Smi::FromInt(Isolate::kProtectorValid))); | 2892 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
2894 | 2893 |
2895 __ bind(&no_protector_check); | 2894 __ bind(&no_protector_check); |
2896 // Load the FixedArray backing store. | 2895 // Load the FixedArray backing store. |
2897 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | 2896 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
2898 __ Branch(&push_args); | 2897 __ Branch(&push_args); |
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3123 __ break_(0xCC); | 3122 __ break_(0xCC); |
3124 } | 3123 } |
3125 } | 3124 } |
3126 | 3125 |
3127 #undef __ | 3126 #undef __ |
3128 | 3127 |
3129 } // namespace internal | 3128 } // namespace internal |
3130 } // namespace v8 | 3129 } // namespace v8 |
3131 | 3130 |
3132 #endif // V8_TARGET_ARCH_MIPS64 | 3131 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |