| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 2840 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2851 __ lw(scratch2, | 2851 __ lw(scratch2, |
| 2852 ContextMemOperand(native_context, | 2852 ContextMemOperand(native_context, |
| 2853 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | 2853 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2854 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); | 2854 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2855 | 2855 |
| 2856 // For FastPacked kinds, iteration will have the same effect as simply | 2856 // For FastPacked kinds, iteration will have the same effect as simply |
| 2857 // accessing each property in order. | 2857 // accessing each property in order. |
| 2858 Label no_protector_check; | 2858 Label no_protector_check; |
| 2859 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | 2859 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
| 2860 __ DecodeField<Map::ElementsKindBits>(scratch); | 2860 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2861 __ Branch(&runtime_call, hi, scratch, Operand(LAST_FAST_ELEMENTS_KIND)); | 2861 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); |
| 2862 // For non-FastHoley kinds, we can skip the protector check. | 2862 // For non-FastHoley kinds, we can skip the protector check. |
| 2863 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); | 2863 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); |
| 2864 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); | 2864 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); |
| 2865 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_DOUBLE_ELEMENTS)); | |
| 2866 // Check the ArrayProtector cell. | 2865 // Check the ArrayProtector cell. |
| 2867 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | 2866 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2868 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | 2867 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2869 __ Branch(&runtime_call, ne, scratch, | 2868 __ Branch(&runtime_call, ne, scratch, |
| 2870 Operand(Smi::FromInt(Isolate::kProtectorValid))); | 2869 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2871 | 2870 |
| 2872 __ bind(&no_protector_check); | 2871 __ bind(&no_protector_check); |
| 2873 // Load the FixedArray backing store. | 2872 // Load the FixedArray backing store. |
| 2874 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | 2873 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
| 2875 __ Branch(&push_args); | 2874 __ Branch(&push_args); |
| (...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3098 __ break_(0xCC); | 3097 __ break_(0xCC); |
| 3099 } | 3098 } |
| 3100 } | 3099 } |
| 3101 | 3100 |
| 3102 #undef __ | 3101 #undef __ |
| 3103 | 3102 |
| 3104 } // namespace internal | 3103 } // namespace internal |
| 3105 } // namespace v8 | 3104 } // namespace v8 |
| 3106 | 3105 |
| 3107 #endif // V8_TARGET_ARCH_MIPS | 3106 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |