OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 2869 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2880 __ cmp(scratch, | 2880 __ cmp(scratch, |
2881 ContextOperand(scratch2, | 2881 ContextOperand(scratch2, |
2882 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | 2882 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
2883 __ j(not_equal, &runtime_call); | 2883 __ j(not_equal, &runtime_call); |
2884 | 2884 |
2885 // For FastPacked kinds, iteration will have the same effect as simply | 2885 // For FastPacked kinds, iteration will have the same effect as simply |
2886 // accessing each property in order. | 2886 // accessing each property in order. |
2887 Label no_protector_check; | 2887 Label no_protector_check; |
2888 __ mov(scratch, FieldOperand(spread_map, Map::kBitField2Offset)); | 2888 __ mov(scratch, FieldOperand(spread_map, Map::kBitField2Offset)); |
2889 __ DecodeField<Map::ElementsKindBits>(scratch); | 2889 __ DecodeField<Map::ElementsKindBits>(scratch); |
2890 __ cmp(scratch, Immediate(LAST_FAST_ELEMENTS_KIND)); | 2890 __ cmp(scratch, Immediate(FAST_HOLEY_ELEMENTS)); |
2891 __ j(above, &runtime_call); | 2891 __ j(above, &runtime_call); |
2892 // For non-FastHoley kinds, we can skip the protector check. | 2892 // For non-FastHoley kinds, we can skip the protector check. |
2893 __ cmp(scratch, Immediate(FAST_SMI_ELEMENTS)); | 2893 __ cmp(scratch, Immediate(FAST_SMI_ELEMENTS)); |
2894 __ j(equal, &no_protector_check); | 2894 __ j(equal, &no_protector_check); |
2895 __ cmp(scratch, Immediate(FAST_ELEMENTS)); | 2895 __ cmp(scratch, Immediate(FAST_ELEMENTS)); |
2896 __ j(equal, &no_protector_check); | 2896 __ j(equal, &no_protector_check); |
2897 __ cmp(scratch, Immediate(FAST_DOUBLE_ELEMENTS)); | |
2898 __ j(equal, &no_protector_check); | |
2899 // Check the ArrayProtector cell. | 2897 // Check the ArrayProtector cell. |
2900 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | 2898 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
2901 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), | 2899 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), |
2902 Immediate(Smi::FromInt(Isolate::kProtectorValid))); | 2900 Immediate(Smi::FromInt(Isolate::kProtectorValid))); |
2903 __ j(not_equal, &runtime_call); | 2901 __ j(not_equal, &runtime_call); |
2904 | 2902 |
2905 __ bind(&no_protector_check); | 2903 __ bind(&no_protector_check); |
2906 // Load the FixedArray backing store. | 2904 // Load the FixedArray backing store. |
2907 __ mov(spread, FieldOperand(spread, JSArray::kElementsOffset)); | 2905 __ mov(spread, FieldOperand(spread, JSArray::kElementsOffset)); |
2908 // Free up some registers. | 2906 // Free up some registers. |
(...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3324 | 3322 |
3325 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { | 3323 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { |
3326 Generate_OnStackReplacementHelper(masm, true); | 3324 Generate_OnStackReplacementHelper(masm, true); |
3327 } | 3325 } |
3328 | 3326 |
3329 #undef __ | 3327 #undef __ |
3330 } // namespace internal | 3328 } // namespace internal |
3331 } // namespace v8 | 3329 } // namespace v8 |
3332 | 3330 |
3333 #endif // V8_TARGET_ARCH_X87 | 3331 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |