| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2788 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2799 b(cond, &L); | 2799 b(cond, &L); |
| 2800 Abort(reason); | 2800 Abort(reason); |
| 2801 // will not return here | 2801 // will not return here |
| 2802 bind(&L); | 2802 bind(&L); |
| 2803 } | 2803 } |
| 2804 | 2804 |
| 2805 | 2805 |
| 2806 void MacroAssembler::Abort(BailoutReason reason) { | 2806 void MacroAssembler::Abort(BailoutReason reason) { |
| 2807 Label abort_start; | 2807 Label abort_start; |
| 2808 bind(&abort_start); | 2808 bind(&abort_start); |
| 2809 // We want to pass the msg string like a smi to avoid GC |
| 2810 // problems, however msg is not guaranteed to be aligned |
| 2811 // properly. Instead, we pass an aligned pointer that is |
| 2812 // a proper v8 smi, but also pass the alignment difference |
| 2813 // from the real pointer as a smi. |
| 2814 const char* msg = GetBailoutReason(reason); |
| 2815 intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
| 2816 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
| 2817 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
| 2809 #ifdef DEBUG | 2818 #ifdef DEBUG |
| 2810 const char* msg = GetBailoutReason(reason); | |
| 2811 if (msg != NULL) { | 2819 if (msg != NULL) { |
| 2812 RecordComment("Abort message: "); | 2820 RecordComment("Abort message: "); |
| 2813 RecordComment(msg); | 2821 RecordComment(msg); |
| 2814 } | 2822 } |
| 2815 | 2823 |
| 2816 if (FLAG_trap_on_abort) { | 2824 if (FLAG_trap_on_abort) { |
| 2817 stop(msg); | 2825 stop(msg); |
| 2818 return; | 2826 return; |
| 2819 } | 2827 } |
| 2820 #endif | 2828 #endif |
| 2821 | 2829 |
| 2822 mov(r0, Operand(Smi::FromInt(reason))); | 2830 mov(r0, Operand(p0)); |
| 2823 push(r0); | 2831 push(r0); |
| 2824 | 2832 mov(r0, Operand(Smi::FromInt(p1 - p0))); |
| 2833 push(r0); |
| 2825 // Disable stub call restrictions to always allow calls to abort. | 2834 // Disable stub call restrictions to always allow calls to abort. |
| 2826 if (!has_frame_) { | 2835 if (!has_frame_) { |
| 2827 // We don't actually want to generate a pile of code for this, so just | 2836 // We don't actually want to generate a pile of code for this, so just |
| 2828 // claim there is a stack frame, without generating one. | 2837 // claim there is a stack frame, without generating one. |
| 2829 FrameScope scope(this, StackFrame::NONE); | 2838 FrameScope scope(this, StackFrame::NONE); |
| 2830 CallRuntime(Runtime::kAbort, 1); | 2839 CallRuntime(Runtime::kAbort, 2); |
| 2831 } else { | 2840 } else { |
| 2832 CallRuntime(Runtime::kAbort, 1); | 2841 CallRuntime(Runtime::kAbort, 2); |
| 2833 } | 2842 } |
| 2834 // will not return here | 2843 // will not return here |
| 2835 if (is_const_pool_blocked()) { | 2844 if (is_const_pool_blocked()) { |
| 2836 // If the calling code cares about the exact number of | 2845 // If the calling code cares about the exact number of |
| 2837 // instructions generated, we insert padding here to keep the size | 2846 // instructions generated, we insert padding here to keep the size |
| 2838 // of the Abort macro constant. | 2847 // of the Abort macro constant. |
| 2839 static const int kExpectedAbortInstructions = 7; | 2848 static const int kExpectedAbortInstructions = 10; |
| 2840 int abort_instructions = InstructionsGeneratedSince(&abort_start); | 2849 int abort_instructions = InstructionsGeneratedSince(&abort_start); |
| 2841 ASSERT(abort_instructions <= kExpectedAbortInstructions); | 2850 ASSERT(abort_instructions <= kExpectedAbortInstructions); |
| 2842 while (abort_instructions++ < kExpectedAbortInstructions) { | 2851 while (abort_instructions++ < kExpectedAbortInstructions) { |
| 2843 nop(); | 2852 nop(); |
| 2844 } | 2853 } |
| 2845 } | 2854 } |
| 2846 } | 2855 } |
| 2847 | 2856 |
| 2848 | 2857 |
| 2849 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { | 2858 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2883 cmp(map_in_out, ip); | 2892 cmp(map_in_out, ip); |
| 2884 b(ne, no_map_match); | 2893 b(ne, no_map_match); |
| 2885 | 2894 |
| 2886 // Use the transitioned cached map. | 2895 // Use the transitioned cached map. |
| 2887 offset = transitioned_kind * kPointerSize + | 2896 offset = transitioned_kind * kPointerSize + |
| 2888 FixedArrayBase::kHeaderSize; | 2897 FixedArrayBase::kHeaderSize; |
| 2889 ldr(map_in_out, FieldMemOperand(scratch, offset)); | 2898 ldr(map_in_out, FieldMemOperand(scratch, offset)); |
| 2890 } | 2899 } |
| 2891 | 2900 |
| 2892 | 2901 |
| 2902 void MacroAssembler::LoadInitialArrayMap( |
| 2903 Register function_in, Register scratch, |
| 2904 Register map_out, bool can_have_holes) { |
| 2905 ASSERT(!function_in.is(map_out)); |
| 2906 Label done; |
| 2907 ldr(map_out, FieldMemOperand(function_in, |
| 2908 JSFunction::kPrototypeOrInitialMapOffset)); |
| 2909 if (!FLAG_smi_only_arrays) { |
| 2910 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; |
| 2911 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 2912 kind, |
| 2913 map_out, |
| 2914 scratch, |
| 2915 &done); |
| 2916 } else if (can_have_holes) { |
| 2917 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 2918 FAST_HOLEY_SMI_ELEMENTS, |
| 2919 map_out, |
| 2920 scratch, |
| 2921 &done); |
| 2922 } |
| 2923 bind(&done); |
| 2924 } |
| 2925 |
| 2926 |
| 2893 void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 2927 void MacroAssembler::LoadGlobalFunction(int index, Register function) { |
| 2894 // Load the global or builtins object from the current context. | 2928 // Load the global or builtins object from the current context. |
| 2895 ldr(function, | 2929 ldr(function, |
| 2896 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 2930 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 2897 // Load the native context from the global or builtins object. | 2931 // Load the native context from the global or builtins object. |
| 2898 ldr(function, FieldMemOperand(function, | 2932 ldr(function, FieldMemOperand(function, |
| 2899 GlobalObject::kNativeContextOffset)); | 2933 GlobalObject::kNativeContextOffset)); |
| 2900 // Load the function from the native context. | 2934 // Load the function from the native context. |
| 2901 ldr(function, MemOperand(function, Context::SlotOffset(index))); | 2935 ldr(function, MemOperand(function, Context::SlotOffset(index))); |
| 2902 } | 2936 } |
| (...skipping 1138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4041 void CodePatcher::EmitCondition(Condition cond) { | 4075 void CodePatcher::EmitCondition(Condition cond) { |
| 4042 Instr instr = Assembler::instr_at(masm_.pc_); | 4076 Instr instr = Assembler::instr_at(masm_.pc_); |
| 4043 instr = (instr & ~kCondMask) | cond; | 4077 instr = (instr & ~kCondMask) | cond; |
| 4044 masm_.emit(instr); | 4078 masm_.emit(instr); |
| 4045 } | 4079 } |
| 4046 | 4080 |
| 4047 | 4081 |
| 4048 } } // namespace v8::internal | 4082 } } // namespace v8::internal |
| 4049 | 4083 |
| 4050 #endif // V8_TARGET_ARCH_ARM | 4084 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |