| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include <limits.h> // For LONG_MIN, LONG_MAX. | 28 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 29 | 29 |
| 30 #include "v8.h" | 30 #include "v8.h" |
| 31 | 31 |
| 32 #if defined(V8_TARGET_ARCH_ARM) | 32 #if V8_TARGET_ARCH_ARM |
| 33 | 33 |
| 34 #include "bootstrapper.h" | 34 #include "bootstrapper.h" |
| 35 #include "codegen.h" | 35 #include "codegen.h" |
| 36 #include "cpu-profiler.h" |
| 36 #include "debug.h" | 37 #include "debug.h" |
| 37 #include "runtime.h" | 38 #include "runtime.h" |
| 38 | 39 |
| 39 namespace v8 { | 40 namespace v8 { |
| 40 namespace internal { | 41 namespace internal { |
| 41 | 42 |
| 42 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 43 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
| 43 : Assembler(arg_isolate, buffer, size), | 44 : Assembler(arg_isolate, buffer, size), |
| 44 generating_stub_(false), | 45 generating_stub_(false), |
| 45 allow_stub_calls_(true), | 46 allow_stub_calls_(true), |
| (...skipping 932 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 978 SmiTag(scratch1, length); | 979 SmiTag(scratch1, length); |
| 979 LoadRoot(scratch2, map_index); | 980 LoadRoot(scratch2, map_index); |
| 980 str(scratch1, FieldMemOperand(string, String::kLengthOffset)); | 981 str(scratch1, FieldMemOperand(string, String::kLengthOffset)); |
| 981 mov(scratch1, Operand(String::kEmptyHashField)); | 982 mov(scratch1, Operand(String::kEmptyHashField)); |
| 982 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset)); | 983 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 983 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset)); | 984 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset)); |
| 984 } | 985 } |
| 985 | 986 |
| 986 | 987 |
| 987 int MacroAssembler::ActivationFrameAlignment() { | 988 int MacroAssembler::ActivationFrameAlignment() { |
| 988 #if defined(V8_HOST_ARCH_ARM) | 989 #if V8_HOST_ARCH_ARM |
| 989 // Running on the real platform. Use the alignment as mandated by the local | 990 // Running on the real platform. Use the alignment as mandated by the local |
| 990 // environment. | 991 // environment. |
| 991 // Note: This will break if we ever start generating snapshots on one ARM | 992 // Note: This will break if we ever start generating snapshots on one ARM |
| 992 // platform for another ARM platform with a different alignment. | 993 // platform for another ARM platform with a different alignment. |
| 993 return OS::ActivationFrameAlignment(); | 994 return OS::ActivationFrameAlignment(); |
| 994 #else // defined(V8_HOST_ARCH_ARM) | 995 #else // V8_HOST_ARCH_ARM |
| 995 // If we are using the simulator then we should always align to the expected | 996 // If we are using the simulator then we should always align to the expected |
| 996 // alignment. As the simulator is used to generate snapshots we do not know | 997 // alignment. As the simulator is used to generate snapshots we do not know |
| 997 // if the target platform will need alignment, so this is controlled from a | 998 // if the target platform will need alignment, so this is controlled from a |
| 998 // flag. | 999 // flag. |
| 999 return FLAG_sim_stack_alignment; | 1000 return FLAG_sim_stack_alignment; |
| 1000 #endif // defined(V8_HOST_ARCH_ARM) | 1001 #endif // V8_HOST_ARCH_ARM |
| 1001 } | 1002 } |
| 1002 | 1003 |
| 1003 | 1004 |
| 1004 void MacroAssembler::LeaveExitFrame(bool save_doubles, | 1005 void MacroAssembler::LeaveExitFrame(bool save_doubles, |
| 1005 Register argument_count) { | 1006 Register argument_count) { |
| 1006 // Optionally restore all double registers. | 1007 // Optionally restore all double registers. |
| 1007 if (save_doubles) { | 1008 if (save_doubles) { |
| 1008 // Calculate the stack location of the saved doubles and restore them. | 1009 // Calculate the stack location of the saved doubles and restore them. |
| 1009 const int offset = 2 * kPointerSize; | 1010 const int offset = 2 * kPointerSize; |
| 1010 sub(r3, fp, | 1011 sub(r3, fp, |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1025 #endif | 1026 #endif |
| 1026 | 1027 |
| 1027 // Tear down the exit frame, pop the arguments, and return. | 1028 // Tear down the exit frame, pop the arguments, and return. |
| 1028 mov(sp, Operand(fp)); | 1029 mov(sp, Operand(fp)); |
| 1029 ldm(ia_w, sp, fp.bit() | lr.bit()); | 1030 ldm(ia_w, sp, fp.bit() | lr.bit()); |
| 1030 if (argument_count.is_valid()) { | 1031 if (argument_count.is_valid()) { |
| 1031 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); | 1032 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); |
| 1032 } | 1033 } |
| 1033 } | 1034 } |
| 1034 | 1035 |
| 1036 |
| 1035 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { | 1037 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { |
| 1036 if (use_eabi_hardfloat()) { | 1038 if (use_eabi_hardfloat()) { |
| 1037 Move(dst, d0); | 1039 Move(dst, d0); |
| 1038 } else { | 1040 } else { |
| 1039 vmov(dst, r0, r1); | 1041 vmov(dst, r0, r1); |
| 1040 } | 1042 } |
| 1041 } | 1043 } |
| 1042 | 1044 |
| 1043 | 1045 |
| 1044 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { | 1046 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { |
| (...skipping 2351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3396 } | 3398 } |
| 3397 | 3399 |
| 3398 | 3400 |
| 3399 void MacroAssembler::CallCFunctionHelper(Register function, | 3401 void MacroAssembler::CallCFunctionHelper(Register function, |
| 3400 int num_reg_arguments, | 3402 int num_reg_arguments, |
| 3401 int num_double_arguments) { | 3403 int num_double_arguments) { |
| 3402 ASSERT(has_frame()); | 3404 ASSERT(has_frame()); |
| 3403 // Make sure that the stack is aligned before calling a C function unless | 3405 // Make sure that the stack is aligned before calling a C function unless |
| 3404 // running in the simulator. The simulator has its own alignment check which | 3406 // running in the simulator. The simulator has its own alignment check which |
| 3405 // provides more information. | 3407 // provides more information. |
| 3406 #if defined(V8_HOST_ARCH_ARM) | 3408 #if V8_HOST_ARCH_ARM |
| 3407 if (emit_debug_code()) { | 3409 if (emit_debug_code()) { |
| 3408 int frame_alignment = OS::ActivationFrameAlignment(); | 3410 int frame_alignment = OS::ActivationFrameAlignment(); |
| 3409 int frame_alignment_mask = frame_alignment - 1; | 3411 int frame_alignment_mask = frame_alignment - 1; |
| 3410 if (frame_alignment > kPointerSize) { | 3412 if (frame_alignment > kPointerSize) { |
| 3411 ASSERT(IsPowerOf2(frame_alignment)); | 3413 ASSERT(IsPowerOf2(frame_alignment)); |
| 3412 Label alignment_as_expected; | 3414 Label alignment_as_expected; |
| 3413 tst(sp, Operand(frame_alignment_mask)); | 3415 tst(sp, Operand(frame_alignment_mask)); |
| 3414 b(eq, &alignment_as_expected); | 3416 b(eq, &alignment_as_expected); |
| 3415 // Don't use Check here, as it will call Runtime_Abort possibly | 3417 // Don't use Check here, as it will call Runtime_Abort possibly |
| 3416 // re-entering here. | 3418 // re-entering here. |
| (...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3826 void CodePatcher::EmitCondition(Condition cond) { | 3828 void CodePatcher::EmitCondition(Condition cond) { |
| 3827 Instr instr = Assembler::instr_at(masm_.pc_); | 3829 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3828 instr = (instr & ~kCondMask) | cond; | 3830 instr = (instr & ~kCondMask) | cond; |
| 3829 masm_.emit(instr); | 3831 masm_.emit(instr); |
| 3830 } | 3832 } |
| 3831 | 3833 |
| 3832 | 3834 |
| 3833 } } // namespace v8::internal | 3835 } } // namespace v8::internal |
| 3834 | 3836 |
| 3835 #endif // V8_TARGET_ARCH_ARM | 3837 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |