| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 584 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 595 StoreBufferOverflowStub(fp_mode); | 595 StoreBufferOverflowStub(fp_mode); |
| 596 CallStub(&store_buffer_overflow); | 596 CallStub(&store_buffer_overflow); |
| 597 pop(lr); | 597 pop(lr); |
| 598 bind(&done); | 598 bind(&done); |
| 599 if (and_then == kReturnAtEnd) { | 599 if (and_then == kReturnAtEnd) { |
| 600 Ret(); | 600 Ret(); |
| 601 } | 601 } |
| 602 } | 602 } |
| 603 | 603 |
| 604 | 604 |
| 605 void MacroAssembler::PushFixedFrame(Register marker_reg) { |
| 606 ASSERT(!marker_reg.is_valid() || marker_reg.code() < pp.code()); |
| 607 stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | |
| 608 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | |
| 609 cp.bit() | fp.bit() | lr.bit()); |
| 610 } |
| 611 |
| 612 |
| 613 void MacroAssembler::PopFixedFrame(Register marker_reg) { |
| 614 ASSERT(!marker_reg.is_valid() || marker_reg.code() < pp.code()); |
| 615 ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | |
| 616 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | |
| 617 cp.bit() | fp.bit() | lr.bit()); |
| 618 } |
| 619 |
| 620 |
| 605 // Push and pop all registers that can hold pointers. | 621 // Push and pop all registers that can hold pointers. |
| 606 void MacroAssembler::PushSafepointRegisters() { | 622 void MacroAssembler::PushSafepointRegisters() { |
| 607 // Safepoints expect a block of contiguous register values starting with r0: | 623 // Safepoints expect a block of contiguous register values starting with r0: |
| 608 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); | 624 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); |
| 609 // Safepoints expect a block of kNumSafepointRegisters values on the | 625 // Safepoints expect a block of kNumSafepointRegisters values on the |
| 610 // stack, so adjust the stack for unsaved registers. | 626 // stack, so adjust the stack for unsaved registers. |
| 611 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 627 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
| 612 ASSERT(num_unsaved >= 0); | 628 ASSERT(num_unsaved >= 0); |
| 613 sub(sp, sp, Operand(num_unsaved * kPointerSize)); | 629 sub(sp, sp, Operand(num_unsaved * kPointerSize)); |
| 614 stm(db_w, sp, kSafepointSavedRegisters); | 630 stm(db_w, sp, kSafepointSavedRegisters); |
| (...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 863 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); | 879 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); |
| 864 vmov(loc.low(), src); | 880 vmov(loc.low(), src); |
| 865 } else { | 881 } else { |
| 866 vmov(dst, VmovIndexLo, src); | 882 vmov(dst, VmovIndexLo, src); |
| 867 } | 883 } |
| 868 } | 884 } |
| 869 | 885 |
| 870 | 886 |
| 871 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { | 887 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { |
| 872 if (frame_mode == BUILD_STUB_FRAME) { | 888 if (frame_mode == BUILD_STUB_FRAME) { |
| 873 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); | 889 PushFixedFrame(); |
| 874 Push(Smi::FromInt(StackFrame::STUB)); | 890 Push(Smi::FromInt(StackFrame::STUB)); |
| 875 // Adjust FP to point to saved FP. | 891 // Adjust FP to point to saved FP. |
| 876 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 892 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 877 } else { | 893 } else { |
| 878 PredictableCodeSizeScope predictible_code_size_scope( | 894 PredictableCodeSizeScope predictible_code_size_scope( |
| 879 this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); | 895 this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); |
| 880 // The following three instructions must remain together and unmodified | 896 // The following three instructions must remain together and unmodified |
| 881 // for code aging to work properly. | 897 // for code aging to work properly. |
| 882 if (isolate()->IsCodePreAgingActive()) { | 898 if (isolate()->IsCodePreAgingActive()) { |
| 883 // Pre-age the code. | 899 // Pre-age the code. |
| 884 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); | 900 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); |
| 885 add(r0, pc, Operand(-8)); | 901 add(r0, pc, Operand(-8)); |
| 886 ldr(pc, MemOperand(pc, -4)); | 902 ldr(pc, MemOperand(pc, -4)); |
| 887 emit_code_stub_address(stub); | 903 emit_code_stub_address(stub); |
| 888 } else { | 904 } else { |
| 889 stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); | 905 PushFixedFrame(r1); |
| 890 nop(ip.code()); | 906 nop(ip.code()); |
| 891 // Adjust FP to point to saved FP. | 907 // Adjust FP to point to saved FP. |
| 892 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 908 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 893 } | 909 } |
| 894 } | 910 } |
| 895 } | 911 } |
| 896 | 912 |
| 897 | 913 |
| 898 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 914 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
| 899 // r0-r3: preserved | 915 // r0-r3: preserved |
| 900 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); | 916 PushFixedFrame(); |
| 901 mov(ip, Operand(Smi::FromInt(type))); | 917 mov(ip, Operand(Smi::FromInt(type))); |
| 902 push(ip); | 918 push(ip); |
| 903 mov(ip, Operand(CodeObject())); | 919 mov(ip, Operand(CodeObject())); |
| 904 push(ip); | 920 push(ip); |
| 905 // Adjust FP to point to saved FP. | 921 // Adjust FP to point to saved FP. |
| 906 add(fp, sp, | 922 add(fp, sp, |
| 907 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); | 923 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); |
| 908 } | 924 } |
| 909 | 925 |
| 910 | 926 |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1027 | 1043 |
| 1028 // Tear down the exit frame, pop the arguments, and return. | 1044 // Tear down the exit frame, pop the arguments, and return. |
| 1029 mov(sp, Operand(fp)); | 1045 mov(sp, Operand(fp)); |
| 1030 ldm(ia_w, sp, fp.bit() | lr.bit()); | 1046 ldm(ia_w, sp, fp.bit() | lr.bit()); |
| 1031 if (argument_count.is_valid()) { | 1047 if (argument_count.is_valid()) { |
| 1032 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); | 1048 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); |
| 1033 } | 1049 } |
| 1034 } | 1050 } |
| 1035 | 1051 |
| 1036 | 1052 |
| 1053 void MacroAssembler::RestoreConstantPoolPointer() { |
| 1054 if (FLAG_enable_ool_constant_pool) { |
| 1055 ldr(pp, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); |
| 1056 } |
| 1057 } |
| 1058 |
| 1059 |
| 1060 void MacroAssembler::LoadConstantPoolPointer(Register js_function) { |
| 1061 if (FLAG_enable_ool_constant_pool) { |
| 1062 ldr(pp, FieldMemOperand(js_function, JSFunction::kConstantPoolOffset)); |
| 1063 } |
| 1064 } |
| 1065 |
| 1066 |
| 1037 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { | 1067 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { |
| 1038 if (use_eabi_hardfloat()) { | 1068 if (use_eabi_hardfloat()) { |
| 1039 Move(dst, d0); | 1069 Move(dst, d0); |
| 1040 } else { | 1070 } else { |
| 1041 vmov(dst, r0, r1); | 1071 vmov(dst, r0, r1); |
| 1042 } | 1072 } |
| 1043 } | 1073 } |
| 1044 | 1074 |
| 1045 | 1075 |
| 1046 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { | 1076 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1210 ASSERT(flag == JUMP_FUNCTION || has_frame()); | 1240 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1211 | 1241 |
| 1212 // Contract with called JS functions requires that function is passed in r1. | 1242 // Contract with called JS functions requires that function is passed in r1. |
| 1213 ASSERT(fun.is(r1)); | 1243 ASSERT(fun.is(r1)); |
| 1214 | 1244 |
| 1215 Register expected_reg = r2; | 1245 Register expected_reg = r2; |
| 1216 Register code_reg = r3; | 1246 Register code_reg = r3; |
| 1217 | 1247 |
| 1218 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1248 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1219 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 1249 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 1250 LoadConstantPoolPointer(r1); |
| 1220 ldr(expected_reg, | 1251 ldr(expected_reg, |
| 1221 FieldMemOperand(code_reg, | 1252 FieldMemOperand(code_reg, |
| 1222 SharedFunctionInfo::kFormalParameterCountOffset)); | 1253 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 1223 SmiUntag(expected_reg); | 1254 SmiUntag(expected_reg); |
| 1224 ldr(code_reg, | 1255 ldr(code_reg, |
| 1225 FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 1256 FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
| 1226 | 1257 |
| 1227 ParameterCount expected(expected_reg); | 1258 ParameterCount expected(expected_reg); |
| 1228 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind); | 1259 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind); |
| 1229 } | 1260 } |
| 1230 | 1261 |
| 1231 | 1262 |
| 1232 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, | 1263 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, |
| 1233 const ParameterCount& expected, | 1264 const ParameterCount& expected, |
| 1234 const ParameterCount& actual, | 1265 const ParameterCount& actual, |
| 1235 InvokeFlag flag, | 1266 InvokeFlag flag, |
| 1236 const CallWrapper& call_wrapper, | 1267 const CallWrapper& call_wrapper, |
| 1237 CallKind call_kind) { | 1268 CallKind call_kind) { |
| 1238 // You can't call a function without a valid frame. | 1269 // You can't call a function without a valid frame. |
| 1239 ASSERT(flag == JUMP_FUNCTION || has_frame()); | 1270 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1240 | 1271 |
| 1241 // Get the function and setup the context. | 1272 // Get the function and setup the context. |
| 1242 Move(r1, function); | 1273 Move(r1, function); |
| 1243 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 1274 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 1275 LoadConstantPoolPointer(r1); |
| 1244 | 1276 |
| 1245 // We call indirectly through the code field in the function to | 1277 // We call indirectly through the code field in the function to |
| 1246 // allow recompilation to take effect without changing any of the | 1278 // allow recompilation to take effect without changing any of the |
| 1247 // call sites. | 1279 // call sites. |
| 1248 ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 1280 ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
| 1249 InvokeCode(r3, expected, actual, flag, call_wrapper, call_kind); | 1281 InvokeCode(r3, expected, actual, flag, call_wrapper, call_kind); |
| 1250 } | 1282 } |
| 1251 | 1283 |
| 1252 | 1284 |
| 1253 void MacroAssembler::IsObjectJSObjectType(Register heap_object, | 1285 void MacroAssembler::IsObjectJSObjectType(Register heap_object, |
| (...skipping 2802 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4056 void CodePatcher::EmitCondition(Condition cond) { | 4088 void CodePatcher::EmitCondition(Condition cond) { |
| 4057 Instr instr = Assembler::instr_at(masm_.pc_); | 4089 Instr instr = Assembler::instr_at(masm_.pc_); |
| 4058 instr = (instr & ~kCondMask) | cond; | 4090 instr = (instr & ~kCondMask) | cond; |
| 4059 masm_.emit(instr); | 4091 masm_.emit(instr); |
| 4060 } | 4092 } |
| 4061 | 4093 |
| 4062 | 4094 |
| 4063 } } // namespace v8::internal | 4095 } } // namespace v8::internal |
| 4064 | 4096 |
| 4065 #endif // V8_TARGET_ARCH_ARM | 4097 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |