OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
126 // Block constant pool for the call instruction sequence. | 126 // Block constant pool for the call instruction sequence. |
127 BlockConstPoolScope block_const_pool(this); | 127 BlockConstPoolScope block_const_pool(this); |
128 Label start; | 128 Label start; |
129 bind(&start); | 129 bind(&start); |
130 | 130 |
131 bool old_predictable_code_size = predictable_code_size(); | 131 bool old_predictable_code_size = predictable_code_size(); |
132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
133 set_predictable_code_size(true); | 133 set_predictable_code_size(true); |
134 } | 134 } |
135 | 135 |
136 // Check the expected size before generating code to ensure we assume the same | |
137 // constant pool availability (e.g., whether constant pool is full or not). | |
138 int expected_size = CallSize(target, rmode, cond); | |
139 | |
136 // Call sequence on V7 or later may be : | 140 // Call sequence on V7 or later may be : |
137 // movw ip, #... @ call address low 16 | 141 // movw ip, #... @ call address low 16 |
138 // movt ip, #... @ call address high 16 | 142 // movt ip, #... @ call address high 16 |
139 // blx ip | 143 // blx ip |
140 // @ return address | 144 // @ return address |
141 // Or for pre-V7 or values that may be back-patched | 145 // Or for pre-V7 or values that may be back-patched |
142 // to avoid ICache flushes: | 146 // to avoid ICache flushes: |
143 // ldr ip, [pc, #...] @ call address | 147 // ldr ip, [pc, #...] @ call address |
144 // blx ip | 148 // blx ip |
145 // @ return address | 149 // @ return address |
146 | 150 |
147 // Statement positions are expected to be recorded when the target | 151 // Statement positions are expected to be recorded when the target |
148 // address is loaded. The mov method will automatically record | 152 // address is loaded. The mov method will automatically record |
149 // positions when pc is the target, since this is not the case here | 153 // positions when pc is the target, since this is not the case here |
150 // we have to do it explicitly. | 154 // we have to do it explicitly. |
151 positions_recorder()->WriteRecordedPositions(); | 155 positions_recorder()->WriteRecordedPositions(); |
152 | 156 |
153 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 157 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
154 blx(ip, cond); | 158 blx(ip, cond); |
155 | 159 |
156 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); | 160 ASSERT_EQ(expected_size, SizeOfCodeGeneratedSince(&start)); |
157 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 161 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
158 set_predictable_code_size(old_predictable_code_size); | 162 set_predictable_code_size(old_predictable_code_size); |
159 } | 163 } |
160 } | 164 } |
161 | 165 |
162 | 166 |
163 int MacroAssembler::CallSize(Handle<Code> code, | 167 int MacroAssembler::CallSize(Handle<Code> code, |
164 RelocInfo::Mode rmode, | 168 RelocInfo::Mode rmode, |
165 TypeFeedbackId ast_id, | 169 TypeFeedbackId ast_id, |
166 Condition cond) { | 170 Condition cond) { |
(...skipping 751 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
918 } else { | 922 } else { |
919 PushFixedFrame(r1); | 923 PushFixedFrame(r1); |
920 nop(ip.code()); | 924 nop(ip.code()); |
921 // Adjust FP to point to saved FP. | 925 // Adjust FP to point to saved FP. |
922 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 926 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
923 } | 927 } |
924 } | 928 } |
925 if (FLAG_enable_ool_constant_pool) { | 929 if (FLAG_enable_ool_constant_pool) { |
926 LoadConstantPoolPointerRegister(); | 930 LoadConstantPoolPointerRegister(); |
927 set_constant_pool_available(true); | 931 set_constant_pool_available(true); |
932 set_constant_pool_available(true); | |
Rodolph Perfetta (ARM)
2014/03/12 14:46:20
duplication
rmcilroy
2014/03/17 18:31:34
Thanks (merge errors...)
| |
928 } | 933 } |
929 } | 934 } |
930 | 935 |
931 | 936 |
932 void MacroAssembler::EnterFrame(StackFrame::Type type, | 937 void MacroAssembler::EnterFrame(StackFrame::Type type, |
933 bool load_constant_pool) { | 938 bool load_constant_pool) { |
934 // r0-r3: preserved | 939 // r0-r3: preserved |
935 PushFixedFrame(); | 940 PushFixedFrame(); |
936 if (FLAG_enable_ool_constant_pool && load_constant_pool) { | 941 if (FLAG_enable_ool_constant_pool && load_constant_pool) { |
937 LoadConstantPoolPointerRegister(); | 942 LoadConstantPoolPointerRegister(); |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1047 // if the target platform will need alignment, so this is controlled from a | 1052 // if the target platform will need alignment, so this is controlled from a |
1048 // flag. | 1053 // flag. |
1049 return FLAG_sim_stack_alignment; | 1054 return FLAG_sim_stack_alignment; |
1050 #endif // V8_HOST_ARCH_ARM | 1055 #endif // V8_HOST_ARCH_ARM |
1051 } | 1056 } |
1052 | 1057 |
1053 | 1058 |
1054 void MacroAssembler::LeaveExitFrame(bool save_doubles, | 1059 void MacroAssembler::LeaveExitFrame(bool save_doubles, |
1055 Register argument_count, | 1060 Register argument_count, |
1056 bool restore_context) { | 1061 bool restore_context) { |
1062 ConstantPoolUnavailableScope constant_pool_unavailable(this); | |
1063 | |
1057 // Optionally restore all double registers. | 1064 // Optionally restore all double registers. |
1058 if (save_doubles) { | 1065 if (save_doubles) { |
1059 // Calculate the stack location of the saved doubles and restore them. | 1066 // Calculate the stack location of the saved doubles and restore them. |
1060 const int offset = ExitFrameConstants::kFrameSize; | 1067 const int offset = ExitFrameConstants::kFrameSize; |
1061 sub(r3, fp, | 1068 sub(r3, fp, |
1062 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); | 1069 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); |
1063 RestoreFPRegs(r3, ip); | 1070 RestoreFPRegs(r3, ip); |
1064 } | 1071 } |
1065 | 1072 |
1066 // Clear top frame. | 1073 // Clear top frame. |
1067 mov(r3, Operand::Zero()); | 1074 mov(r3, Operand::Zero()); |
1068 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 1075 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
1069 str(r3, MemOperand(ip)); | 1076 str(r3, MemOperand(ip)); |
1070 | 1077 |
1071 | |
1072 // Restore current context from top and clear it in debug mode. | 1078 // Restore current context from top and clear it in debug mode. |
1073 if (restore_context) { | 1079 if (restore_context) { |
1074 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 1080 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
1075 ldr(cp, MemOperand(ip)); | 1081 ldr(cp, MemOperand(ip)); |
1076 } | 1082 } |
1077 #ifdef DEBUG | 1083 #ifdef DEBUG |
1078 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 1084 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
1079 str(r3, MemOperand(ip)); | 1085 str(r3, MemOperand(ip)); |
1080 #endif | 1086 #endif |
1081 | 1087 |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1368 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 1374 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); |
1369 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); | 1375 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); |
1370 str(r1, MemOperand(ip)); | 1376 str(r1, MemOperand(ip)); |
1371 } | 1377 } |
1372 | 1378 |
1373 | 1379 |
1374 void MacroAssembler::JumpToHandlerEntry() { | 1380 void MacroAssembler::JumpToHandlerEntry() { |
1375 // Compute the handler entry address and jump to it. The handler table is | 1381 // Compute the handler entry address and jump to it. The handler table is |
1376 // a fixed array of (smi-tagged) code offsets. | 1382 // a fixed array of (smi-tagged) code offsets. |
1377 // r0 = exception, r1 = code object, r2 = state. | 1383 // r0 = exception, r1 = code object, r2 = state. |
1384 | |
1385 ConstantPoolUnavailableScope constant_pool_unavailable(this); | |
1386 if (FLAG_enable_ool_constant_pool) { | |
1387 ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset)); // Constant pool. | |
1388 } | |
1378 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. | 1389 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. |
1379 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1390 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1380 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. | 1391 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. |
1381 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. | 1392 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. |
1382 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. | 1393 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. |
1383 add(pc, r1, Operand::SmiUntag(r2)); // Jump | 1394 add(pc, r1, Operand::SmiUntag(r2)); // Jump |
1384 } | 1395 } |
1385 | 1396 |
1386 | 1397 |
1387 void MacroAssembler::Throw(Register value) { | 1398 void MacroAssembler::Throw(Register value) { |
(...skipping 2146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3534 num_reg_arguments, num_double_arguments); | 3545 num_reg_arguments, num_double_arguments); |
3535 if (ActivationFrameAlignment() > kPointerSize) { | 3546 if (ActivationFrameAlignment() > kPointerSize) { |
3536 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 3547 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
3537 } else { | 3548 } else { |
3538 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 3549 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
3539 } | 3550 } |
3540 } | 3551 } |
3541 | 3552 |
3542 | 3553 |
3543 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 3554 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
3544 Register result) { | 3555 Register result) { |
3545 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 3556 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
3546 const int32_t kPCRegOffset = 2 * kPointerSize; | |
3547 ldr(result, MemOperand(ldr_location)); | 3557 ldr(result, MemOperand(ldr_location)); |
3548 if (emit_debug_code()) { | 3558 if (emit_debug_code()) { |
3549 // Check that the instruction is a ldr reg, [pc + offset] . | 3559 // Check that the instruction is a ldr reg, [<pc or pp> + offset] . |
3550 and_(result, result, Operand(kLdrPCPattern)); | 3560 if (FLAG_enable_ool_constant_pool) { |
3551 cmp(result, Operand(kLdrPCPattern)); | 3561 and_(result, result, Operand(kLdrPpPattern)); |
3552 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); | 3562 cmp(result, Operand(kLdrPpPattern)); |
3563 Check(eq, kTheInstructionToPatchShouldBeALoadFromPp); | |
3564 } else { | |
3565 and_(result, result, Operand(kLdrPCPattern)); | |
3566 cmp(result, Operand(kLdrPCPattern)); | |
3567 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); | |
3568 } | |
3553 // Result was clobbered. Restore it. | 3569 // Result was clobbered. Restore it. |
3554 ldr(result, MemOperand(ldr_location)); | 3570 ldr(result, MemOperand(ldr_location)); |
3555 } | 3571 } |
3556 // Get the address of the constant. | 3572 // Get the address of the constant. |
3557 and_(result, result, Operand(kLdrOffsetMask)); | 3573 and_(result, result, Operand(kLdrOffsetMask)); |
3558 add(result, ldr_location, Operand(result)); | 3574 if (FLAG_enable_ool_constant_pool) { |
3559 add(result, result, Operand(kPCRegOffset)); | 3575 add(result, pp, Operand(result)); |
3576 } else { | |
3577 const int32_t kPCRegOffset = 2 * kPointerSize; | |
Rodolph Perfetta (ARM)
2014/03/12 14:46:20
use kPCReadOffset insetad
rmcilroy
2014/03/17 18:31:34
Done.
| |
3578 add(result, ldr_location, Operand(result)); | |
3579 add(result, result, Operand(kPCRegOffset)); | |
3580 } | |
3560 } | 3581 } |
3561 | 3582 |
3562 | 3583 |
3563 void MacroAssembler::CheckPageFlag( | 3584 void MacroAssembler::CheckPageFlag( |
3564 Register object, | 3585 Register object, |
3565 Register scratch, | 3586 Register scratch, |
3566 int mask, | 3587 int mask, |
3567 Condition cc, | 3588 Condition cc, |
3568 Label* condition_met) { | 3589 Label* condition_met) { |
3569 Bfc(scratch, object, 0, kPageSizeBits); | 3590 Bfc(scratch, object, 0, kPageSizeBits); |
(...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4037 void CodePatcher::EmitCondition(Condition cond) { | 4058 void CodePatcher::EmitCondition(Condition cond) { |
4038 Instr instr = Assembler::instr_at(masm_.pc_); | 4059 Instr instr = Assembler::instr_at(masm_.pc_); |
4039 instr = (instr & ~kCondMask) | cond; | 4060 instr = (instr & ~kCondMask) | cond; |
4040 masm_.emit(instr); | 4061 masm_.emit(instr); |
4041 } | 4062 } |
4042 | 4063 |
4043 | 4064 |
4044 } } // namespace v8::internal | 4065 } } // namespace v8::internal |
4045 | 4066 |
4046 #endif // V8_TARGET_ARCH_ARM | 4067 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |