OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
126 // Block constant pool for the call instruction sequence. | 126 // Block constant pool for the call instruction sequence. |
127 BlockConstPoolScope block_const_pool(this); | 127 BlockConstPoolScope block_const_pool(this); |
128 Label start; | 128 Label start; |
129 bind(&start); | 129 bind(&start); |
130 | 130 |
131 bool old_predictable_code_size = predictable_code_size(); | 131 bool old_predictable_code_size = predictable_code_size(); |
132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
133 set_predictable_code_size(true); | 133 set_predictable_code_size(true); |
134 } | 134 } |
135 | 135 |
136 // Check the expected size before generating code to ensure we assume the same | |
137 // constant pool availability (e.g., whether constant pool is full or not). | |
138 int expected_size = CallSize(target, rmode, cond); | |
139 | |
140 // Call sequence on V7 or later may be : | 136 // Call sequence on V7 or later may be : |
141 // movw ip, #... @ call address low 16 | 137 // movw ip, #... @ call address low 16 |
142 // movt ip, #... @ call address high 16 | 138 // movt ip, #... @ call address high 16 |
143 // blx ip | 139 // blx ip |
144 // @ return address | 140 // @ return address |
145 // Or for pre-V7 or values that may be back-patched | 141 // Or for pre-V7 or values that may be back-patched |
146 // to avoid ICache flushes: | 142 // to avoid ICache flushes: |
147 // ldr ip, [pc, #...] @ call address | 143 // ldr ip, [pc, #...] @ call address |
148 // blx ip | 144 // blx ip |
149 // @ return address | 145 // @ return address |
150 | 146 |
151 // Statement positions are expected to be recorded when the target | 147 // Statement positions are expected to be recorded when the target |
152 // address is loaded. The mov method will automatically record | 148 // address is loaded. The mov method will automatically record |
153 // positions when pc is the target, since this is not the case here | 149 // positions when pc is the target, since this is not the case here |
154 // we have to do it explicitly. | 150 // we have to do it explicitly. |
155 positions_recorder()->WriteRecordedPositions(); | 151 positions_recorder()->WriteRecordedPositions(); |
156 | 152 |
157 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 153 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
158 blx(ip, cond); | 154 blx(ip, cond); |
159 | 155 |
160 ASSERT_EQ(expected_size, SizeOfCodeGeneratedSince(&start)); | 156 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); |
161 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 157 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
162 set_predictable_code_size(old_predictable_code_size); | 158 set_predictable_code_size(old_predictable_code_size); |
163 } | 159 } |
164 } | 160 } |
165 | 161 |
166 | 162 |
167 int MacroAssembler::CallSize(Handle<Code> code, | 163 int MacroAssembler::CallSize(Handle<Code> code, |
168 RelocInfo::Mode rmode, | 164 RelocInfo::Mode rmode, |
169 TypeFeedbackId ast_id, | 165 TypeFeedbackId ast_id, |
170 Condition cond) { | 166 Condition cond) { |
(...skipping 880 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1051 // if the target platform will need alignment, so this is controlled from a | 1047 // if the target platform will need alignment, so this is controlled from a |
1052 // flag. | 1048 // flag. |
1053 return FLAG_sim_stack_alignment; | 1049 return FLAG_sim_stack_alignment; |
1054 #endif // V8_HOST_ARCH_ARM | 1050 #endif // V8_HOST_ARCH_ARM |
1055 } | 1051 } |
1056 | 1052 |
1057 | 1053 |
1058 void MacroAssembler::LeaveExitFrame(bool save_doubles, | 1054 void MacroAssembler::LeaveExitFrame(bool save_doubles, |
1059 Register argument_count, | 1055 Register argument_count, |
1060 bool restore_context) { | 1056 bool restore_context) { |
1061 ConstantPoolUnavailableScope constant_pool_unavailable(this); | |
1062 | |
1063 // Optionally restore all double registers. | 1057 // Optionally restore all double registers. |
1064 if (save_doubles) { | 1058 if (save_doubles) { |
1065 // Calculate the stack location of the saved doubles and restore them. | 1059 // Calculate the stack location of the saved doubles and restore them. |
1066 const int offset = ExitFrameConstants::kFrameSize; | 1060 const int offset = ExitFrameConstants::kFrameSize; |
1067 sub(r3, fp, | 1061 sub(r3, fp, |
1068 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); | 1062 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); |
1069 RestoreFPRegs(r3, ip); | 1063 RestoreFPRegs(r3, ip); |
1070 } | 1064 } |
1071 | 1065 |
1072 // Clear top frame. | 1066 // Clear top frame. |
1073 mov(r3, Operand::Zero()); | 1067 mov(r3, Operand::Zero()); |
1074 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 1068 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
1075 str(r3, MemOperand(ip)); | 1069 str(r3, MemOperand(ip)); |
1076 | 1070 |
| 1071 |
1077 // Restore current context from top and clear it in debug mode. | 1072 // Restore current context from top and clear it in debug mode. |
1078 if (restore_context) { | 1073 if (restore_context) { |
1079 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 1074 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
1080 ldr(cp, MemOperand(ip)); | 1075 ldr(cp, MemOperand(ip)); |
1081 } | 1076 } |
1082 #ifdef DEBUG | 1077 #ifdef DEBUG |
1083 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 1078 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
1084 str(r3, MemOperand(ip)); | 1079 str(r3, MemOperand(ip)); |
1085 #endif | 1080 #endif |
1086 | 1081 |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1373 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 1368 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); |
1374 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); | 1369 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); |
1375 str(r1, MemOperand(ip)); | 1370 str(r1, MemOperand(ip)); |
1376 } | 1371 } |
1377 | 1372 |
1378 | 1373 |
1379 void MacroAssembler::JumpToHandlerEntry() { | 1374 void MacroAssembler::JumpToHandlerEntry() { |
1380 // Compute the handler entry address and jump to it. The handler table is | 1375 // Compute the handler entry address and jump to it. The handler table is |
1381 // a fixed array of (smi-tagged) code offsets. | 1376 // a fixed array of (smi-tagged) code offsets. |
1382 // r0 = exception, r1 = code object, r2 = state. | 1377 // r0 = exception, r1 = code object, r2 = state. |
1383 | |
1384 ConstantPoolUnavailableScope constant_pool_unavailable(this); | |
1385 if (FLAG_enable_ool_constant_pool) { | |
1386 ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset)); // Constant pool. | |
1387 } | |
1388 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. | 1378 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. |
1389 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1379 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1390 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. | 1380 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. |
1391 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. | 1381 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. |
1392 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. | 1382 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. |
1393 add(pc, r1, Operand::SmiUntag(r2)); // Jump | 1383 add(pc, r1, Operand::SmiUntag(r2)); // Jump |
1394 } | 1384 } |
1395 | 1385 |
1396 | 1386 |
1397 void MacroAssembler::Throw(Register value) { | 1387 void MacroAssembler::Throw(Register value) { |
(...skipping 2160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3558 num_reg_arguments, num_double_arguments); | 3548 num_reg_arguments, num_double_arguments); |
3559 if (ActivationFrameAlignment() > kPointerSize) { | 3549 if (ActivationFrameAlignment() > kPointerSize) { |
3560 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 3550 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
3561 } else { | 3551 } else { |
3562 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 3552 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
3563 } | 3553 } |
3564 } | 3554 } |
3565 | 3555 |
3566 | 3556 |
3567 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 3557 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
3568 Register result) { | 3558 Register result) { |
3569 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 3559 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| 3560 const int32_t kPCRegOffset = 2 * kPointerSize; |
3570 ldr(result, MemOperand(ldr_location)); | 3561 ldr(result, MemOperand(ldr_location)); |
3571 if (emit_debug_code()) { | 3562 if (emit_debug_code()) { |
3572 // Check that the instruction is a ldr reg, [<pc or pp> + offset] . | 3563 // Check that the instruction is a ldr reg, [pc + offset] . |
3573 if (FLAG_enable_ool_constant_pool) { | 3564 and_(result, result, Operand(kLdrPCPattern)); |
3574 and_(result, result, Operand(kLdrPpPattern)); | 3565 cmp(result, Operand(kLdrPCPattern)); |
3575 cmp(result, Operand(kLdrPpPattern)); | 3566 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); |
3576 Check(eq, kTheInstructionToPatchShouldBeALoadFromPp); | |
3577 } else { | |
3578 and_(result, result, Operand(kLdrPCPattern)); | |
3579 cmp(result, Operand(kLdrPCPattern)); | |
3580 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); | |
3581 } | |
3582 // Result was clobbered. Restore it. | 3567 // Result was clobbered. Restore it. |
3583 ldr(result, MemOperand(ldr_location)); | 3568 ldr(result, MemOperand(ldr_location)); |
3584 } | 3569 } |
3585 // Get the address of the constant. | 3570 // Get the address of the constant. |
3586 and_(result, result, Operand(kLdrOffsetMask)); | 3571 and_(result, result, Operand(kLdrOffsetMask)); |
3587 if (FLAG_enable_ool_constant_pool) { | 3572 add(result, ldr_location, Operand(result)); |
3588 add(result, pp, Operand(result)); | 3573 add(result, result, Operand(kPCRegOffset)); |
3589 } else { | |
3590 add(result, ldr_location, Operand(result)); | |
3591 add(result, result, Operand(Instruction::kPCReadOffset)); | |
3592 } | |
3593 } | 3574 } |
3594 | 3575 |
3595 | 3576 |
3596 void MacroAssembler::CheckPageFlag( | 3577 void MacroAssembler::CheckPageFlag( |
3597 Register object, | 3578 Register object, |
3598 Register scratch, | 3579 Register scratch, |
3599 int mask, | 3580 int mask, |
3600 Condition cc, | 3581 Condition cc, |
3601 Label* condition_met) { | 3582 Label* condition_met) { |
3602 Bfc(scratch, object, 0, kPageSizeBits); | 3583 Bfc(scratch, object, 0, kPageSizeBits); |
(...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4090 sub(result, result, Operand(dividend)); | 4071 sub(result, result, Operand(dividend)); |
4091 } | 4072 } |
4092 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); | 4073 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); |
4093 add(result, result, Operand(dividend, LSR, 31)); | 4074 add(result, result, Operand(dividend, LSR, 31)); |
4094 } | 4075 } |
4095 | 4076 |
4096 | 4077 |
4097 } } // namespace v8::internal | 4078 } } // namespace v8::internal |
4098 | 4079 |
4099 #endif // V8_TARGET_ARCH_ARM | 4080 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |