OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
126 // Block constant pool for the call instruction sequence. | 126 // Block constant pool for the call instruction sequence. |
127 BlockConstPoolScope block_const_pool(this); | 127 BlockConstPoolScope block_const_pool(this); |
128 Label start; | 128 Label start; |
129 bind(&start); | 129 bind(&start); |
130 | 130 |
131 bool old_predictable_code_size = predictable_code_size(); | 131 bool old_predictable_code_size = predictable_code_size(); |
132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
133 set_predictable_code_size(true); | 133 set_predictable_code_size(true); |
134 } | 134 } |
135 | 135 |
| 136 // Check the expected size before generating code to ensure we assume the same |
| 137 // constant pool availability (e.g., whether constant pool is full or not). |
| 138 int expected_size = CallSize(target, rmode, cond); |
| 139 |
136 // Call sequence on V7 or later may be : | 140 // Call sequence on V7 or later may be : |
137 // movw ip, #... @ call address low 16 | 141 // movw ip, #... @ call address low 16 |
138 // movt ip, #... @ call address high 16 | 142 // movt ip, #... @ call address high 16 |
139 // blx ip | 143 // blx ip |
140 // @ return address | 144 // @ return address |
141 // Or for pre-V7 or values that may be back-patched | 145 // Or for pre-V7 or values that may be back-patched |
142 // to avoid ICache flushes: | 146 // to avoid ICache flushes: |
143 // ldr ip, [pc, #...] @ call address | 147 // ldr ip, [pc, #...] @ call address |
144 // blx ip | 148 // blx ip |
145 // @ return address | 149 // @ return address |
146 | 150 |
147 // Statement positions are expected to be recorded when the target | 151 // Statement positions are expected to be recorded when the target |
148 // address is loaded. The mov method will automatically record | 152 // address is loaded. The mov method will automatically record |
149 // positions when pc is the target, since this is not the case here | 153 // positions when pc is the target, since this is not the case here |
150 // we have to do it explicitly. | 154 // we have to do it explicitly. |
151 positions_recorder()->WriteRecordedPositions(); | 155 positions_recorder()->WriteRecordedPositions(); |
152 | 156 |
153 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 157 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
154 blx(ip, cond); | 158 blx(ip, cond); |
155 | 159 |
156 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); | 160 ASSERT_EQ(expected_size, SizeOfCodeGeneratedSince(&start)); |
157 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 161 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
158 set_predictable_code_size(old_predictable_code_size); | 162 set_predictable_code_size(old_predictable_code_size); |
159 } | 163 } |
160 } | 164 } |
161 | 165 |
162 | 166 |
163 int MacroAssembler::CallSize(Handle<Code> code, | 167 int MacroAssembler::CallSize(Handle<Code> code, |
164 RelocInfo::Mode rmode, | 168 RelocInfo::Mode rmode, |
165 TypeFeedbackId ast_id, | 169 TypeFeedbackId ast_id, |
166 Condition cond) { | 170 Condition cond) { |
(...skipping 880 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1047 // if the target platform will need alignment, so this is controlled from a | 1051 // if the target platform will need alignment, so this is controlled from a |
1048 // flag. | 1052 // flag. |
1049 return FLAG_sim_stack_alignment; | 1053 return FLAG_sim_stack_alignment; |
1050 #endif // V8_HOST_ARCH_ARM | 1054 #endif // V8_HOST_ARCH_ARM |
1051 } | 1055 } |
1052 | 1056 |
1053 | 1057 |
1054 void MacroAssembler::LeaveExitFrame(bool save_doubles, | 1058 void MacroAssembler::LeaveExitFrame(bool save_doubles, |
1055 Register argument_count, | 1059 Register argument_count, |
1056 bool restore_context) { | 1060 bool restore_context) { |
| 1061 ConstantPoolUnavailableScope constant_pool_unavailable(this); |
| 1062 |
1057 // Optionally restore all double registers. | 1063 // Optionally restore all double registers. |
1058 if (save_doubles) { | 1064 if (save_doubles) { |
1059 // Calculate the stack location of the saved doubles and restore them. | 1065 // Calculate the stack location of the saved doubles and restore them. |
1060 const int offset = ExitFrameConstants::kFrameSize; | 1066 const int offset = ExitFrameConstants::kFrameSize; |
1061 sub(r3, fp, | 1067 sub(r3, fp, |
1062 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); | 1068 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); |
1063 RestoreFPRegs(r3, ip); | 1069 RestoreFPRegs(r3, ip); |
1064 } | 1070 } |
1065 | 1071 |
1066 // Clear top frame. | 1072 // Clear top frame. |
1067 mov(r3, Operand::Zero()); | 1073 mov(r3, Operand::Zero()); |
1068 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 1074 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
1069 str(r3, MemOperand(ip)); | 1075 str(r3, MemOperand(ip)); |
1070 | 1076 |
1071 | |
1072 // Restore current context from top and clear it in debug mode. | 1077 // Restore current context from top and clear it in debug mode. |
1073 if (restore_context) { | 1078 if (restore_context) { |
1074 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 1079 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
1075 ldr(cp, MemOperand(ip)); | 1080 ldr(cp, MemOperand(ip)); |
1076 } | 1081 } |
1077 #ifdef DEBUG | 1082 #ifdef DEBUG |
1078 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 1083 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
1079 str(r3, MemOperand(ip)); | 1084 str(r3, MemOperand(ip)); |
1080 #endif | 1085 #endif |
1081 | 1086 |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1368 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 1373 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); |
1369 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); | 1374 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); |
1370 str(r1, MemOperand(ip)); | 1375 str(r1, MemOperand(ip)); |
1371 } | 1376 } |
1372 | 1377 |
1373 | 1378 |
1374 void MacroAssembler::JumpToHandlerEntry() { | 1379 void MacroAssembler::JumpToHandlerEntry() { |
1375 // Compute the handler entry address and jump to it. The handler table is | 1380 // Compute the handler entry address and jump to it. The handler table is |
1376 // a fixed array of (smi-tagged) code offsets. | 1381 // a fixed array of (smi-tagged) code offsets. |
1377 // r0 = exception, r1 = code object, r2 = state. | 1382 // r0 = exception, r1 = code object, r2 = state. |
| 1383 |
| 1384 ConstantPoolUnavailableScope constant_pool_unavailable(this); |
| 1385 if (FLAG_enable_ool_constant_pool) { |
| 1386 ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset)); // Constant pool. |
| 1387 } |
1378 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. | 1388 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. |
1379 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1389 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1380 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. | 1390 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. |
1381 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. | 1391 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. |
1382 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. | 1392 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. |
1383 add(pc, r1, Operand::SmiUntag(r2)); // Jump | 1393 add(pc, r1, Operand::SmiUntag(r2)); // Jump |
1384 } | 1394 } |
1385 | 1395 |
1386 | 1396 |
1387 void MacroAssembler::Throw(Register value) { | 1397 void MacroAssembler::Throw(Register value) { |
(...skipping 2160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3548 num_reg_arguments, num_double_arguments); | 3558 num_reg_arguments, num_double_arguments); |
3549 if (ActivationFrameAlignment() > kPointerSize) { | 3559 if (ActivationFrameAlignment() > kPointerSize) { |
3550 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 3560 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
3551 } else { | 3561 } else { |
3552 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 3562 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
3553 } | 3563 } |
3554 } | 3564 } |
3555 | 3565 |
3556 | 3566 |
3557 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 3567 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
3558 Register result) { | 3568 Register result) { |
3559 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 3569 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
3560 const int32_t kPCRegOffset = 2 * kPointerSize; | |
3561 ldr(result, MemOperand(ldr_location)); | 3570 ldr(result, MemOperand(ldr_location)); |
3562 if (emit_debug_code()) { | 3571 if (emit_debug_code()) { |
3563 // Check that the instruction is a ldr reg, [pc + offset] . | 3572 // Check that the instruction is a ldr reg, [<pc or pp> + offset] . |
3564 and_(result, result, Operand(kLdrPCPattern)); | 3573 if (FLAG_enable_ool_constant_pool) { |
3565 cmp(result, Operand(kLdrPCPattern)); | 3574 and_(result, result, Operand(kLdrPpPattern)); |
3566 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); | 3575 cmp(result, Operand(kLdrPpPattern)); |
| 3576 Check(eq, kTheInstructionToPatchShouldBeALoadFromPp); |
| 3577 } else { |
| 3578 and_(result, result, Operand(kLdrPCPattern)); |
| 3579 cmp(result, Operand(kLdrPCPattern)); |
| 3580 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); |
| 3581 } |
3567 // Result was clobbered. Restore it. | 3582 // Result was clobbered. Restore it. |
3568 ldr(result, MemOperand(ldr_location)); | 3583 ldr(result, MemOperand(ldr_location)); |
3569 } | 3584 } |
3570 // Get the address of the constant. | 3585 // Get the address of the constant. |
3571 and_(result, result, Operand(kLdrOffsetMask)); | 3586 and_(result, result, Operand(kLdrOffsetMask)); |
3572 add(result, ldr_location, Operand(result)); | 3587 if (FLAG_enable_ool_constant_pool) { |
3573 add(result, result, Operand(kPCRegOffset)); | 3588 add(result, pp, Operand(result)); |
| 3589 } else { |
| 3590 add(result, ldr_location, Operand(result)); |
| 3591 add(result, result, Operand(Instruction::kPCReadOffset)); |
| 3592 } |
3574 } | 3593 } |
3575 | 3594 |
3576 | 3595 |
3577 void MacroAssembler::CheckPageFlag( | 3596 void MacroAssembler::CheckPageFlag( |
3578 Register object, | 3597 Register object, |
3579 Register scratch, | 3598 Register scratch, |
3580 int mask, | 3599 int mask, |
3581 Condition cc, | 3600 Condition cc, |
3582 Label* condition_met) { | 3601 Label* condition_met) { |
3583 Bfc(scratch, object, 0, kPageSizeBits); | 3602 Bfc(scratch, object, 0, kPageSizeBits); |
(...skipping 486 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4070 if (divisor < 0 && ms.multiplier() > 0) { | 4089 if (divisor < 0 && ms.multiplier() > 0) { |
4071 sub(result, result, Operand(dividend)); | 4090 sub(result, result, Operand(dividend)); |
4072 } | 4091 } |
4073 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); | 4092 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); |
4074 } | 4093 } |
4075 | 4094 |
4076 | 4095 |
4077 } } // namespace v8::internal | 4096 } } // namespace v8::internal |
4078 | 4097 |
4079 #endif // V8_TARGET_ARCH_ARM | 4098 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |