Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(32)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 204323004: Reland "Add out-of-line constant pool support to Arm." (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/full-codegen-arm.cc ('k') | src/assembler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
126 // Block constant pool for the call instruction sequence. 126 // Block constant pool for the call instruction sequence.
127 BlockConstPoolScope block_const_pool(this); 127 BlockConstPoolScope block_const_pool(this);
128 Label start; 128 Label start;
129 bind(&start); 129 bind(&start);
130 130
131 bool old_predictable_code_size = predictable_code_size(); 131 bool old_predictable_code_size = predictable_code_size();
132 if (mode == NEVER_INLINE_TARGET_ADDRESS) { 132 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
133 set_predictable_code_size(true); 133 set_predictable_code_size(true);
134 } 134 }
135 135
136 #ifdef DEBUG
137 // Check the expected size before generating code to ensure we assume the same
138 // constant pool availability (e.g., whether constant pool is full or not).
139 int expected_size = CallSize(target, rmode, cond);
140 #endif
141
136 // Call sequence on V7 or later may be : 142 // Call sequence on V7 or later may be :
137 // movw ip, #... @ call address low 16 143 // movw ip, #... @ call address low 16
138 // movt ip, #... @ call address high 16 144 // movt ip, #... @ call address high 16
139 // blx ip 145 // blx ip
140 // @ return address 146 // @ return address
141 // Or for pre-V7 or values that may be back-patched 147 // Or for pre-V7 or values that may be back-patched
142 // to avoid ICache flushes: 148 // to avoid ICache flushes:
143 // ldr ip, [pc, #...] @ call address 149 // ldr ip, [pc, #...] @ call address
144 // blx ip 150 // blx ip
145 // @ return address 151 // @ return address
146 152
147 // Statement positions are expected to be recorded when the target 153 // Statement positions are expected to be recorded when the target
148 // address is loaded. The mov method will automatically record 154 // address is loaded. The mov method will automatically record
149 // positions when pc is the target, since this is not the case here 155 // positions when pc is the target, since this is not the case here
150 // we have to do it explicitly. 156 // we have to do it explicitly.
151 positions_recorder()->WriteRecordedPositions(); 157 positions_recorder()->WriteRecordedPositions();
152 158
153 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); 159 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
154 blx(ip, cond); 160 blx(ip, cond);
155 161
156 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); 162 ASSERT_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
157 if (mode == NEVER_INLINE_TARGET_ADDRESS) { 163 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
158 set_predictable_code_size(old_predictable_code_size); 164 set_predictable_code_size(old_predictable_code_size);
159 } 165 }
160 } 166 }
161 167
162 168
163 int MacroAssembler::CallSize(Handle<Code> code, 169 int MacroAssembler::CallSize(Handle<Code> code,
164 RelocInfo::Mode rmode, 170 RelocInfo::Mode rmode,
165 TypeFeedbackId ast_id, 171 TypeFeedbackId ast_id,
166 Condition cond) { 172 Condition cond) {
(...skipping 880 matching lines...) Expand 10 before | Expand all | Expand 10 after
1047 // if the target platform will need alignment, so this is controlled from a 1053 // if the target platform will need alignment, so this is controlled from a
1048 // flag. 1054 // flag.
1049 return FLAG_sim_stack_alignment; 1055 return FLAG_sim_stack_alignment;
1050 #endif // V8_HOST_ARCH_ARM 1056 #endif // V8_HOST_ARCH_ARM
1051 } 1057 }
1052 1058
1053 1059
1054 void MacroAssembler::LeaveExitFrame(bool save_doubles, 1060 void MacroAssembler::LeaveExitFrame(bool save_doubles,
1055 Register argument_count, 1061 Register argument_count,
1056 bool restore_context) { 1062 bool restore_context) {
1063 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1064
1057 // Optionally restore all double registers. 1065 // Optionally restore all double registers.
1058 if (save_doubles) { 1066 if (save_doubles) {
1059 // Calculate the stack location of the saved doubles and restore them. 1067 // Calculate the stack location of the saved doubles and restore them.
1060 const int offset = ExitFrameConstants::kFrameSize; 1068 const int offset = ExitFrameConstants::kFrameSize;
1061 sub(r3, fp, 1069 sub(r3, fp,
1062 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); 1070 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1063 RestoreFPRegs(r3, ip); 1071 RestoreFPRegs(r3, ip);
1064 } 1072 }
1065 1073
1066 // Clear top frame. 1074 // Clear top frame.
1067 mov(r3, Operand::Zero()); 1075 mov(r3, Operand::Zero());
1068 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); 1076 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
1069 str(r3, MemOperand(ip)); 1077 str(r3, MemOperand(ip));
1070 1078
1071
1072 // Restore current context from top and clear it in debug mode. 1079 // Restore current context from top and clear it in debug mode.
1073 if (restore_context) { 1080 if (restore_context) {
1074 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); 1081 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1075 ldr(cp, MemOperand(ip)); 1082 ldr(cp, MemOperand(ip));
1076 } 1083 }
1077 #ifdef DEBUG 1084 #ifdef DEBUG
1078 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); 1085 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1079 str(r3, MemOperand(ip)); 1086 str(r3, MemOperand(ip));
1080 #endif 1087 #endif
1081 1088
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
1368 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); 1375 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1369 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); 1376 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1370 str(r1, MemOperand(ip)); 1377 str(r1, MemOperand(ip));
1371 } 1378 }
1372 1379
1373 1380
1374 void MacroAssembler::JumpToHandlerEntry() { 1381 void MacroAssembler::JumpToHandlerEntry() {
1375 // Compute the handler entry address and jump to it. The handler table is 1382 // Compute the handler entry address and jump to it. The handler table is
1376 // a fixed array of (smi-tagged) code offsets. 1383 // a fixed array of (smi-tagged) code offsets.
1377 // r0 = exception, r1 = code object, r2 = state. 1384 // r0 = exception, r1 = code object, r2 = state.
1385
1386 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1387 if (FLAG_enable_ool_constant_pool) {
1388 ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset)); // Constant pool.
1389 }
1378 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table. 1390 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table.
1379 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1391 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1380 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index. 1392 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index.
1381 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset. 1393 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset.
1382 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start. 1394 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
1383 add(pc, r1, Operand::SmiUntag(r2)); // Jump 1395 add(pc, r1, Operand::SmiUntag(r2)); // Jump
1384 } 1396 }
1385 1397
1386 1398
1387 void MacroAssembler::Throw(Register value) { 1399 void MacroAssembler::Throw(Register value) {
(...skipping 2160 matching lines...) Expand 10 before | Expand all | Expand 10 after
3548 num_reg_arguments, num_double_arguments); 3560 num_reg_arguments, num_double_arguments);
3549 if (ActivationFrameAlignment() > kPointerSize) { 3561 if (ActivationFrameAlignment() > kPointerSize) {
3550 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); 3562 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3551 } else { 3563 } else {
3552 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); 3564 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
3553 } 3565 }
3554 } 3566 }
3555 3567
3556 3568
3557 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, 3569 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
3558 Register result) { 3570 Register result) {
3559 const uint32_t kLdrOffsetMask = (1 << 12) - 1; 3571 const uint32_t kLdrOffsetMask = (1 << 12) - 1;
3560 const int32_t kPCRegOffset = 2 * kPointerSize;
3561 ldr(result, MemOperand(ldr_location)); 3572 ldr(result, MemOperand(ldr_location));
3562 if (emit_debug_code()) { 3573 if (emit_debug_code()) {
3563 // Check that the instruction is a ldr reg, [pc + offset] . 3574 // Check that the instruction is a ldr reg, [<pc or pp> + offset] .
3564 and_(result, result, Operand(kLdrPCPattern)); 3575 if (FLAG_enable_ool_constant_pool) {
3565 cmp(result, Operand(kLdrPCPattern)); 3576 and_(result, result, Operand(kLdrPpPattern));
3566 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); 3577 cmp(result, Operand(kLdrPpPattern));
3578 Check(eq, kTheInstructionToPatchShouldBeALoadFromPp);
3579 } else {
3580 and_(result, result, Operand(kLdrPCPattern));
3581 cmp(result, Operand(kLdrPCPattern));
3582 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc);
3583 }
3567 // Result was clobbered. Restore it. 3584 // Result was clobbered. Restore it.
3568 ldr(result, MemOperand(ldr_location)); 3585 ldr(result, MemOperand(ldr_location));
3569 } 3586 }
3570 // Get the address of the constant. 3587 // Get the address of the constant.
3571 and_(result, result, Operand(kLdrOffsetMask)); 3588 and_(result, result, Operand(kLdrOffsetMask));
3572 add(result, ldr_location, Operand(result)); 3589 if (FLAG_enable_ool_constant_pool) {
3573 add(result, result, Operand(kPCRegOffset)); 3590 add(result, pp, Operand(result));
3591 } else {
3592 add(result, ldr_location, Operand(result));
3593 add(result, result, Operand(Instruction::kPCReadOffset));
3594 }
3574 } 3595 }
3575 3596
3576 3597
3577 void MacroAssembler::CheckPageFlag( 3598 void MacroAssembler::CheckPageFlag(
3578 Register object, 3599 Register object,
3579 Register scratch, 3600 Register scratch,
3580 int mask, 3601 int mask,
3581 Condition cc, 3602 Condition cc,
3582 Label* condition_met) { 3603 Label* condition_met) {
3583 Bfc(scratch, object, 0, kPageSizeBits); 3604 Bfc(scratch, object, 0, kPageSizeBits);
(...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after
4071 sub(result, result, Operand(dividend)); 4092 sub(result, result, Operand(dividend));
4072 } 4093 }
4073 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); 4094 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift()));
4074 add(result, result, Operand(dividend, LSR, 31)); 4095 add(result, result, Operand(dividend, LSR, 31));
4075 } 4096 }
4076 4097
4077 4098
4078 } } // namespace v8::internal 4099 } } // namespace v8::internal
4079 4100
4080 #endif // V8_TARGET_ARCH_ARM 4101 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/full-codegen-arm.cc ('k') | src/assembler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698