OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm/macro-assembler-arm.h" | 7 #include "src/arm/macro-assembler-arm.h" |
8 #include "src/ast/scopes.h" | 8 #include "src/ast/scopes.h" |
9 #include "src/compiler/code-generator-impl.h" | 9 #include "src/compiler/code-generator-impl.h" |
10 #include "src/compiler/gap-resolver.h" | 10 #include "src/compiler/gap-resolver.h" |
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
237 __ JumpIfSmi(value_, exit()); | 237 __ JumpIfSmi(value_, exit()); |
238 } | 238 } |
239 __ CheckPageFlag(value_, scratch0_, | 239 __ CheckPageFlag(value_, scratch0_, |
240 MemoryChunk::kPointersToHereAreInterestingMask, eq, | 240 MemoryChunk::kPointersToHereAreInterestingMask, eq, |
241 exit()); | 241 exit()); |
242 RememberedSetAction const remembered_set_action = | 242 RememberedSetAction const remembered_set_action = |
243 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET | 243 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET |
244 : OMIT_REMEMBERED_SET; | 244 : OMIT_REMEMBERED_SET; |
245 SaveFPRegsMode const save_fp_mode = | 245 SaveFPRegsMode const save_fp_mode = |
246 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; | 246 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; |
247 if (!frame()->needs_frame()) { | 247 if (!frame_access_state()->access_frame_with_fp()) { |
danno
2016/03/16 18:18:11
Here and below, and on other ARM-like platforms, I
Mircea Trofin
2016/03/16 20:20:46
Ah, yes! I started from Intel and ended up cargo-c
| |
248 // We need to save and restore lr if the frame was elided. | 248 // We need to save and restore lr if the frame was elided. |
249 __ Push(lr); | 249 __ Push(lr); |
250 } | 250 } |
251 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, | 251 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, |
252 remembered_set_action, save_fp_mode); | 252 remembered_set_action, save_fp_mode); |
253 if (index_.is(no_reg)) { | 253 if (index_.is(no_reg)) { |
254 __ add(scratch1_, object_, Operand(index_immediate_)); | 254 __ add(scratch1_, object_, Operand(index_immediate_)); |
255 } else { | 255 } else { |
256 DCHECK_EQ(0, index_immediate_); | 256 DCHECK_EQ(0, index_immediate_); |
257 __ add(scratch1_, object_, Operand(index_)); | 257 __ add(scratch1_, object_, Operand(index_)); |
258 } | 258 } |
259 __ CallStub(&stub); | 259 __ CallStub(&stub); |
260 if (!frame()->needs_frame()) { | 260 if (!frame_access_state()->access_frame_with_fp()) { |
261 __ Pop(lr); | 261 __ Pop(lr); |
262 } | 262 } |
263 } | 263 } |
264 | 264 |
265 private: | 265 private: |
266 Register const object_; | 266 Register const object_; |
267 Register const index_; | 267 Register const index_; |
268 int32_t const index_immediate_; // Valid if index_.is(no_reg). | 268 int32_t const index_immediate_; // Valid if index_.is(no_reg). |
269 Register const value_; | 269 Register const value_; |
270 Register const scratch0_; | 270 Register const scratch0_; |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
379 if (instr->InputAt(1)->IsRegister()) { \ | 379 if (instr->InputAt(1)->IsRegister()) { \ |
380 __ cmp(offset, i.InputRegister(1)); \ | 380 __ cmp(offset, i.InputRegister(1)); \ |
381 } else { \ | 381 } else { \ |
382 __ cmp(offset, i.InputImmediate(1)); \ | 382 __ cmp(offset, i.InputImmediate(1)); \ |
383 } \ | 383 } \ |
384 auto value = i.InputRegister(2); \ | 384 auto value = i.InputRegister(2); \ |
385 __ asm_instr(value, i.InputOffset(3), lo); \ | 385 __ asm_instr(value, i.InputOffset(3), lo); \ |
386 DCHECK_EQ(LeaveCC, i.OutputSBit()); \ | 386 DCHECK_EQ(LeaveCC, i.OutputSBit()); \ |
387 } while (0) | 387 } while (0) |
388 | 388 |
389 void CodeGenerator::AssembleDeconstructFrame() { | |
390 __ LeaveFrame(StackFrame::MANUAL); | |
391 } | |
389 | 392 |
390 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 393 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
391 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 394 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
392 if (sp_slot_delta > 0) { | 395 if (sp_slot_delta > 0) { |
393 __ add(sp, sp, Operand(sp_slot_delta * kPointerSize)); | 396 __ add(sp, sp, Operand(sp_slot_delta * kPointerSize)); |
394 } | 397 } |
395 frame_access_state()->SetFrameAccessToDefault(); | 398 frame_access_state()->SetFrameAccessToDefault(); |
396 } | 399 } |
397 | 400 |
398 | 401 |
399 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 402 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
400 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 403 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
401 if (sp_slot_delta < 0) { | 404 if (sp_slot_delta < 0) { |
402 __ sub(sp, sp, Operand(-sp_slot_delta * kPointerSize)); | 405 __ sub(sp, sp, Operand(-sp_slot_delta * kPointerSize)); |
403 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 406 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
404 } | 407 } |
405 if (frame()->needs_frame()) { | 408 if (frame_access_state()->access_frame_with_fp()) { |
406 if (FLAG_enable_embedded_constant_pool) { | 409 if (FLAG_enable_embedded_constant_pool) { |
407 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); | 410 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); |
408 } | 411 } |
409 __ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | 412 __ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); |
410 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 413 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
411 } | 414 } |
412 frame_access_state()->SetFrameAccessToSP(); | 415 frame_access_state()->SetFrameAccessToSP(); |
413 } | 416 } |
414 | 417 |
415 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 418 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
573 break; | 576 break; |
574 case kArchStackPointer: | 577 case kArchStackPointer: |
575 __ mov(i.OutputRegister(), sp); | 578 __ mov(i.OutputRegister(), sp); |
576 DCHECK_EQ(LeaveCC, i.OutputSBit()); | 579 DCHECK_EQ(LeaveCC, i.OutputSBit()); |
577 break; | 580 break; |
578 case kArchFramePointer: | 581 case kArchFramePointer: |
579 __ mov(i.OutputRegister(), fp); | 582 __ mov(i.OutputRegister(), fp); |
580 DCHECK_EQ(LeaveCC, i.OutputSBit()); | 583 DCHECK_EQ(LeaveCC, i.OutputSBit()); |
581 break; | 584 break; |
582 case kArchParentFramePointer: | 585 case kArchParentFramePointer: |
583 if (frame_access_state()->frame()->needs_frame()) { | 586 if (frame_access_state()->access_frame_with_fp()) { |
584 __ ldr(i.OutputRegister(), MemOperand(fp, 0)); | 587 __ ldr(i.OutputRegister(), MemOperand(fp, 0)); |
585 } else { | 588 } else { |
586 __ mov(i.OutputRegister(), fp); | 589 __ mov(i.OutputRegister(), fp); |
587 } | 590 } |
588 break; | 591 break; |
589 case kArchTruncateDoubleToI: | 592 case kArchTruncateDoubleToI: |
590 __ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0)); | 593 __ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0)); |
591 DCHECK_EQ(LeaveCC, i.OutputSBit()); | 594 DCHECK_EQ(LeaveCC, i.OutputSBit()); |
592 break; | 595 break; |
593 case kArchStoreWithWriteBarrier: { | 596 case kArchStoreWithWriteBarrier: { |
(...skipping 598 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1192 ArmOperandConverter i(this, instr); | 1195 ArmOperandConverter i(this, instr); |
1193 Label* tlabel = branch->true_label; | 1196 Label* tlabel = branch->true_label; |
1194 Label* flabel = branch->false_label; | 1197 Label* flabel = branch->false_label; |
1195 Condition cc = FlagsConditionToCondition(branch->condition); | 1198 Condition cc = FlagsConditionToCondition(branch->condition); |
1196 __ b(cc, tlabel); | 1199 __ b(cc, tlabel); |
1197 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. | 1200 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. |
1198 } | 1201 } |
1199 | 1202 |
1200 | 1203 |
1201 void CodeGenerator::AssembleArchJump(RpoNumber target) { | 1204 void CodeGenerator::AssembleArchJump(RpoNumber target) { |
1205 AssembleDeconstructFrameBetweenBlocks(); | |
1202 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); | 1206 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); |
1203 } | 1207 } |
1204 | 1208 |
1205 | 1209 |
1206 // Assembles boolean materializations after an instruction. | 1210 // Assembles boolean materializations after an instruction. |
1207 void CodeGenerator::AssembleArchBoolean(Instruction* instr, | 1211 void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
1208 FlagsCondition condition) { | 1212 FlagsCondition condition) { |
1209 ArmOperandConverter i(this, instr); | 1213 ArmOperandConverter i(this, instr); |
1210 | 1214 |
1211 // Materialize a full 32-bit 1 or 0 value. The result register is always the | 1215 // Materialize a full 32-bit 1 or 0 value. The result register is always the |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1252 // TODO(turbofan): We should be able to generate better code by sharing the | 1256 // TODO(turbofan): We should be able to generate better code by sharing the |
1253 // actual final call site and just bl'ing to it here, similar to what we do | 1257 // actual final call site and just bl'ing to it here, similar to what we do |
1254 // in the lithium backend. | 1258 // in the lithium backend. |
1255 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1259 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
1256 __ CheckConstPool(false, false); | 1260 __ CheckConstPool(false, false); |
1257 } | 1261 } |
1258 | 1262 |
1259 | 1263 |
1260 void CodeGenerator::AssemblePrologue() { | 1264 void CodeGenerator::AssemblePrologue() { |
1261 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1265 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1262 if (frame()->needs_frame()) { | 1266 if (frame_access_state()->access_frame_with_fp()) { |
1263 if (descriptor->IsCFunctionCall()) { | 1267 if (descriptor->IsCFunctionCall()) { |
1264 if (FLAG_enable_embedded_constant_pool) { | 1268 if (FLAG_enable_embedded_constant_pool) { |
1265 __ Push(lr, fp, pp); | 1269 __ Push(lr, fp, pp); |
1266 // Adjust FP to point to saved FP. | 1270 // Adjust FP to point to saved FP. |
1267 __ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); | 1271 __ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); |
1268 } else { | 1272 } else { |
1269 __ Push(lr, fp); | 1273 __ Push(lr, fp); |
1270 __ mov(fp, sp); | 1274 __ mov(fp, sp); |
1271 } | 1275 } |
1272 } else if (descriptor->IsJSFunctionCall()) { | 1276 } else if (descriptor->IsJSFunctionCall()) { |
1273 __ Prologue(this->info()->GeneratePreagedPrologue()); | 1277 __ Prologue(this->info()->GeneratePreagedPrologue()); |
1274 } else { | 1278 } else { |
1275 __ StubPrologue(info()->GetOutputStackFrameType()); | 1279 __ StubPrologue(info()->GetOutputStackFrameType()); |
1276 } | 1280 } |
1277 } else { | |
1278 frame()->SetElidedFrameSizeInSlots(0); | |
1279 } | 1281 } |
1280 frame_access_state()->SetFrameAccessToDefault(); | |
1281 | 1282 |
1282 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1283 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1283 if (info()->is_osr()) { | 1284 if (info()->is_osr()) { |
1284 // TurboFan OSR-compiled functions cannot be entered directly. | 1285 // TurboFan OSR-compiled functions cannot be entered directly. |
1285 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1286 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1286 | 1287 |
1287 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1288 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1288 // frame is still on the stack. Optimized code uses OSR values directly from | 1289 // frame is still on the stack. Optimized code uses OSR values directly from |
1289 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1290 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1290 // remaining stack slots. | 1291 // remaining stack slots. |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1340 const RegList saves_fp = descriptor->CalleeSavedFPRegisters(); | 1341 const RegList saves_fp = descriptor->CalleeSavedFPRegisters(); |
1341 if (saves_fp != 0) { | 1342 if (saves_fp != 0) { |
1342 STATIC_ASSERT(DwVfpRegister::kMaxNumRegisters == 32); | 1343 STATIC_ASSERT(DwVfpRegister::kMaxNumRegisters == 32); |
1343 uint32_t last = base::bits::CountLeadingZeros32(saves_fp) - 1; | 1344 uint32_t last = base::bits::CountLeadingZeros32(saves_fp) - 1; |
1344 uint32_t first = base::bits::CountTrailingZeros32(saves_fp); | 1345 uint32_t first = base::bits::CountTrailingZeros32(saves_fp); |
1345 __ vldm(ia_w, sp, DwVfpRegister::from_code(first), | 1346 __ vldm(ia_w, sp, DwVfpRegister::from_code(first), |
1346 DwVfpRegister::from_code(last)); | 1347 DwVfpRegister::from_code(last)); |
1347 } | 1348 } |
1348 | 1349 |
1349 if (descriptor->IsCFunctionCall()) { | 1350 if (descriptor->IsCFunctionCall()) { |
1350 __ LeaveFrame(StackFrame::MANUAL); | 1351 AssembleDeconstructFrameWhenLeaving(); |
1351 } else if (frame()->needs_frame()) { | 1352 } else if (frame_access_state()->access_frame_with_fp()) { |
1352 // Canonicalize JSFunction return sites for now. | 1353 // Canonicalize JSFunction return sites for now. |
1353 if (return_label_.is_bound()) { | 1354 if (return_label_.is_bound()) { |
1355 AssembleDeconstructFrameBetweenBlocks(); | |
1354 __ b(&return_label_); | 1356 __ b(&return_label_); |
1355 return; | 1357 return; |
1356 } else { | 1358 } else { |
1357 __ bind(&return_label_); | 1359 __ bind(&return_label_); |
1358 __ LeaveFrame(StackFrame::MANUAL); | 1360 AssembleDeconstructFrameWhenLeaving(); |
1359 } | 1361 } |
1360 } | 1362 } |
1361 __ Ret(pop_count); | 1363 __ Ret(pop_count); |
1362 } | 1364 } |
1363 | 1365 |
1364 | 1366 |
1365 void CodeGenerator::AssembleMove(InstructionOperand* source, | 1367 void CodeGenerator::AssembleMove(InstructionOperand* source, |
1366 InstructionOperand* destination) { | 1368 InstructionOperand* destination) { |
1367 ArmOperandConverter g(this, nullptr); | 1369 ArmOperandConverter g(this, nullptr); |
1368 // Dispatch on the source and destination operand kinds. Not all | 1370 // Dispatch on the source and destination operand kinds. Not all |
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1567 padding_size -= v8::internal::Assembler::kInstrSize; | 1569 padding_size -= v8::internal::Assembler::kInstrSize; |
1568 } | 1570 } |
1569 } | 1571 } |
1570 } | 1572 } |
1571 | 1573 |
1572 #undef __ | 1574 #undef __ |
1573 | 1575 |
1574 } // namespace compiler | 1576 } // namespace compiler |
1575 } // namespace internal | 1577 } // namespace internal |
1576 } // namespace v8 | 1578 } // namespace v8 |
OLD | NEW |