OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
10 #include "src/ppc/macro-assembler-ppc.h" | 10 #include "src/ppc/macro-assembler-ppc.h" |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
45 case kUnsignedLessThanOrEqual: | 45 case kUnsignedLessThanOrEqual: |
46 case kUnsignedGreaterThan: | 46 case kUnsignedGreaterThan: |
47 return true; | 47 return true; |
48 default: | 48 default: |
49 return false; | 49 return false; |
50 } | 50 } |
51 UNREACHABLE(); | 51 UNREACHABLE(); |
52 return false; | 52 return false; |
53 } | 53 } |
54 | 54 |
55 Operand InputImmediate(int index) { | 55 Operand InputImmediate(size_t index) { |
56 Constant constant = ToConstant(instr_->InputAt(index)); | 56 Constant constant = ToConstant(instr_->InputAt(index)); |
57 switch (constant.type()) { | 57 switch (constant.type()) { |
58 case Constant::kInt32: | 58 case Constant::kInt32: |
59 return Operand(constant.ToInt32()); | 59 return Operand(constant.ToInt32()); |
60 case Constant::kFloat32: | 60 case Constant::kFloat32: |
61 return Operand( | 61 return Operand( |
62 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED)); | 62 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED)); |
63 case Constant::kFloat64: | 63 case Constant::kFloat64: |
64 return Operand( | 64 return Operand( |
65 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED)); | 65 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED)); |
66 case Constant::kInt64: | 66 case Constant::kInt64: |
67 #if V8_TARGET_ARCH_PPC64 | 67 #if V8_TARGET_ARCH_PPC64 |
68 return Operand(constant.ToInt64()); | 68 return Operand(constant.ToInt64()); |
69 #endif | 69 #endif |
70 case Constant::kExternalReference: | 70 case Constant::kExternalReference: |
71 case Constant::kHeapObject: | 71 case Constant::kHeapObject: |
72 case Constant::kRpoNumber: | 72 case Constant::kRpoNumber: |
73 break; | 73 break; |
74 } | 74 } |
75 UNREACHABLE(); | 75 UNREACHABLE(); |
76 return Operand::Zero(); | 76 return Operand::Zero(); |
77 } | 77 } |
78 | 78 |
79 MemOperand MemoryOperand(AddressingMode* mode, int* first_index) { | 79 MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) { |
80 const int index = *first_index; | 80 const size_t index = *first_index; |
81 *mode = AddressingModeField::decode(instr_->opcode()); | 81 *mode = AddressingModeField::decode(instr_->opcode()); |
82 switch (*mode) { | 82 switch (*mode) { |
83 case kMode_None: | 83 case kMode_None: |
84 break; | 84 break; |
85 case kMode_MRI: | 85 case kMode_MRI: |
86 *first_index += 2; | 86 *first_index += 2; |
87 return MemOperand(InputRegister(index + 0), InputInt32(index + 1)); | 87 return MemOperand(InputRegister(index + 0), InputInt32(index + 1)); |
88 case kMode_MRR: | 88 case kMode_MRR: |
89 *first_index += 2; | 89 *first_index += 2; |
90 return MemOperand(InputRegister(index + 0), InputRegister(index + 1)); | 90 return MemOperand(InputRegister(index + 0), InputRegister(index + 1)); |
91 } | 91 } |
92 UNREACHABLE(); | 92 UNREACHABLE(); |
93 return MemOperand(r0); | 93 return MemOperand(r0); |
94 } | 94 } |
95 | 95 |
96 MemOperand MemoryOperand(AddressingMode* mode, int first_index = 0) { | 96 MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) { |
97 return MemoryOperand(mode, &first_index); | 97 return MemoryOperand(mode, &first_index); |
98 } | 98 } |
99 | 99 |
100 MemOperand ToMemOperand(InstructionOperand* op) const { | 100 MemOperand ToMemOperand(InstructionOperand* op) const { |
101 DCHECK(op != NULL); | 101 DCHECK(op != NULL); |
102 DCHECK(!op->IsRegister()); | 102 DCHECK(!op->IsRegister()); |
103 DCHECK(!op->IsDoubleRegister()); | 103 DCHECK(!op->IsDoubleRegister()); |
104 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 104 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
105 // The linkage computes where all spill slots are located. | 105 // The linkage computes where all spill slots are located. |
106 FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0); | 106 FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0); |
107 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); | 107 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); |
108 } | 108 } |
109 }; | 109 }; |
110 | 110 |
111 | 111 |
112 static inline bool HasRegisterInput(Instruction* instr, int index) { | 112 static inline bool HasRegisterInput(Instruction* instr, size_t index) { |
113 return instr->InputAt(index)->IsRegister(); | 113 return instr->InputAt(index)->IsRegister(); |
114 } | 114 } |
115 | 115 |
116 | 116 |
117 namespace { | 117 namespace { |
118 | 118 |
119 class OutOfLineLoadNAN32 FINAL : public OutOfLineCode { | 119 class OutOfLineLoadNAN32 FINAL : public OutOfLineCode { |
120 public: | 120 public: |
121 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result) | 121 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result) |
122 : OutOfLineCode(gen), result_(result) {} | 122 : OutOfLineCode(gen), result_(result) {} |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
362 __ asm_instr(result, operand); \ | 362 __ asm_instr(result, operand); \ |
363 } else { \ | 363 } else { \ |
364 __ asm_instrx(result, operand); \ | 364 __ asm_instrx(result, operand); \ |
365 } \ | 365 } \ |
366 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 366 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
367 } while (0) | 367 } while (0) |
368 | 368 |
369 | 369 |
370 #define ASSEMBLE_STORE_FLOAT(asm_instr, asm_instrx) \ | 370 #define ASSEMBLE_STORE_FLOAT(asm_instr, asm_instrx) \ |
371 do { \ | 371 do { \ |
372 int index = 0; \ | 372 size_t index = 0; \ |
373 AddressingMode mode = kMode_None; \ | 373 AddressingMode mode = kMode_None; \ |
374 MemOperand operand = i.MemoryOperand(&mode, &index); \ | 374 MemOperand operand = i.MemoryOperand(&mode, &index); \ |
375 DoubleRegister value = i.InputDoubleRegister(index); \ | 375 DoubleRegister value = i.InputDoubleRegister(index); \ |
376 if (mode == kMode_MRI) { \ | 376 if (mode == kMode_MRI) { \ |
377 __ asm_instr(value, operand); \ | 377 __ asm_instr(value, operand); \ |
378 } else { \ | 378 } else { \ |
379 __ asm_instrx(value, operand); \ | 379 __ asm_instrx(value, operand); \ |
380 } \ | 380 } \ |
381 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 381 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
382 } while (0) | 382 } while (0) |
383 | 383 |
384 | 384 |
385 #define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \ | 385 #define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \ |
386 do { \ | 386 do { \ |
387 int index = 0; \ | 387 size_t index = 0; \ |
388 AddressingMode mode = kMode_None; \ | 388 AddressingMode mode = kMode_None; \ |
389 MemOperand operand = i.MemoryOperand(&mode, &index); \ | 389 MemOperand operand = i.MemoryOperand(&mode, &index); \ |
390 Register value = i.InputRegister(index); \ | 390 Register value = i.InputRegister(index); \ |
391 if (mode == kMode_MRI) { \ | 391 if (mode == kMode_MRI) { \ |
392 __ asm_instr(value, operand); \ | 392 __ asm_instr(value, operand); \ |
393 } else { \ | 393 } else { \ |
394 __ asm_instrx(value, operand); \ | 394 __ asm_instrx(value, operand); \ |
395 } \ | 395 } \ |
396 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 396 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
397 } while (0) | 397 } while (0) |
398 | 398 |
399 | 399 |
400 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 400 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
401 #define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \ | 401 #define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \ |
402 do { \ | 402 do { \ |
403 DoubleRegister result = i.OutputDoubleRegister(); \ | 403 DoubleRegister result = i.OutputDoubleRegister(); \ |
| 404 size_t index = 0; \ |
404 AddressingMode mode = kMode_None; \ | 405 AddressingMode mode = kMode_None; \ |
405 MemOperand operand = i.MemoryOperand(&mode, 0); \ | 406 MemOperand operand = i.MemoryOperand(&mode, index); \ |
406 DCHECK_EQ(kMode_MRR, mode); \ | 407 DCHECK_EQ(kMode_MRR, mode); \ |
407 Register offset = operand.rb(); \ | 408 Register offset = operand.rb(); \ |
408 __ extsw(offset, offset); \ | 409 __ extsw(offset, offset); \ |
409 if (HasRegisterInput(instr, 2)) { \ | 410 if (HasRegisterInput(instr, 2)) { \ |
410 __ cmplw(offset, i.InputRegister(2)); \ | 411 __ cmplw(offset, i.InputRegister(2)); \ |
411 } else { \ | 412 } else { \ |
412 __ cmplwi(offset, i.InputImmediate(2)); \ | 413 __ cmplwi(offset, i.InputImmediate(2)); \ |
413 } \ | 414 } \ |
414 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \ | 415 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \ |
415 __ bge(ool->entry()); \ | 416 __ bge(ool->entry()); \ |
416 if (mode == kMode_MRI) { \ | 417 if (mode == kMode_MRI) { \ |
417 __ asm_instr(result, operand); \ | 418 __ asm_instr(result, operand); \ |
418 } else { \ | 419 } else { \ |
419 __ asm_instrx(result, operand); \ | 420 __ asm_instrx(result, operand); \ |
420 } \ | 421 } \ |
421 __ bind(ool->exit()); \ | 422 __ bind(ool->exit()); \ |
422 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 423 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
423 } while (0) | 424 } while (0) |
424 | 425 |
425 | 426 |
426 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 427 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
427 #define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \ | 428 #define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \ |
428 do { \ | 429 do { \ |
429 Register result = i.OutputRegister(); \ | 430 Register result = i.OutputRegister(); \ |
| 431 size_t index = 0; \ |
430 AddressingMode mode = kMode_None; \ | 432 AddressingMode mode = kMode_None; \ |
431 MemOperand operand = i.MemoryOperand(&mode, 0); \ | 433 MemOperand operand = i.MemoryOperand(&mode, index); \ |
432 DCHECK_EQ(kMode_MRR, mode); \ | 434 DCHECK_EQ(kMode_MRR, mode); \ |
433 Register offset = operand.rb(); \ | 435 Register offset = operand.rb(); \ |
434 __ extsw(offset, offset); \ | 436 __ extsw(offset, offset); \ |
435 if (HasRegisterInput(instr, 2)) { \ | 437 if (HasRegisterInput(instr, 2)) { \ |
436 __ cmplw(offset, i.InputRegister(2)); \ | 438 __ cmplw(offset, i.InputRegister(2)); \ |
437 } else { \ | 439 } else { \ |
438 __ cmplwi(offset, i.InputImmediate(2)); \ | 440 __ cmplwi(offset, i.InputImmediate(2)); \ |
439 } \ | 441 } \ |
440 auto ool = new (zone()) OutOfLineLoadZero(this, result); \ | 442 auto ool = new (zone()) OutOfLineLoadZero(this, result); \ |
441 __ bge(ool->entry()); \ | 443 __ bge(ool->entry()); \ |
442 if (mode == kMode_MRI) { \ | 444 if (mode == kMode_MRI) { \ |
443 __ asm_instr(result, operand); \ | 445 __ asm_instr(result, operand); \ |
444 } else { \ | 446 } else { \ |
445 __ asm_instrx(result, operand); \ | 447 __ asm_instrx(result, operand); \ |
446 } \ | 448 } \ |
447 __ bind(ool->exit()); \ | 449 __ bind(ool->exit()); \ |
448 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 450 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
449 } while (0) | 451 } while (0) |
450 | 452 |
451 | 453 |
452 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 454 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
453 #define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr, asm_instrx) \ | 455 #define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr, asm_instrx) \ |
454 do { \ | 456 do { \ |
455 Label done; \ | 457 Label done; \ |
| 458 size_t index = 0; \ |
456 AddressingMode mode = kMode_None; \ | 459 AddressingMode mode = kMode_None; \ |
457 MemOperand operand = i.MemoryOperand(&mode, 0); \ | 460 MemOperand operand = i.MemoryOperand(&mode, index); \ |
458 DCHECK_EQ(kMode_MRR, mode); \ | 461 DCHECK_EQ(kMode_MRR, mode); \ |
459 Register offset = operand.rb(); \ | 462 Register offset = operand.rb(); \ |
460 __ extsw(offset, offset); \ | 463 __ extsw(offset, offset); \ |
461 if (HasRegisterInput(instr, 2)) { \ | 464 if (HasRegisterInput(instr, 2)) { \ |
462 __ cmplw(offset, i.InputRegister(2)); \ | 465 __ cmplw(offset, i.InputRegister(2)); \ |
463 } else { \ | 466 } else { \ |
464 __ cmplwi(offset, i.InputImmediate(2)); \ | 467 __ cmplwi(offset, i.InputImmediate(2)); \ |
465 } \ | 468 } \ |
466 __ bge(&done); \ | 469 __ bge(&done); \ |
467 DoubleRegister value = i.InputDoubleRegister(3); \ | 470 DoubleRegister value = i.InputDoubleRegister(3); \ |
468 if (mode == kMode_MRI) { \ | 471 if (mode == kMode_MRI) { \ |
469 __ asm_instr(value, operand); \ | 472 __ asm_instr(value, operand); \ |
470 } else { \ | 473 } else { \ |
471 __ asm_instrx(value, operand); \ | 474 __ asm_instrx(value, operand); \ |
472 } \ | 475 } \ |
473 __ bind(&done); \ | 476 __ bind(&done); \ |
474 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 477 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
475 } while (0) | 478 } while (0) |
476 | 479 |
477 | 480 |
478 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. | 481 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits. |
479 #define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \ | 482 #define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \ |
480 do { \ | 483 do { \ |
481 Label done; \ | 484 Label done; \ |
| 485 size_t index = 0; \ |
482 AddressingMode mode = kMode_None; \ | 486 AddressingMode mode = kMode_None; \ |
483 MemOperand operand = i.MemoryOperand(&mode, 0); \ | 487 MemOperand operand = i.MemoryOperand(&mode, index); \ |
484 DCHECK_EQ(kMode_MRR, mode); \ | 488 DCHECK_EQ(kMode_MRR, mode); \ |
485 Register offset = operand.rb(); \ | 489 Register offset = operand.rb(); \ |
486 __ extsw(offset, offset); \ | 490 __ extsw(offset, offset); \ |
487 if (HasRegisterInput(instr, 2)) { \ | 491 if (HasRegisterInput(instr, 2)) { \ |
488 __ cmplw(offset, i.InputRegister(2)); \ | 492 __ cmplw(offset, i.InputRegister(2)); \ |
489 } else { \ | 493 } else { \ |
490 __ cmplwi(offset, i.InputImmediate(2)); \ | 494 __ cmplwi(offset, i.InputImmediate(2)); \ |
491 } \ | 495 } \ |
492 __ bge(&done); \ | 496 __ bge(&done); \ |
493 Register value = i.InputRegister(3); \ | 497 Register value = i.InputRegister(3); \ |
(...skipping 586 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1080 break; | 1084 break; |
1081 } | 1085 } |
1082 __ bind(&done); | 1086 __ bind(&done); |
1083 } | 1087 } |
1084 | 1088 |
1085 | 1089 |
1086 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) { | 1090 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) { |
1087 PPCOperandConverter i(this, instr); | 1091 PPCOperandConverter i(this, instr); |
1088 Register input = i.InputRegister(0); | 1092 Register input = i.InputRegister(0); |
1089 for (size_t index = 2; index < instr->InputCount(); index += 2) { | 1093 for (size_t index = 2; index < instr->InputCount(); index += 2) { |
1090 __ Cmpi(input, Operand(i.InputInt32(static_cast<int>(index + 0))), r0); | 1094 __ Cmpi(input, Operand(i.InputInt32(index + 0)), r0); |
1091 __ beq(GetLabel(i.InputRpo(static_cast<int>(index + 1)))); | 1095 __ beq(GetLabel(i.InputRpo(index + 1))); |
1092 } | 1096 } |
1093 AssembleArchJump(i.InputRpo(1)); | 1097 AssembleArchJump(i.InputRpo(1)); |
1094 } | 1098 } |
1095 | 1099 |
1096 | 1100 |
1097 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { | 1101 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { |
1098 PPCOperandConverter i(this, instr); | 1102 PPCOperandConverter i(this, instr); |
1099 Register input = i.InputRegister(0); | 1103 Register input = i.InputRegister(0); |
1100 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2); | 1104 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2); |
1101 Label** cases = zone()->NewArray<Label*>(case_count); | 1105 Label** cases = zone()->NewArray<Label*>(case_count); |
(...skipping 18 matching lines...) Expand all Loading... |
1120 | 1124 |
1121 | 1125 |
1122 void CodeGenerator::AssemblePrologue() { | 1126 void CodeGenerator::AssemblePrologue() { |
1123 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1127 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1124 int stack_slots = frame()->GetSpillSlotCount(); | 1128 int stack_slots = frame()->GetSpillSlotCount(); |
1125 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1129 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
1126 __ function_descriptor(); | 1130 __ function_descriptor(); |
1127 int register_save_area_size = 0; | 1131 int register_save_area_size = 0; |
1128 RegList frame_saves = fp.bit(); | 1132 RegList frame_saves = fp.bit(); |
1129 __ mflr(r0); | 1133 __ mflr(r0); |
1130 #if V8_OOL_CONSTANT_POOL | |
1131 __ Push(r0, fp, kConstantPoolRegister); | |
1132 // Adjust FP to point to saved FP. | |
1133 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); | |
1134 register_save_area_size += kPointerSize; | |
1135 frame_saves |= kConstantPoolRegister.bit(); | |
1136 #else | |
1137 __ Push(r0, fp); | 1134 __ Push(r0, fp); |
1138 __ mr(fp, sp); | 1135 __ mr(fp, sp); |
1139 #endif | |
1140 // Save callee-saved registers. | 1136 // Save callee-saved registers. |
1141 const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves; | 1137 const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves; |
1142 for (int i = Register::kNumRegisters - 1; i >= 0; i--) { | 1138 for (int i = Register::kNumRegisters - 1; i >= 0; i--) { |
1143 if (!((1 << i) & saves)) continue; | 1139 if (!((1 << i) & saves)) continue; |
1144 register_save_area_size += kPointerSize; | 1140 register_save_area_size += kPointerSize; |
1145 } | 1141 } |
1146 frame()->SetRegisterSaveAreaSize(register_save_area_size); | 1142 frame()->SetRegisterSaveAreaSize(register_save_area_size); |
1147 __ MultiPush(saves); | 1143 __ MultiPush(saves); |
1148 } else if (descriptor->IsJSFunctionCall()) { | 1144 } else if (descriptor->IsJSFunctionCall()) { |
1149 CompilationInfo* info = this->info(); | 1145 CompilationInfo* info = this->info(); |
(...skipping 30 matching lines...) Expand all Loading... |
1180 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1176 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1181 int stack_slots = frame()->GetSpillSlotCount(); | 1177 int stack_slots = frame()->GetSpillSlotCount(); |
1182 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1178 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
1183 if (frame()->GetRegisterSaveAreaSize() > 0) { | 1179 if (frame()->GetRegisterSaveAreaSize() > 0) { |
1184 // Remove this frame's spill slots first. | 1180 // Remove this frame's spill slots first. |
1185 if (stack_slots > 0) { | 1181 if (stack_slots > 0) { |
1186 __ Add(sp, sp, stack_slots * kPointerSize, r0); | 1182 __ Add(sp, sp, stack_slots * kPointerSize, r0); |
1187 } | 1183 } |
1188 // Restore registers. | 1184 // Restore registers. |
1189 RegList frame_saves = fp.bit(); | 1185 RegList frame_saves = fp.bit(); |
1190 #if V8_OOL_CONSTANT_POOL | |
1191 frame_saves |= kConstantPoolRegister.bit(); | |
1192 #endif | |
1193 const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves; | 1186 const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves; |
1194 if (saves != 0) { | 1187 if (saves != 0) { |
1195 __ MultiPop(saves); | 1188 __ MultiPop(saves); |
1196 } | 1189 } |
1197 } | 1190 } |
1198 __ LeaveFrame(StackFrame::MANUAL); | 1191 __ LeaveFrame(StackFrame::MANUAL); |
1199 __ Ret(); | 1192 __ Ret(); |
1200 } else if (descriptor->IsJSFunctionCall() || stack_slots > 0) { | 1193 } else if (descriptor->IsJSFunctionCall() || stack_slots > 0) { |
1201 int pop_count = descriptor->IsJSFunctionCall() | 1194 int pop_count = descriptor->IsJSFunctionCall() |
1202 ? static_cast<int>(descriptor->JSParameterCount()) | 1195 ? static_cast<int>(descriptor->JSParameterCount()) |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1398 } | 1391 } |
1399 } | 1392 } |
1400 MarkLazyDeoptSite(); | 1393 MarkLazyDeoptSite(); |
1401 } | 1394 } |
1402 | 1395 |
1403 #undef __ | 1396 #undef __ |
1404 | 1397 |
1405 } // namespace compiler | 1398 } // namespace compiler |
1406 } // namespace internal | 1399 } // namespace internal |
1407 } // namespace v8 | 1400 } // namespace v8 |
OLD | NEW |