| OLD | NEW | 
|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" | 
| 6 | 6 | 
| 7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" | 
| 8 #include "src/arm64/macro-assembler-arm64.h" | 8 #include "src/arm64/macro-assembler-arm64.h" | 
| 9 #include "src/compiler/code-generator-impl.h" | 9 #include "src/compiler/code-generator-impl.h" | 
| 10 #include "src/compiler/gap-resolver.h" | 10 #include "src/compiler/gap-resolver.h" | 
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 198         UNREACHABLE();  // TODO(dcarney): RPO immediates on arm64. | 198         UNREACHABLE();  // TODO(dcarney): RPO immediates on arm64. | 
| 199         break; | 199         break; | 
| 200     } | 200     } | 
| 201     UNREACHABLE(); | 201     UNREACHABLE(); | 
| 202     return Operand(-1); | 202     return Operand(-1); | 
| 203   } | 203   } | 
| 204 | 204 | 
| 205   MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const { | 205   MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const { | 
| 206     DCHECK(op != NULL); | 206     DCHECK(op != NULL); | 
| 207     DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 207     DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 
| 208     FrameOffset offset = | 208     FrameOffset offset = linkage()->GetFrameOffset( | 
| 209         linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 209         AllocatedOperand::cast(op)->index(), frame_access_state()); | 
| 210     if (offset.from_frame_pointer()) { | 210     if (offset.from_frame_pointer()) { | 
| 211       int from_sp = | 211       int from_sp = | 
| 212           offset.offset() + (frame()->GetSpToFpSlotCount() * kPointerSize); | 212           offset.offset() + | 
|  | 213           ((frame()->GetSpToFpSlotCount() + frame_access_state()->sp_delta()) * | 
|  | 214            kPointerSize); | 
| 213       // Convert FP-offsets to SP-offsets if it results in better code. | 215       // Convert FP-offsets to SP-offsets if it results in better code. | 
| 214       if (Assembler::IsImmLSUnscaled(from_sp) || | 216       if (Assembler::IsImmLSUnscaled(from_sp) || | 
| 215           Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) { | 217           Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) { | 
| 216         offset = FrameOffset::FromStackPointer(from_sp); | 218         offset = FrameOffset::FromStackPointer(from_sp); | 
| 217       } | 219       } | 
| 218     } | 220     } | 
| 219     return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, | 221     return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, | 
| 220                       offset.offset()); | 222                       offset.offset()); | 
| 221   } | 223   } | 
| 222 }; | 224 }; | 
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 455                    imm % (width));                                          \ | 457                    imm % (width));                                          \ | 
| 456     }                                                                       \ | 458     }                                                                       \ | 
| 457   } while (0) | 459   } while (0) | 
| 458 | 460 | 
| 459 | 461 | 
| 460 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 462 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 
| 461   int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 463   int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 
| 462   if (sp_slot_delta > 0) { | 464   if (sp_slot_delta > 0) { | 
| 463     __ Add(jssp, jssp, Operand(sp_slot_delta * kPointerSize)); | 465     __ Add(jssp, jssp, Operand(sp_slot_delta * kPointerSize)); | 
| 464   } | 466   } | 
| 465   CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 467   if (frame()->needs_frame()) { | 
| 466   int spill_slots = frame()->GetSpillSlotCount(); |  | 
| 467   bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0; |  | 
| 468   if (has_frame) { |  | 
| 469     __ Pop(fp, lr); | 468     __ Pop(fp, lr); | 
| 470   } | 469   } | 
|  | 470   frame_access_state()->UseDefaultFrameAccess(); | 
| 471 } | 471 } | 
| 472 | 472 | 
| 473 | 473 | 
| 474 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 474 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 
| 475   int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 475   int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 
| 476   if (sp_slot_delta < 0) { | 476   if (sp_slot_delta < 0) { | 
| 477     __ Sub(jssp, jssp, Operand(-sp_slot_delta * kPointerSize)); | 477     __ Sub(jssp, jssp, Operand(-sp_slot_delta * kPointerSize)); | 
| 478     frame()->AllocateOutgoingParameterSlots(-sp_slot_delta); | 478     frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 
| 479   } | 479   } | 
|  | 480   frame_access_state()->UseSPToAccessFrame(); | 
| 480 } | 481 } | 
| 481 | 482 | 
| 482 | 483 | 
| 483 // Assembles an instruction after register allocation, producing machine code. | 484 // Assembles an instruction after register allocation, producing machine code. | 
| 484 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 485 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 
| 485   Arm64OperandConverter i(this, instr); | 486   Arm64OperandConverter i(this, instr); | 
| 486   InstructionCode opcode = instr->opcode(); | 487   InstructionCode opcode = instr->opcode(); | 
| 487   switch (ArchOpcodeField::decode(opcode)) { | 488   switch (ArchOpcodeField::decode(opcode)) { | 
| 488     case kArchCallCodeObject: { | 489     case kArchCallCodeObject: { | 
| 489       EnsureSpaceForLazyDeopt(); | 490       EnsureSpaceForLazyDeopt(); | 
| 490       if (instr->InputAt(0)->IsImmediate()) { | 491       if (instr->InputAt(0)->IsImmediate()) { | 
| 491         __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 492         __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 
| 492                 RelocInfo::CODE_TARGET); | 493                 RelocInfo::CODE_TARGET); | 
| 493       } else { | 494       } else { | 
| 494         Register target = i.InputRegister(0); | 495         Register target = i.InputRegister(0); | 
| 495         __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 496         __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 
| 496         __ Call(target); | 497         __ Call(target); | 
| 497       } | 498       } | 
| 498       frame()->ClearOutgoingParameterSlots(); | 499       frame_access_state()->ClearSPDelta(); | 
| 499       RecordCallPosition(instr); | 500       RecordCallPosition(instr); | 
| 500       break; | 501       break; | 
| 501     } | 502     } | 
| 502     case kArchTailCallCodeObject: { | 503     case kArchTailCallCodeObject: { | 
| 503       int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 504       int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 
| 504       AssembleDeconstructActivationRecord(stack_param_delta); | 505       AssembleDeconstructActivationRecord(stack_param_delta); | 
| 505       if (instr->InputAt(0)->IsImmediate()) { | 506       if (instr->InputAt(0)->IsImmediate()) { | 
| 506         __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 507         __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 
| 507                 RelocInfo::CODE_TARGET); | 508                 RelocInfo::CODE_TARGET); | 
| 508       } else { | 509       } else { | 
| 509         Register target = i.InputRegister(0); | 510         Register target = i.InputRegister(0); | 
| 510         __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 511         __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 
| 511         __ Jump(target); | 512         __ Jump(target); | 
| 512       } | 513       } | 
| 513       frame()->ClearOutgoingParameterSlots(); | 514       frame_access_state()->ClearSPDelta(); | 
| 514       break; | 515       break; | 
| 515     } | 516     } | 
| 516     case kArchCallJSFunction: { | 517     case kArchCallJSFunction: { | 
| 517       EnsureSpaceForLazyDeopt(); | 518       EnsureSpaceForLazyDeopt(); | 
| 518       Register func = i.InputRegister(0); | 519       Register func = i.InputRegister(0); | 
| 519       if (FLAG_debug_code) { | 520       if (FLAG_debug_code) { | 
| 520         // Check the function's context matches the context argument. | 521         // Check the function's context matches the context argument. | 
| 521         UseScratchRegisterScope scope(masm()); | 522         UseScratchRegisterScope scope(masm()); | 
| 522         Register temp = scope.AcquireX(); | 523         Register temp = scope.AcquireX(); | 
| 523         __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 524         __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 
| 524         __ cmp(cp, temp); | 525         __ cmp(cp, temp); | 
| 525         __ Assert(eq, kWrongFunctionContext); | 526         __ Assert(eq, kWrongFunctionContext); | 
| 526       } | 527       } | 
| 527       __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 528       __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 
| 528       __ Call(x10); | 529       __ Call(x10); | 
| 529       frame()->ClearOutgoingParameterSlots(); | 530       frame_access_state()->ClearSPDelta(); | 
| 530       RecordCallPosition(instr); | 531       RecordCallPosition(instr); | 
| 531       break; | 532       break; | 
| 532     } | 533     } | 
| 533     case kArchTailCallJSFunction: { | 534     case kArchTailCallJSFunction: { | 
| 534       Register func = i.InputRegister(0); | 535       Register func = i.InputRegister(0); | 
| 535       if (FLAG_debug_code) { | 536       if (FLAG_debug_code) { | 
| 536         // Check the function's context matches the context argument. | 537         // Check the function's context matches the context argument. | 
| 537         UseScratchRegisterScope scope(masm()); | 538         UseScratchRegisterScope scope(masm()); | 
| 538         Register temp = scope.AcquireX(); | 539         Register temp = scope.AcquireX(); | 
| 539         __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 540         __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 
| 540         __ cmp(cp, temp); | 541         __ cmp(cp, temp); | 
| 541         __ Assert(eq, kWrongFunctionContext); | 542         __ Assert(eq, kWrongFunctionContext); | 
| 542       } | 543       } | 
| 543       int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 544       int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 
| 544       AssembleDeconstructActivationRecord(stack_param_delta); | 545       AssembleDeconstructActivationRecord(stack_param_delta); | 
| 545       __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 546       __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 
| 546       __ Jump(x10); | 547       __ Jump(x10); | 
| 547       frame()->ClearOutgoingParameterSlots(); | 548       frame_access_state()->ClearSPDelta(); | 
| 548       break; | 549       break; | 
| 549     } | 550     } | 
| 550     case kArchLazyBailout: { | 551     case kArchLazyBailout: { | 
| 551       EnsureSpaceForLazyDeopt(); | 552       EnsureSpaceForLazyDeopt(); | 
| 552       RecordCallPosition(instr); | 553       RecordCallPosition(instr); | 
| 553       break; | 554       break; | 
| 554     } | 555     } | 
| 555     case kArchPrepareCallCFunction: | 556     case kArchPrepareCallCFunction: | 
| 556       // We don't need kArchPrepareCallCFunction on arm64 as the instruction | 557       // We don't need kArchPrepareCallCFunction on arm64 as the instruction | 
| 557       // selector already perform a Claim to reserve space on the stack and | 558       // selector already perform a Claim to reserve space on the stack and | 
| 558       // guarantee correct alignment of stack pointer. | 559       // guarantee correct alignment of stack pointer. | 
| 559       UNREACHABLE(); | 560       UNREACHABLE(); | 
| 560       break; | 561       break; | 
| 561     case kArchPrepareTailCall: | 562     case kArchPrepareTailCall: | 
| 562       AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 563       AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 
| 563       break; | 564       break; | 
| 564     case kArchCallCFunction: { | 565     case kArchCallCFunction: { | 
| 565       int const num_parameters = MiscField::decode(instr->opcode()); | 566       int const num_parameters = MiscField::decode(instr->opcode()); | 
| 566       if (instr->InputAt(0)->IsImmediate()) { | 567       if (instr->InputAt(0)->IsImmediate()) { | 
| 567         ExternalReference ref = i.InputExternalReference(0); | 568         ExternalReference ref = i.InputExternalReference(0); | 
| 568         __ CallCFunction(ref, num_parameters, 0); | 569         __ CallCFunction(ref, num_parameters, 0); | 
| 569       } else { | 570       } else { | 
| 570         Register func = i.InputRegister(0); | 571         Register func = i.InputRegister(0); | 
| 571         __ CallCFunction(func, num_parameters, 0); | 572         __ CallCFunction(func, num_parameters, 0); | 
| 572       } | 573       } | 
| 573       // CallCFunction only supports register arguments so we never need to call | 574       // CallCFunction only supports register arguments so we never need to call | 
| 574       // frame()->ClearOutgoingParameterSlots() here. | 575       // frame()->ClearOutgoingParameterSlots() here. | 
| 575       DCHECK(frame()->GetOutgoingParameterSlotCount() == 0); | 576       DCHECK(frame_access_state()->sp_delta() == 0); | 
| 576       break; | 577       break; | 
| 577     } | 578     } | 
| 578     case kArchJmp: | 579     case kArchJmp: | 
| 579       AssembleArchJump(i.InputRpo(0)); | 580       AssembleArchJump(i.InputRpo(0)); | 
| 580       break; | 581       break; | 
| 581     case kArchTableSwitch: | 582     case kArchTableSwitch: | 
| 582       AssembleArchTableSwitch(instr); | 583       AssembleArchTableSwitch(instr); | 
| 583       break; | 584       break; | 
| 584     case kArchLookupSwitch: | 585     case kArchLookupSwitch: | 
| 585       AssembleArchLookupSwitch(instr); | 586       AssembleArchLookupSwitch(instr); | 
| (...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 851       break; | 852       break; | 
| 852     case kArm64TestAndBranch32: | 853     case kArm64TestAndBranch32: | 
| 853     case kArm64TestAndBranch: | 854     case kArm64TestAndBranch: | 
| 854       // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch. | 855       // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch. | 
| 855       break; | 856       break; | 
| 856     case kArm64CompareAndBranch32: | 857     case kArm64CompareAndBranch32: | 
| 857       // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. | 858       // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. | 
| 858       break; | 859       break; | 
| 859     case kArm64ClaimForCallArguments: { | 860     case kArm64ClaimForCallArguments: { | 
| 860       __ Claim(i.InputInt32(0)); | 861       __ Claim(i.InputInt32(0)); | 
| 861       frame()->AllocateOutgoingParameterSlots(i.InputInt32(0)); | 862       frame_access_state()->IncreaseSPDelta(i.InputInt32(0)); | 
| 862       break; | 863       break; | 
| 863     } | 864     } | 
| 864     case kArm64Poke: { | 865     case kArm64Poke: { | 
| 865       Operand operand(i.InputInt32(1) * kPointerSize); | 866       Operand operand(i.InputInt32(1) * kPointerSize); | 
| 866       __ Poke(i.InputRegister(0), operand); | 867       __ Poke(i.InputRegister(0), operand); | 
| 867       break; | 868       break; | 
| 868     } | 869     } | 
| 869     case kArm64PokePair: { | 870     case kArm64PokePair: { | 
| 870       int slot = i.InputInt32(2) - 1; | 871       int slot = i.InputInt32(2) - 1; | 
| 871       __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize); | 872       __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize); | 
| (...skipping 384 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1256 void CodeGenerator::AssembleDeoptimizerCall( | 1257 void CodeGenerator::AssembleDeoptimizerCall( | 
| 1257     int deoptimization_id, Deoptimizer::BailoutType bailout_type) { | 1258     int deoptimization_id, Deoptimizer::BailoutType bailout_type) { | 
| 1258   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( | 1259   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( | 
| 1259       isolate(), deoptimization_id, bailout_type); | 1260       isolate(), deoptimization_id, bailout_type); | 
| 1260   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1261   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 
| 1261 } | 1262 } | 
| 1262 | 1263 | 
| 1263 | 1264 | 
| 1264 void CodeGenerator::AssemblePrologue() { | 1265 void CodeGenerator::AssemblePrologue() { | 
| 1265   CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1266   CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 
| 1266   if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1267   if (descriptor->IsCFunctionCall()) { | 
| 1267     __ SetStackPointer(csp); | 1268     __ SetStackPointer(csp); | 
| 1268     __ Push(lr, fp); | 1269     __ Push(lr, fp); | 
| 1269     __ Mov(fp, csp); | 1270     __ Mov(fp, csp); | 
| 1270   } else if (descriptor->IsJSFunctionCall()) { | 1271   } else if (descriptor->IsJSFunctionCall()) { | 
| 1271     CompilationInfo* info = this->info(); | 1272     CompilationInfo* info = this->info(); | 
| 1272     __ SetStackPointer(jssp); | 1273     __ SetStackPointer(jssp); | 
| 1273     __ Prologue(info->IsCodePreAgingActive()); | 1274     __ Prologue(info->IsCodePreAgingActive()); | 
| 1274   } else if (needs_frame_) { | 1275   } else if (frame()->needs_frame()) { | 
| 1275     __ SetStackPointer(jssp); | 1276     __ SetStackPointer(jssp); | 
| 1276     __ StubPrologue(); | 1277     __ StubPrologue(); | 
| 1277   } else { | 1278   } else { | 
| 1278     frame()->SetElidedFrameSizeInSlots(0); | 1279     frame()->SetElidedFrameSizeInSlots(0); | 
| 1279   } | 1280   } | 
|  | 1281   frame_access_state()->UseDefaultFrameAccess(); | 
| 1280 | 1282 | 
| 1281   int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1283   int stack_shrink_slots = frame()->GetSpillSlotCount(); | 
| 1282   if (info()->is_osr()) { | 1284   if (info()->is_osr()) { | 
| 1283     // TurboFan OSR-compiled functions cannot be entered directly. | 1285     // TurboFan OSR-compiled functions cannot be entered directly. | 
| 1284     __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1286     __ Abort(kShouldNotDirectlyEnterOsrFunction); | 
| 1285 | 1287 | 
| 1286     // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1288     // Unoptimized code jumps directly to this entrypoint while the unoptimized | 
| 1287     // frame is still on the stack. Optimized code uses OSR values directly from | 1289     // frame is still on the stack. Optimized code uses OSR values directly from | 
| 1288     // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1290     // the unoptimized frame. Thus, all that needs to be done is to allocate the | 
| 1289     // remaining stack slots. | 1291     // remaining stack slots. | 
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1336   } | 1338   } | 
| 1337 | 1339 | 
| 1338   // Restore fp registers. | 1340   // Restore fp registers. | 
| 1339   CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 1341   CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 
| 1340                                    descriptor->CalleeSavedFPRegisters()); | 1342                                    descriptor->CalleeSavedFPRegisters()); | 
| 1341   if (saves_fp.Count() != 0) { | 1343   if (saves_fp.Count() != 0) { | 
| 1342     __ PopCPURegList(saves_fp); | 1344     __ PopCPURegList(saves_fp); | 
| 1343   } | 1345   } | 
| 1344 | 1346 | 
| 1345   int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 1347   int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 
| 1346   if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1348   if (descriptor->IsCFunctionCall()) { | 
| 1347     __ Mov(csp, fp); | 1349     __ Mov(csp, fp); | 
| 1348     __ Pop(fp, lr); | 1350     __ Pop(fp, lr); | 
| 1349   } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1351   } else if (frame()->needs_frame()) { | 
| 1350     // Canonicalize JSFunction return sites for now. | 1352     // Canonicalize JSFunction return sites for now. | 
| 1351     if (return_label_.is_bound()) { | 1353     if (return_label_.is_bound()) { | 
| 1352       __ B(&return_label_); | 1354       __ B(&return_label_); | 
| 1353       return; | 1355       return; | 
| 1354     } else { | 1356     } else { | 
| 1355       __ Bind(&return_label_); | 1357       __ Bind(&return_label_); | 
| 1356       __ Mov(jssp, fp); | 1358       __ Mov(jssp, fp); | 
| 1357       __ Pop(fp, lr); | 1359       __ Pop(fp, lr); | 
| 1358     } | 1360     } | 
| 1359   } | 1361   } | 
| (...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1544       padding_size -= kInstructionSize; | 1546       padding_size -= kInstructionSize; | 
| 1545     } | 1547     } | 
| 1546   } | 1548   } | 
| 1547 } | 1549 } | 
| 1548 | 1550 | 
| 1549 #undef __ | 1551 #undef __ | 
| 1550 | 1552 | 
| 1551 }  // namespace compiler | 1553 }  // namespace compiler | 
| 1552 }  // namespace internal | 1554 }  // namespace internal | 
| 1553 }  // namespace v8 | 1555 }  // namespace v8 | 
| OLD | NEW | 
|---|