OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
10 #include "src/compiler/osr.h" | 10 #include "src/compiler/osr.h" |
(...skipping 30 matching lines...) Expand all Loading... |
41 | 41 |
42 Operand ToOperand(InstructionOperand* op, int extra = 0) { | 42 Operand ToOperand(InstructionOperand* op, int extra = 0) { |
43 if (op->IsRegister()) { | 43 if (op->IsRegister()) { |
44 DCHECK(extra == 0); | 44 DCHECK(extra == 0); |
45 return Operand(ToRegister(op)); | 45 return Operand(ToRegister(op)); |
46 } else if (op->IsDoubleRegister()) { | 46 } else if (op->IsDoubleRegister()) { |
47 DCHECK(extra == 0); | 47 DCHECK(extra == 0); |
48 return Operand(ToDoubleRegister(op)); | 48 return Operand(ToDoubleRegister(op)); |
49 } | 49 } |
50 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 50 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
51 FrameOffset offset = | 51 FrameOffset offset = linkage()->GetFrameOffset( |
52 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 52 AllocatedOperand::cast(op)->index(), frame_access_state()); |
53 return Operand(offset.from_stack_pointer() ? esp : ebp, | 53 return Operand(offset.from_stack_pointer() ? esp : ebp, |
54 offset.offset() + extra); | 54 offset.offset() + extra); |
55 } | 55 } |
56 | 56 |
| 57 Operand ToMaterializableOperand(int materializable_offset) { |
| 58 FrameOffset offset = linkage()->GetFrameOffset( |
| 59 Frame::FPOffsetToSlot(materializable_offset), frame_access_state()); |
| 60 return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset()); |
| 61 } |
| 62 |
57 Operand HighOperand(InstructionOperand* op) { | 63 Operand HighOperand(InstructionOperand* op) { |
58 DCHECK(op->IsDoubleStackSlot()); | 64 DCHECK(op->IsDoubleStackSlot()); |
59 return ToOperand(op, kPointerSize); | 65 return ToOperand(op, kPointerSize); |
60 } | 66 } |
61 | 67 |
62 Immediate ToImmediate(InstructionOperand* operand) { | 68 Immediate ToImmediate(InstructionOperand* operand) { |
63 Constant constant = ToConstant(operand); | 69 Constant constant = ToConstant(operand); |
64 switch (constant.type()) { | 70 switch (constant.type()) { |
65 case Constant::kInt32: | 71 case Constant::kInt32: |
66 return Immediate(constant.ToInt32()); | 72 return Immediate(constant.ToInt32()); |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 // Discard only slots that won't be used by new parameters. | 345 // Discard only slots that won't be used by new parameters. |
340 total_discarded_slots -= stack_param_delta; | 346 total_discarded_slots -= stack_param_delta; |
341 if (total_discarded_slots > 0) { | 347 if (total_discarded_slots > 0) { |
342 __ add(esp, Immediate(total_discarded_slots * kPointerSize)); | 348 __ add(esp, Immediate(total_discarded_slots * kPointerSize)); |
343 } | 349 } |
344 } else { | 350 } else { |
345 __ mov(esp, ebp); | 351 __ mov(esp, ebp); |
346 } | 352 } |
347 __ pop(ebp); | 353 __ pop(ebp); |
348 } | 354 } |
| 355 frame_access_state()->UseDefaultFrameAccess(); |
349 } | 356 } |
350 | 357 |
351 | 358 |
352 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 359 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
353 if (stack_param_delta > 0) { | 360 if (stack_param_delta > 0) { |
354 int total_discarded_slots = frame()->GetTotalFrameSlotCount(); | 361 int total_discarded_slots = frame()->GetTotalFrameSlotCount(); |
355 // Leave the PC and saved frame pointer on the stack. | 362 // Leave the PC and saved frame pointer on the stack. |
356 total_discarded_slots -= | 363 total_discarded_slots -= |
357 StandardFrameConstants::kFixedFrameSizeFromFp / kPointerSize; | 364 StandardFrameConstants::kFixedFrameSizeFromFp / kPointerSize; |
358 // Discard only slots that won't be used by new parameters. | 365 // Discard only slots that won't be used by new parameters. |
359 total_discarded_slots -= stack_param_delta; | 366 total_discarded_slots -= stack_param_delta; |
360 if (total_discarded_slots < 0) { | 367 if (total_discarded_slots < 0) { |
361 __ sub(esp, Immediate(-total_discarded_slots * kPointerSize)); | 368 __ sub(esp, Immediate(-total_discarded_slots * kPointerSize)); |
| 369 frame_access_state()->IncreaseSPDelta(-total_discarded_slots); |
362 } | 370 } |
363 } | 371 } |
| 372 frame_access_state()->UseSPToAccessFrame(); |
364 } | 373 } |
365 | 374 |
366 | 375 |
367 // Assembles an instruction after register allocation, producing machine code. | 376 // Assembles an instruction after register allocation, producing machine code. |
368 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 377 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
369 IA32OperandConverter i(this, instr); | 378 IA32OperandConverter i(this, instr); |
370 | 379 |
371 switch (ArchOpcodeField::decode(instr->opcode())) { | 380 switch (ArchOpcodeField::decode(instr->opcode())) { |
372 case kArchCallCodeObject: { | 381 case kArchCallCodeObject: { |
373 EnsureSpaceForLazyDeopt(); | 382 EnsureSpaceForLazyDeopt(); |
374 if (HasImmediateInput(instr, 0)) { | 383 if (HasImmediateInput(instr, 0)) { |
375 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); | 384 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); |
376 __ call(code, RelocInfo::CODE_TARGET); | 385 __ call(code, RelocInfo::CODE_TARGET); |
377 } else { | 386 } else { |
378 Register reg = i.InputRegister(0); | 387 Register reg = i.InputRegister(0); |
379 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 388 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
380 __ call(reg); | 389 __ call(reg); |
381 } | 390 } |
382 RecordCallPosition(instr); | 391 RecordCallPosition(instr); |
| 392 frame_access_state()->ClearSPDelta(); |
383 break; | 393 break; |
384 } | 394 } |
385 case kArchTailCallCodeObject: { | 395 case kArchTailCallCodeObject: { |
386 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 396 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
387 AssembleDeconstructActivationRecord(stack_param_delta); | 397 AssembleDeconstructActivationRecord(stack_param_delta); |
388 if (HasImmediateInput(instr, 0)) { | 398 if (HasImmediateInput(instr, 0)) { |
389 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); | 399 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); |
390 __ jmp(code, RelocInfo::CODE_TARGET); | 400 __ jmp(code, RelocInfo::CODE_TARGET); |
391 } else { | 401 } else { |
392 Register reg = i.InputRegister(0); | 402 Register reg = i.InputRegister(0); |
393 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 403 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
394 __ jmp(reg); | 404 __ jmp(reg); |
395 } | 405 } |
| 406 frame_access_state()->ClearSPDelta(); |
396 break; | 407 break; |
397 } | 408 } |
398 case kArchCallJSFunction: { | 409 case kArchCallJSFunction: { |
399 EnsureSpaceForLazyDeopt(); | 410 EnsureSpaceForLazyDeopt(); |
400 Register func = i.InputRegister(0); | 411 Register func = i.InputRegister(0); |
401 if (FLAG_debug_code) { | 412 if (FLAG_debug_code) { |
402 // Check the function's context matches the context argument. | 413 // Check the function's context matches the context argument. |
403 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); | 414 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); |
404 __ Assert(equal, kWrongFunctionContext); | 415 __ Assert(equal, kWrongFunctionContext); |
405 } | 416 } |
406 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); | 417 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); |
407 RecordCallPosition(instr); | 418 RecordCallPosition(instr); |
| 419 frame_access_state()->ClearSPDelta(); |
408 break; | 420 break; |
409 } | 421 } |
410 case kArchTailCallJSFunction: { | 422 case kArchTailCallJSFunction: { |
411 Register func = i.InputRegister(0); | 423 Register func = i.InputRegister(0); |
412 if (FLAG_debug_code) { | 424 if (FLAG_debug_code) { |
413 // Check the function's context matches the context argument. | 425 // Check the function's context matches the context argument. |
414 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); | 426 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); |
415 __ Assert(equal, kWrongFunctionContext); | 427 __ Assert(equal, kWrongFunctionContext); |
416 } | 428 } |
417 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 429 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
418 AssembleDeconstructActivationRecord(stack_param_delta); | 430 AssembleDeconstructActivationRecord(stack_param_delta); |
419 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); | 431 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); |
| 432 frame_access_state()->ClearSPDelta(); |
420 break; | 433 break; |
421 } | 434 } |
422 case kArchLazyBailout: { | 435 case kArchLazyBailout: { |
423 EnsureSpaceForLazyDeopt(); | 436 EnsureSpaceForLazyDeopt(); |
424 RecordCallPosition(instr); | 437 RecordCallPosition(instr); |
425 break; | 438 break; |
426 } | 439 } |
427 case kArchPrepareCallCFunction: { | 440 case kArchPrepareCallCFunction: { |
| 441 // Frame alignment requires using FP-relative frame addressing. |
| 442 frame_access_state()->UseFPToAccessFrame(); |
428 int const num_parameters = MiscField::decode(instr->opcode()); | 443 int const num_parameters = MiscField::decode(instr->opcode()); |
429 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); | 444 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); |
430 break; | 445 break; |
431 } | 446 } |
432 case kArchPrepareTailCall: | 447 case kArchPrepareTailCall: |
433 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 448 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
434 break; | 449 break; |
435 case kArchCallCFunction: { | 450 case kArchCallCFunction: { |
436 int const num_parameters = MiscField::decode(instr->opcode()); | 451 int const num_parameters = MiscField::decode(instr->opcode()); |
437 if (HasImmediateInput(instr, 0)) { | 452 if (HasImmediateInput(instr, 0)) { |
438 ExternalReference ref = i.InputExternalReference(0); | 453 ExternalReference ref = i.InputExternalReference(0); |
439 __ CallCFunction(ref, num_parameters); | 454 __ CallCFunction(ref, num_parameters); |
440 } else { | 455 } else { |
441 Register func = i.InputRegister(0); | 456 Register func = i.InputRegister(0); |
442 __ CallCFunction(func, num_parameters); | 457 __ CallCFunction(func, num_parameters); |
443 } | 458 } |
| 459 frame_access_state()->UseDefaultFrameAccess(); |
| 460 frame_access_state()->ClearSPDelta(); |
444 break; | 461 break; |
445 } | 462 } |
446 case kArchJmp: | 463 case kArchJmp: |
447 AssembleArchJump(i.InputRpo(0)); | 464 AssembleArchJump(i.InputRpo(0)); |
448 break; | 465 break; |
449 case kArchLookupSwitch: | 466 case kArchLookupSwitch: |
450 AssembleArchLookupSwitch(instr); | 467 AssembleArchLookupSwitch(instr); |
451 break; | 468 break; |
452 case kArchTableSwitch: | 469 case kArchTableSwitch: |
453 AssembleArchTableSwitch(instr); | 470 AssembleArchTableSwitch(instr); |
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
989 } | 1006 } |
990 } else { | 1007 } else { |
991 __ lea(i.OutputRegister(), i.MemoryOperand()); | 1008 __ lea(i.OutputRegister(), i.MemoryOperand()); |
992 } | 1009 } |
993 break; | 1010 break; |
994 } | 1011 } |
995 case kIA32Push: | 1012 case kIA32Push: |
996 if (instr->InputAt(0)->IsDoubleRegister()) { | 1013 if (instr->InputAt(0)->IsDoubleRegister()) { |
997 __ sub(esp, Immediate(kDoubleSize)); | 1014 __ sub(esp, Immediate(kDoubleSize)); |
998 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0)); | 1015 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0)); |
| 1016 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
999 } else if (HasImmediateInput(instr, 0)) { | 1017 } else if (HasImmediateInput(instr, 0)) { |
1000 __ push(i.InputImmediate(0)); | 1018 __ push(i.InputImmediate(0)); |
| 1019 frame_access_state()->IncreaseSPDelta(1); |
1001 } else { | 1020 } else { |
1002 __ push(i.InputOperand(0)); | 1021 __ push(i.InputOperand(0)); |
| 1022 frame_access_state()->IncreaseSPDelta(1); |
1003 } | 1023 } |
1004 break; | 1024 break; |
1005 case kIA32Poke: { | 1025 case kIA32Poke: { |
1006 int const slot = MiscField::decode(instr->opcode()); | 1026 int const slot = MiscField::decode(instr->opcode()); |
1007 if (HasImmediateInput(instr, 0)) { | 1027 if (HasImmediateInput(instr, 0)) { |
1008 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); | 1028 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); |
1009 } else { | 1029 } else { |
1010 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); | 1030 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); |
1011 } | 1031 } |
1012 break; | 1032 break; |
(...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1372 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1392 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1373 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1393 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
1374 // Assemble a prologue similar the to cdecl calling convention. | 1394 // Assemble a prologue similar the to cdecl calling convention. |
1375 __ push(ebp); | 1395 __ push(ebp); |
1376 __ mov(ebp, esp); | 1396 __ mov(ebp, esp); |
1377 } else if (descriptor->IsJSFunctionCall()) { | 1397 } else if (descriptor->IsJSFunctionCall()) { |
1378 // TODO(turbofan): this prologue is redundant with OSR, but needed for | 1398 // TODO(turbofan): this prologue is redundant with OSR, but needed for |
1379 // code aging. | 1399 // code aging. |
1380 CompilationInfo* info = this->info(); | 1400 CompilationInfo* info = this->info(); |
1381 __ Prologue(info->IsCodePreAgingActive()); | 1401 __ Prologue(info->IsCodePreAgingActive()); |
1382 } else if (needs_frame_) { | 1402 } else if (frame()->needs_frame()) { |
1383 __ StubPrologue(); | 1403 __ StubPrologue(); |
1384 } else { | 1404 } else { |
1385 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); | 1405 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); |
1386 } | 1406 } |
| 1407 frame_access_state()->UseDefaultFrameAccess(); |
1387 | 1408 |
1388 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1409 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1389 if (info()->is_osr()) { | 1410 if (info()->is_osr()) { |
1390 // TurboFan OSR-compiled functions cannot be entered directly. | 1411 // TurboFan OSR-compiled functions cannot be entered directly. |
1391 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1412 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1392 | 1413 |
1393 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1414 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1394 // frame is still on the stack. Optimized code uses OSR values directly from | 1415 // frame is still on the stack. Optimized code uses OSR values directly from |
1395 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1416 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1396 // remaining stack slots. | 1417 // remaining stack slots. |
(...skipping 30 matching lines...) Expand all Loading... |
1427 if (saves != 0) { | 1448 if (saves != 0) { |
1428 for (int i = 0; i < Register::kNumRegisters; i++) { | 1449 for (int i = 0; i < Register::kNumRegisters; i++) { |
1429 if (!((1 << i) & saves)) continue; | 1450 if (!((1 << i) & saves)) continue; |
1430 __ pop(Register::from_code(i)); | 1451 __ pop(Register::from_code(i)); |
1431 } | 1452 } |
1432 } | 1453 } |
1433 | 1454 |
1434 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1455 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
1435 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1456 __ mov(esp, ebp); // Move stack pointer back to frame pointer. |
1436 __ pop(ebp); // Pop caller's frame pointer. | 1457 __ pop(ebp); // Pop caller's frame pointer. |
1437 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1458 } else if (descriptor->IsJSFunctionCall() || frame()->needs_frame()) { |
1438 // Canonicalize JSFunction return sites for now. | 1459 // Canonicalize JSFunction return sites for now. |
1439 if (return_label_.is_bound()) { | 1460 if (return_label_.is_bound()) { |
1440 __ jmp(&return_label_); | 1461 __ jmp(&return_label_); |
1441 return; | 1462 return; |
1442 } else { | 1463 } else { |
1443 __ bind(&return_label_); | 1464 __ bind(&return_label_); |
1444 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1465 __ mov(esp, ebp); // Move stack pointer back to frame pointer. |
1445 __ pop(ebp); // Pop caller's frame pointer. | 1466 __ pop(ebp); // Pop caller's frame pointer. |
1446 } | 1467 } |
1447 } | 1468 } |
(...skipping 26 matching lines...) Expand all Loading... |
1474 __ pop(dst); | 1495 __ pop(dst); |
1475 } | 1496 } |
1476 } else if (source->IsConstant()) { | 1497 } else if (source->IsConstant()) { |
1477 Constant src_constant = g.ToConstant(source); | 1498 Constant src_constant = g.ToConstant(source); |
1478 if (src_constant.type() == Constant::kHeapObject) { | 1499 if (src_constant.type() == Constant::kHeapObject) { |
1479 Handle<HeapObject> src = src_constant.ToHeapObject(); | 1500 Handle<HeapObject> src = src_constant.ToHeapObject(); |
1480 int offset; | 1501 int offset; |
1481 if (IsMaterializableFromFrame(src, &offset)) { | 1502 if (IsMaterializableFromFrame(src, &offset)) { |
1482 if (destination->IsRegister()) { | 1503 if (destination->IsRegister()) { |
1483 Register dst = g.ToRegister(destination); | 1504 Register dst = g.ToRegister(destination); |
1484 __ mov(dst, Operand(ebp, offset)); | 1505 __ mov(dst, g.ToMaterializableOperand(offset)); |
1485 } else { | 1506 } else { |
1486 DCHECK(destination->IsStackSlot()); | 1507 DCHECK(destination->IsStackSlot()); |
1487 Operand dst = g.ToOperand(destination); | 1508 Operand dst = g.ToOperand(destination); |
1488 __ push(Operand(ebp, offset)); | 1509 __ push(g.ToMaterializableOperand(offset)); |
1489 __ pop(dst); | 1510 __ pop(dst); |
1490 } | 1511 } |
1491 } else if (destination->IsRegister()) { | 1512 } else if (destination->IsRegister()) { |
1492 Register dst = g.ToRegister(destination); | 1513 Register dst = g.ToRegister(destination); |
1493 __ LoadHeapObject(dst, src); | 1514 __ LoadHeapObject(dst, src); |
1494 } else { | 1515 } else { |
1495 DCHECK(destination->IsStackSlot()); | 1516 DCHECK(destination->IsStackSlot()); |
1496 Operand dst = g.ToOperand(destination); | 1517 Operand dst = g.ToOperand(destination); |
1497 AllowDeferredHandleDereference embedding_raw_address; | 1518 AllowDeferredHandleDereference embedding_raw_address; |
1498 if (isolate()->heap()->InNewSpace(*src)) { | 1519 if (isolate()->heap()->InNewSpace(*src)) { |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1570 if (source->IsRegister() && destination->IsRegister()) { | 1591 if (source->IsRegister() && destination->IsRegister()) { |
1571 // Register-register. | 1592 // Register-register. |
1572 Register src = g.ToRegister(source); | 1593 Register src = g.ToRegister(source); |
1573 Register dst = g.ToRegister(destination); | 1594 Register dst = g.ToRegister(destination); |
1574 __ xchg(dst, src); | 1595 __ xchg(dst, src); |
1575 } else if (source->IsRegister() && destination->IsStackSlot()) { | 1596 } else if (source->IsRegister() && destination->IsStackSlot()) { |
1576 // Register-memory. | 1597 // Register-memory. |
1577 __ xchg(g.ToRegister(source), g.ToOperand(destination)); | 1598 __ xchg(g.ToRegister(source), g.ToOperand(destination)); |
1578 } else if (source->IsStackSlot() && destination->IsStackSlot()) { | 1599 } else if (source->IsStackSlot() && destination->IsStackSlot()) { |
1579 // Memory-memory. | 1600 // Memory-memory. |
1580 Operand src = g.ToOperand(source); | 1601 Operand dst1 = g.ToOperand(destination); |
1581 Operand dst = g.ToOperand(destination); | 1602 __ push(dst1); |
1582 __ push(dst); | 1603 frame_access_state()->IncreaseSPDelta(1); |
1583 __ push(src); | 1604 Operand src1 = g.ToOperand(source); |
1584 __ pop(dst); | 1605 __ push(src1); |
1585 __ pop(src); | 1606 Operand dst2 = g.ToOperand(destination); |
| 1607 __ pop(dst2); |
| 1608 frame_access_state()->IncreaseSPDelta(-1); |
| 1609 Operand src2 = g.ToOperand(source); |
| 1610 __ pop(src2); |
1586 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { | 1611 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { |
1587 // XMM register-register swap. | 1612 // XMM register-register swap. |
1588 XMMRegister src = g.ToDoubleRegister(source); | 1613 XMMRegister src = g.ToDoubleRegister(source); |
1589 XMMRegister dst = g.ToDoubleRegister(destination); | 1614 XMMRegister dst = g.ToDoubleRegister(destination); |
1590 __ movaps(kScratchDoubleReg, src); | 1615 __ movaps(kScratchDoubleReg, src); |
1591 __ movaps(src, dst); | 1616 __ movaps(src, dst); |
1592 __ movaps(dst, kScratchDoubleReg); | 1617 __ movaps(dst, kScratchDoubleReg); |
1593 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) { | 1618 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) { |
1594 // XMM register-memory swap. | 1619 // XMM register-memory swap. |
1595 XMMRegister reg = g.ToDoubleRegister(source); | 1620 XMMRegister reg = g.ToDoubleRegister(source); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1639 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1664 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
1640 __ Nop(padding_size); | 1665 __ Nop(padding_size); |
1641 } | 1666 } |
1642 } | 1667 } |
1643 | 1668 |
1644 #undef __ | 1669 #undef __ |
1645 | 1670 |
1646 } // namespace compiler | 1671 } // namespace compiler |
1647 } // namespace internal | 1672 } // namespace internal |
1648 } // namespace v8 | 1673 } // namespace v8 |
OLD | NEW |