OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
10 #include "src/compiler/osr.h" | 10 #include "src/compiler/osr.h" |
(...skipping 30 matching lines...) Expand all Loading... |
41 | 41 |
42 Operand ToOperand(InstructionOperand* op, int extra = 0) { | 42 Operand ToOperand(InstructionOperand* op, int extra = 0) { |
43 if (op->IsRegister()) { | 43 if (op->IsRegister()) { |
44 DCHECK(extra == 0); | 44 DCHECK(extra == 0); |
45 return Operand(ToRegister(op)); | 45 return Operand(ToRegister(op)); |
46 } else if (op->IsDoubleRegister()) { | 46 } else if (op->IsDoubleRegister()) { |
47 DCHECK(extra == 0); | 47 DCHECK(extra == 0); |
48 return Operand(ToDoubleRegister(op)); | 48 return Operand(ToDoubleRegister(op)); |
49 } | 49 } |
50 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 50 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
51 FrameOffset offset = | 51 FrameOffset offset = frame_access_state()->GetFrameOffset( |
52 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 52 AllocatedOperand::cast(op)->index()); |
53 return Operand(offset.from_stack_pointer() ? esp : ebp, | 53 return Operand(offset.from_stack_pointer() ? esp : ebp, |
54 offset.offset() + extra); | 54 offset.offset() + extra); |
55 } | 55 } |
56 | 56 |
| 57 Operand ToMaterializableOperand(int materializable_offset) { |
| 58 FrameOffset offset = frame_access_state()->GetFrameOffset( |
| 59 Frame::FPOffsetToSlot(materializable_offset)); |
| 60 return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset()); |
| 61 } |
| 62 |
57 Operand HighOperand(InstructionOperand* op) { | 63 Operand HighOperand(InstructionOperand* op) { |
58 DCHECK(op->IsDoubleStackSlot()); | 64 DCHECK(op->IsDoubleStackSlot()); |
59 return ToOperand(op, kPointerSize); | 65 return ToOperand(op, kPointerSize); |
60 } | 66 } |
61 | 67 |
62 Immediate ToImmediate(InstructionOperand* operand) { | 68 Immediate ToImmediate(InstructionOperand* operand) { |
63 Constant constant = ToConstant(operand); | 69 Constant constant = ToConstant(operand); |
64 switch (constant.type()) { | 70 switch (constant.type()) { |
65 case Constant::kInt32: | 71 case Constant::kInt32: |
66 return Immediate(constant.ToInt32()); | 72 return Immediate(constant.ToInt32()); |
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
324 } \ | 330 } \ |
325 __ bind(&done); \ | 331 __ bind(&done); \ |
326 } while (false) | 332 } while (false) |
327 | 333 |
328 | 334 |
329 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 335 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
330 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 336 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
331 if (sp_slot_delta > 0) { | 337 if (sp_slot_delta > 0) { |
332 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); | 338 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); |
333 } | 339 } |
334 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 340 if (frame()->needs_frame()) { |
335 int spill_slots = frame()->GetSpillSlotCount(); | |
336 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0; | |
337 if (has_frame) { | |
338 __ pop(ebp); | 341 __ pop(ebp); |
339 } | 342 } |
| 343 frame_access_state()->SetFrameAccessToDefault(); |
340 } | 344 } |
341 | 345 |
342 | 346 |
343 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 347 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
344 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 348 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
345 if (sp_slot_delta < 0) { | 349 if (sp_slot_delta < 0) { |
346 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); | 350 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); |
| 351 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
347 } | 352 } |
| 353 frame_access_state()->SetFrameAccessToSP(); |
348 } | 354 } |
349 | 355 |
350 | 356 |
351 // Assembles an instruction after register allocation, producing machine code. | 357 // Assembles an instruction after register allocation, producing machine code. |
352 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 358 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
353 IA32OperandConverter i(this, instr); | 359 IA32OperandConverter i(this, instr); |
354 | 360 |
355 switch (ArchOpcodeField::decode(instr->opcode())) { | 361 switch (ArchOpcodeField::decode(instr->opcode())) { |
356 case kArchCallCodeObject: { | 362 case kArchCallCodeObject: { |
357 EnsureSpaceForLazyDeopt(); | 363 EnsureSpaceForLazyDeopt(); |
358 if (HasImmediateInput(instr, 0)) { | 364 if (HasImmediateInput(instr, 0)) { |
359 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); | 365 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); |
360 __ call(code, RelocInfo::CODE_TARGET); | 366 __ call(code, RelocInfo::CODE_TARGET); |
361 } else { | 367 } else { |
362 Register reg = i.InputRegister(0); | 368 Register reg = i.InputRegister(0); |
363 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 369 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
364 __ call(reg); | 370 __ call(reg); |
365 } | 371 } |
366 RecordCallPosition(instr); | 372 RecordCallPosition(instr); |
| 373 frame_access_state()->ClearSPDelta(); |
367 break; | 374 break; |
368 } | 375 } |
369 case kArchTailCallCodeObject: { | 376 case kArchTailCallCodeObject: { |
370 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 377 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
371 AssembleDeconstructActivationRecord(stack_param_delta); | 378 AssembleDeconstructActivationRecord(stack_param_delta); |
372 if (HasImmediateInput(instr, 0)) { | 379 if (HasImmediateInput(instr, 0)) { |
373 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); | 380 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); |
374 __ jmp(code, RelocInfo::CODE_TARGET); | 381 __ jmp(code, RelocInfo::CODE_TARGET); |
375 } else { | 382 } else { |
376 Register reg = i.InputRegister(0); | 383 Register reg = i.InputRegister(0); |
377 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 384 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
378 __ jmp(reg); | 385 __ jmp(reg); |
379 } | 386 } |
| 387 frame_access_state()->ClearSPDelta(); |
380 break; | 388 break; |
381 } | 389 } |
382 case kArchCallJSFunction: { | 390 case kArchCallJSFunction: { |
383 EnsureSpaceForLazyDeopt(); | 391 EnsureSpaceForLazyDeopt(); |
384 Register func = i.InputRegister(0); | 392 Register func = i.InputRegister(0); |
385 if (FLAG_debug_code) { | 393 if (FLAG_debug_code) { |
386 // Check the function's context matches the context argument. | 394 // Check the function's context matches the context argument. |
387 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); | 395 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); |
388 __ Assert(equal, kWrongFunctionContext); | 396 __ Assert(equal, kWrongFunctionContext); |
389 } | 397 } |
390 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); | 398 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); |
391 RecordCallPosition(instr); | 399 RecordCallPosition(instr); |
| 400 frame_access_state()->ClearSPDelta(); |
392 break; | 401 break; |
393 } | 402 } |
394 case kArchTailCallJSFunction: { | 403 case kArchTailCallJSFunction: { |
395 Register func = i.InputRegister(0); | 404 Register func = i.InputRegister(0); |
396 if (FLAG_debug_code) { | 405 if (FLAG_debug_code) { |
397 // Check the function's context matches the context argument. | 406 // Check the function's context matches the context argument. |
398 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); | 407 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); |
399 __ Assert(equal, kWrongFunctionContext); | 408 __ Assert(equal, kWrongFunctionContext); |
400 } | 409 } |
401 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 410 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
402 AssembleDeconstructActivationRecord(stack_param_delta); | 411 AssembleDeconstructActivationRecord(stack_param_delta); |
403 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); | 412 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); |
| 413 frame_access_state()->ClearSPDelta(); |
404 break; | 414 break; |
405 } | 415 } |
406 case kArchLazyBailout: { | 416 case kArchLazyBailout: { |
407 EnsureSpaceForLazyDeopt(); | 417 EnsureSpaceForLazyDeopt(); |
408 RecordCallPosition(instr); | 418 RecordCallPosition(instr); |
409 break; | 419 break; |
410 } | 420 } |
411 case kArchPrepareCallCFunction: { | 421 case kArchPrepareCallCFunction: { |
| 422 // Frame alignment requires using FP-relative frame addressing. |
| 423 frame_access_state()->SetFrameAccessToFP(); |
412 int const num_parameters = MiscField::decode(instr->opcode()); | 424 int const num_parameters = MiscField::decode(instr->opcode()); |
413 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); | 425 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); |
414 break; | 426 break; |
415 } | 427 } |
416 case kArchPrepareTailCall: | 428 case kArchPrepareTailCall: |
417 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 429 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
418 break; | 430 break; |
419 case kArchCallCFunction: { | 431 case kArchCallCFunction: { |
420 int const num_parameters = MiscField::decode(instr->opcode()); | 432 int const num_parameters = MiscField::decode(instr->opcode()); |
421 if (HasImmediateInput(instr, 0)) { | 433 if (HasImmediateInput(instr, 0)) { |
422 ExternalReference ref = i.InputExternalReference(0); | 434 ExternalReference ref = i.InputExternalReference(0); |
423 __ CallCFunction(ref, num_parameters); | 435 __ CallCFunction(ref, num_parameters); |
424 } else { | 436 } else { |
425 Register func = i.InputRegister(0); | 437 Register func = i.InputRegister(0); |
426 __ CallCFunction(func, num_parameters); | 438 __ CallCFunction(func, num_parameters); |
427 } | 439 } |
| 440 frame_access_state()->SetFrameAccessToDefault(); |
| 441 frame_access_state()->ClearSPDelta(); |
428 break; | 442 break; |
429 } | 443 } |
430 case kArchJmp: | 444 case kArchJmp: |
431 AssembleArchJump(i.InputRpo(0)); | 445 AssembleArchJump(i.InputRpo(0)); |
432 break; | 446 break; |
433 case kArchLookupSwitch: | 447 case kArchLookupSwitch: |
434 AssembleArchLookupSwitch(instr); | 448 AssembleArchLookupSwitch(instr); |
435 break; | 449 break; |
436 case kArchTableSwitch: | 450 case kArchTableSwitch: |
437 AssembleArchTableSwitch(instr); | 451 AssembleArchTableSwitch(instr); |
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
973 } | 987 } |
974 } else { | 988 } else { |
975 __ lea(i.OutputRegister(), i.MemoryOperand()); | 989 __ lea(i.OutputRegister(), i.MemoryOperand()); |
976 } | 990 } |
977 break; | 991 break; |
978 } | 992 } |
979 case kIA32Push: | 993 case kIA32Push: |
980 if (instr->InputAt(0)->IsDoubleRegister()) { | 994 if (instr->InputAt(0)->IsDoubleRegister()) { |
981 __ sub(esp, Immediate(kDoubleSize)); | 995 __ sub(esp, Immediate(kDoubleSize)); |
982 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0)); | 996 __ movsd(Operand(esp, 0), i.InputDoubleRegister(0)); |
| 997 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
983 } else if (HasImmediateInput(instr, 0)) { | 998 } else if (HasImmediateInput(instr, 0)) { |
984 __ push(i.InputImmediate(0)); | 999 __ push(i.InputImmediate(0)); |
| 1000 frame_access_state()->IncreaseSPDelta(1); |
985 } else { | 1001 } else { |
986 __ push(i.InputOperand(0)); | 1002 __ push(i.InputOperand(0)); |
| 1003 frame_access_state()->IncreaseSPDelta(1); |
987 } | 1004 } |
988 break; | 1005 break; |
989 case kIA32Poke: { | 1006 case kIA32Poke: { |
990 int const slot = MiscField::decode(instr->opcode()); | 1007 int const slot = MiscField::decode(instr->opcode()); |
991 if (HasImmediateInput(instr, 0)) { | 1008 if (HasImmediateInput(instr, 0)) { |
992 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); | 1009 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); |
993 } else { | 1010 } else { |
994 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); | 1011 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); |
995 } | 1012 } |
996 break; | 1013 break; |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1347 // | FP | RET | args | caller frame | | 1364 // | FP | RET | args | caller frame | |
1348 // ^ esp,ebp | 1365 // ^ esp,ebp |
1349 | 1366 |
1350 // --{ pop ebp }---------------------------------------------------------------- | 1367 // --{ pop ebp }---------------------------------------------------------------- |
1351 // | RET | args | caller frame | | 1368 // | RET | args | caller frame | |
1352 // ^ esp ^ ebp | 1369 // ^ esp ^ ebp |
1353 | 1370 |
1354 | 1371 |
1355 void CodeGenerator::AssemblePrologue() { | 1372 void CodeGenerator::AssemblePrologue() { |
1356 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1373 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1357 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1374 if (descriptor->IsCFunctionCall()) { |
1358 // Assemble a prologue similar the to cdecl calling convention. | 1375 // Assemble a prologue similar the to cdecl calling convention. |
1359 __ push(ebp); | 1376 __ push(ebp); |
1360 __ mov(ebp, esp); | 1377 __ mov(ebp, esp); |
1361 } else if (descriptor->IsJSFunctionCall()) { | 1378 } else if (descriptor->IsJSFunctionCall()) { |
1362 // TODO(turbofan): this prologue is redundant with OSR, but needed for | 1379 // TODO(turbofan): this prologue is redundant with OSR, but needed for |
1363 // code aging. | 1380 // code aging. |
1364 CompilationInfo* info = this->info(); | 1381 CompilationInfo* info = this->info(); |
1365 __ Prologue(info->IsCodePreAgingActive()); | 1382 __ Prologue(info->IsCodePreAgingActive()); |
1366 } else if (needs_frame_) { | 1383 } else if (frame()->needs_frame()) { |
1367 __ StubPrologue(); | 1384 __ StubPrologue(); |
1368 } else { | 1385 } else { |
1369 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); | 1386 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); |
1370 } | 1387 } |
| 1388 frame_access_state()->SetFrameAccessToDefault(); |
1371 | 1389 |
1372 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1390 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1373 if (info()->is_osr()) { | 1391 if (info()->is_osr()) { |
1374 // TurboFan OSR-compiled functions cannot be entered directly. | 1392 // TurboFan OSR-compiled functions cannot be entered directly. |
1375 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1393 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1376 | 1394 |
1377 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1395 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1378 // frame is still on the stack. Optimized code uses OSR values directly from | 1396 // frame is still on the stack. Optimized code uses OSR values directly from |
1379 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1397 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1380 // remaining stack slots. | 1398 // remaining stack slots. |
(...skipping 27 matching lines...) Expand all Loading... |
1408 | 1426 |
1409 const RegList saves = descriptor->CalleeSavedRegisters(); | 1427 const RegList saves = descriptor->CalleeSavedRegisters(); |
1410 // Restore registers. | 1428 // Restore registers. |
1411 if (saves != 0) { | 1429 if (saves != 0) { |
1412 for (int i = 0; i < Register::kNumRegisters; i++) { | 1430 for (int i = 0; i < Register::kNumRegisters; i++) { |
1413 if (!((1 << i) & saves)) continue; | 1431 if (!((1 << i) & saves)) continue; |
1414 __ pop(Register::from_code(i)); | 1432 __ pop(Register::from_code(i)); |
1415 } | 1433 } |
1416 } | 1434 } |
1417 | 1435 |
1418 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1436 if (descriptor->IsCFunctionCall()) { |
1419 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1437 __ mov(esp, ebp); // Move stack pointer back to frame pointer. |
1420 __ pop(ebp); // Pop caller's frame pointer. | 1438 __ pop(ebp); // Pop caller's frame pointer. |
1421 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1439 } else if (frame()->needs_frame()) { |
1422 // Canonicalize JSFunction return sites for now. | 1440 // Canonicalize JSFunction return sites for now. |
1423 if (return_label_.is_bound()) { | 1441 if (return_label_.is_bound()) { |
1424 __ jmp(&return_label_); | 1442 __ jmp(&return_label_); |
1425 return; | 1443 return; |
1426 } else { | 1444 } else { |
1427 __ bind(&return_label_); | 1445 __ bind(&return_label_); |
1428 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1446 __ mov(esp, ebp); // Move stack pointer back to frame pointer. |
1429 __ pop(ebp); // Pop caller's frame pointer. | 1447 __ pop(ebp); // Pop caller's frame pointer. |
1430 } | 1448 } |
1431 } | 1449 } |
(...skipping 26 matching lines...) Expand all Loading... |
1458 __ pop(dst); | 1476 __ pop(dst); |
1459 } | 1477 } |
1460 } else if (source->IsConstant()) { | 1478 } else if (source->IsConstant()) { |
1461 Constant src_constant = g.ToConstant(source); | 1479 Constant src_constant = g.ToConstant(source); |
1462 if (src_constant.type() == Constant::kHeapObject) { | 1480 if (src_constant.type() == Constant::kHeapObject) { |
1463 Handle<HeapObject> src = src_constant.ToHeapObject(); | 1481 Handle<HeapObject> src = src_constant.ToHeapObject(); |
1464 int offset; | 1482 int offset; |
1465 if (IsMaterializableFromFrame(src, &offset)) { | 1483 if (IsMaterializableFromFrame(src, &offset)) { |
1466 if (destination->IsRegister()) { | 1484 if (destination->IsRegister()) { |
1467 Register dst = g.ToRegister(destination); | 1485 Register dst = g.ToRegister(destination); |
1468 __ mov(dst, Operand(ebp, offset)); | 1486 __ mov(dst, g.ToMaterializableOperand(offset)); |
1469 } else { | 1487 } else { |
1470 DCHECK(destination->IsStackSlot()); | 1488 DCHECK(destination->IsStackSlot()); |
1471 Operand dst = g.ToOperand(destination); | 1489 Operand dst = g.ToOperand(destination); |
1472 __ push(Operand(ebp, offset)); | 1490 __ push(g.ToMaterializableOperand(offset)); |
1473 __ pop(dst); | 1491 __ pop(dst); |
1474 } | 1492 } |
1475 } else if (destination->IsRegister()) { | 1493 } else if (destination->IsRegister()) { |
1476 Register dst = g.ToRegister(destination); | 1494 Register dst = g.ToRegister(destination); |
1477 __ LoadHeapObject(dst, src); | 1495 __ LoadHeapObject(dst, src); |
1478 } else { | 1496 } else { |
1479 DCHECK(destination->IsStackSlot()); | 1497 DCHECK(destination->IsStackSlot()); |
1480 Operand dst = g.ToOperand(destination); | 1498 Operand dst = g.ToOperand(destination); |
1481 AllowDeferredHandleDereference embedding_raw_address; | 1499 AllowDeferredHandleDereference embedding_raw_address; |
1482 if (isolate()->heap()->InNewSpace(*src)) { | 1500 if (isolate()->heap()->InNewSpace(*src)) { |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1554 if (source->IsRegister() && destination->IsRegister()) { | 1572 if (source->IsRegister() && destination->IsRegister()) { |
1555 // Register-register. | 1573 // Register-register. |
1556 Register src = g.ToRegister(source); | 1574 Register src = g.ToRegister(source); |
1557 Register dst = g.ToRegister(destination); | 1575 Register dst = g.ToRegister(destination); |
1558 __ xchg(dst, src); | 1576 __ xchg(dst, src); |
1559 } else if (source->IsRegister() && destination->IsStackSlot()) { | 1577 } else if (source->IsRegister() && destination->IsStackSlot()) { |
1560 // Register-memory. | 1578 // Register-memory. |
1561 __ xchg(g.ToRegister(source), g.ToOperand(destination)); | 1579 __ xchg(g.ToRegister(source), g.ToOperand(destination)); |
1562 } else if (source->IsStackSlot() && destination->IsStackSlot()) { | 1580 } else if (source->IsStackSlot() && destination->IsStackSlot()) { |
1563 // Memory-memory. | 1581 // Memory-memory. |
1564 Operand src = g.ToOperand(source); | 1582 Operand dst1 = g.ToOperand(destination); |
1565 Operand dst = g.ToOperand(destination); | 1583 __ push(dst1); |
1566 __ push(dst); | 1584 frame_access_state()->IncreaseSPDelta(1); |
1567 __ push(src); | 1585 Operand src1 = g.ToOperand(source); |
1568 __ pop(dst); | 1586 __ push(src1); |
1569 __ pop(src); | 1587 Operand dst2 = g.ToOperand(destination); |
| 1588 __ pop(dst2); |
| 1589 frame_access_state()->IncreaseSPDelta(-1); |
| 1590 Operand src2 = g.ToOperand(source); |
| 1591 __ pop(src2); |
1570 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { | 1592 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { |
1571 // XMM register-register swap. | 1593 // XMM register-register swap. |
1572 XMMRegister src = g.ToDoubleRegister(source); | 1594 XMMRegister src = g.ToDoubleRegister(source); |
1573 XMMRegister dst = g.ToDoubleRegister(destination); | 1595 XMMRegister dst = g.ToDoubleRegister(destination); |
1574 __ movaps(kScratchDoubleReg, src); | 1596 __ movaps(kScratchDoubleReg, src); |
1575 __ movaps(src, dst); | 1597 __ movaps(src, dst); |
1576 __ movaps(dst, kScratchDoubleReg); | 1598 __ movaps(dst, kScratchDoubleReg); |
1577 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) { | 1599 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) { |
1578 // XMM register-memory swap. | 1600 // XMM register-memory swap. |
1579 XMMRegister reg = g.ToDoubleRegister(source); | 1601 XMMRegister reg = g.ToDoubleRegister(source); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1623 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1645 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
1624 __ Nop(padding_size); | 1646 __ Nop(padding_size); |
1625 } | 1647 } |
1626 } | 1648 } |
1627 | 1649 |
1628 #undef __ | 1650 #undef __ |
1629 | 1651 |
1630 } // namespace compiler | 1652 } // namespace compiler |
1631 } // namespace internal | 1653 } // namespace internal |
1632 } // namespace v8 | 1654 } // namespace v8 |
OLD | NEW |