| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 399 // As the then-branch, but move double-value to result before shifting. | 399 // As the then-branch, but move double-value to result before shifting. |
| 400 __ xorl(result, double_value); | 400 __ xorl(result, double_value); |
| 401 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); | 401 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); |
| 402 __ shll_cl(result); | 402 __ shll_cl(result); |
| 403 } | 403 } |
| 404 | 404 |
| 405 __ bind(&done); | 405 __ bind(&done); |
| 406 } | 406 } |
| 407 | 407 |
| 408 | 408 |
| 409 Handle<Code> GetTypeRecordingUnaryOpStub(int key, | 409 Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) { |
| 410 TRUnaryOpIC::TypeInfo type_info) { | 410 UnaryOpStub stub(key, type_info); |
| 411 TypeRecordingUnaryOpStub stub(key, type_info); | |
| 412 return stub.GetCode(); | 411 return stub.GetCode(); |
| 413 } | 412 } |
| 414 | 413 |
| 415 | 414 |
| 416 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { | 415 void UnaryOpStub::Generate(MacroAssembler* masm) { |
| 417 switch (operand_type_) { | 416 switch (operand_type_) { |
| 418 case TRUnaryOpIC::UNINITIALIZED: | 417 case UnaryOpIC::UNINITIALIZED: |
| 419 GenerateTypeTransition(masm); | 418 GenerateTypeTransition(masm); |
| 420 break; | 419 break; |
| 421 case TRUnaryOpIC::SMI: | 420 case UnaryOpIC::SMI: |
| 422 GenerateSmiStub(masm); | 421 GenerateSmiStub(masm); |
| 423 break; | 422 break; |
| 424 case TRUnaryOpIC::HEAP_NUMBER: | 423 case UnaryOpIC::HEAP_NUMBER: |
| 425 GenerateHeapNumberStub(masm); | 424 GenerateHeapNumberStub(masm); |
| 426 break; | 425 break; |
| 427 case TRUnaryOpIC::GENERIC: | 426 case UnaryOpIC::GENERIC: |
| 428 GenerateGenericStub(masm); | 427 GenerateGenericStub(masm); |
| 429 break; | 428 break; |
| 430 } | 429 } |
| 431 } | 430 } |
| 432 | 431 |
| 433 | 432 |
| 434 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 433 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 435 __ pop(rcx); // Save return address. | 434 __ pop(rcx); // Save return address. |
| 436 __ push(rax); | 435 __ push(rax); |
| 437 // Left and right arguments are now on top. | 436 // Left and right arguments are now on top. |
| 438 // Push this stub's key. Although the operation and the type info are | 437 // Push this stub's key. Although the operation and the type info are |
| 439 // encoded into the key, the encoding is opaque, so push them too. | 438 // encoded into the key, the encoding is opaque, so push them too. |
| 440 __ Push(Smi::FromInt(MinorKey())); | 439 __ Push(Smi::FromInt(MinorKey())); |
| 441 __ Push(Smi::FromInt(op_)); | 440 __ Push(Smi::FromInt(op_)); |
| 442 __ Push(Smi::FromInt(operand_type_)); | 441 __ Push(Smi::FromInt(operand_type_)); |
| 443 | 442 |
| 444 __ push(rcx); // Push return address. | 443 __ push(rcx); // Push return address. |
| 445 | 444 |
| 446 // Patch the caller to an appropriate specialized stub and return the | 445 // Patch the caller to an appropriate specialized stub and return the |
| 447 // operation result to the caller of the stub. | 446 // operation result to the caller of the stub. |
| 448 __ TailCallExternalReference( | 447 __ TailCallExternalReference( |
| 449 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), | 448 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), |
| 450 masm->isolate()), | 449 masm->isolate()), |
| 451 4, | 450 4, |
| 452 1); | 451 1); |
| 453 } | 452 } |
| 454 | 453 |
| 455 | 454 |
| 456 // TODO(svenpanne): Use virtual functions instead of switch. | 455 // TODO(svenpanne): Use virtual functions instead of switch. |
| 457 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 456 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
| 458 switch (op_) { | 457 switch (op_) { |
| 459 case Token::SUB: | 458 case Token::SUB: |
| 460 GenerateSmiStubSub(masm); | 459 GenerateSmiStubSub(masm); |
| 461 break; | 460 break; |
| 462 case Token::BIT_NOT: | 461 case Token::BIT_NOT: |
| 463 GenerateSmiStubBitNot(masm); | 462 GenerateSmiStubBitNot(masm); |
| 464 break; | 463 break; |
| 465 default: | 464 default: |
| 466 UNREACHABLE(); | 465 UNREACHABLE(); |
| 467 } | 466 } |
| 468 } | 467 } |
| 469 | 468 |
| 470 | 469 |
| 471 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | 470 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
| 472 Label slow; | 471 Label slow; |
| 473 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); | 472 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); |
| 474 __ bind(&slow); | 473 __ bind(&slow); |
| 475 GenerateTypeTransition(masm); | 474 GenerateTypeTransition(masm); |
| 476 } | 475 } |
| 477 | 476 |
| 478 | 477 |
| 479 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | 478 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
| 480 Label non_smi; | 479 Label non_smi; |
| 481 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 480 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 482 __ bind(&non_smi); | 481 __ bind(&non_smi); |
| 483 GenerateTypeTransition(masm); | 482 GenerateTypeTransition(masm); |
| 484 } | 483 } |
| 485 | 484 |
| 486 | 485 |
| 487 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | 486 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
| 488 Label* non_smi, | 487 Label* non_smi, |
| 489 Label* slow, | 488 Label* slow, |
| 490 Label::Distance non_smi_near, | 489 Label::Distance non_smi_near, |
| 491 Label::Distance slow_near) { | 490 Label::Distance slow_near) { |
| 492 Label done; | 491 Label done; |
| 493 __ JumpIfNotSmi(rax, non_smi, non_smi_near); | 492 __ JumpIfNotSmi(rax, non_smi, non_smi_near); |
| 494 __ SmiNeg(rax, rax, &done, Label::kNear); | 493 __ SmiNeg(rax, rax, &done, Label::kNear); |
| 495 __ jmp(slow, slow_near); | 494 __ jmp(slow, slow_near); |
| 496 __ bind(&done); | 495 __ bind(&done); |
| 497 __ ret(0); | 496 __ ret(0); |
| 498 } | 497 } |
| 499 | 498 |
| 500 | 499 |
| 501 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( | 500 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, |
| 502 MacroAssembler* masm, | 501 Label* non_smi, |
| 503 Label* non_smi, | 502 Label::Distance non_smi_near) { |
| 504 Label::Distance non_smi_near) { | |
| 505 __ JumpIfNotSmi(rax, non_smi, non_smi_near); | 503 __ JumpIfNotSmi(rax, non_smi, non_smi_near); |
| 506 __ SmiNot(rax, rax); | 504 __ SmiNot(rax, rax); |
| 507 __ ret(0); | 505 __ ret(0); |
| 508 } | 506 } |
| 509 | 507 |
| 510 | 508 |
| 511 // TODO(svenpanne): Use virtual functions instead of switch. | 509 // TODO(svenpanne): Use virtual functions instead of switch. |
| 512 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 510 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 513 switch (op_) { | 511 switch (op_) { |
| 514 case Token::SUB: | 512 case Token::SUB: |
| 515 GenerateHeapNumberStubSub(masm); | 513 GenerateHeapNumberStubSub(masm); |
| 516 break; | 514 break; |
| 517 case Token::BIT_NOT: | 515 case Token::BIT_NOT: |
| 518 GenerateHeapNumberStubBitNot(masm); | 516 GenerateHeapNumberStubBitNot(masm); |
| 519 break; | 517 break; |
| 520 default: | 518 default: |
| 521 UNREACHABLE(); | 519 UNREACHABLE(); |
| 522 } | 520 } |
| 523 } | 521 } |
| 524 | 522 |
| 525 | 523 |
| 526 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 524 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
| 527 Label non_smi, slow, call_builtin; | 525 Label non_smi, slow, call_builtin; |
| 528 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); | 526 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); |
| 529 __ bind(&non_smi); | 527 __ bind(&non_smi); |
| 530 GenerateHeapNumberCodeSub(masm, &slow); | 528 GenerateHeapNumberCodeSub(masm, &slow); |
| 531 __ bind(&slow); | 529 __ bind(&slow); |
| 532 GenerateTypeTransition(masm); | 530 GenerateTypeTransition(masm); |
| 533 __ bind(&call_builtin); | 531 __ bind(&call_builtin); |
| 534 GenerateGenericCodeFallback(masm); | 532 GenerateGenericCodeFallback(masm); |
| 535 } | 533 } |
| 536 | 534 |
| 537 | 535 |
| 538 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 536 void UnaryOpStub::GenerateHeapNumberStubBitNot( |
| 539 MacroAssembler* masm) { | 537 MacroAssembler* masm) { |
| 540 Label non_smi, slow; | 538 Label non_smi, slow; |
| 541 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 539 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 542 __ bind(&non_smi); | 540 __ bind(&non_smi); |
| 543 GenerateHeapNumberCodeBitNot(masm, &slow); | 541 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 544 __ bind(&slow); | 542 __ bind(&slow); |
| 545 GenerateTypeTransition(masm); | 543 GenerateTypeTransition(masm); |
| 546 } | 544 } |
| 547 | 545 |
| 548 | 546 |
| 549 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 547 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
| 550 Label* slow) { | 548 Label* slow) { |
| 551 // Check if the operand is a heap number. | 549 // Check if the operand is a heap number. |
| 552 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 550 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 553 Heap::kHeapNumberMapRootIndex); | 551 Heap::kHeapNumberMapRootIndex); |
| 554 __ j(not_equal, slow); | 552 __ j(not_equal, slow); |
| 555 | 553 |
| 556 // Operand is a float, negate its value by flipping the sign bit. | 554 // Operand is a float, negate its value by flipping the sign bit. |
| 557 if (mode_ == UNARY_OVERWRITE) { | 555 if (mode_ == UNARY_OVERWRITE) { |
| 558 __ Set(kScratchRegister, 0x01); | 556 __ Set(kScratchRegister, 0x01); |
| 559 __ shl(kScratchRegister, Immediate(63)); | 557 __ shl(kScratchRegister, Immediate(63)); |
| 560 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); | 558 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 580 __ Set(kScratchRegister, 0x01); | 578 __ Set(kScratchRegister, 0x01); |
| 581 __ shl(kScratchRegister, Immediate(63)); | 579 __ shl(kScratchRegister, Immediate(63)); |
| 582 __ xor_(rdx, kScratchRegister); // Flip sign. | 580 __ xor_(rdx, kScratchRegister); // Flip sign. |
| 583 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 581 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
| 584 __ movq(rax, rcx); | 582 __ movq(rax, rcx); |
| 585 } | 583 } |
| 586 __ ret(0); | 584 __ ret(0); |
| 587 } | 585 } |
| 588 | 586 |
| 589 | 587 |
| 590 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( | 588 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, |
| 591 MacroAssembler* masm, | 589 Label* slow) { |
| 592 Label* slow) { | |
| 593 // Check if the operand is a heap number. | 590 // Check if the operand is a heap number. |
| 594 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 591 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 595 Heap::kHeapNumberMapRootIndex); | 592 Heap::kHeapNumberMapRootIndex); |
| 596 __ j(not_equal, slow); | 593 __ j(not_equal, slow); |
| 597 | 594 |
| 598 // Convert the heap number in rax to an untagged integer in rcx. | 595 // Convert the heap number in rax to an untagged integer in rcx. |
| 599 IntegerConvert(masm, rax, rax); | 596 IntegerConvert(masm, rax, rax); |
| 600 | 597 |
| 601 // Do the bitwise operation and smi tag the result. | 598 // Do the bitwise operation and smi tag the result. |
| 602 __ notl(rax); | 599 __ notl(rax); |
| 603 __ Integer32ToSmi(rax, rax); | 600 __ Integer32ToSmi(rax, rax); |
| 604 __ ret(0); | 601 __ ret(0); |
| 605 } | 602 } |
| 606 | 603 |
| 607 | 604 |
| 608 // TODO(svenpanne): Use virtual functions instead of switch. | 605 // TODO(svenpanne): Use virtual functions instead of switch. |
| 609 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | 606 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
| 610 switch (op_) { | 607 switch (op_) { |
| 611 case Token::SUB: | 608 case Token::SUB: |
| 612 GenerateGenericStubSub(masm); | 609 GenerateGenericStubSub(masm); |
| 613 break; | 610 break; |
| 614 case Token::BIT_NOT: | 611 case Token::BIT_NOT: |
| 615 GenerateGenericStubBitNot(masm); | 612 GenerateGenericStubBitNot(masm); |
| 616 break; | 613 break; |
| 617 default: | 614 default: |
| 618 UNREACHABLE(); | 615 UNREACHABLE(); |
| 619 } | 616 } |
| 620 } | 617 } |
| 621 | 618 |
| 622 | 619 |
| 623 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | 620 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
| 624 Label non_smi, slow; | 621 Label non_smi, slow; |
| 625 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); | 622 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); |
| 626 __ bind(&non_smi); | 623 __ bind(&non_smi); |
| 627 GenerateHeapNumberCodeSub(masm, &slow); | 624 GenerateHeapNumberCodeSub(masm, &slow); |
| 628 __ bind(&slow); | 625 __ bind(&slow); |
| 629 GenerateGenericCodeFallback(masm); | 626 GenerateGenericCodeFallback(masm); |
| 630 } | 627 } |
| 631 | 628 |
| 632 | 629 |
| 633 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | 630 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
| 634 Label non_smi, slow; | 631 Label non_smi, slow; |
| 635 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 632 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 636 __ bind(&non_smi); | 633 __ bind(&non_smi); |
| 637 GenerateHeapNumberCodeBitNot(masm, &slow); | 634 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 638 __ bind(&slow); | 635 __ bind(&slow); |
| 639 GenerateGenericCodeFallback(masm); | 636 GenerateGenericCodeFallback(masm); |
| 640 } | 637 } |
| 641 | 638 |
| 642 | 639 |
| 643 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( | 640 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { |
| 644 MacroAssembler* masm) { | |
| 645 // Handle the slow case by jumping to the JavaScript builtin. | 641 // Handle the slow case by jumping to the JavaScript builtin. |
| 646 __ pop(rcx); // pop return address | 642 __ pop(rcx); // pop return address |
| 647 __ push(rax); | 643 __ push(rax); |
| 648 __ push(rcx); // push return address | 644 __ push(rcx); // push return address |
| 649 switch (op_) { | 645 switch (op_) { |
| 650 case Token::SUB: | 646 case Token::SUB: |
| 651 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); | 647 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); |
| 652 break; | 648 break; |
| 653 case Token::BIT_NOT: | 649 case Token::BIT_NOT: |
| 654 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); | 650 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); |
| 655 break; | 651 break; |
| 656 default: | 652 default: |
| 657 UNREACHABLE(); | 653 UNREACHABLE(); |
| 658 } | 654 } |
| 659 } | 655 } |
| 660 | 656 |
| 661 | 657 |
| 662 const char* TypeRecordingUnaryOpStub::GetName() { | 658 const char* UnaryOpStub::GetName() { |
| 663 if (name_ != NULL) return name_; | 659 if (name_ != NULL) return name_; |
| 664 const int kMaxNameLength = 100; | 660 const int kMaxNameLength = 100; |
| 665 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | 661 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
| 666 kMaxNameLength); | 662 kMaxNameLength); |
| 667 if (name_ == NULL) return "OOM"; | 663 if (name_ == NULL) return "OOM"; |
| 668 const char* op_name = Token::Name(op_); | 664 const char* op_name = Token::Name(op_); |
| 669 const char* overwrite_name = NULL; // Make g++ happy. | 665 const char* overwrite_name = NULL; // Make g++ happy. |
| 670 switch (mode_) { | 666 switch (mode_) { |
| 671 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | 667 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
| 672 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | 668 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; |
| 673 } | 669 } |
| 674 | 670 |
| 675 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | 671 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
| 676 "TypeRecordingUnaryOpStub_%s_%s_%s", | 672 "UnaryOpStub_%s_%s_%s", |
| 677 op_name, | 673 op_name, |
| 678 overwrite_name, | 674 overwrite_name, |
| 679 TRUnaryOpIC::GetName(operand_type_)); | 675 UnaryOpIC::GetName(operand_type_)); |
| 680 return name_; | 676 return name_; |
| 681 } | 677 } |
| 682 | 678 |
| 683 | 679 |
| 684 Handle<Code> GetTypeRecordingBinaryOpStub(int key, | 680 Handle<Code> GetBinaryOpStub(int key, |
| 685 TRBinaryOpIC::TypeInfo type_info, | 681 BinaryOpIC::TypeInfo type_info, |
| 686 TRBinaryOpIC::TypeInfo result_type_info) { | 682 BinaryOpIC::TypeInfo result_type_info) { |
| 687 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); | 683 BinaryOpStub stub(key, type_info, result_type_info); |
| 688 return stub.GetCode(); | 684 return stub.GetCode(); |
| 689 } | 685 } |
| 690 | 686 |
| 691 | 687 |
| 692 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 688 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 693 __ pop(rcx); // Save return address. | 689 __ pop(rcx); // Save return address. |
| 694 __ push(rdx); | 690 __ push(rdx); |
| 695 __ push(rax); | 691 __ push(rax); |
| 696 // Left and right arguments are now on top. | 692 // Left and right arguments are now on top. |
| 697 // Push this stub's key. Although the operation and the type info are | 693 // Push this stub's key. Although the operation and the type info are |
| 698 // encoded into the key, the encoding is opaque, so push them too. | 694 // encoded into the key, the encoding is opaque, so push them too. |
| 699 __ Push(Smi::FromInt(MinorKey())); | 695 __ Push(Smi::FromInt(MinorKey())); |
| 700 __ Push(Smi::FromInt(op_)); | 696 __ Push(Smi::FromInt(op_)); |
| 701 __ Push(Smi::FromInt(operands_type_)); | 697 __ Push(Smi::FromInt(operands_type_)); |
| 702 | 698 |
| 703 __ push(rcx); // Push return address. | 699 __ push(rcx); // Push return address. |
| 704 | 700 |
| 705 // Patch the caller to an appropriate specialized stub and return the | 701 // Patch the caller to an appropriate specialized stub and return the |
| 706 // operation result to the caller of the stub. | 702 // operation result to the caller of the stub. |
| 707 __ TailCallExternalReference( | 703 __ TailCallExternalReference( |
| 708 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), | 704 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
| 709 masm->isolate()), | 705 masm->isolate()), |
| 710 5, | 706 5, |
| 711 1); | 707 1); |
| 712 } | 708 } |
| 713 | 709 |
| 714 | 710 |
| 715 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { | 711 void BinaryOpStub::Generate(MacroAssembler* masm) { |
| 716 switch (operands_type_) { | 712 switch (operands_type_) { |
| 717 case TRBinaryOpIC::UNINITIALIZED: | 713 case BinaryOpIC::UNINITIALIZED: |
| 718 GenerateTypeTransition(masm); | 714 GenerateTypeTransition(masm); |
| 719 break; | 715 break; |
| 720 case TRBinaryOpIC::SMI: | 716 case BinaryOpIC::SMI: |
| 721 GenerateSmiStub(masm); | 717 GenerateSmiStub(masm); |
| 722 break; | 718 break; |
| 723 case TRBinaryOpIC::INT32: | 719 case BinaryOpIC::INT32: |
| 724 UNREACHABLE(); | 720 UNREACHABLE(); |
| 725 // The int32 case is identical to the Smi case. We avoid creating this | 721 // The int32 case is identical to the Smi case. We avoid creating this |
| 726 // ic state on x64. | 722 // ic state on x64. |
| 727 break; | 723 break; |
| 728 case TRBinaryOpIC::HEAP_NUMBER: | 724 case BinaryOpIC::HEAP_NUMBER: |
| 729 GenerateHeapNumberStub(masm); | 725 GenerateHeapNumberStub(masm); |
| 730 break; | 726 break; |
| 731 case TRBinaryOpIC::ODDBALL: | 727 case BinaryOpIC::ODDBALL: |
| 732 GenerateOddballStub(masm); | 728 GenerateOddballStub(masm); |
| 733 break; | 729 break; |
| 734 case TRBinaryOpIC::BOTH_STRING: | 730 case BinaryOpIC::BOTH_STRING: |
| 735 GenerateBothStringStub(masm); | 731 GenerateBothStringStub(masm); |
| 736 break; | 732 break; |
| 737 case TRBinaryOpIC::STRING: | 733 case BinaryOpIC::STRING: |
| 738 GenerateStringStub(masm); | 734 GenerateStringStub(masm); |
| 739 break; | 735 break; |
| 740 case TRBinaryOpIC::GENERIC: | 736 case BinaryOpIC::GENERIC: |
| 741 GenerateGeneric(masm); | 737 GenerateGeneric(masm); |
| 742 break; | 738 break; |
| 743 default: | 739 default: |
| 744 UNREACHABLE(); | 740 UNREACHABLE(); |
| 745 } | 741 } |
| 746 } | 742 } |
| 747 | 743 |
| 748 | 744 |
| 749 const char* TypeRecordingBinaryOpStub::GetName() { | 745 const char* BinaryOpStub::GetName() { |
| 750 if (name_ != NULL) return name_; | 746 if (name_ != NULL) return name_; |
| 751 const int kMaxNameLength = 100; | 747 const int kMaxNameLength = 100; |
| 752 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | 748 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
| 753 kMaxNameLength); | 749 kMaxNameLength); |
| 754 if (name_ == NULL) return "OOM"; | 750 if (name_ == NULL) return "OOM"; |
| 755 const char* op_name = Token::Name(op_); | 751 const char* op_name = Token::Name(op_); |
| 756 const char* overwrite_name; | 752 const char* overwrite_name; |
| 757 switch (mode_) { | 753 switch (mode_) { |
| 758 case NO_OVERWRITE: overwrite_name = "Alloc"; break; | 754 case NO_OVERWRITE: overwrite_name = "Alloc"; break; |
| 759 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; | 755 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; |
| 760 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; | 756 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; |
| 761 default: overwrite_name = "UnknownOverwrite"; break; | 757 default: overwrite_name = "UnknownOverwrite"; break; |
| 762 } | 758 } |
| 763 | 759 |
| 764 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | 760 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
| 765 "TypeRecordingBinaryOpStub_%s_%s_%s", | 761 "BinaryOpStub_%s_%s_%s", |
| 766 op_name, | 762 op_name, |
| 767 overwrite_name, | 763 overwrite_name, |
| 768 TRBinaryOpIC::GetName(operands_type_)); | 764 BinaryOpIC::GetName(operands_type_)); |
| 769 return name_; | 765 return name_; |
| 770 } | 766 } |
| 771 | 767 |
| 772 | 768 |
| 773 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, | 769 void BinaryOpStub::GenerateSmiCode( |
| 770 MacroAssembler* masm, |
| 774 Label* slow, | 771 Label* slow, |
| 775 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { | 772 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { |
| 776 | 773 |
| 777 // Arguments to TypeRecordingBinaryOpStub are in rdx and rax. | 774 // Arguments to BinaryOpStub are in rdx and rax. |
| 778 Register left = rdx; | 775 Register left = rdx; |
| 779 Register right = rax; | 776 Register right = rax; |
| 780 | 777 |
| 781 // We only generate heapnumber answers for overflowing calculations | 778 // We only generate heapnumber answers for overflowing calculations |
| 782 // for the four basic arithmetic operations and logical right shift by 0. | 779 // for the four basic arithmetic operations and logical right shift by 0. |
| 783 bool generate_inline_heapnumber_results = | 780 bool generate_inline_heapnumber_results = |
| 784 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) && | 781 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) && |
| 785 (op_ == Token::ADD || op_ == Token::SUB || | 782 (op_ == Token::ADD || op_ == Token::SUB || |
| 786 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR); | 783 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR); |
| 787 | 784 |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 913 // values that could be smi. | 910 // values that could be smi. |
| 914 __ bind(¬_smis); | 911 __ bind(¬_smis); |
| 915 Comment done_comment(masm, "-- Enter non-smi code"); | 912 Comment done_comment(masm, "-- Enter non-smi code"); |
| 916 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, | 913 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, |
| 917 &smi_values, &fail); | 914 &smi_values, &fail); |
| 918 __ jmp(&smi_values); | 915 __ jmp(&smi_values); |
| 919 __ bind(&fail); | 916 __ bind(&fail); |
| 920 } | 917 } |
| 921 | 918 |
| 922 | 919 |
| 923 void TypeRecordingBinaryOpStub::GenerateFloatingPointCode( | 920 void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm, |
| 924 MacroAssembler* masm, | 921 Label* allocation_failure, |
| 925 Label* allocation_failure, | 922 Label* non_numeric_failure) { |
| 926 Label* non_numeric_failure) { | |
| 927 switch (op_) { | 923 switch (op_) { |
| 928 case Token::ADD: | 924 case Token::ADD: |
| 929 case Token::SUB: | 925 case Token::SUB: |
| 930 case Token::MUL: | 926 case Token::MUL: |
| 931 case Token::DIV: { | 927 case Token::DIV: { |
| 932 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); | 928 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); |
| 933 | 929 |
| 934 switch (op_) { | 930 switch (op_) { |
| 935 case Token::ADD: __ addsd(xmm0, xmm1); break; | 931 case Token::ADD: __ addsd(xmm0, xmm1); break; |
| 936 case Token::SUB: __ subsd(xmm0, xmm1); break; | 932 case Token::SUB: __ subsd(xmm0, xmm1); break; |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1015 __ Integer32ToSmi(rdx, rbx); | 1011 __ Integer32ToSmi(rdx, rbx); |
| 1016 __ jmp(allocation_failure); | 1012 __ jmp(allocation_failure); |
| 1017 } | 1013 } |
| 1018 break; | 1014 break; |
| 1019 } | 1015 } |
| 1020 default: UNREACHABLE(); break; | 1016 default: UNREACHABLE(); break; |
| 1021 } | 1017 } |
| 1022 // No fall-through from this generated code. | 1018 // No fall-through from this generated code. |
| 1023 if (FLAG_debug_code) { | 1019 if (FLAG_debug_code) { |
| 1024 __ Abort("Unexpected fall-through in " | 1020 __ Abort("Unexpected fall-through in " |
| 1025 "TypeRecordingBinaryStub::GenerateFloatingPointCode."); | 1021 "BinaryStub::GenerateFloatingPointCode."); |
| 1026 } | 1022 } |
| 1027 } | 1023 } |
| 1028 | 1024 |
| 1029 | 1025 |
| 1030 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { | 1026 void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
| 1031 ASSERT(op_ == Token::ADD); | 1027 ASSERT(op_ == Token::ADD); |
| 1032 Label left_not_string, call_runtime; | 1028 Label left_not_string, call_runtime; |
| 1033 | 1029 |
| 1034 // Registers containing left and right operands respectively. | 1030 // Registers containing left and right operands respectively. |
| 1035 Register left = rdx; | 1031 Register left = rdx; |
| 1036 Register right = rax; | 1032 Register right = rax; |
| 1037 | 1033 |
| 1038 // Test if left operand is a string. | 1034 // Test if left operand is a string. |
| 1039 __ JumpIfSmi(left, &left_not_string, Label::kNear); | 1035 __ JumpIfSmi(left, &left_not_string, Label::kNear); |
| 1040 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); | 1036 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1051 | 1047 |
| 1052 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | 1048 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
| 1053 GenerateRegisterArgsPush(masm); | 1049 GenerateRegisterArgsPush(masm); |
| 1054 __ TailCallStub(&string_add_right_stub); | 1050 __ TailCallStub(&string_add_right_stub); |
| 1055 | 1051 |
| 1056 // Neither argument is a string. | 1052 // Neither argument is a string. |
| 1057 __ bind(&call_runtime); | 1053 __ bind(&call_runtime); |
| 1058 } | 1054 } |
| 1059 | 1055 |
| 1060 | 1056 |
| 1061 void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { | 1057 void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
| 1062 GenerateRegisterArgsPush(masm); | 1058 GenerateRegisterArgsPush(masm); |
| 1063 switch (op_) { | 1059 switch (op_) { |
| 1064 case Token::ADD: | 1060 case Token::ADD: |
| 1065 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); | 1061 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); |
| 1066 break; | 1062 break; |
| 1067 case Token::SUB: | 1063 case Token::SUB: |
| 1068 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); | 1064 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); |
| 1069 break; | 1065 break; |
| 1070 case Token::MUL: | 1066 case Token::MUL: |
| 1071 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); | 1067 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 1093 break; | 1089 break; |
| 1094 case Token::SHR: | 1090 case Token::SHR: |
| 1095 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); | 1091 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); |
| 1096 break; | 1092 break; |
| 1097 default: | 1093 default: |
| 1098 UNREACHABLE(); | 1094 UNREACHABLE(); |
| 1099 } | 1095 } |
| 1100 } | 1096 } |
| 1101 | 1097 |
| 1102 | 1098 |
| 1103 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 1099 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
| 1104 Label call_runtime; | 1100 Label call_runtime; |
| 1105 if (result_type_ == TRBinaryOpIC::UNINITIALIZED || | 1101 if (result_type_ == BinaryOpIC::UNINITIALIZED || |
| 1106 result_type_ == TRBinaryOpIC::SMI) { | 1102 result_type_ == BinaryOpIC::SMI) { |
| 1107 // Only allow smi results. | 1103 // Only allow smi results. |
| 1108 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); | 1104 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); |
| 1109 } else { | 1105 } else { |
| 1110 // Allow heap number result and don't make a transition if a heap number | 1106 // Allow heap number result and don't make a transition if a heap number |
| 1111 // cannot be allocated. | 1107 // cannot be allocated. |
| 1112 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 1108 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
| 1113 } | 1109 } |
| 1114 | 1110 |
| 1115 // Code falls through if the result is not returned as either a smi or heap | 1111 // Code falls through if the result is not returned as either a smi or heap |
| 1116 // number. | 1112 // number. |
| 1117 GenerateTypeTransition(masm); | 1113 GenerateTypeTransition(masm); |
| 1118 | 1114 |
| 1119 if (call_runtime.is_linked()) { | 1115 if (call_runtime.is_linked()) { |
| 1120 __ bind(&call_runtime); | 1116 __ bind(&call_runtime); |
| 1121 GenerateCallRuntimeCode(masm); | 1117 GenerateCallRuntimeCode(masm); |
| 1122 } | 1118 } |
| 1123 } | 1119 } |
| 1124 | 1120 |
| 1125 | 1121 |
| 1126 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 1122 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
| 1127 ASSERT(operands_type_ == TRBinaryOpIC::STRING); | 1123 ASSERT(operands_type_ == BinaryOpIC::STRING); |
| 1128 ASSERT(op_ == Token::ADD); | 1124 ASSERT(op_ == Token::ADD); |
| 1129 GenerateStringAddCode(masm); | 1125 GenerateStringAddCode(masm); |
| 1130 // Try to add arguments as strings, otherwise, transition to the generic | 1126 // Try to add arguments as strings, otherwise, transition to the generic |
| 1131 // TRBinaryOpIC type. | 1127 // BinaryOpIC type. |
| 1132 GenerateTypeTransition(masm); | 1128 GenerateTypeTransition(masm); |
| 1133 } | 1129 } |
| 1134 | 1130 |
| 1135 | 1131 |
| 1136 void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { | 1132 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
| 1137 Label call_runtime; | 1133 Label call_runtime; |
| 1138 ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); | 1134 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); |
| 1139 ASSERT(op_ == Token::ADD); | 1135 ASSERT(op_ == Token::ADD); |
| 1140 // If both arguments are strings, call the string add stub. | 1136 // If both arguments are strings, call the string add stub. |
| 1141 // Otherwise, do a transition. | 1137 // Otherwise, do a transition. |
| 1142 | 1138 |
| 1143 // Registers containing left and right operands respectively. | 1139 // Registers containing left and right operands respectively. |
| 1144 Register left = rdx; | 1140 Register left = rdx; |
| 1145 Register right = rax; | 1141 Register right = rax; |
| 1146 | 1142 |
| 1147 // Test if left operand is a string. | 1143 // Test if left operand is a string. |
| 1148 __ JumpIfSmi(left, &call_runtime); | 1144 __ JumpIfSmi(left, &call_runtime); |
| 1149 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); | 1145 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
| 1150 __ j(above_equal, &call_runtime); | 1146 __ j(above_equal, &call_runtime); |
| 1151 | 1147 |
| 1152 // Test if right operand is a string. | 1148 // Test if right operand is a string. |
| 1153 __ JumpIfSmi(right, &call_runtime); | 1149 __ JumpIfSmi(right, &call_runtime); |
| 1154 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); | 1150 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); |
| 1155 __ j(above_equal, &call_runtime); | 1151 __ j(above_equal, &call_runtime); |
| 1156 | 1152 |
| 1157 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); | 1153 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); |
| 1158 GenerateRegisterArgsPush(masm); | 1154 GenerateRegisterArgsPush(masm); |
| 1159 __ TailCallStub(&string_add_stub); | 1155 __ TailCallStub(&string_add_stub); |
| 1160 | 1156 |
| 1161 __ bind(&call_runtime); | 1157 __ bind(&call_runtime); |
| 1162 GenerateTypeTransition(masm); | 1158 GenerateTypeTransition(masm); |
| 1163 } | 1159 } |
| 1164 | 1160 |
| 1165 | 1161 |
| 1166 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { | 1162 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
| 1167 Label call_runtime; | 1163 Label call_runtime; |
| 1168 | 1164 |
| 1169 if (op_ == Token::ADD) { | 1165 if (op_ == Token::ADD) { |
| 1170 // Handle string addition here, because it is the only operation | 1166 // Handle string addition here, because it is the only operation |
| 1171 // that does not do a ToNumber conversion on the operands. | 1167 // that does not do a ToNumber conversion on the operands. |
| 1172 GenerateStringAddCode(masm); | 1168 GenerateStringAddCode(masm); |
| 1173 } | 1169 } |
| 1174 | 1170 |
| 1175 // Convert oddball arguments to numbers. | 1171 // Convert oddball arguments to numbers. |
| 1176 Label check, done; | 1172 Label check, done; |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1189 __ xor_(rax, rax); | 1185 __ xor_(rax, rax); |
| 1190 } else { | 1186 } else { |
| 1191 __ LoadRoot(rax, Heap::kNanValueRootIndex); | 1187 __ LoadRoot(rax, Heap::kNanValueRootIndex); |
| 1192 } | 1188 } |
| 1193 __ bind(&done); | 1189 __ bind(&done); |
| 1194 | 1190 |
| 1195 GenerateHeapNumberStub(masm); | 1191 GenerateHeapNumberStub(masm); |
| 1196 } | 1192 } |
| 1197 | 1193 |
| 1198 | 1194 |
| 1199 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 1195 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 1200 Label gc_required, not_number; | 1196 Label gc_required, not_number; |
| 1201 GenerateFloatingPointCode(masm, &gc_required, ¬_number); | 1197 GenerateFloatingPointCode(masm, &gc_required, ¬_number); |
| 1202 | 1198 |
| 1203 __ bind(¬_number); | 1199 __ bind(¬_number); |
| 1204 GenerateTypeTransition(masm); | 1200 GenerateTypeTransition(masm); |
| 1205 | 1201 |
| 1206 __ bind(&gc_required); | 1202 __ bind(&gc_required); |
| 1207 GenerateCallRuntimeCode(masm); | 1203 GenerateCallRuntimeCode(masm); |
| 1208 } | 1204 } |
| 1209 | 1205 |
| 1210 | 1206 |
| 1211 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { | 1207 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
| 1212 Label call_runtime, call_string_add_or_runtime; | 1208 Label call_runtime, call_string_add_or_runtime; |
| 1213 | 1209 |
| 1214 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 1210 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
| 1215 | 1211 |
| 1216 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime); | 1212 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime); |
| 1217 | 1213 |
| 1218 __ bind(&call_string_add_or_runtime); | 1214 __ bind(&call_string_add_or_runtime); |
| 1219 if (op_ == Token::ADD) { | 1215 if (op_ == Token::ADD) { |
| 1220 GenerateStringAddCode(masm); | 1216 GenerateStringAddCode(masm); |
| 1221 } | 1217 } |
| 1222 | 1218 |
| 1223 __ bind(&call_runtime); | 1219 __ bind(&call_runtime); |
| 1224 GenerateCallRuntimeCode(masm); | 1220 GenerateCallRuntimeCode(masm); |
| 1225 } | 1221 } |
| 1226 | 1222 |
| 1227 | 1223 |
| 1228 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( | 1224 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm, |
| 1229 MacroAssembler* masm, | 1225 Label* alloc_failure) { |
| 1230 Label* alloc_failure) { | |
| 1231 Label skip_allocation; | 1226 Label skip_allocation; |
| 1232 OverwriteMode mode = mode_; | 1227 OverwriteMode mode = mode_; |
| 1233 switch (mode) { | 1228 switch (mode) { |
| 1234 case OVERWRITE_LEFT: { | 1229 case OVERWRITE_LEFT: { |
| 1235 // If the argument in rdx is already an object, we skip the | 1230 // If the argument in rdx is already an object, we skip the |
| 1236 // allocation of a heap number. | 1231 // allocation of a heap number. |
| 1237 __ JumpIfNotSmi(rdx, &skip_allocation); | 1232 __ JumpIfNotSmi(rdx, &skip_allocation); |
| 1238 // Allocate a heap number for the result. Keep eax and edx intact | 1233 // Allocate a heap number for the result. Keep eax and edx intact |
| 1239 // for the possible runtime call. | 1234 // for the possible runtime call. |
| 1240 __ AllocateHeapNumber(rbx, rcx, alloc_failure); | 1235 __ AllocateHeapNumber(rbx, rcx, alloc_failure); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1258 // Now rax can be overwritten losing one of the arguments as we are | 1253 // Now rax can be overwritten losing one of the arguments as we are |
| 1259 // now done and will not need it any more. | 1254 // now done and will not need it any more. |
| 1260 __ movq(rax, rbx); | 1255 __ movq(rax, rbx); |
| 1261 __ bind(&skip_allocation); | 1256 __ bind(&skip_allocation); |
| 1262 break; | 1257 break; |
| 1263 default: UNREACHABLE(); | 1258 default: UNREACHABLE(); |
| 1264 } | 1259 } |
| 1265 } | 1260 } |
| 1266 | 1261 |
| 1267 | 1262 |
| 1268 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 1263 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
| 1269 __ pop(rcx); | 1264 __ pop(rcx); |
| 1270 __ push(rdx); | 1265 __ push(rdx); |
| 1271 __ push(rax); | 1266 __ push(rax); |
| 1272 __ push(rcx); | 1267 __ push(rcx); |
| 1273 } | 1268 } |
| 1274 | 1269 |
| 1275 | 1270 |
| 1276 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 1271 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
| 1277 // TAGGED case: | 1272 // TAGGED case: |
| 1278 // Input: | 1273 // Input: |
| (...skipping 3845 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5124 __ Drop(1); | 5119 __ Drop(1); |
| 5125 __ ret(2 * kPointerSize); | 5120 __ ret(2 * kPointerSize); |
| 5126 } | 5121 } |
| 5127 | 5122 |
| 5128 | 5123 |
| 5129 #undef __ | 5124 #undef __ |
| 5130 | 5125 |
| 5131 } } // namespace v8::internal | 5126 } } // namespace v8::internal |
| 5132 | 5127 |
| 5133 #endif // V8_TARGET_ARCH_X64 | 5128 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |