OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
399 // As the then-branch, but move double-value to result before shifting. | 399 // As the then-branch, but move double-value to result before shifting. |
400 __ xorl(result, double_value); | 400 __ xorl(result, double_value); |
401 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); | 401 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); |
402 __ shll_cl(result); | 402 __ shll_cl(result); |
403 } | 403 } |
404 | 404 |
405 __ bind(&done); | 405 __ bind(&done); |
406 } | 406 } |
407 | 407 |
408 | 408 |
409 Handle<Code> GetTypeRecordingUnaryOpStub(int key, | 409 Handle<Code> GetUnaryOpStub(int key, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Indentation.
fschneider
2011/05/24 12:16:41
Done.
| |
410 TRUnaryOpIC::TypeInfo type_info) { | 410 UnaryOpIC::TypeInfo type_info) { |
411 TypeRecordingUnaryOpStub stub(key, type_info); | 411 UnaryOpStub stub(key, type_info); |
412 return stub.GetCode(); | 412 return stub.GetCode(); |
413 } | 413 } |
414 | 414 |
415 | 415 |
416 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { | 416 void UnaryOpStub::Generate(MacroAssembler* masm) { |
417 switch (operand_type_) { | 417 switch (operand_type_) { |
418 case TRUnaryOpIC::UNINITIALIZED: | 418 case UnaryOpIC::UNINITIALIZED: |
419 GenerateTypeTransition(masm); | 419 GenerateTypeTransition(masm); |
420 break; | 420 break; |
421 case TRUnaryOpIC::SMI: | 421 case UnaryOpIC::SMI: |
422 GenerateSmiStub(masm); | 422 GenerateSmiStub(masm); |
423 break; | 423 break; |
424 case TRUnaryOpIC::HEAP_NUMBER: | 424 case UnaryOpIC::HEAP_NUMBER: |
425 GenerateHeapNumberStub(masm); | 425 GenerateHeapNumberStub(masm); |
426 break; | 426 break; |
427 case TRUnaryOpIC::GENERIC: | 427 case UnaryOpIC::GENERIC: |
428 GenerateGenericStub(masm); | 428 GenerateGenericStub(masm); |
429 break; | 429 break; |
430 } | 430 } |
431 } | 431 } |
432 | 432 |
433 | 433 |
434 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 434 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
435 __ pop(rcx); // Save return address. | 435 __ pop(rcx); // Save return address. |
436 __ push(rax); | 436 __ push(rax); |
437 // Left and right arguments are now on top. | 437 // Left and right arguments are now on top. |
438 // Push this stub's key. Although the operation and the type info are | 438 // Push this stub's key. Although the operation and the type info are |
439 // encoded into the key, the encoding is opaque, so push them too. | 439 // encoded into the key, the encoding is opaque, so push them too. |
440 __ Push(Smi::FromInt(MinorKey())); | 440 __ Push(Smi::FromInt(MinorKey())); |
441 __ Push(Smi::FromInt(op_)); | 441 __ Push(Smi::FromInt(op_)); |
442 __ Push(Smi::FromInt(operand_type_)); | 442 __ Push(Smi::FromInt(operand_type_)); |
443 | 443 |
444 __ push(rcx); // Push return address. | 444 __ push(rcx); // Push return address. |
445 | 445 |
446 // Patch the caller to an appropriate specialized stub and return the | 446 // Patch the caller to an appropriate specialized stub and return the |
447 // operation result to the caller of the stub. | 447 // operation result to the caller of the stub. |
448 __ TailCallExternalReference( | 448 __ TailCallExternalReference( |
449 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), | 449 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), |
450 masm->isolate()), | 450 masm->isolate()), |
451 4, | 451 4, |
452 1); | 452 1); |
453 } | 453 } |
454 | 454 |
455 | 455 |
456 // TODO(svenpanne): Use virtual functions instead of switch. | 456 // TODO(svenpanne): Use virtual functions instead of switch. |
457 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 457 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
458 switch (op_) { | 458 switch (op_) { |
459 case Token::SUB: | 459 case Token::SUB: |
460 GenerateSmiStubSub(masm); | 460 GenerateSmiStubSub(masm); |
461 break; | 461 break; |
462 case Token::BIT_NOT: | 462 case Token::BIT_NOT: |
463 GenerateSmiStubBitNot(masm); | 463 GenerateSmiStubBitNot(masm); |
464 break; | 464 break; |
465 default: | 465 default: |
466 UNREACHABLE(); | 466 UNREACHABLE(); |
467 } | 467 } |
468 } | 468 } |
469 | 469 |
470 | 470 |
471 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | 471 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
472 Label slow; | 472 Label slow; |
473 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); | 473 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); |
474 __ bind(&slow); | 474 __ bind(&slow); |
475 GenerateTypeTransition(masm); | 475 GenerateTypeTransition(masm); |
476 } | 476 } |
477 | 477 |
478 | 478 |
479 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | 479 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
480 Label non_smi; | 480 Label non_smi; |
481 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 481 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
482 __ bind(&non_smi); | 482 __ bind(&non_smi); |
483 GenerateTypeTransition(masm); | 483 GenerateTypeTransition(masm); |
484 } | 484 } |
485 | 485 |
486 | 486 |
487 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | 487 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Indentation.
fschneider
2011/05/24 12:16:41
Done.
| |
488 Label* non_smi, | 488 Label* non_smi, |
489 Label* slow, | 489 Label* slow, |
490 Label::Distance non_smi_near, | 490 Label::Distance non_smi_near, |
491 Label::Distance slow_near) { | 491 Label::Distance slow_near) { |
492 Label done; | 492 Label done; |
493 __ JumpIfNotSmi(rax, non_smi, non_smi_near); | 493 __ JumpIfNotSmi(rax, non_smi, non_smi_near); |
494 __ SmiNeg(rax, rax, &done, Label::kNear); | 494 __ SmiNeg(rax, rax, &done, Label::kNear); |
495 __ jmp(slow, slow_near); | 495 __ jmp(slow, slow_near); |
496 __ bind(&done); | 496 __ bind(&done); |
497 __ ret(0); | 497 __ ret(0); |
498 } | 498 } |
499 | 499 |
500 | 500 |
501 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( | 501 void UnaryOpStub::GenerateSmiCodeBitNot( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full intentation?
fschneider
2011/05/24 12:16:41
Done.
| |
502 MacroAssembler* masm, | 502 MacroAssembler* masm, |
503 Label* non_smi, | 503 Label* non_smi, |
504 Label::Distance non_smi_near) { | 504 Label::Distance non_smi_near) { |
505 __ JumpIfNotSmi(rax, non_smi, non_smi_near); | 505 __ JumpIfNotSmi(rax, non_smi, non_smi_near); |
506 __ SmiNot(rax, rax); | 506 __ SmiNot(rax, rax); |
507 __ ret(0); | 507 __ ret(0); |
508 } | 508 } |
509 | 509 |
510 | 510 |
511 // TODO(svenpanne): Use virtual functions instead of switch. | 511 // TODO(svenpanne): Use virtual functions instead of switch. |
512 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 512 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
513 switch (op_) { | 513 switch (op_) { |
514 case Token::SUB: | 514 case Token::SUB: |
515 GenerateHeapNumberStubSub(masm); | 515 GenerateHeapNumberStubSub(masm); |
516 break; | 516 break; |
517 case Token::BIT_NOT: | 517 case Token::BIT_NOT: |
518 GenerateHeapNumberStubBitNot(masm); | 518 GenerateHeapNumberStubBitNot(masm); |
519 break; | 519 break; |
520 default: | 520 default: |
521 UNREACHABLE(); | 521 UNREACHABLE(); |
522 } | 522 } |
523 } | 523 } |
524 | 524 |
525 | 525 |
526 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 526 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
527 Label non_smi, slow, call_builtin; | 527 Label non_smi, slow, call_builtin; |
528 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); | 528 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); |
529 __ bind(&non_smi); | 529 __ bind(&non_smi); |
530 GenerateHeapNumberCodeSub(masm, &slow); | 530 GenerateHeapNumberCodeSub(masm, &slow); |
531 __ bind(&slow); | 531 __ bind(&slow); |
532 GenerateTypeTransition(masm); | 532 GenerateTypeTransition(masm); |
533 __ bind(&call_builtin); | 533 __ bind(&call_builtin); |
534 GenerateGenericCodeFallback(masm); | 534 GenerateGenericCodeFallback(masm); |
535 } | 535 } |
536 | 536 |
537 | 537 |
538 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 538 void UnaryOpStub::GenerateHeapNumberStubBitNot( |
539 MacroAssembler* masm) { | 539 MacroAssembler* masm) { |
540 Label non_smi, slow; | 540 Label non_smi, slow; |
541 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 541 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
542 __ bind(&non_smi); | 542 __ bind(&non_smi); |
543 GenerateHeapNumberCodeBitNot(masm, &slow); | 543 GenerateHeapNumberCodeBitNot(masm, &slow); |
544 __ bind(&slow); | 544 __ bind(&slow); |
545 GenerateTypeTransition(masm); | 545 GenerateTypeTransition(masm); |
546 } | 546 } |
547 | 547 |
548 | 548 |
549 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 549 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Indentation.
fschneider
2011/05/24 12:16:41
Done.
| |
550 Label* slow) { | 550 Label* slow) { |
551 // Check if the operand is a heap number. | 551 // Check if the operand is a heap number. |
552 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 552 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
553 Heap::kHeapNumberMapRootIndex); | 553 Heap::kHeapNumberMapRootIndex); |
554 __ j(not_equal, slow); | 554 __ j(not_equal, slow); |
555 | 555 |
556 // Operand is a float, negate its value by flipping the sign bit. | 556 // Operand is a float, negate its value by flipping the sign bit. |
557 if (mode_ == UNARY_OVERWRITE) { | 557 if (mode_ == UNARY_OVERWRITE) { |
558 __ Set(kScratchRegister, 0x01); | 558 __ Set(kScratchRegister, 0x01); |
559 __ shl(kScratchRegister, Immediate(63)); | 559 __ shl(kScratchRegister, Immediate(63)); |
(...skipping 20 matching lines...) Expand all Loading... | |
580 __ Set(kScratchRegister, 0x01); | 580 __ Set(kScratchRegister, 0x01); |
581 __ shl(kScratchRegister, Immediate(63)); | 581 __ shl(kScratchRegister, Immediate(63)); |
582 __ xor_(rdx, kScratchRegister); // Flip sign. | 582 __ xor_(rdx, kScratchRegister); // Flip sign. |
583 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 583 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
584 __ movq(rax, rcx); | 584 __ movq(rax, rcx); |
585 } | 585 } |
586 __ ret(0); | 586 __ ret(0); |
587 } | 587 } |
588 | 588 |
589 | 589 |
590 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( | 590 void UnaryOpStub::GenerateHeapNumberCodeBitNot( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full indentation?
fschneider
2011/05/24 12:16:41
Done.
| |
591 MacroAssembler* masm, | 591 MacroAssembler* masm, |
592 Label* slow) { | 592 Label* slow) { |
593 // Check if the operand is a heap number. | 593 // Check if the operand is a heap number. |
594 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 594 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
595 Heap::kHeapNumberMapRootIndex); | 595 Heap::kHeapNumberMapRootIndex); |
596 __ j(not_equal, slow); | 596 __ j(not_equal, slow); |
597 | 597 |
598 // Convert the heap number in rax to an untagged integer in rcx. | 598 // Convert the heap number in rax to an untagged integer in rcx. |
599 IntegerConvert(masm, rax, rax); | 599 IntegerConvert(masm, rax, rax); |
600 | 600 |
601 // Do the bitwise operation and smi tag the result. | 601 // Do the bitwise operation and smi tag the result. |
602 __ notl(rax); | 602 __ notl(rax); |
603 __ Integer32ToSmi(rax, rax); | 603 __ Integer32ToSmi(rax, rax); |
604 __ ret(0); | 604 __ ret(0); |
605 } | 605 } |
606 | 606 |
607 | 607 |
608 // TODO(svenpanne): Use virtual functions instead of switch. | 608 // TODO(svenpanne): Use virtual functions instead of switch. |
609 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | 609 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
610 switch (op_) { | 610 switch (op_) { |
611 case Token::SUB: | 611 case Token::SUB: |
612 GenerateGenericStubSub(masm); | 612 GenerateGenericStubSub(masm); |
613 break; | 613 break; |
614 case Token::BIT_NOT: | 614 case Token::BIT_NOT: |
615 GenerateGenericStubBitNot(masm); | 615 GenerateGenericStubBitNot(masm); |
616 break; | 616 break; |
617 default: | 617 default: |
618 UNREACHABLE(); | 618 UNREACHABLE(); |
619 } | 619 } |
620 } | 620 } |
621 | 621 |
622 | 622 |
623 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | 623 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
624 Label non_smi, slow; | 624 Label non_smi, slow; |
625 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); | 625 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); |
626 __ bind(&non_smi); | 626 __ bind(&non_smi); |
627 GenerateHeapNumberCodeSub(masm, &slow); | 627 GenerateHeapNumberCodeSub(masm, &slow); |
628 __ bind(&slow); | 628 __ bind(&slow); |
629 GenerateGenericCodeFallback(masm); | 629 GenerateGenericCodeFallback(masm); |
630 } | 630 } |
631 | 631 |
632 | 632 |
633 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | 633 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
634 Label non_smi, slow; | 634 Label non_smi, slow; |
635 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 635 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
636 __ bind(&non_smi); | 636 __ bind(&non_smi); |
637 GenerateHeapNumberCodeBitNot(masm, &slow); | 637 GenerateHeapNumberCodeBitNot(masm, &slow); |
638 __ bind(&slow); | 638 __ bind(&slow); |
639 GenerateGenericCodeFallback(masm); | 639 GenerateGenericCodeFallback(masm); |
640 } | 640 } |
641 | 641 |
642 | 642 |
643 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( | 643 void UnaryOpStub::GenerateGenericCodeFallback( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits one line?
fschneider
2011/05/24 12:16:41
Done.
| |
644 MacroAssembler* masm) { | 644 MacroAssembler* masm) { |
645 // Handle the slow case by jumping to the JavaScript builtin. | 645 // Handle the slow case by jumping to the JavaScript builtin. |
646 __ pop(rcx); // pop return address | 646 __ pop(rcx); // pop return address |
647 __ push(rax); | 647 __ push(rax); |
648 __ push(rcx); // push return address | 648 __ push(rcx); // push return address |
649 switch (op_) { | 649 switch (op_) { |
650 case Token::SUB: | 650 case Token::SUB: |
651 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); | 651 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); |
652 break; | 652 break; |
653 case Token::BIT_NOT: | 653 case Token::BIT_NOT: |
654 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); | 654 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); |
655 break; | 655 break; |
656 default: | 656 default: |
657 UNREACHABLE(); | 657 UNREACHABLE(); |
658 } | 658 } |
659 } | 659 } |
660 | 660 |
661 | 661 |
662 const char* TypeRecordingUnaryOpStub::GetName() { | 662 const char* UnaryOpStub::GetName() { |
663 if (name_ != NULL) return name_; | 663 if (name_ != NULL) return name_; |
664 const int kMaxNameLength = 100; | 664 const int kMaxNameLength = 100; |
665 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | 665 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
666 kMaxNameLength); | 666 kMaxNameLength); |
667 if (name_ == NULL) return "OOM"; | 667 if (name_ == NULL) return "OOM"; |
668 const char* op_name = Token::Name(op_); | 668 const char* op_name = Token::Name(op_); |
669 const char* overwrite_name = NULL; // Make g++ happy. | 669 const char* overwrite_name = NULL; // Make g++ happy. |
670 switch (mode_) { | 670 switch (mode_) { |
671 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | 671 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
672 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | 672 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; |
673 } | 673 } |
674 | 674 |
675 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | 675 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
676 "TypeRecordingUnaryOpStub_%s_%s_%s", | 676 "UnaryOpStub_%s_%s_%s", |
677 op_name, | 677 op_name, |
678 overwrite_name, | 678 overwrite_name, |
679 TRUnaryOpIC::GetName(operand_type_)); | 679 UnaryOpIC::GetName(operand_type_)); |
680 return name_; | 680 return name_; |
681 } | 681 } |
682 | 682 |
683 | 683 |
684 Handle<Code> GetTypeRecordingBinaryOpStub(int key, | 684 Handle<Code> GetBinaryOpStub(int key, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Full intentation or int key on separate line.
fschneider
2011/05/24 12:16:41
Done.
| |
685 TRBinaryOpIC::TypeInfo type_info, | 685 BinaryOpIC::TypeInfo type_info, |
686 TRBinaryOpIC::TypeInfo result_type_info) { | 686 BinaryOpIC::TypeInfo result_type_info) { |
687 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); | 687 BinaryOpStub stub(key, type_info, result_type_info); |
688 return stub.GetCode(); | 688 return stub.GetCode(); |
689 } | 689 } |
690 | 690 |
691 | 691 |
692 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 692 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
693 __ pop(rcx); // Save return address. | 693 __ pop(rcx); // Save return address. |
694 __ push(rdx); | 694 __ push(rdx); |
695 __ push(rax); | 695 __ push(rax); |
696 // Left and right arguments are now on top. | 696 // Left and right arguments are now on top. |
697 // Push this stub's key. Although the operation and the type info are | 697 // Push this stub's key. Although the operation and the type info are |
698 // encoded into the key, the encoding is opaque, so push them too. | 698 // encoded into the key, the encoding is opaque, so push them too. |
699 __ Push(Smi::FromInt(MinorKey())); | 699 __ Push(Smi::FromInt(MinorKey())); |
700 __ Push(Smi::FromInt(op_)); | 700 __ Push(Smi::FromInt(op_)); |
701 __ Push(Smi::FromInt(operands_type_)); | 701 __ Push(Smi::FromInt(operands_type_)); |
702 | 702 |
703 __ push(rcx); // Push return address. | 703 __ push(rcx); // Push return address. |
704 | 704 |
705 // Patch the caller to an appropriate specialized stub and return the | 705 // Patch the caller to an appropriate specialized stub and return the |
706 // operation result to the caller of the stub. | 706 // operation result to the caller of the stub. |
707 __ TailCallExternalReference( | 707 __ TailCallExternalReference( |
708 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), | 708 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
709 masm->isolate()), | 709 masm->isolate()), |
710 5, | 710 5, |
711 1); | 711 1); |
712 } | 712 } |
713 | 713 |
714 | 714 |
715 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { | 715 void BinaryOpStub::Generate(MacroAssembler* masm) { |
716 switch (operands_type_) { | 716 switch (operands_type_) { |
717 case TRBinaryOpIC::UNINITIALIZED: | 717 case BinaryOpIC::UNINITIALIZED: |
718 GenerateTypeTransition(masm); | 718 GenerateTypeTransition(masm); |
719 break; | 719 break; |
720 case TRBinaryOpIC::SMI: | 720 case BinaryOpIC::SMI: |
721 GenerateSmiStub(masm); | 721 GenerateSmiStub(masm); |
722 break; | 722 break; |
723 case TRBinaryOpIC::INT32: | 723 case BinaryOpIC::INT32: |
724 UNREACHABLE(); | 724 UNREACHABLE(); |
725 // The int32 case is identical to the Smi case. We avoid creating this | 725 // The int32 case is identical to the Smi case. We avoid creating this |
726 // ic state on x64. | 726 // ic state on x64. |
727 break; | 727 break; |
728 case TRBinaryOpIC::HEAP_NUMBER: | 728 case BinaryOpIC::HEAP_NUMBER: |
729 GenerateHeapNumberStub(masm); | 729 GenerateHeapNumberStub(masm); |
730 break; | 730 break; |
731 case TRBinaryOpIC::ODDBALL: | 731 case BinaryOpIC::ODDBALL: |
732 GenerateOddballStub(masm); | 732 GenerateOddballStub(masm); |
733 break; | 733 break; |
734 case TRBinaryOpIC::BOTH_STRING: | 734 case BinaryOpIC::BOTH_STRING: |
735 GenerateBothStringStub(masm); | 735 GenerateBothStringStub(masm); |
736 break; | 736 break; |
737 case TRBinaryOpIC::STRING: | 737 case BinaryOpIC::STRING: |
738 GenerateStringStub(masm); | 738 GenerateStringStub(masm); |
739 break; | 739 break; |
740 case TRBinaryOpIC::GENERIC: | 740 case BinaryOpIC::GENERIC: |
741 GenerateGeneric(masm); | 741 GenerateGeneric(masm); |
742 break; | 742 break; |
743 default: | 743 default: |
744 UNREACHABLE(); | 744 UNREACHABLE(); |
745 } | 745 } |
746 } | 746 } |
747 | 747 |
748 | 748 |
749 const char* TypeRecordingBinaryOpStub::GetName() { | 749 const char* BinaryOpStub::GetName() { |
750 if (name_ != NULL) return name_; | 750 if (name_ != NULL) return name_; |
751 const int kMaxNameLength = 100; | 751 const int kMaxNameLength = 100; |
752 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | 752 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
753 kMaxNameLength); | 753 kMaxNameLength); |
754 if (name_ == NULL) return "OOM"; | 754 if (name_ == NULL) return "OOM"; |
755 const char* op_name = Token::Name(op_); | 755 const char* op_name = Token::Name(op_); |
756 const char* overwrite_name; | 756 const char* overwrite_name; |
757 switch (mode_) { | 757 switch (mode_) { |
758 case NO_OVERWRITE: overwrite_name = "Alloc"; break; | 758 case NO_OVERWRITE: overwrite_name = "Alloc"; break; |
759 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; | 759 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; |
760 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; | 760 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; |
761 default: overwrite_name = "UnknownOverwrite"; break; | 761 default: overwrite_name = "UnknownOverwrite"; break; |
762 } | 762 } |
763 | 763 |
764 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | 764 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
765 "TypeRecordingBinaryOpStub_%s_%s_%s", | 765 "BinaryOpStub_%s_%s_%s", |
766 op_name, | 766 op_name, |
767 overwrite_name, | 767 overwrite_name, |
768 TRBinaryOpIC::GetName(operands_type_)); | 768 BinaryOpIC::GetName(operands_type_)); |
769 return name_; | 769 return name_; |
770 } | 770 } |
771 | 771 |
772 | 772 |
773 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, | 773 void BinaryOpStub::GenerateSmiCode(MacroAssembler* masm, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
MacroAssembler* masm, on separate line.
fschneider
2011/05/24 12:16:41
Done.
| |
774 Label* slow, | 774 Label* slow, |
775 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { | 775 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { |
776 | 776 |
777 // Arguments to TypeRecordingBinaryOpStub are in rdx and rax. | 777 // Arguments to BinaryOpStub are in rdx and rax. |
778 Register left = rdx; | 778 Register left = rdx; |
779 Register right = rax; | 779 Register right = rax; |
780 | 780 |
781 // We only generate heapnumber answers for overflowing calculations | 781 // We only generate heapnumber answers for overflowing calculations |
782 // for the four basic arithmetic operations and logical right shift by 0. | 782 // for the four basic arithmetic operations and logical right shift by 0. |
783 bool generate_inline_heapnumber_results = | 783 bool generate_inline_heapnumber_results = |
784 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) && | 784 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) && |
785 (op_ == Token::ADD || op_ == Token::SUB || | 785 (op_ == Token::ADD || op_ == Token::SUB || |
786 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR); | 786 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR); |
787 | 787 |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
913 // values that could be smi. | 913 // values that could be smi. |
914 __ bind(¬_smis); | 914 __ bind(¬_smis); |
915 Comment done_comment(masm, "-- Enter non-smi code"); | 915 Comment done_comment(masm, "-- Enter non-smi code"); |
916 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, | 916 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, |
917 &smi_values, &fail); | 917 &smi_values, &fail); |
918 __ jmp(&smi_values); | 918 __ jmp(&smi_values); |
919 __ bind(&fail); | 919 __ bind(&fail); |
920 } | 920 } |
921 | 921 |
922 | 922 |
923 void TypeRecordingBinaryOpStub::GenerateFloatingPointCode( | 923 void BinaryOpStub::GenerateFloatingPointCode( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full indentation?
fschneider
2011/05/24 12:16:41
Done.
| |
924 MacroAssembler* masm, | 924 MacroAssembler* masm, |
925 Label* allocation_failure, | 925 Label* allocation_failure, |
926 Label* non_numeric_failure) { | 926 Label* non_numeric_failure) { |
927 switch (op_) { | 927 switch (op_) { |
928 case Token::ADD: | 928 case Token::ADD: |
929 case Token::SUB: | 929 case Token::SUB: |
930 case Token::MUL: | 930 case Token::MUL: |
931 case Token::DIV: { | 931 case Token::DIV: { |
932 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); | 932 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); |
933 | 933 |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1015 __ Integer32ToSmi(rdx, rbx); | 1015 __ Integer32ToSmi(rdx, rbx); |
1016 __ jmp(allocation_failure); | 1016 __ jmp(allocation_failure); |
1017 } | 1017 } |
1018 break; | 1018 break; |
1019 } | 1019 } |
1020 default: UNREACHABLE(); break; | 1020 default: UNREACHABLE(); break; |
1021 } | 1021 } |
1022 // No fall-through from this generated code. | 1022 // No fall-through from this generated code. |
1023 if (FLAG_debug_code) { | 1023 if (FLAG_debug_code) { |
1024 __ Abort("Unexpected fall-through in " | 1024 __ Abort("Unexpected fall-through in " |
1025 "TypeRecordingBinaryStub::GenerateFloatingPointCode."); | 1025 "BinaryStub::GenerateFloatingPointCode."); |
1026 } | 1026 } |
1027 } | 1027 } |
1028 | 1028 |
1029 | 1029 |
1030 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { | 1030 void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
1031 ASSERT(op_ == Token::ADD); | 1031 ASSERT(op_ == Token::ADD); |
1032 Label left_not_string, call_runtime; | 1032 Label left_not_string, call_runtime; |
1033 | 1033 |
1034 // Registers containing left and right operands respectively. | 1034 // Registers containing left and right operands respectively. |
1035 Register left = rdx; | 1035 Register left = rdx; |
1036 Register right = rax; | 1036 Register right = rax; |
1037 | 1037 |
1038 // Test if left operand is a string. | 1038 // Test if left operand is a string. |
1039 __ JumpIfSmi(left, &left_not_string, Label::kNear); | 1039 __ JumpIfSmi(left, &left_not_string, Label::kNear); |
1040 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); | 1040 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
(...skipping 10 matching lines...) Expand all Loading... | |
1051 | 1051 |
1052 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | 1052 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
1053 GenerateRegisterArgsPush(masm); | 1053 GenerateRegisterArgsPush(masm); |
1054 __ TailCallStub(&string_add_right_stub); | 1054 __ TailCallStub(&string_add_right_stub); |
1055 | 1055 |
1056 // Neither argument is a string. | 1056 // Neither argument is a string. |
1057 __ bind(&call_runtime); | 1057 __ bind(&call_runtime); |
1058 } | 1058 } |
1059 | 1059 |
1060 | 1060 |
1061 void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { | 1061 void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
1062 GenerateRegisterArgsPush(masm); | 1062 GenerateRegisterArgsPush(masm); |
1063 switch (op_) { | 1063 switch (op_) { |
1064 case Token::ADD: | 1064 case Token::ADD: |
1065 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); | 1065 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); |
1066 break; | 1066 break; |
1067 case Token::SUB: | 1067 case Token::SUB: |
1068 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); | 1068 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); |
1069 break; | 1069 break; |
1070 case Token::MUL: | 1070 case Token::MUL: |
1071 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); | 1071 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); |
(...skipping 21 matching lines...) Expand all Loading... | |
1093 break; | 1093 break; |
1094 case Token::SHR: | 1094 case Token::SHR: |
1095 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); | 1095 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); |
1096 break; | 1096 break; |
1097 default: | 1097 default: |
1098 UNREACHABLE(); | 1098 UNREACHABLE(); |
1099 } | 1099 } |
1100 } | 1100 } |
1101 | 1101 |
1102 | 1102 |
1103 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 1103 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
1104 Label call_runtime; | 1104 Label call_runtime; |
1105 if (result_type_ == TRBinaryOpIC::UNINITIALIZED || | 1105 if (result_type_ == BinaryOpIC::UNINITIALIZED || |
1106 result_type_ == TRBinaryOpIC::SMI) { | 1106 result_type_ == BinaryOpIC::SMI) { |
1107 // Only allow smi results. | 1107 // Only allow smi results. |
1108 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); | 1108 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); |
1109 } else { | 1109 } else { |
1110 // Allow heap number result and don't make a transition if a heap number | 1110 // Allow heap number result and don't make a transition if a heap number |
1111 // cannot be allocated. | 1111 // cannot be allocated. |
1112 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 1112 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
1113 } | 1113 } |
1114 | 1114 |
1115 // Code falls through if the result is not returned as either a smi or heap | 1115 // Code falls through if the result is not returned as either a smi or heap |
1116 // number. | 1116 // number. |
1117 GenerateTypeTransition(masm); | 1117 GenerateTypeTransition(masm); |
1118 | 1118 |
1119 if (call_runtime.is_linked()) { | 1119 if (call_runtime.is_linked()) { |
1120 __ bind(&call_runtime); | 1120 __ bind(&call_runtime); |
1121 GenerateCallRuntimeCode(masm); | 1121 GenerateCallRuntimeCode(masm); |
1122 } | 1122 } |
1123 } | 1123 } |
1124 | 1124 |
1125 | 1125 |
1126 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 1126 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
1127 ASSERT(operands_type_ == TRBinaryOpIC::STRING); | 1127 ASSERT(operands_type_ == BinaryOpIC::STRING); |
1128 ASSERT(op_ == Token::ADD); | 1128 ASSERT(op_ == Token::ADD); |
1129 GenerateStringAddCode(masm); | 1129 GenerateStringAddCode(masm); |
1130 // Try to add arguments as strings, otherwise, transition to the generic | 1130 // Try to add arguments as strings, otherwise, transition to the generic |
1131 // TRBinaryOpIC type. | 1131 // BinaryOpIC type. |
1132 GenerateTypeTransition(masm); | 1132 GenerateTypeTransition(masm); |
1133 } | 1133 } |
1134 | 1134 |
1135 | 1135 |
1136 void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { | 1136 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
1137 Label call_runtime; | 1137 Label call_runtime; |
1138 ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); | 1138 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); |
1139 ASSERT(op_ == Token::ADD); | 1139 ASSERT(op_ == Token::ADD); |
1140 // If both arguments are strings, call the string add stub. | 1140 // If both arguments are strings, call the string add stub. |
1141 // Otherwise, do a transition. | 1141 // Otherwise, do a transition. |
1142 | 1142 |
1143 // Registers containing left and right operands respectively. | 1143 // Registers containing left and right operands respectively. |
1144 Register left = rdx; | 1144 Register left = rdx; |
1145 Register right = rax; | 1145 Register right = rax; |
1146 | 1146 |
1147 // Test if left operand is a string. | 1147 // Test if left operand is a string. |
1148 __ JumpIfSmi(left, &call_runtime); | 1148 __ JumpIfSmi(left, &call_runtime); |
1149 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); | 1149 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
1150 __ j(above_equal, &call_runtime); | 1150 __ j(above_equal, &call_runtime); |
1151 | 1151 |
1152 // Test if right operand is a string. | 1152 // Test if right operand is a string. |
1153 __ JumpIfSmi(right, &call_runtime); | 1153 __ JumpIfSmi(right, &call_runtime); |
1154 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); | 1154 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); |
1155 __ j(above_equal, &call_runtime); | 1155 __ j(above_equal, &call_runtime); |
1156 | 1156 |
1157 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); | 1157 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); |
1158 GenerateRegisterArgsPush(masm); | 1158 GenerateRegisterArgsPush(masm); |
1159 __ TailCallStub(&string_add_stub); | 1159 __ TailCallStub(&string_add_stub); |
1160 | 1160 |
1161 __ bind(&call_runtime); | 1161 __ bind(&call_runtime); |
1162 GenerateTypeTransition(masm); | 1162 GenerateTypeTransition(masm); |
1163 } | 1163 } |
1164 | 1164 |
1165 | 1165 |
1166 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { | 1166 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
1167 Label call_runtime; | 1167 Label call_runtime; |
1168 | 1168 |
1169 if (op_ == Token::ADD) { | 1169 if (op_ == Token::ADD) { |
1170 // Handle string addition here, because it is the only operation | 1170 // Handle string addition here, because it is the only operation |
1171 // that does not do a ToNumber conversion on the operands. | 1171 // that does not do a ToNumber conversion on the operands. |
1172 GenerateStringAddCode(masm); | 1172 GenerateStringAddCode(masm); |
1173 } | 1173 } |
1174 | 1174 |
1175 // Convert oddball arguments to numbers. | 1175 // Convert oddball arguments to numbers. |
1176 Label check, done; | 1176 Label check, done; |
(...skipping 12 matching lines...) Expand all Loading... | |
1189 __ xor_(rax, rax); | 1189 __ xor_(rax, rax); |
1190 } else { | 1190 } else { |
1191 __ LoadRoot(rax, Heap::kNanValueRootIndex); | 1191 __ LoadRoot(rax, Heap::kNanValueRootIndex); |
1192 } | 1192 } |
1193 __ bind(&done); | 1193 __ bind(&done); |
1194 | 1194 |
1195 GenerateHeapNumberStub(masm); | 1195 GenerateHeapNumberStub(masm); |
1196 } | 1196 } |
1197 | 1197 |
1198 | 1198 |
1199 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 1199 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
1200 Label gc_required, not_number; | 1200 Label gc_required, not_number; |
1201 GenerateFloatingPointCode(masm, &gc_required, ¬_number); | 1201 GenerateFloatingPointCode(masm, &gc_required, ¬_number); |
1202 | 1202 |
1203 __ bind(¬_number); | 1203 __ bind(¬_number); |
1204 GenerateTypeTransition(masm); | 1204 GenerateTypeTransition(masm); |
1205 | 1205 |
1206 __ bind(&gc_required); | 1206 __ bind(&gc_required); |
1207 GenerateCallRuntimeCode(masm); | 1207 GenerateCallRuntimeCode(masm); |
1208 } | 1208 } |
1209 | 1209 |
1210 | 1210 |
1211 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { | 1211 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
1212 Label call_runtime, call_string_add_or_runtime; | 1212 Label call_runtime, call_string_add_or_runtime; |
1213 | 1213 |
1214 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 1214 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
1215 | 1215 |
1216 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime); | 1216 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime); |
1217 | 1217 |
1218 __ bind(&call_string_add_or_runtime); | 1218 __ bind(&call_string_add_or_runtime); |
1219 if (op_ == Token::ADD) { | 1219 if (op_ == Token::ADD) { |
1220 GenerateStringAddCode(masm); | 1220 GenerateStringAddCode(masm); |
1221 } | 1221 } |
1222 | 1222 |
1223 __ bind(&call_runtime); | 1223 __ bind(&call_runtime); |
1224 GenerateCallRuntimeCode(masm); | 1224 GenerateCallRuntimeCode(masm); |
1225 } | 1225 } |
1226 | 1226 |
1227 | 1227 |
1228 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( | 1228 void BinaryOpStub::GenerateHeapResultAllocation( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full indentation?
fschneider
2011/05/24 12:16:41
Done.
| |
1229 MacroAssembler* masm, | 1229 MacroAssembler* masm, |
1230 Label* alloc_failure) { | 1230 Label* alloc_failure) { |
1231 Label skip_allocation; | 1231 Label skip_allocation; |
1232 OverwriteMode mode = mode_; | 1232 OverwriteMode mode = mode_; |
1233 switch (mode) { | 1233 switch (mode) { |
1234 case OVERWRITE_LEFT: { | 1234 case OVERWRITE_LEFT: { |
1235 // If the argument in rdx is already an object, we skip the | 1235 // If the argument in rdx is already an object, we skip the |
1236 // allocation of a heap number. | 1236 // allocation of a heap number. |
1237 __ JumpIfNotSmi(rdx, &skip_allocation); | 1237 __ JumpIfNotSmi(rdx, &skip_allocation); |
1238 // Allocate a heap number for the result. Keep eax and edx intact | 1238 // Allocate a heap number for the result. Keep eax and edx intact |
(...skipping 19 matching lines...) Expand all Loading... | |
1258 // Now rax can be overwritten losing one of the arguments as we are | 1258 // Now rax can be overwritten losing one of the arguments as we are |
1259 // now done and will not need it any more. | 1259 // now done and will not need it any more. |
1260 __ movq(rax, rbx); | 1260 __ movq(rax, rbx); |
1261 __ bind(&skip_allocation); | 1261 __ bind(&skip_allocation); |
1262 break; | 1262 break; |
1263 default: UNREACHABLE(); | 1263 default: UNREACHABLE(); |
1264 } | 1264 } |
1265 } | 1265 } |
1266 | 1266 |
1267 | 1267 |
1268 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 1268 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
1269 __ pop(rcx); | 1269 __ pop(rcx); |
1270 __ push(rdx); | 1270 __ push(rdx); |
1271 __ push(rax); | 1271 __ push(rax); |
1272 __ push(rcx); | 1272 __ push(rcx); |
1273 } | 1273 } |
1274 | 1274 |
1275 | 1275 |
1276 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 1276 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
1277 // TAGGED case: | 1277 // TAGGED case: |
1278 // Input: | 1278 // Input: |
(...skipping 3845 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5124 __ Drop(1); | 5124 __ Drop(1); |
5125 __ ret(2 * kPointerSize); | 5125 __ ret(2 * kPointerSize); |
5126 } | 5126 } |
5127 | 5127 |
5128 | 5128 |
5129 #undef __ | 5129 #undef __ |
5130 | 5130 |
5131 } } // namespace v8::internal | 5131 } } // namespace v8::internal |
5132 | 5132 |
5133 #endif // V8_TARGET_ARCH_X64 | 5133 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |