OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
334 } | 334 } |
335 | 335 |
336 void CodeGenerator::VisitCompareOperation(CompareOperation* a) { | 336 void CodeGenerator::VisitCompareOperation(CompareOperation* a) { |
337 UNIMPLEMENTED(); | 337 UNIMPLEMENTED(); |
338 } | 338 } |
339 | 339 |
340 void CodeGenerator::VisitThisFunction(ThisFunction* a) { | 340 void CodeGenerator::VisitThisFunction(ThisFunction* a) { |
341 UNIMPLEMENTED(); | 341 UNIMPLEMENTED(); |
342 } | 342 } |
343 | 343 |
344 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* a) { | 344 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { |
345 UNIMPLEMENTED(); | 345 UNIMPLEMENTED(); |
346 } | 346 } |
347 | 347 |
348 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* a) { | 348 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { |
349 UNIMPLEMENTED(); | 349 UNIMPLEMENTED();} |
350 } | |
351 | 350 |
352 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* a) { | 351 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* a) { |
353 UNIMPLEMENTED(); | 352 UNIMPLEMENTED(); |
354 } | 353 } |
355 | 354 |
356 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* a) { | 355 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { |
357 UNIMPLEMENTED(); | 356 UNIMPLEMENTED(); |
358 } | 357 } |
359 | 358 |
360 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* a) { | 359 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { |
361 UNIMPLEMENTED(); | 360 UNIMPLEMENTED(); |
362 } | 361 } |
363 | 362 |
364 void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* a) { | 363 void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) { |
365 UNIMPLEMENTED(); | 364 UNIMPLEMENTED(); |
366 } | 365 } |
367 | 366 |
368 void CodeGenerator::GenerateLog(ZoneList<Expression*>* a) { | 367 void CodeGenerator::GenerateLog(ZoneList<Expression*>* a) { |
369 UNIMPLEMENTED(); | 368 UNIMPLEMENTED(); |
370 } | 369 } |
371 | 370 |
372 void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* a) { | 371 void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) { |
373 UNIMPLEMENTED(); | 372 UNIMPLEMENTED(); |
374 } | 373 } |
375 | 374 |
376 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* a) { | 375 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* a) { |
377 UNIMPLEMENTED(); | 376 UNIMPLEMENTED(); |
378 } | 377 } |
379 | 378 |
380 void CodeGenerator::GenerateFastMathOp(MathOp op, ZoneList<Expression*>* args) { | 379 void CodeGenerator::GenerateFastMathOp(MathOp op, ZoneList<Expression*>* args) { |
381 UNIMPLEMENTED(); | 380 UNIMPLEMENTED(); |
382 } | 381 } |
383 | 382 |
384 void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* a) { | 383 void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) { |
385 UNIMPLEMENTED(); | 384 UNIMPLEMENTED(); |
386 } | 385 } |
387 | 386 |
388 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* a) { | 387 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { |
389 UNIMPLEMENTED(); | 388 UNIMPLEMENTED(); |
390 } | 389 } |
391 | 390 |
392 #undef __ | 391 #undef __ |
393 // End of CodeGenerator implementation. | 392 // End of CodeGenerator implementation. |
394 | 393 |
395 // ----------------------------------------------------------------------------- | 394 // ----------------------------------------------------------------------------- |
396 // Implementation of stubs. | 395 // Implementation of stubs. |
397 | 396 |
398 // Stub classes have public member named masm, not masm_. | 397 // Stub classes have public member named masm, not masm_. |
399 #define __ ACCESS_MASM(masm) | 398 #define __ ACCESS_MASM(masm) |
400 | 399 |
| 400 class ToBooleanStub: public CodeStub { |
| 401 public: |
| 402 ToBooleanStub() { } |
| 403 |
| 404 void Generate(MacroAssembler* masm); |
| 405 |
| 406 private: |
| 407 Major MajorKey() { return ToBoolean; } |
| 408 int MinorKey() { return 0; } |
| 409 }; |
| 410 |
| 411 |
| 412 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 413 Label false_result, true_result, not_string; |
| 414 __ movq(rax, Operand(rsp, 1 * kPointerSize)); |
| 415 |
| 416 // 'null' => false. |
| 417 __ movq(kScratchRegister, Factory::null_value(), RelocInfo::EMBEDDED_OBJECT); |
| 418 __ cmpq(rax, kScratchRegister); |
| 419 __ j(equal, &false_result); |
| 420 |
| 421 // Get the map and type of the heap object. |
| 422 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); |
| 423 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset)); |
| 424 |
| 425 // Undetectable => false. |
| 426 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset)); |
| 427 __ and_(rbx, Immediate(1 << Map::kIsUndetectable)); |
| 428 __ j(not_zero, &false_result); |
| 429 |
| 430 // JavaScript object => true. |
| 431 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE)); |
| 432 __ j(above_equal, &true_result); |
| 433 |
| 434 // String value => false iff empty. |
| 435 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE)); |
| 436 __ j(above_equal, ¬_string); |
| 437 __ and_(rcx, Immediate(kStringSizeMask)); |
| 438 __ cmpq(rcx, Immediate(kShortStringTag)); |
| 439 __ j(not_equal, &true_result); // Empty string is always short. |
| 440 __ movq(rdx, FieldOperand(rax, String::kLengthOffset)); |
| 441 __ shr(rdx, Immediate(String::kShortLengthShift)); |
| 442 __ j(zero, &false_result); |
| 443 __ jmp(&true_result); |
| 444 |
| 445 __ bind(¬_string); |
| 446 // HeapNumber => false iff +0, -0, or NaN. |
| 447 __ movq(kScratchRegister, |
| 448 Factory::heap_number_map(), |
| 449 RelocInfo::EMBEDDED_OBJECT); |
| 450 __ cmpq(rdx, kScratchRegister); |
| 451 __ j(not_equal, &true_result); |
| 452 // TODO(x64): Don't use fp stack, use MMX registers? |
| 453 __ fldz(); // Load zero onto fp stack |
| 454 // Load heap-number double value onto fp stack |
| 455 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); |
| 456 __ fucompp(); // Compare and pop both values. |
| 457 __ movq(kScratchRegister, rax); |
| 458 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions. |
| 459 __ testb(rax, Immediate(0x08)); // Test FP condition flag C3. |
| 460 __ movq(rax, kScratchRegister); |
| 461 __ j(zero, &false_result); |
| 462 // Fall through to |true_result|. |
| 463 |
| 464 // Return 1/0 for true/false in rax. |
| 465 __ bind(&true_result); |
| 466 __ movq(rax, Immediate(1)); |
| 467 __ ret(1 * kPointerSize); |
| 468 __ bind(&false_result); |
| 469 __ xor_(rax, rax); |
| 470 __ ret(1 * kPointerSize); |
| 471 } |
| 472 |
| 473 |
| 474 // Flag that indicates whether or not the code that handles smi arguments |
| 475 // should be placed in the stub, inlined, or omitted entirely. |
| 476 enum GenericBinaryFlags { |
| 477 SMI_CODE_IN_STUB, |
| 478 SMI_CODE_INLINED |
| 479 }; |
| 480 |
| 481 |
| 482 class GenericBinaryOpStub: public CodeStub { |
| 483 public: |
| 484 GenericBinaryOpStub(Token::Value op, |
| 485 OverwriteMode mode, |
| 486 GenericBinaryFlags flags) |
| 487 : op_(op), mode_(mode), flags_(flags) { |
| 488 ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); |
| 489 } |
| 490 |
| 491 void GenerateSmiCode(MacroAssembler* masm, Label* slow); |
| 492 |
| 493 private: |
| 494 Token::Value op_; |
| 495 OverwriteMode mode_; |
| 496 GenericBinaryFlags flags_; |
| 497 |
| 498 const char* GetName(); |
| 499 |
| 500 #ifdef DEBUG |
| 501 void Print() { |
| 502 PrintF("GenericBinaryOpStub (op %s), (mode %d, flags %d)\n", |
| 503 Token::String(op_), |
| 504 static_cast<int>(mode_), |
| 505 static_cast<int>(flags_)); |
| 506 } |
| 507 #endif |
| 508 |
| 509 // Minor key encoding in 16 bits FOOOOOOOOOOOOOMM. |
| 510 class ModeBits: public BitField<OverwriteMode, 0, 2> {}; |
| 511 class OpBits: public BitField<Token::Value, 2, 13> {}; |
| 512 class FlagBits: public BitField<GenericBinaryFlags, 15, 1> {}; |
| 513 |
| 514 Major MajorKey() { return GenericBinaryOp; } |
| 515 int MinorKey() { |
| 516 // Encode the parameters in a unique 16 bit value. |
| 517 return OpBits::encode(op_) |
| 518 | ModeBits::encode(mode_) |
| 519 | FlagBits::encode(flags_); |
| 520 } |
| 521 void Generate(MacroAssembler* masm); |
| 522 }; |
| 523 |
| 524 |
| 525 const char* GenericBinaryOpStub::GetName() { |
| 526 switch (op_) { |
| 527 case Token::ADD: return "GenericBinaryOpStub_ADD"; |
| 528 case Token::SUB: return "GenericBinaryOpStub_SUB"; |
| 529 case Token::MUL: return "GenericBinaryOpStub_MUL"; |
| 530 case Token::DIV: return "GenericBinaryOpStub_DIV"; |
| 531 case Token::BIT_OR: return "GenericBinaryOpStub_BIT_OR"; |
| 532 case Token::BIT_AND: return "GenericBinaryOpStub_BIT_AND"; |
| 533 case Token::BIT_XOR: return "GenericBinaryOpStub_BIT_XOR"; |
| 534 case Token::SAR: return "GenericBinaryOpStub_SAR"; |
| 535 case Token::SHL: return "GenericBinaryOpStub_SHL"; |
| 536 case Token::SHR: return "GenericBinaryOpStub_SHR"; |
| 537 default: return "GenericBinaryOpStub"; |
| 538 } |
| 539 } |
| 540 |
| 541 |
| 542 void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { |
| 543 // Perform fast-case smi code for the operation (rax <op> rbx) and |
| 544 // leave result in register rax. |
| 545 |
| 546 // Prepare the smi check of both operands by or'ing them together |
| 547 // before checking against the smi mask. |
| 548 __ movq(rcx, rbx); |
| 549 __ or_(rcx, rax); |
| 550 |
| 551 switch (op_) { |
| 552 case Token::ADD: |
| 553 __ addl(rax, rbx); // add optimistically |
| 554 __ j(overflow, slow); |
| 555 __ movsxlq(rax, rax); // Sign extend eax into rax. |
| 556 break; |
| 557 |
| 558 case Token::SUB: |
| 559 __ subl(rax, rbx); // subtract optimistically |
| 560 __ j(overflow, slow); |
| 561 __ movsxlq(rax, rax); // Sign extend eax into rax. |
| 562 break; |
| 563 |
| 564 case Token::DIV: |
| 565 case Token::MOD: |
| 566 // Sign extend rax into rdx:rax |
| 567 // (also sign extends eax into edx if eax is Smi). |
| 568 __ cqo(); |
| 569 // Check for 0 divisor. |
| 570 __ testq(rbx, rbx); |
| 571 __ j(zero, slow); |
| 572 break; |
| 573 |
| 574 default: |
| 575 // Fall-through to smi check. |
| 576 break; |
| 577 } |
| 578 |
| 579 // Perform the actual smi check. |
| 580 ASSERT(kSmiTag == 0); // adjust zero check if not the case |
| 581 __ testl(rcx, Immediate(kSmiTagMask)); |
| 582 __ j(not_zero, slow); |
| 583 |
| 584 switch (op_) { |
| 585 case Token::ADD: |
| 586 case Token::SUB: |
| 587 // Do nothing here. |
| 588 break; |
| 589 |
| 590 case Token::MUL: |
| 591 // If the smi tag is 0 we can just leave the tag on one operand. |
| 592 ASSERT(kSmiTag == 0); // adjust code below if not the case |
| 593 // Remove tag from one of the operands (but keep sign). |
| 594 __ sar(rax, Immediate(kSmiTagSize)); |
| 595 // Do multiplication. |
| 596 __ imull(rax, rbx); // multiplication of smis; result in eax |
| 597 // Go slow on overflows. |
| 598 __ j(overflow, slow); |
| 599 // Check for negative zero result. |
| 600 __ movsxlq(rax, rax); // Sign extend eax into rax. |
| 601 __ NegativeZeroTest(rax, rcx, slow); // use rcx = x | y |
| 602 break; |
| 603 |
| 604 case Token::DIV: |
| 605 // Divide rdx:rax by rbx (where rdx:rax is equivalent to the smi in eax). |
| 606 __ idiv(rbx); |
| 607 // Check that the remainder is zero. |
| 608 __ testq(rdx, rdx); |
| 609 __ j(not_zero, slow); |
| 610 // Check for the corner case of dividing the most negative smi |
| 611 // by -1. We cannot use the overflow flag, since it is not set |
| 612 // by idiv instruction. |
| 613 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 614 // TODO(X64):TODO(Smi): Smi implementation dependent constant. |
| 615 // Value is Smi::fromInt(-(1<<31)) / Smi::fromInt(-1) |
| 616 __ cmpq(rax, Immediate(0x40000000)); |
| 617 __ j(equal, slow); |
| 618 // Check for negative zero result. |
| 619 __ NegativeZeroTest(rax, rcx, slow); // use ecx = x | y |
| 620 // Tag the result and store it in register rax. |
| 621 ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case |
| 622 __ lea(rax, Operand(rax, rax, kTimes1, kSmiTag)); |
| 623 break; |
| 624 |
| 625 case Token::MOD: |
| 626 // Divide rdx:rax by rbx. |
| 627 __ idiv(rbx); |
| 628 // Check for negative zero result. |
| 629 __ NegativeZeroTest(rdx, rcx, slow); // use ecx = x | y |
| 630 // Move remainder to register rax. |
| 631 __ movq(rax, rdx); |
| 632 break; |
| 633 |
| 634 case Token::BIT_OR: |
| 635 __ or_(rax, rbx); |
| 636 break; |
| 637 |
| 638 case Token::BIT_AND: |
| 639 __ and_(rax, rbx); |
| 640 break; |
| 641 |
| 642 case Token::BIT_XOR: |
| 643 __ xor_(rax, rbx); |
| 644 break; |
| 645 |
| 646 case Token::SHL: |
| 647 case Token::SHR: |
| 648 case Token::SAR: |
| 649 // Move the second operand into register ecx. |
| 650 __ movq(rcx, rbx); |
| 651 // Remove tags from operands (but keep sign). |
| 652 __ sar(rax, Immediate(kSmiTagSize)); |
| 653 __ sar(rcx, Immediate(kSmiTagSize)); |
| 654 // Perform the operation. |
| 655 switch (op_) { |
| 656 case Token::SAR: |
| 657 __ sar(rax); |
| 658 // No checks of result necessary |
| 659 break; |
| 660 case Token::SHR: |
| 661 __ shrl(rax); // ecx is implicit shift register |
| 662 // Check that the *unsigned* result fits in a smi. |
| 663 // Neither of the two high-order bits can be set: |
| 664 // - 0x80000000: high bit would be lost when smi tagging. |
| 665 // - 0x40000000: this number would convert to negative when |
| 666 // Smi tagging these two cases can only happen with shifts |
| 667 // by 0 or 1 when handed a valid smi. |
| 668 __ testq(rax, Immediate(0xc0000000)); |
| 669 __ j(not_zero, slow); |
| 670 break; |
| 671 case Token::SHL: |
| 672 __ shll(rax); |
| 673 // TODO(Smi): Significant change if Smi changes. |
| 674 // Check that the *signed* result fits in a smi. |
| 675 // It does, if the 30th and 31st bits are equal, since then |
| 676 // shifting the SmiTag in at the bottom doesn't change the sign. |
| 677 ASSERT(kSmiTagSize == 1); |
| 678 __ cmpl(rax, Immediate(0xc0000000)); |
| 679 __ j(sign, slow); |
| 680 __ movsxlq(rax, rax); // Extend new sign of eax into rax. |
| 681 break; |
| 682 default: |
| 683 UNREACHABLE(); |
| 684 } |
| 685 // Tag the result and store it in register eax. |
| 686 ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case |
| 687 __ lea(rax, Operand(rax, rax, kTimes1, kSmiTag)); |
| 688 break; |
| 689 |
| 690 default: |
| 691 UNREACHABLE(); |
| 692 break; |
| 693 } |
| 694 } |
| 695 |
| 696 |
| 697 void GenericBinaryOpStub::Generate(MacroAssembler* masm) { |
| 698 } |
| 699 |
| 700 |
| 701 void UnarySubStub::Generate(MacroAssembler* masm) { |
| 702 } |
| 703 |
| 704 class CompareStub: public CodeStub { |
| 705 public: |
| 706 CompareStub(Condition cc, bool strict) : cc_(cc), strict_(strict) { } |
| 707 |
| 708 void Generate(MacroAssembler* masm); |
| 709 |
| 710 private: |
| 711 Condition cc_; |
| 712 bool strict_; |
| 713 |
| 714 Major MajorKey() { return Compare; } |
| 715 |
| 716 int MinorKey() { |
| 717 // Encode the three parameters in a unique 16 bit value. |
| 718 ASSERT(static_cast<int>(cc_) < (1 << 15)); |
| 719 return (static_cast<int>(cc_) << 1) | (strict_ ? 1 : 0); |
| 720 } |
| 721 |
| 722 #ifdef DEBUG |
| 723 void Print() { |
| 724 PrintF("CompareStub (cc %d), (strict %s)\n", |
| 725 static_cast<int>(cc_), |
| 726 strict_ ? "true" : "false"); |
| 727 } |
| 728 #endif |
| 729 }; |
| 730 |
| 731 |
| 732 void CompareStub::Generate(MacroAssembler* masm) { |
| 733 } |
| 734 |
| 735 |
| 736 void StackCheckStub::Generate(MacroAssembler* masm) { |
| 737 } |
| 738 |
| 739 |
| 740 class CallFunctionStub: public CodeStub { |
| 741 public: |
| 742 CallFunctionStub(int argc, InLoopFlag in_loop) |
| 743 : argc_(argc), in_loop_(in_loop) { } |
| 744 |
| 745 void Generate(MacroAssembler* masm); |
| 746 |
| 747 private: |
| 748 int argc_; |
| 749 InLoopFlag in_loop_; |
| 750 |
| 751 #ifdef DEBUG |
| 752 void Print() { PrintF("CallFunctionStub (args %d)\n", argc_); } |
| 753 #endif |
| 754 |
| 755 Major MajorKey() { return CallFunction; } |
| 756 int MinorKey() { return argc_; } |
| 757 InLoopFlag InLoop() { return in_loop_; } |
| 758 }; |
| 759 |
| 760 |
| 761 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 762 } |
| 763 |
| 764 |
| 765 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 766 } |
| 767 |
| 768 |
| 769 |
| 770 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { |
| 771 // The displacement is used for skipping the return address and the |
| 772 // frame pointer on the stack. It is the offset of the last |
| 773 // parameter (if any) relative to the frame pointer. |
| 774 static const int kDisplacement = 2 * kPointerSize; |
| 775 |
| 776 // Check if the calling frame is an arguments adaptor frame. |
| 777 Label runtime; |
| 778 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 779 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
| 780 __ cmpq(rcx, Immediate(ArgumentsAdaptorFrame::SENTINEL)); |
| 781 __ j(not_equal, &runtime); |
| 782 // Value in rcx is Smi encoded. |
| 783 |
| 784 // Patch the arguments.length and the parameters pointer. |
| 785 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 786 __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
| 787 __ lea(rdx, Operand(rdx, rcx, kTimes4, kDisplacement)); |
| 788 __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
| 789 |
| 790 // Do the runtime call to allocate the arguments object. |
| 791 __ bind(&runtime); |
| 792 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3); |
| 793 } |
| 794 |
| 795 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| 796 // The key is in rdx and the parameter count is in rax. |
| 797 |
| 798 // The displacement is used for skipping the frame pointer on the |
| 799 // stack. It is the offset of the last parameter (if any) relative |
| 800 // to the frame pointer. |
| 801 static const int kDisplacement = 1 * kPointerSize; |
| 802 |
| 803 // Check that the key is a smi. |
| 804 Label slow; |
| 805 __ testl(rdx, Immediate(kSmiTagMask)); |
| 806 __ j(not_zero, &slow); |
| 807 |
| 808 // Check if the calling frame is an arguments adaptor frame. |
| 809 Label adaptor; |
| 810 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 811 __ movq(rcx, Operand(rbx, StandardFrameConstants::kContextOffset)); |
| 812 __ cmpq(rcx, Immediate(ArgumentsAdaptorFrame::SENTINEL)); |
| 813 __ j(equal, &adaptor); |
| 814 |
| 815 // Check index against formal parameters count limit passed in |
| 816 // through register rax. Use unsigned comparison to get negative |
| 817 // check for free. |
| 818 __ cmpq(rdx, rax); |
| 819 __ j(above_equal, &slow); |
| 820 |
| 821 // Read the argument from the stack and return it. |
| 822 // Shifting code depends on SmiEncoding being equivalent to left shift: |
| 823 // we multiply by four to get pointer alignment. |
| 824 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); |
| 825 __ lea(rbx, Operand(rbp, rax, kTimes4, 0)); |
| 826 __ neg(rdx); |
| 827 __ movq(rax, Operand(rbx, rdx, kTimes4, kDisplacement)); |
| 828 __ Ret(); |
| 829 |
| 830 // Arguments adaptor case: Check index against actual arguments |
| 831 // limit found in the arguments adaptor frame. Use unsigned |
| 832 // comparison to get negative check for free. |
| 833 __ bind(&adaptor); |
| 834 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 835 __ cmpq(rdx, rcx); |
| 836 __ j(above_equal, &slow); |
| 837 |
| 838 // Read the argument from the stack and return it. |
| 839 // Shifting code depends on SmiEncoding being equivalent to left shift: |
| 840 // we multiply by four to get pointer alignment. |
| 841 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); |
| 842 __ lea(rbx, Operand(rbx, rcx, kTimes4, 0)); |
| 843 __ neg(rdx); |
| 844 __ movq(rax, Operand(rbx, rdx, kTimes4, kDisplacement)); |
| 845 __ Ret(); |
| 846 |
| 847 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 848 // by calling the runtime system. |
| 849 __ bind(&slow); |
| 850 __ pop(rbx); // Return address. |
| 851 __ push(rdx); |
| 852 __ push(rbx); |
| 853 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1); |
| 854 } |
| 855 |
| 856 |
| 857 void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) { |
| 858 // Check if the calling frame is an arguments adaptor frame. |
| 859 Label adaptor; |
| 860 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 861 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
| 862 __ cmpq(rcx, Immediate(ArgumentsAdaptorFrame::SENTINEL)); |
| 863 __ j(equal, &adaptor); |
| 864 |
| 865 // Nothing to do: The formal number of parameters has already been |
| 866 // passed in register rax by calling function. Just return it. |
| 867 __ ret(0); |
| 868 |
| 869 // Arguments adaptor case: Read the arguments length from the |
| 870 // adaptor frame and return it. |
| 871 __ bind(&adaptor); |
| 872 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 873 __ ret(0); |
| 874 } |
| 875 |
| 876 |
401 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 877 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
402 // Check that stack should contain frame pointer, code pointer, state and | 878 // Check that stack should contain frame pointer, code pointer, state and |
403 // return address in that order. | 879 // return address in that order. |
404 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize, | 880 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize, |
405 StackHandlerConstants::kStateOffset); | 881 StackHandlerConstants::kStateOffset); |
406 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize, | 882 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize, |
407 StackHandlerConstants::kPCOffset); | 883 StackHandlerConstants::kPCOffset); |
408 | 884 |
409 ExternalReference handler_address(Top::k_handler_address); | 885 ExternalReference handler_address(Top::k_handler_address); |
410 __ movq(kScratchRegister, handler_address); | 886 __ movq(kScratchRegister, handler_address); |
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
736 __ pop(r14); | 1212 __ pop(r14); |
737 __ pop(r13); | 1213 __ pop(r13); |
738 __ pop(r12); | 1214 __ pop(r12); |
739 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers | 1215 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers |
740 | 1216 |
741 // Restore frame pointer and return. | 1217 // Restore frame pointer and return. |
742 __ pop(rbp); | 1218 __ pop(rbp); |
743 __ ret(0); | 1219 __ ret(0); |
744 } | 1220 } |
745 | 1221 |
746 | |
747 #undef __ | 1222 #undef __ |
748 | 1223 |
749 } } // namespace v8::internal | 1224 } } // namespace v8::internal |
OLD | NEW |