| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 435 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 446 case Token::BIT_NOT: | 446 case Token::BIT_NOT: |
| 447 GenerateSmiStubBitNot(masm); | 447 GenerateSmiStubBitNot(masm); |
| 448 break; | 448 break; |
| 449 default: | 449 default: |
| 450 UNREACHABLE(); | 450 UNREACHABLE(); |
| 451 } | 451 } |
| 452 } | 452 } |
| 453 | 453 |
| 454 | 454 |
| 455 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | 455 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
| 456 NearLabel non_smi; | |
| 457 Label slow; | 456 Label slow; |
| 458 GenerateSmiCodeSub(masm, &non_smi, &slow); | 457 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); |
| 459 __ bind(&non_smi); | |
| 460 __ bind(&slow); | 458 __ bind(&slow); |
| 461 GenerateTypeTransition(masm); | 459 GenerateTypeTransition(masm); |
| 462 } | 460 } |
| 463 | 461 |
| 464 | 462 |
| 465 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | 463 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
| 466 NearLabel non_smi; | 464 Label non_smi; |
| 467 GenerateSmiCodeBitNot(masm, &non_smi); | 465 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 468 __ bind(&non_smi); | 466 __ bind(&non_smi); |
| 469 GenerateTypeTransition(masm); | 467 GenerateTypeTransition(masm); |
| 470 } | 468 } |
| 471 | 469 |
| 472 | 470 |
| 473 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | 471 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
| 474 NearLabel* non_smi, | 472 Label* non_smi, |
| 475 Label* slow) { | 473 Label* slow, |
| 476 NearLabel done; | 474 Label::Distance non_smi_near, |
| 477 __ JumpIfNotSmi(rax, non_smi); | 475 Label::Distance slow_near) { |
| 478 __ SmiNeg(rax, rax, &done); | 476 Label done; |
| 479 __ jmp(slow); | 477 __ JumpIfNotSmi(rax, non_smi, non_smi_near); |
| 478 __ SmiNeg(rax, rax, &done, Label::kNear); |
| 479 __ jmp(slow, slow_near); |
| 480 __ bind(&done); | 480 __ bind(&done); |
| 481 __ ret(0); | 481 __ ret(0); |
| 482 } | 482 } |
| 483 | 483 |
| 484 | 484 |
| 485 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, | 485 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( |
| 486 NearLabel* non_smi) { | 486 MacroAssembler* masm, |
| 487 __ JumpIfNotSmi(rax, non_smi); | 487 Label* non_smi, |
| 488 Label::Distance non_smi_near) { |
| 489 __ JumpIfNotSmi(rax, non_smi, non_smi_near); |
| 488 __ SmiNot(rax, rax); | 490 __ SmiNot(rax, rax); |
| 489 __ ret(0); | 491 __ ret(0); |
| 490 } | 492 } |
| 491 | 493 |
| 492 | 494 |
| 493 // TODO(svenpanne): Use virtual functions instead of switch. | 495 // TODO(svenpanne): Use virtual functions instead of switch. |
| 494 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 496 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 495 switch (op_) { | 497 switch (op_) { |
| 496 case Token::SUB: | 498 case Token::SUB: |
| 497 GenerateHeapNumberStubSub(masm); | 499 GenerateHeapNumberStubSub(masm); |
| 498 break; | 500 break; |
| 499 case Token::BIT_NOT: | 501 case Token::BIT_NOT: |
| 500 GenerateHeapNumberStubBitNot(masm); | 502 GenerateHeapNumberStubBitNot(masm); |
| 501 break; | 503 break; |
| 502 default: | 504 default: |
| 503 UNREACHABLE(); | 505 UNREACHABLE(); |
| 504 } | 506 } |
| 505 } | 507 } |
| 506 | 508 |
| 507 | 509 |
| 508 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 510 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
| 509 NearLabel non_smi; | 511 Label non_smi, slow; |
| 510 Label slow; | 512 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); |
| 511 GenerateSmiCodeSub(masm, &non_smi, &slow); | |
| 512 __ bind(&non_smi); | 513 __ bind(&non_smi); |
| 513 GenerateHeapNumberCodeSub(masm, &slow); | 514 GenerateHeapNumberCodeSub(masm, &slow); |
| 514 __ bind(&slow); | 515 __ bind(&slow); |
| 515 GenerateTypeTransition(masm); | 516 GenerateTypeTransition(masm); |
| 516 } | 517 } |
| 517 | 518 |
| 518 | 519 |
| 519 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 520 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( |
| 520 MacroAssembler* masm) { | 521 MacroAssembler* masm) { |
| 521 NearLabel non_smi; | 522 Label non_smi, slow; |
| 522 Label slow; | 523 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 523 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 524 __ bind(&non_smi); | 524 __ bind(&non_smi); |
| 525 GenerateHeapNumberCodeBitNot(masm, &slow); | 525 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 526 __ bind(&slow); | 526 __ bind(&slow); |
| 527 GenerateTypeTransition(masm); | 527 GenerateTypeTransition(masm); |
| 528 } | 528 } |
| 529 | 529 |
| 530 | 530 |
| 531 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 531 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
| 532 Label* slow) { | 532 Label* slow) { |
| 533 // Check if the operand is a heap number. | 533 // Check if the operand is a heap number. |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 596 case Token::BIT_NOT: | 596 case Token::BIT_NOT: |
| 597 GenerateGenericStubBitNot(masm); | 597 GenerateGenericStubBitNot(masm); |
| 598 break; | 598 break; |
| 599 default: | 599 default: |
| 600 UNREACHABLE(); | 600 UNREACHABLE(); |
| 601 } | 601 } |
| 602 } | 602 } |
| 603 | 603 |
| 604 | 604 |
| 605 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | 605 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
| 606 NearLabel non_smi; | 606 Label non_smi, slow; |
| 607 Label slow; | 607 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); |
| 608 GenerateSmiCodeSub(masm, &non_smi, &slow); | |
| 609 __ bind(&non_smi); | 608 __ bind(&non_smi); |
| 610 GenerateHeapNumberCodeSub(masm, &slow); | 609 GenerateHeapNumberCodeSub(masm, &slow); |
| 611 __ bind(&slow); | 610 __ bind(&slow); |
| 612 GenerateGenericCodeFallback(masm); | 611 GenerateGenericCodeFallback(masm); |
| 613 } | 612 } |
| 614 | 613 |
| 615 | 614 |
| 616 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | 615 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
| 617 NearLabel non_smi; | 616 Label non_smi, slow; |
| 618 Label slow; | 617 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 619 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 620 __ bind(&non_smi); | 618 __ bind(&non_smi); |
| 621 GenerateHeapNumberCodeBitNot(masm, &slow); | 619 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 622 __ bind(&slow); | 620 __ bind(&slow); |
| 623 GenerateGenericCodeFallback(masm); | 621 GenerateGenericCodeFallback(masm); |
| 624 } | 622 } |
| 625 | 623 |
| 626 | 624 |
| 627 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( | 625 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( |
| 628 MacroAssembler* masm) { | 626 MacroAssembler* masm) { |
| 629 // Handle the slow case by jumping to the JavaScript builtin. | 627 // Handle the slow case by jumping to the JavaScript builtin. |
| (...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1006 // No fall-through from this generated code. | 1004 // No fall-through from this generated code. |
| 1007 if (FLAG_debug_code) { | 1005 if (FLAG_debug_code) { |
| 1008 __ Abort("Unexpected fall-through in " | 1006 __ Abort("Unexpected fall-through in " |
| 1009 "TypeRecordingBinaryStub::GenerateFloatingPointCode."); | 1007 "TypeRecordingBinaryStub::GenerateFloatingPointCode."); |
| 1010 } | 1008 } |
| 1011 } | 1009 } |
| 1012 | 1010 |
| 1013 | 1011 |
| 1014 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { | 1012 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
| 1015 ASSERT(op_ == Token::ADD); | 1013 ASSERT(op_ == Token::ADD); |
| 1016 NearLabel left_not_string, call_runtime; | 1014 Label left_not_string, call_runtime; |
| 1017 | 1015 |
| 1018 // Registers containing left and right operands respectively. | 1016 // Registers containing left and right operands respectively. |
| 1019 Register left = rdx; | 1017 Register left = rdx; |
| 1020 Register right = rax; | 1018 Register right = rax; |
| 1021 | 1019 |
| 1022 // Test if left operand is a string. | 1020 // Test if left operand is a string. |
| 1023 __ JumpIfSmi(left, &left_not_string); | 1021 __ JumpIfSmi(left, &left_not_string, Label::kNear); |
| 1024 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); | 1022 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
| 1025 __ j(above_equal, &left_not_string); | 1023 __ j(above_equal, &left_not_string, Label::kNear); |
| 1026 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); | 1024 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); |
| 1027 GenerateRegisterArgsPush(masm); | 1025 GenerateRegisterArgsPush(masm); |
| 1028 __ TailCallStub(&string_add_left_stub); | 1026 __ TailCallStub(&string_add_left_stub); |
| 1029 | 1027 |
| 1030 // Left operand is not a string, test right. | 1028 // Left operand is not a string, test right. |
| 1031 __ bind(&left_not_string); | 1029 __ bind(&left_not_string); |
| 1032 __ JumpIfSmi(right, &call_runtime); | 1030 __ JumpIfSmi(right, &call_runtime, Label::kNear); |
| 1033 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); | 1031 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); |
| 1034 __ j(above_equal, &call_runtime); | 1032 __ j(above_equal, &call_runtime, Label::kNear); |
| 1035 | 1033 |
| 1036 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | 1034 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
| 1037 GenerateRegisterArgsPush(masm); | 1035 GenerateRegisterArgsPush(masm); |
| 1038 __ TailCallStub(&string_add_right_stub); | 1036 __ TailCallStub(&string_add_right_stub); |
| 1039 | 1037 |
| 1040 // Neither argument is a string. | 1038 // Neither argument is a string. |
| 1041 __ bind(&call_runtime); | 1039 __ bind(&call_runtime); |
| 1042 } | 1040 } |
| 1043 | 1041 |
| 1044 | 1042 |
| (...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1269 // rsp[0]: return address. | 1267 // rsp[0]: return address. |
| 1270 // xmm1: untagged double input argument | 1268 // xmm1: untagged double input argument |
| 1271 // Output: | 1269 // Output: |
| 1272 // xmm1: untagged double result. | 1270 // xmm1: untagged double result. |
| 1273 | 1271 |
| 1274 Label runtime_call; | 1272 Label runtime_call; |
| 1275 Label runtime_call_clear_stack; | 1273 Label runtime_call_clear_stack; |
| 1276 Label skip_cache; | 1274 Label skip_cache; |
| 1277 const bool tagged = (argument_type_ == TAGGED); | 1275 const bool tagged = (argument_type_ == TAGGED); |
| 1278 if (tagged) { | 1276 if (tagged) { |
| 1279 NearLabel input_not_smi; | 1277 Label input_not_smi, loaded; |
| 1280 Label loaded; | |
| 1281 // Test that rax is a number. | 1278 // Test that rax is a number. |
| 1282 __ movq(rax, Operand(rsp, kPointerSize)); | 1279 __ movq(rax, Operand(rsp, kPointerSize)); |
| 1283 __ JumpIfNotSmi(rax, &input_not_smi); | 1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); |
| 1284 // Input is a smi. Untag and load it onto the FPU stack. | 1281 // Input is a smi. Untag and load it onto the FPU stack. |
| 1285 // Then load the bits of the double into rbx. | 1282 // Then load the bits of the double into rbx. |
| 1286 __ SmiToInteger32(rax, rax); | 1283 __ SmiToInteger32(rax, rax); |
| 1287 __ subq(rsp, Immediate(kDoubleSize)); | 1284 __ subq(rsp, Immediate(kDoubleSize)); |
| 1288 __ cvtlsi2sd(xmm1, rax); | 1285 __ cvtlsi2sd(xmm1, rax); |
| 1289 __ movsd(Operand(rsp, 0), xmm1); | 1286 __ movsd(Operand(rsp, 0), xmm1); |
| 1290 __ movq(rbx, xmm1); | 1287 __ movq(rbx, xmm1); |
| 1291 __ movq(rdx, xmm1); | 1288 __ movq(rdx, xmm1); |
| 1292 __ fld_d(Operand(rsp, 0)); | 1289 __ fld_d(Operand(rsp, 0)); |
| 1293 __ addq(rsp, Immediate(kDoubleSize)); | 1290 __ addq(rsp, Immediate(kDoubleSize)); |
| (...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1691 Register scratch2, | 1688 Register scratch2, |
| 1692 Register scratch3, | 1689 Register scratch3, |
| 1693 Label* on_success, | 1690 Label* on_success, |
| 1694 Label* on_not_smis) { | 1691 Label* on_not_smis) { |
| 1695 Register heap_number_map = scratch3; | 1692 Register heap_number_map = scratch3; |
| 1696 Register smi_result = scratch1; | 1693 Register smi_result = scratch1; |
| 1697 Label done; | 1694 Label done; |
| 1698 | 1695 |
| 1699 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 1696 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 1700 | 1697 |
| 1701 NearLabel first_smi; | 1698 Label first_smi; |
| 1702 __ JumpIfSmi(first, &first_smi); | 1699 __ JumpIfSmi(first, &first_smi, Label::kNear); |
| 1703 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map); | 1700 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map); |
| 1704 __ j(not_equal, on_not_smis); | 1701 __ j(not_equal, on_not_smis); |
| 1705 // Convert HeapNumber to smi if possible. | 1702 // Convert HeapNumber to smi if possible. |
| 1706 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); | 1703 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); |
| 1707 __ movq(scratch2, xmm0); | 1704 __ movq(scratch2, xmm0); |
| 1708 __ cvttsd2siq(smi_result, xmm0); | 1705 __ cvttsd2siq(smi_result, xmm0); |
| 1709 // Check if conversion was successful by converting back and | 1706 // Check if conversion was successful by converting back and |
| 1710 // comparing to the original double's bits. | 1707 // comparing to the original double's bits. |
| 1711 __ cvtlsi2sd(xmm1, smi_result); | 1708 __ cvtlsi2sd(xmm1, smi_result); |
| 1712 __ movq(kScratchRegister, xmm1); | 1709 __ movq(kScratchRegister, xmm1); |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1820 // on doubles. | 1817 // on doubles. |
| 1821 __ bind(&exponent_nonsmi); | 1818 __ bind(&exponent_nonsmi); |
| 1822 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 1819 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 1823 Heap::kHeapNumberMapRootIndex); | 1820 Heap::kHeapNumberMapRootIndex); |
| 1824 __ j(not_equal, &call_runtime); | 1821 __ j(not_equal, &call_runtime); |
| 1825 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 1822 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1826 // Test if exponent is nan. | 1823 // Test if exponent is nan. |
| 1827 __ ucomisd(xmm1, xmm1); | 1824 __ ucomisd(xmm1, xmm1); |
| 1828 __ j(parity_even, &call_runtime); | 1825 __ j(parity_even, &call_runtime); |
| 1829 | 1826 |
| 1830 NearLabel base_not_smi; | 1827 Label base_not_smi, handle_special_cases; |
| 1831 Label handle_special_cases; | 1828 __ JumpIfNotSmi(rdx, &base_not_smi, Label::kNear); |
| 1832 __ JumpIfNotSmi(rdx, &base_not_smi); | |
| 1833 __ SmiToInteger32(rdx, rdx); | 1829 __ SmiToInteger32(rdx, rdx); |
| 1834 __ cvtlsi2sd(xmm0, rdx); | 1830 __ cvtlsi2sd(xmm0, rdx); |
| 1835 __ jmp(&handle_special_cases, Label::kNear); | 1831 __ jmp(&handle_special_cases, Label::kNear); |
| 1836 | 1832 |
| 1837 __ bind(&base_not_smi); | 1833 __ bind(&base_not_smi); |
| 1838 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset), | 1834 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset), |
| 1839 Heap::kHeapNumberMapRootIndex); | 1835 Heap::kHeapNumberMapRootIndex); |
| 1840 __ j(not_equal, &call_runtime); | 1836 __ j(not_equal, &call_runtime); |
| 1841 __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset)); | 1837 __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset)); |
| 1842 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); | 1838 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); |
| (...skipping 2672 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4515 Label compare_chars; | 4511 Label compare_chars; |
| 4516 __ bind(&check_zero_length); | 4512 __ bind(&check_zero_length); |
| 4517 STATIC_ASSERT(kSmiTag == 0); | 4513 STATIC_ASSERT(kSmiTag == 0); |
| 4518 __ SmiTest(length); | 4514 __ SmiTest(length); |
| 4519 __ j(not_zero, &compare_chars, Label::kNear); | 4515 __ j(not_zero, &compare_chars, Label::kNear); |
| 4520 __ Move(rax, Smi::FromInt(EQUAL)); | 4516 __ Move(rax, Smi::FromInt(EQUAL)); |
| 4521 __ ret(0); | 4517 __ ret(0); |
| 4522 | 4518 |
| 4523 // Compare characters. | 4519 // Compare characters. |
| 4524 __ bind(&compare_chars); | 4520 __ bind(&compare_chars); |
| 4525 NearLabel strings_not_equal; | 4521 Label strings_not_equal; |
| 4526 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, | 4522 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, |
| 4527 &strings_not_equal); | 4523 &strings_not_equal, Label::kNear); |
| 4528 | 4524 |
| 4529 // Characters are equal. | 4525 // Characters are equal. |
| 4530 __ Move(rax, Smi::FromInt(EQUAL)); | 4526 __ Move(rax, Smi::FromInt(EQUAL)); |
| 4531 __ ret(0); | 4527 __ ret(0); |
| 4532 | 4528 |
| 4533 // Characters are not equal. | 4529 // Characters are not equal. |
| 4534 __ bind(&strings_not_equal); | 4530 __ bind(&strings_not_equal); |
| 4535 __ Move(rax, Smi::FromInt(NOT_EQUAL)); | 4531 __ Move(rax, Smi::FromInt(NOT_EQUAL)); |
| 4536 __ ret(0); | 4532 __ ret(0); |
| 4537 } | 4533 } |
| (...skipping 27 matching lines...) Expand all Loading... |
| 4565 __ bind(&left_shorter); | 4561 __ bind(&left_shorter); |
| 4566 // Register scratch1 now holds Min(left.length, right.length). | 4562 // Register scratch1 now holds Min(left.length, right.length). |
| 4567 const Register min_length = scratch1; | 4563 const Register min_length = scratch1; |
| 4568 | 4564 |
| 4569 Label compare_lengths; | 4565 Label compare_lengths; |
| 4570 // If min-length is zero, go directly to comparing lengths. | 4566 // If min-length is zero, go directly to comparing lengths. |
| 4571 __ SmiTest(min_length); | 4567 __ SmiTest(min_length); |
| 4572 __ j(zero, &compare_lengths, Label::kNear); | 4568 __ j(zero, &compare_lengths, Label::kNear); |
| 4573 | 4569 |
| 4574 // Compare loop. | 4570 // Compare loop. |
| 4575 NearLabel result_not_equal; | 4571 Label result_not_equal; |
| 4576 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, | 4572 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, |
| 4577 &result_not_equal); | 4573 &result_not_equal, Label::kNear); |
| 4578 | 4574 |
| 4579 // Completed loop without finding different characters. | 4575 // Completed loop without finding different characters. |
| 4580 // Compare lengths (precomputed). | 4576 // Compare lengths (precomputed). |
| 4581 __ bind(&compare_lengths); | 4577 __ bind(&compare_lengths); |
| 4582 __ SmiTest(length_difference); | 4578 __ SmiTest(length_difference); |
| 4583 __ j(not_zero, &result_not_equal); | 4579 __ j(not_zero, &result_not_equal, Label::kNear); |
| 4584 | 4580 |
| 4585 // Result is EQUAL. | 4581 // Result is EQUAL. |
| 4586 __ Move(rax, Smi::FromInt(EQUAL)); | 4582 __ Move(rax, Smi::FromInt(EQUAL)); |
| 4587 __ ret(0); | 4583 __ ret(0); |
| 4588 | 4584 |
| 4589 Label result_greater; | 4585 Label result_greater; |
| 4590 __ bind(&result_not_equal); | 4586 __ bind(&result_not_equal); |
| 4591 // Unequal comparison of left to right, either character or length. | 4587 // Unequal comparison of left to right, either character or length. |
| 4592 __ j(greater, &result_greater, Label::kNear); | 4588 __ j(greater, &result_greater, Label::kNear); |
| 4593 | 4589 |
| 4594 // Result is LESS. | 4590 // Result is LESS. |
| 4595 __ Move(rax, Smi::FromInt(LESS)); | 4591 __ Move(rax, Smi::FromInt(LESS)); |
| 4596 __ ret(0); | 4592 __ ret(0); |
| 4597 | 4593 |
| 4598 // Result is GREATER. | 4594 // Result is GREATER. |
| 4599 __ bind(&result_greater); | 4595 __ bind(&result_greater); |
| 4600 __ Move(rax, Smi::FromInt(GREATER)); | 4596 __ Move(rax, Smi::FromInt(GREATER)); |
| 4601 __ ret(0); | 4597 __ ret(0); |
| 4602 } | 4598 } |
| 4603 | 4599 |
| 4604 | 4600 |
| 4605 void StringCompareStub::GenerateAsciiCharsCompareLoop( | 4601 void StringCompareStub::GenerateAsciiCharsCompareLoop( |
| 4606 MacroAssembler* masm, | 4602 MacroAssembler* masm, |
| 4607 Register left, | 4603 Register left, |
| 4608 Register right, | 4604 Register right, |
| 4609 Register length, | 4605 Register length, |
| 4610 Register scratch, | 4606 Register scratch, |
| 4611 NearLabel* chars_not_equal) { | 4607 Label* chars_not_equal, |
| 4608 Label::Distance near_jump) { |
| 4612 // Change index to run from -length to -1 by adding length to string | 4609 // Change index to run from -length to -1 by adding length to string |
| 4613 // start. This means that loop ends when index reaches zero, which | 4610 // start. This means that loop ends when index reaches zero, which |
| 4614 // doesn't need an additional compare. | 4611 // doesn't need an additional compare. |
| 4615 __ SmiToInteger32(length, length); | 4612 __ SmiToInteger32(length, length); |
| 4616 __ lea(left, | 4613 __ lea(left, |
| 4617 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize)); | 4614 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize)); |
| 4618 __ lea(right, | 4615 __ lea(right, |
| 4619 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); | 4616 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); |
| 4620 __ neg(length); | 4617 __ neg(length); |
| 4621 Register index = length; // index = -length; | 4618 Register index = length; // index = -length; |
| 4622 | 4619 |
| 4623 // Compare loop. | 4620 // Compare loop. |
| 4624 Label loop; | 4621 Label loop; |
| 4625 __ bind(&loop); | 4622 __ bind(&loop); |
| 4626 __ movb(scratch, Operand(left, index, times_1, 0)); | 4623 __ movb(scratch, Operand(left, index, times_1, 0)); |
| 4627 __ cmpb(scratch, Operand(right, index, times_1, 0)); | 4624 __ cmpb(scratch, Operand(right, index, times_1, 0)); |
| 4628 __ j(not_equal, chars_not_equal); | 4625 __ j(not_equal, chars_not_equal, near_jump); |
| 4629 __ addq(index, Immediate(1)); | 4626 __ addq(index, Immediate(1)); |
| 4630 __ j(not_zero, &loop); | 4627 __ j(not_zero, &loop); |
| 4631 } | 4628 } |
| 4632 | 4629 |
| 4633 | 4630 |
| 4634 void StringCompareStub::Generate(MacroAssembler* masm) { | 4631 void StringCompareStub::Generate(MacroAssembler* masm) { |
| 4635 Label runtime; | 4632 Label runtime; |
| 4636 | 4633 |
| 4637 // Stack frame on entry. | 4634 // Stack frame on entry. |
| 4638 // rsp[0]: return address | 4635 // rsp[0]: return address |
| (...skipping 27 matching lines...) Expand all Loading... |
| 4666 | 4663 |
| 4667 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 4664 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 4668 // tagged as a small integer. | 4665 // tagged as a small integer. |
| 4669 __ bind(&runtime); | 4666 __ bind(&runtime); |
| 4670 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 4667 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 4671 } | 4668 } |
| 4672 | 4669 |
| 4673 | 4670 |
| 4674 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 4671 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 4675 ASSERT(state_ == CompareIC::SMIS); | 4672 ASSERT(state_ == CompareIC::SMIS); |
| 4676 NearLabel miss; | 4673 Label miss; |
| 4677 __ JumpIfNotBothSmi(rdx, rax, &miss); | 4674 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); |
| 4678 | 4675 |
| 4679 if (GetCondition() == equal) { | 4676 if (GetCondition() == equal) { |
| 4680 // For equality we do not care about the sign of the result. | 4677 // For equality we do not care about the sign of the result. |
| 4681 __ subq(rax, rdx); | 4678 __ subq(rax, rdx); |
| 4682 } else { | 4679 } else { |
| 4683 Label done; | 4680 Label done; |
| 4684 __ subq(rdx, rax); | 4681 __ subq(rdx, rax); |
| 4685 __ j(no_overflow, &done, Label::kNear); | 4682 __ j(no_overflow, &done, Label::kNear); |
| 4686 // Correct sign of result in case of overflow. | 4683 // Correct sign of result in case of overflow. |
| 4687 __ SmiNot(rdx, rdx); | 4684 __ SmiNot(rdx, rdx); |
| (...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5104 __ Drop(1); | 5101 __ Drop(1); |
| 5105 __ ret(2 * kPointerSize); | 5102 __ ret(2 * kPointerSize); |
| 5106 } | 5103 } |
| 5107 | 5104 |
| 5108 | 5105 |
| 5109 #undef __ | 5106 #undef __ |
| 5110 | 5107 |
| 5111 } } // namespace v8::internal | 5108 } } // namespace v8::internal |
| 5112 | 5109 |
| 5113 #endif // V8_TARGET_ARCH_X64 | 5110 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |