| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 #include "regexp-macro-assembler.h" | 34 #include "regexp-macro-assembler.h" |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 #define __ ACCESS_MASM(masm) | 40 #define __ ACCESS_MASM(masm) |
| 41 | 41 |
| 42 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 42 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
| 43 Label* slow, | 43 Label* slow, |
| 44 Condition cc, | 44 Condition cond, |
| 45 bool never_nan_nan); | 45 bool never_nan_nan); |
| 46 static void EmitSmiNonsmiComparison(MacroAssembler* masm, | 46 static void EmitSmiNonsmiComparison(MacroAssembler* masm, |
| 47 Register lhs, | 47 Register lhs, |
| 48 Register rhs, | 48 Register rhs, |
| 49 Label* lhs_not_nan, | 49 Label* lhs_not_nan, |
| 50 Label* slow, | 50 Label* slow, |
| 51 bool strict); | 51 bool strict); |
| 52 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc); | 52 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cond); |
| 53 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, | 53 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, |
| 54 Register lhs, | 54 Register lhs, |
| 55 Register rhs); | 55 Register rhs); |
| 56 | 56 |
| 57 | 57 |
| 58 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 58 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
| 59 // Create a new closure from the given function info in new | 59 // Create a new closure from the given function info in new |
| 60 // space. Set the context to the current context in cp. | 60 // space. Set the context to the current context in cp. |
| 61 Label gc; | 61 Label gc; |
| 62 | 62 |
| (...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 387 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset)); | 387 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset)); |
| 388 __ Ret(); | 388 __ Ret(); |
| 389 } | 389 } |
| 390 | 390 |
| 391 | 391 |
| 392 // Handle the case where the lhs and rhs are the same object. | 392 // Handle the case where the lhs and rhs are the same object. |
| 393 // Equality is almost reflexive (everything but NaN), so this is a test | 393 // Equality is almost reflexive (everything but NaN), so this is a test |
| 394 // for "identity and not NaN". | 394 // for "identity and not NaN". |
| 395 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 395 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
| 396 Label* slow, | 396 Label* slow, |
| 397 Condition cc, | 397 Condition cond, |
| 398 bool never_nan_nan) { | 398 bool never_nan_nan) { |
| 399 Label not_identical; | 399 Label not_identical; |
| 400 Label heap_number, return_equal; | 400 Label heap_number, return_equal; |
| 401 __ cmp(r0, r1); | 401 __ cmp(r0, r1); |
| 402 __ b(ne, ¬_identical); | 402 __ b(ne, ¬_identical); |
| 403 | 403 |
| 404 // The two objects are identical. If we know that one of them isn't NaN then | 404 // The two objects are identical. If we know that one of them isn't NaN then |
| 405 // we now know they test equal. | 405 // we now know they test equal. |
| 406 if (cc != eq || !never_nan_nan) { | 406 if (cond != eq || !never_nan_nan) { |
| 407 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), | 407 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), |
| 408 // so we do the second best thing - test it ourselves. | 408 // so we do the second best thing - test it ourselves. |
| 409 // They are both equal and they are not both Smis so both of them are not | 409 // They are both equal and they are not both Smis so both of them are not |
| 410 // Smis. If it's not a heap number, then return equal. | 410 // Smis. If it's not a heap number, then return equal. |
| 411 if (cc == lt || cc == gt) { | 411 if (cond == lt || cond == gt) { |
| 412 __ CompareObjectType(r0, r4, r4, FIRST_JS_OBJECT_TYPE); | 412 __ CompareObjectType(r0, r4, r4, FIRST_JS_OBJECT_TYPE); |
| 413 __ b(ge, slow); | 413 __ b(ge, slow); |
| 414 } else { | 414 } else { |
| 415 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE); | 415 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE); |
| 416 __ b(eq, &heap_number); | 416 __ b(eq, &heap_number); |
| 417 // Comparing JS objects with <=, >= is complicated. | 417 // Comparing JS objects with <=, >= is complicated. |
| 418 if (cc != eq) { | 418 if (cond != eq) { |
| 419 __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE)); | 419 __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE)); |
| 420 __ b(ge, slow); | 420 __ b(ge, slow); |
| 421 // Normally here we fall through to return_equal, but undefined is | 421 // Normally here we fall through to return_equal, but undefined is |
| 422 // special: (undefined == undefined) == true, but | 422 // special: (undefined == undefined) == true, but |
| 423 // (undefined <= undefined) == false! See ECMAScript 11.8.5. | 423 // (undefined <= undefined) == false! See ECMAScript 11.8.5. |
| 424 if (cc == le || cc == ge) { | 424 if (cond == le || cond == ge) { |
| 425 __ cmp(r4, Operand(ODDBALL_TYPE)); | 425 __ cmp(r4, Operand(ODDBALL_TYPE)); |
| 426 __ b(ne, &return_equal); | 426 __ b(ne, &return_equal); |
| 427 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 427 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 428 __ cmp(r0, r2); | 428 __ cmp(r0, r2); |
| 429 __ b(ne, &return_equal); | 429 __ b(ne, &return_equal); |
| 430 if (cc == le) { | 430 if (cond == le) { |
| 431 // undefined <= undefined should fail. | 431 // undefined <= undefined should fail. |
| 432 __ mov(r0, Operand(GREATER)); | 432 __ mov(r0, Operand(GREATER)); |
| 433 } else { | 433 } else { |
| 434 // undefined >= undefined should fail. | 434 // undefined >= undefined should fail. |
| 435 __ mov(r0, Operand(LESS)); | 435 __ mov(r0, Operand(LESS)); |
| 436 } | 436 } |
| 437 __ Ret(); | 437 __ Ret(); |
| 438 } | 438 } |
| 439 } | 439 } |
| 440 } | 440 } |
| 441 } | 441 } |
| 442 | 442 |
| 443 __ bind(&return_equal); | 443 __ bind(&return_equal); |
| 444 if (cc == lt) { | 444 if (cond == lt) { |
| 445 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves. | 445 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves. |
| 446 } else if (cc == gt) { | 446 } else if (cond == gt) { |
| 447 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves. | 447 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves. |
| 448 } else { | 448 } else { |
| 449 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves. | 449 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves. |
| 450 } | 450 } |
| 451 __ Ret(); | 451 __ Ret(); |
| 452 | 452 |
| 453 if (cc != eq || !never_nan_nan) { | 453 if (cond != eq || !never_nan_nan) { |
| 454 // For less and greater we don't have to check for NaN since the result of | 454 // For less and greater we don't have to check for NaN since the result of |
| 455 // x < x is false regardless. For the others here is some code to check | 455 // x < x is false regardless. For the others here is some code to check |
| 456 // for NaN. | 456 // for NaN. |
| 457 if (cc != lt && cc != gt) { | 457 if (cond != lt && cond != gt) { |
| 458 __ bind(&heap_number); | 458 __ bind(&heap_number); |
| 459 // It is a heap number, so return non-equal if it's NaN and equal if it's | 459 // It is a heap number, so return non-equal if it's NaN and equal if it's |
| 460 // not NaN. | 460 // not NaN. |
| 461 | 461 |
| 462 // The representation of NaN values has all exponent bits (52..62) set, | 462 // The representation of NaN values has all exponent bits (52..62) set, |
| 463 // and not all mantissa bits (0..51) clear. | 463 // and not all mantissa bits (0..51) clear. |
| 464 // Read top bits of double representation (second word of value). | 464 // Read top bits of double representation (second word of value). |
| 465 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 465 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
| 466 // Test that exponent bits are all set. | 466 // Test that exponent bits are all set. |
| 467 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits); | 467 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits); |
| 468 // NaNs have all-one exponents so they sign extend to -1. | 468 // NaNs have all-one exponents so they sign extend to -1. |
| 469 __ cmp(r3, Operand(-1)); | 469 __ cmp(r3, Operand(-1)); |
| 470 __ b(ne, &return_equal); | 470 __ b(ne, &return_equal); |
| 471 | 471 |
| 472 // Shift out flag and all exponent bits, retaining only mantissa. | 472 // Shift out flag and all exponent bits, retaining only mantissa. |
| 473 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord)); | 473 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord)); |
| 474 // Or with all low-bits of mantissa. | 474 // Or with all low-bits of mantissa. |
| 475 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); | 475 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); |
| 476 __ orr(r0, r3, Operand(r2), SetCC); | 476 __ orr(r0, r3, Operand(r2), SetCC); |
| 477 // For equal we already have the right value in r0: Return zero (equal) | 477 // For equal we already have the right value in r0: Return zero (equal) |
| 478 // if all bits in mantissa are zero (it's an Infinity) and non-zero if | 478 // if all bits in mantissa are zero (it's an Infinity) and non-zero if |
| 479 // not (it's a NaN). For <= and >= we need to load r0 with the failing | 479 // not (it's a NaN). For <= and >= we need to load r0 with the failing |
| 480 // value if it's a NaN. | 480 // value if it's a NaN. |
| 481 if (cc != eq) { | 481 if (cond != eq) { |
| 482 // All-zero means Infinity means equal. | 482 // All-zero means Infinity means equal. |
| 483 __ Ret(eq); | 483 __ Ret(eq); |
| 484 if (cc == le) { | 484 if (cond == le) { |
| 485 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail. | 485 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail. |
| 486 } else { | 486 } else { |
| 487 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail. | 487 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail. |
| 488 } | 488 } |
| 489 } | 489 } |
| 490 __ Ret(); | 490 __ Ret(); |
| 491 } | 491 } |
| 492 // No fall through here. | 492 // No fall through here. |
| 493 } | 493 } |
| 494 | 494 |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 581 // Convert rhs to a double in r0, r1. | 581 // Convert rhs to a double in r0, r1. |
| 582 __ mov(r7, Operand(rhs)); | 582 __ mov(r7, Operand(rhs)); |
| 583 ConvertToDoubleStub stub2(r1, r0, r7, r6); | 583 ConvertToDoubleStub stub2(r1, r0, r7, r6); |
| 584 __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET); | 584 __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET); |
| 585 __ pop(lr); | 585 __ pop(lr); |
| 586 } | 586 } |
| 587 // Fall through to both_loaded_as_doubles. | 587 // Fall through to both_loaded_as_doubles. |
| 588 } | 588 } |
| 589 | 589 |
| 590 | 590 |
| 591 void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cc) { | 591 void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cond) { |
| 592 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset); | 592 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset); |
| 593 Register rhs_exponent = exp_first ? r0 : r1; | 593 Register rhs_exponent = exp_first ? r0 : r1; |
| 594 Register lhs_exponent = exp_first ? r2 : r3; | 594 Register lhs_exponent = exp_first ? r2 : r3; |
| 595 Register rhs_mantissa = exp_first ? r1 : r0; | 595 Register rhs_mantissa = exp_first ? r1 : r0; |
| 596 Register lhs_mantissa = exp_first ? r3 : r2; | 596 Register lhs_mantissa = exp_first ? r3 : r2; |
| 597 Label one_is_nan, neither_is_nan; | 597 Label one_is_nan, neither_is_nan; |
| 598 | 598 |
| 599 __ Sbfx(r4, | 599 __ Sbfx(r4, |
| 600 lhs_exponent, | 600 lhs_exponent, |
| 601 HeapNumber::kExponentShift, | 601 HeapNumber::kExponentShift, |
| (...skipping 19 matching lines...) Expand all Loading... |
| 621 __ mov(r4, | 621 __ mov(r4, |
| 622 Operand(rhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord), | 622 Operand(rhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord), |
| 623 SetCC); | 623 SetCC); |
| 624 __ b(ne, &one_is_nan); | 624 __ b(ne, &one_is_nan); |
| 625 __ cmp(rhs_mantissa, Operand(0, RelocInfo::NONE)); | 625 __ cmp(rhs_mantissa, Operand(0, RelocInfo::NONE)); |
| 626 __ b(eq, &neither_is_nan); | 626 __ b(eq, &neither_is_nan); |
| 627 | 627 |
| 628 __ bind(&one_is_nan); | 628 __ bind(&one_is_nan); |
| 629 // NaN comparisons always fail. | 629 // NaN comparisons always fail. |
| 630 // Load whatever we need in r0 to make the comparison fail. | 630 // Load whatever we need in r0 to make the comparison fail. |
| 631 if (cc == lt || cc == le) { | 631 if (cond == lt || cond == le) { |
| 632 __ mov(r0, Operand(GREATER)); | 632 __ mov(r0, Operand(GREATER)); |
| 633 } else { | 633 } else { |
| 634 __ mov(r0, Operand(LESS)); | 634 __ mov(r0, Operand(LESS)); |
| 635 } | 635 } |
| 636 __ Ret(); | 636 __ Ret(); |
| 637 | 637 |
| 638 __ bind(&neither_is_nan); | 638 __ bind(&neither_is_nan); |
| 639 } | 639 } |
| 640 | 640 |
| 641 | 641 |
| 642 // See comment at call site. | 642 // See comment at call site. |
| 643 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) { | 643 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, |
| 644 Condition cond) { |
| 644 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset); | 645 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset); |
| 645 Register rhs_exponent = exp_first ? r0 : r1; | 646 Register rhs_exponent = exp_first ? r0 : r1; |
| 646 Register lhs_exponent = exp_first ? r2 : r3; | 647 Register lhs_exponent = exp_first ? r2 : r3; |
| 647 Register rhs_mantissa = exp_first ? r1 : r0; | 648 Register rhs_mantissa = exp_first ? r1 : r0; |
| 648 Register lhs_mantissa = exp_first ? r3 : r2; | 649 Register lhs_mantissa = exp_first ? r3 : r2; |
| 649 | 650 |
| 650 // r0, r1, r2, r3 have the two doubles. Neither is a NaN. | 651 // r0, r1, r2, r3 have the two doubles. Neither is a NaN. |
| 651 if (cc == eq) { | 652 if (cond == eq) { |
| 652 // Doubles are not equal unless they have the same bit pattern. | 653 // Doubles are not equal unless they have the same bit pattern. |
| 653 // Exception: 0 and -0. | 654 // Exception: 0 and -0. |
| 654 __ cmp(rhs_mantissa, Operand(lhs_mantissa)); | 655 __ cmp(rhs_mantissa, Operand(lhs_mantissa)); |
| 655 __ orr(r0, rhs_mantissa, Operand(lhs_mantissa), LeaveCC, ne); | 656 __ orr(r0, rhs_mantissa, Operand(lhs_mantissa), LeaveCC, ne); |
| 656 // Return non-zero if the numbers are unequal. | 657 // Return non-zero if the numbers are unequal. |
| 657 __ Ret(ne); | 658 __ Ret(ne); |
| 658 | 659 |
| 659 __ sub(r0, rhs_exponent, Operand(lhs_exponent), SetCC); | 660 __ sub(r0, rhs_exponent, Operand(lhs_exponent), SetCC); |
| 660 // If exponents are equal then return 0. | 661 // If exponents are equal then return 0. |
| 661 __ Ret(eq); | 662 __ Ret(eq); |
| (...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 930 __ orr(r2, r1, r0); | 931 __ orr(r2, r1, r0); |
| 931 __ tst(r2, Operand(kSmiTagMask)); | 932 __ tst(r2, Operand(kSmiTagMask)); |
| 932 __ b(ne, ¬_two_smis); | 933 __ b(ne, ¬_two_smis); |
| 933 __ mov(r1, Operand(r1, ASR, 1)); | 934 __ mov(r1, Operand(r1, ASR, 1)); |
| 934 __ sub(r0, r1, Operand(r0, ASR, 1)); | 935 __ sub(r0, r1, Operand(r0, ASR, 1)); |
| 935 __ Ret(); | 936 __ Ret(); |
| 936 __ bind(¬_two_smis); | 937 __ bind(¬_two_smis); |
| 937 } else if (FLAG_debug_code) { | 938 } else if (FLAG_debug_code) { |
| 938 __ orr(r2, r1, r0); | 939 __ orr(r2, r1, r0); |
| 939 __ tst(r2, Operand(kSmiTagMask)); | 940 __ tst(r2, Operand(kSmiTagMask)); |
| 940 __ Assert(nz, "CompareStub: unexpected smi operands."); | 941 __ Assert(ne, "CompareStub: unexpected smi operands."); |
| 941 } | 942 } |
| 942 | 943 |
| 943 // NOTICE! This code is only reached after a smi-fast-case check, so | 944 // NOTICE! This code is only reached after a smi-fast-case check, so |
| 944 // it is certain that at least one operand isn't a smi. | 945 // it is certain that at least one operand isn't a smi. |
| 945 | 946 |
| 946 // Handle the case where the objects are identical. Either returns the answer | 947 // Handle the case where the objects are identical. Either returns the answer |
| 947 // or goes to slow. Only falls through if the objects were not identical. | 948 // or goes to slow. Only falls through if the objects were not identical. |
| 948 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_); | 949 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_); |
| 949 | 950 |
| 950 // If either is a Smi (we know that not both are), then they can only | 951 // If either is a Smi (we know that not both are), then they can only |
| (...skipping 2327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3278 STATIC_ASSERT(kSmiTag == 0); | 3279 STATIC_ASSERT(kSmiTag == 0); |
| 3279 __ tst(r0, Operand(kSmiTagMask)); | 3280 __ tst(r0, Operand(kSmiTagMask)); |
| 3280 __ b(eq, &runtime); | 3281 __ b(eq, &runtime); |
| 3281 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); | 3282 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); |
| 3282 __ b(ne, &runtime); | 3283 __ b(ne, &runtime); |
| 3283 | 3284 |
| 3284 // Check that the RegExp has been compiled (data contains a fixed array). | 3285 // Check that the RegExp has been compiled (data contains a fixed array). |
| 3285 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); | 3286 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); |
| 3286 if (FLAG_debug_code) { | 3287 if (FLAG_debug_code) { |
| 3287 __ tst(regexp_data, Operand(kSmiTagMask)); | 3288 __ tst(regexp_data, Operand(kSmiTagMask)); |
| 3288 __ Check(nz, "Unexpected type for RegExp data, FixedArray expected"); | 3289 __ Check(ne, "Unexpected type for RegExp data, FixedArray expected"); |
| 3289 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); | 3290 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); |
| 3290 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); | 3291 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); |
| 3291 } | 3292 } |
| 3292 | 3293 |
| 3293 // regexp_data: RegExp data (FixedArray) | 3294 // regexp_data: RegExp data (FixedArray) |
| 3294 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 3295 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
| 3295 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); | 3296 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); |
| 3296 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); | 3297 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); |
| 3297 __ b(ne, &runtime); | 3298 __ b(ne, &runtime); |
| 3298 | 3299 |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3381 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset)); | 3382 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset)); |
| 3382 __ LoadRoot(r1, Heap::kEmptyStringRootIndex); | 3383 __ LoadRoot(r1, Heap::kEmptyStringRootIndex); |
| 3383 __ cmp(r0, r1); | 3384 __ cmp(r0, r1); |
| 3384 __ b(ne, &runtime); | 3385 __ b(ne, &runtime); |
| 3385 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); | 3386 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); |
| 3386 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 3387 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| 3387 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); | 3388 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); |
| 3388 // Is first part a flat string? | 3389 // Is first part a flat string? |
| 3389 STATIC_ASSERT(kSeqStringTag == 0); | 3390 STATIC_ASSERT(kSeqStringTag == 0); |
| 3390 __ tst(r0, Operand(kStringRepresentationMask)); | 3391 __ tst(r0, Operand(kStringRepresentationMask)); |
| 3391 __ b(nz, &runtime); | 3392 __ b(ne, &runtime); |
| 3392 | 3393 |
| 3393 __ bind(&seq_string); | 3394 __ bind(&seq_string); |
| 3394 // subject: Subject string | 3395 // subject: Subject string |
| 3395 // regexp_data: RegExp data (FixedArray) | 3396 // regexp_data: RegExp data (FixedArray) |
| 3396 // r0: Instance type of subject string | 3397 // r0: Instance type of subject string |
| 3397 STATIC_ASSERT(4 == kAsciiStringTag); | 3398 STATIC_ASSERT(4 == kAsciiStringTag); |
| 3398 STATIC_ASSERT(kTwoByteStringTag == 0); | 3399 STATIC_ASSERT(kTwoByteStringTag == 0); |
| 3399 // Find the code object based on the assumptions above. | 3400 // Find the code object based on the assumptions above. |
| 3400 __ and_(r0, r0, Operand(kStringEncodingMask)); | 3401 __ and_(r0, r0, Operand(kStringEncodingMask)); |
| 3401 __ mov(r3, Operand(r0, ASR, 2), SetCC); | 3402 __ mov(r3, Operand(r0, ASR, 2), SetCC); |
| (...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3829 __ LoadRoot(ip, Heap::kEmptyStringRootIndex); | 3830 __ LoadRoot(ip, Heap::kEmptyStringRootIndex); |
| 3830 __ cmp(result_, Operand(ip)); | 3831 __ cmp(result_, Operand(ip)); |
| 3831 __ b(ne, &call_runtime_); | 3832 __ b(ne, &call_runtime_); |
| 3832 // Get the first of the two strings and load its instance type. | 3833 // Get the first of the two strings and load its instance type. |
| 3833 __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset)); | 3834 __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset)); |
| 3834 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | 3835 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); |
| 3835 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | 3836 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); |
| 3836 // If the first cons component is also non-flat, then go to runtime. | 3837 // If the first cons component is also non-flat, then go to runtime. |
| 3837 STATIC_ASSERT(kSeqStringTag == 0); | 3838 STATIC_ASSERT(kSeqStringTag == 0); |
| 3838 __ tst(result_, Operand(kStringRepresentationMask)); | 3839 __ tst(result_, Operand(kStringRepresentationMask)); |
| 3839 __ b(nz, &call_runtime_); | 3840 __ b(ne, &call_runtime_); |
| 3840 | 3841 |
| 3841 // Check for 1-byte or 2-byte string. | 3842 // Check for 1-byte or 2-byte string. |
| 3842 __ bind(&flat_string); | 3843 __ bind(&flat_string); |
| 3843 STATIC_ASSERT(kAsciiStringTag != 0); | 3844 STATIC_ASSERT(kAsciiStringTag != 0); |
| 3844 __ tst(result_, Operand(kStringEncodingMask)); | 3845 __ tst(result_, Operand(kStringEncodingMask)); |
| 3845 __ b(nz, &ascii_string); | 3846 __ b(ne, &ascii_string); |
| 3846 | 3847 |
| 3847 // 2-byte string. | 3848 // 2-byte string. |
| 3848 // Load the 2-byte character code into the result register. We can | 3849 // Load the 2-byte character code into the result register. We can |
| 3849 // add without shifting since the smi tag size is the log2 of the | 3850 // add without shifting since the smi tag size is the log2 of the |
| 3850 // number of bytes in a two-byte character. | 3851 // number of bytes in a two-byte character. |
| 3851 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0); | 3852 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0); |
| 3852 __ add(scratch_, object_, Operand(scratch_)); | 3853 __ add(scratch_, object_, Operand(scratch_)); |
| 3853 __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize)); | 3854 __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize)); |
| 3854 __ jmp(&got_char_code); | 3855 __ jmp(&got_char_code); |
| 3855 | 3856 |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3920 // StringCharFromCodeGenerator | 3921 // StringCharFromCodeGenerator |
| 3921 | 3922 |
| 3922 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 3923 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
| 3923 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 3924 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
| 3924 STATIC_ASSERT(kSmiTag == 0); | 3925 STATIC_ASSERT(kSmiTag == 0); |
| 3925 STATIC_ASSERT(kSmiShiftSize == 0); | 3926 STATIC_ASSERT(kSmiShiftSize == 0); |
| 3926 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1)); | 3927 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1)); |
| 3927 __ tst(code_, | 3928 __ tst(code_, |
| 3928 Operand(kSmiTagMask | | 3929 Operand(kSmiTagMask | |
| 3929 ((~String::kMaxAsciiCharCode) << kSmiTagSize))); | 3930 ((~String::kMaxAsciiCharCode) << kSmiTagSize))); |
| 3930 __ b(nz, &slow_case_); | 3931 __ b(ne, &slow_case_); |
| 3931 | 3932 |
| 3932 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); | 3933 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); |
| 3933 // At this point code register contains smi tagged ascii char code. | 3934 // At this point code register contains smi tagged ascii char code. |
| 3934 STATIC_ASSERT(kSmiTag == 0); | 3935 STATIC_ASSERT(kSmiTag == 0); |
| 3935 __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize)); | 3936 __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 3936 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); | 3937 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); |
| 3937 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 3938 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 3938 __ cmp(result_, Operand(ip)); | 3939 __ cmp(result_, Operand(ip)); |
| 3939 __ b(eq, &slow_case_); | 3940 __ b(eq, &slow_case_); |
| 3940 __ bind(&exit_); | 3941 __ bind(&exit_); |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4367 void StringHelper::GenerateHashGetHash(MacroAssembler* masm, | 4368 void StringHelper::GenerateHashGetHash(MacroAssembler* masm, |
| 4368 Register hash) { | 4369 Register hash) { |
| 4369 // hash += hash << 3; | 4370 // hash += hash << 3; |
| 4370 __ add(hash, hash, Operand(hash, LSL, 3)); | 4371 __ add(hash, hash, Operand(hash, LSL, 3)); |
| 4371 // hash ^= hash >> 11; | 4372 // hash ^= hash >> 11; |
| 4372 __ eor(hash, hash, Operand(hash, ASR, 11)); | 4373 __ eor(hash, hash, Operand(hash, ASR, 11)); |
| 4373 // hash += hash << 15; | 4374 // hash += hash << 15; |
| 4374 __ add(hash, hash, Operand(hash, LSL, 15), SetCC); | 4375 __ add(hash, hash, Operand(hash, LSL, 15), SetCC); |
| 4375 | 4376 |
| 4376 // if (hash == 0) hash = 27; | 4377 // if (hash == 0) hash = 27; |
| 4377 __ mov(hash, Operand(27), LeaveCC, nz); | 4378 __ mov(hash, Operand(27), LeaveCC, ne); |
| 4378 } | 4379 } |
| 4379 | 4380 |
| 4380 | 4381 |
| 4381 void SubStringStub::Generate(MacroAssembler* masm) { | 4382 void SubStringStub::Generate(MacroAssembler* masm) { |
| 4382 Label runtime; | 4383 Label runtime; |
| 4383 | 4384 |
| 4384 // Stack frame on entry. | 4385 // Stack frame on entry. |
| 4385 // lr: return address | 4386 // lr: return address |
| 4386 // sp[0]: to | 4387 // sp[0]: to |
| 4387 // sp[4]: from | 4388 // sp[4]: from |
| (...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5106 __ pop(r1); | 5107 __ pop(r1); |
| 5107 __ Jump(r2); | 5108 __ Jump(r2); |
| 5108 } | 5109 } |
| 5109 | 5110 |
| 5110 | 5111 |
| 5111 #undef __ | 5112 #undef __ |
| 5112 | 5113 |
| 5113 } } // namespace v8::internal | 5114 } } // namespace v8::internal |
| 5114 | 5115 |
| 5115 #endif // V8_TARGET_ARCH_ARM | 5116 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |