Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(804)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 7860035: Merge bleeding edge up to 9192 into the GC branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 508 matching lines...) Expand 10 before | Expand all | Expand 10 after
519 __ xor_(ecx, Operand(ecx)); 519 __ xor_(ecx, Operand(ecx));
520 // Check whether the exponent matches a 32 bit signed int that cannot be 520 // Check whether the exponent matches a 32 bit signed int that cannot be
521 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the 521 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
522 // exponent is 30 (biased). This is the exponent that we are fastest at and 522 // exponent is 30 (biased). This is the exponent that we are fastest at and
523 // also the highest exponent we can handle here. 523 // also the highest exponent we can handle here.
524 const uint32_t non_smi_exponent = 524 const uint32_t non_smi_exponent =
525 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; 525 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
526 __ cmp(Operand(scratch2), Immediate(non_smi_exponent)); 526 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
527 // If we have a match of the int32-but-not-Smi exponent then skip some 527 // If we have a match of the int32-but-not-Smi exponent then skip some
528 // logic. 528 // logic.
529 __ j(equal, &right_exponent); 529 __ j(equal, &right_exponent, Label::kNear);
530 // If the exponent is higher than that then go to slow case. This catches 530 // If the exponent is higher than that then go to slow case. This catches
531 // numbers that don't fit in a signed int32, infinities and NaNs. 531 // numbers that don't fit in a signed int32, infinities and NaNs.
532 __ j(less, &normal_exponent); 532 __ j(less, &normal_exponent, Label::kNear);
533 533
534 { 534 {
535 // Handle a big exponent. The only reason we have this code is that the 535 // Handle a big exponent. The only reason we have this code is that the
536 // >>> operator has a tendency to generate numbers with an exponent of 31. 536 // >>> operator has a tendency to generate numbers with an exponent of 31.
537 const uint32_t big_non_smi_exponent = 537 const uint32_t big_non_smi_exponent =
538 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift; 538 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
539 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent)); 539 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
540 __ j(not_equal, conversion_failure); 540 __ j(not_equal, conversion_failure);
541 // We have the big exponent, typically from >>>. This means the number is 541 // We have the big exponent, typically from >>>. This means the number is
542 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa. 542 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
543 __ mov(scratch2, scratch); 543 __ mov(scratch2, scratch);
544 __ and_(scratch2, HeapNumber::kMantissaMask); 544 __ and_(scratch2, HeapNumber::kMantissaMask);
545 // Put back the implicit 1. 545 // Put back the implicit 1.
546 __ or_(scratch2, 1 << HeapNumber::kExponentShift); 546 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
547 // Shift up the mantissa bits to take up the space the exponent used to 547 // Shift up the mantissa bits to take up the space the exponent used to
548 // take. We just orred in the implicit bit so that took care of one and 548 // take. We just orred in the implicit bit so that took care of one and
549 // we want to use the full unsigned range so we subtract 1 bit from the 549 // we want to use the full unsigned range so we subtract 1 bit from the
550 // shift distance. 550 // shift distance.
551 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1; 551 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
552 __ shl(scratch2, big_shift_distance); 552 __ shl(scratch2, big_shift_distance);
553 // Get the second half of the double. 553 // Get the second half of the double.
554 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset)); 554 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
555 // Shift down 21 bits to get the most significant 11 bits or the low 555 // Shift down 21 bits to get the most significant 11 bits or the low
556 // mantissa word. 556 // mantissa word.
557 __ shr(ecx, 32 - big_shift_distance); 557 __ shr(ecx, 32 - big_shift_distance);
558 __ or_(ecx, Operand(scratch2)); 558 __ or_(ecx, Operand(scratch2));
559 // We have the answer in ecx, but we may need to negate it. 559 // We have the answer in ecx, but we may need to negate it.
560 __ test(scratch, Operand(scratch)); 560 __ test(scratch, Operand(scratch));
561 __ j(positive, &done); 561 __ j(positive, &done, Label::kNear);
562 __ neg(ecx); 562 __ neg(ecx);
563 __ jmp(&done); 563 __ jmp(&done, Label::kNear);
564 } 564 }
565 565
566 __ bind(&normal_exponent); 566 __ bind(&normal_exponent);
567 // Exponent word in scratch, exponent part of exponent word in scratch2. 567 // Exponent word in scratch, exponent part of exponent word in scratch2.
568 // Zero in ecx. 568 // Zero in ecx.
569 // We know the exponent is smaller than 30 (biased). If it is less than 569 // We know the exponent is smaller than 30 (biased). If it is less than
570 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie 570 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
571 // it rounds to zero. 571 // it rounds to zero.
572 const uint32_t zero_exponent = 572 const uint32_t zero_exponent =
573 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift; 573 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
574 __ sub(Operand(scratch2), Immediate(zero_exponent)); 574 __ sub(Operand(scratch2), Immediate(zero_exponent));
575 // ecx already has a Smi zero. 575 // ecx already has a Smi zero.
576 __ j(less, &done); 576 __ j(less, &done, Label::kNear);
577 577
578 // We have a shifted exponent between 0 and 30 in scratch2. 578 // We have a shifted exponent between 0 and 30 in scratch2.
579 __ shr(scratch2, HeapNumber::kExponentShift); 579 __ shr(scratch2, HeapNumber::kExponentShift);
580 __ mov(ecx, Immediate(30)); 580 __ mov(ecx, Immediate(30));
581 __ sub(ecx, Operand(scratch2)); 581 __ sub(ecx, Operand(scratch2));
582 582
583 __ bind(&right_exponent); 583 __ bind(&right_exponent);
584 // Here ecx is the shift, scratch is the exponent word. 584 // Here ecx is the shift, scratch is the exponent word.
585 // Get the top bits of the mantissa. 585 // Get the top bits of the mantissa.
586 __ and_(scratch, HeapNumber::kMantissaMask); 586 __ and_(scratch, HeapNumber::kMantissaMask);
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
791 791
792 if (mode_ == UNARY_OVERWRITE) { 792 if (mode_ == UNARY_OVERWRITE) {
793 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), 793 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
794 Immediate(HeapNumber::kSignMask)); // Flip sign. 794 Immediate(HeapNumber::kSignMask)); // Flip sign.
795 } else { 795 } else {
796 __ mov(edx, Operand(eax)); 796 __ mov(edx, Operand(eax));
797 // edx: operand 797 // edx: operand
798 798
799 Label slow_allocate_heapnumber, heapnumber_allocated; 799 Label slow_allocate_heapnumber, heapnumber_allocated;
800 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); 800 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
801 __ jmp(&heapnumber_allocated); 801 __ jmp(&heapnumber_allocated, Label::kNear);
802 802
803 __ bind(&slow_allocate_heapnumber); 803 __ bind(&slow_allocate_heapnumber);
804 __ EnterInternalFrame(); 804 __ EnterInternalFrame();
805 __ push(edx); 805 __ push(edx);
806 __ CallRuntime(Runtime::kNumberAlloc, 0); 806 __ CallRuntime(Runtime::kNumberAlloc, 0);
807 __ pop(edx); 807 __ pop(edx);
808 __ LeaveInternalFrame(); 808 __ LeaveInternalFrame();
809 809
810 __ bind(&heapnumber_allocated); 810 __ bind(&heapnumber_allocated);
811 // eax: allocated 'empty' number 811 // eax: allocated 'empty' number
(...skipping 656 matching lines...) Expand 10 before | Expand all | Expand 10 after
1468 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); 1468 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1469 ASSERT(op_ == Token::ADD); 1469 ASSERT(op_ == Token::ADD);
1470 // If both arguments are strings, call the string add stub. 1470 // If both arguments are strings, call the string add stub.
1471 // Otherwise, do a transition. 1471 // Otherwise, do a transition.
1472 1472
1473 // Registers containing left and right operands respectively. 1473 // Registers containing left and right operands respectively.
1474 Register left = edx; 1474 Register left = edx;
1475 Register right = eax; 1475 Register right = eax;
1476 1476
1477 // Test if left operand is a string. 1477 // Test if left operand is a string.
1478 __ JumpIfSmi(left, &call_runtime); 1478 __ JumpIfSmi(left, &call_runtime, Label::kNear);
1479 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); 1479 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1480 __ j(above_equal, &call_runtime); 1480 __ j(above_equal, &call_runtime, Label::kNear);
1481 1481
1482 // Test if right operand is a string. 1482 // Test if right operand is a string.
1483 __ JumpIfSmi(right, &call_runtime); 1483 __ JumpIfSmi(right, &call_runtime, Label::kNear);
1484 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); 1484 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1485 __ j(above_equal, &call_runtime); 1485 __ j(above_equal, &call_runtime, Label::kNear);
1486 1486
1487 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); 1487 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1488 GenerateRegisterArgsPush(masm); 1488 GenerateRegisterArgsPush(masm);
1489 __ TailCallStub(&string_add_stub); 1489 __ TailCallStub(&string_add_stub);
1490 1490
1491 __ bind(&call_runtime); 1491 __ bind(&call_runtime);
1492 GenerateTypeTransition(masm); 1492 GenerateTypeTransition(masm);
1493 } 1493 }
1494 1494
1495 1495
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
1589 case Token::SHR: __ shr_cl(eax); break; 1589 case Token::SHR: __ shr_cl(eax); break;
1590 default: UNREACHABLE(); 1590 default: UNREACHABLE();
1591 } 1591 }
1592 if (op_ == Token::SHR) { 1592 if (op_ == Token::SHR) {
1593 // Check if result is non-negative and fits in a smi. 1593 // Check if result is non-negative and fits in a smi.
1594 __ test(eax, Immediate(0xc0000000)); 1594 __ test(eax, Immediate(0xc0000000));
1595 __ j(not_zero, &call_runtime); 1595 __ j(not_zero, &call_runtime);
1596 } else { 1596 } else {
1597 // Check if result fits in a smi. 1597 // Check if result fits in a smi.
1598 __ cmp(eax, 0xc0000000); 1598 __ cmp(eax, 0xc0000000);
1599 __ j(negative, &non_smi_result); 1599 __ j(negative, &non_smi_result, Label::kNear);
1600 } 1600 }
1601 // Tag smi result and return. 1601 // Tag smi result and return.
1602 __ SmiTag(eax); 1602 __ SmiTag(eax);
1603 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. 1603 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1604 1604
1605 // All ops except SHR return a signed int32 that we load in 1605 // All ops except SHR return a signed int32 that we load in
1606 // a HeapNumber. 1606 // a HeapNumber.
1607 if (op_ != Token::SHR) { 1607 if (op_ != Token::SHR) {
1608 __ bind(&non_smi_result); 1608 __ bind(&non_smi_result);
1609 // Allocate a heap number if needed. 1609 // Allocate a heap number if needed.
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
1803 case Token::SHR: __ shr_cl(eax); break; 1803 case Token::SHR: __ shr_cl(eax); break;
1804 default: UNREACHABLE(); 1804 default: UNREACHABLE();
1805 } 1805 }
1806 if (op_ == Token::SHR) { 1806 if (op_ == Token::SHR) {
1807 // Check if result is non-negative and fits in a smi. 1807 // Check if result is non-negative and fits in a smi.
1808 __ test(eax, Immediate(0xc0000000)); 1808 __ test(eax, Immediate(0xc0000000));
1809 __ j(not_zero, &call_runtime); 1809 __ j(not_zero, &call_runtime);
1810 } else { 1810 } else {
1811 // Check if result fits in a smi. 1811 // Check if result fits in a smi.
1812 __ cmp(eax, 0xc0000000); 1812 __ cmp(eax, 0xc0000000);
1813 __ j(negative, &non_smi_result); 1813 __ j(negative, &non_smi_result, Label::kNear);
1814 } 1814 }
1815 // Tag smi result and return. 1815 // Tag smi result and return.
1816 __ SmiTag(eax); 1816 __ SmiTag(eax);
1817 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. 1817 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1818 1818
1819 // All ops except SHR return a signed int32 that we load in 1819 // All ops except SHR return a signed int32 that we load in
1820 // a HeapNumber. 1820 // a HeapNumber.
1821 if (op_ != Token::SHR) { 1821 if (op_ != Token::SHR) {
1822 __ bind(&non_smi_result); 1822 __ bind(&non_smi_result);
1823 // Allocate a heap number if needed. 1823 // Allocate a heap number if needed.
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
2002 case Token::SHR: __ shr_cl(eax); break; 2002 case Token::SHR: __ shr_cl(eax); break;
2003 default: UNREACHABLE(); 2003 default: UNREACHABLE();
2004 } 2004 }
2005 if (op_ == Token::SHR) { 2005 if (op_ == Token::SHR) {
2006 // Check if result is non-negative and fits in a smi. 2006 // Check if result is non-negative and fits in a smi.
2007 __ test(eax, Immediate(0xc0000000)); 2007 __ test(eax, Immediate(0xc0000000));
2008 __ j(not_zero, &call_runtime); 2008 __ j(not_zero, &call_runtime);
2009 } else { 2009 } else {
2010 // Check if result fits in a smi. 2010 // Check if result fits in a smi.
2011 __ cmp(eax, 0xc0000000); 2011 __ cmp(eax, 0xc0000000);
2012 __ j(negative, &non_smi_result); 2012 __ j(negative, &non_smi_result, Label::kNear);
2013 } 2013 }
2014 // Tag smi result and return. 2014 // Tag smi result and return.
2015 __ SmiTag(eax); 2015 __ SmiTag(eax);
2016 __ ret(2 * kPointerSize); // Drop the arguments from the stack. 2016 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
2017 2017
2018 // All ops except SHR return a signed int32 that we load in 2018 // All ops except SHR return a signed int32 that we load in
2019 // a HeapNumber. 2019 // a HeapNumber.
2020 if (op_ != Token::SHR) { 2020 if (op_ != Token::SHR) {
2021 __ bind(&non_smi_result); 2021 __ bind(&non_smi_result);
2022 // Allocate a heap number if needed. 2022 // Allocate a heap number if needed.
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
2477 // Output: eax, ecx are left and right integers for a bit op. 2477 // Output: eax, ecx are left and right integers for a bit op.
2478 void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm, 2478 void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2479 bool use_sse3, 2479 bool use_sse3,
2480 Label* conversion_failure) { 2480 Label* conversion_failure) {
2481 // Check float operands. 2481 // Check float operands.
2482 Label arg1_is_object, check_undefined_arg1; 2482 Label arg1_is_object, check_undefined_arg1;
2483 Label arg2_is_object, check_undefined_arg2; 2483 Label arg2_is_object, check_undefined_arg2;
2484 Label load_arg2, done; 2484 Label load_arg2, done;
2485 2485
2486 // Test if arg1 is a Smi. 2486 // Test if arg1 is a Smi.
2487 __ JumpIfNotSmi(edx, &arg1_is_object); 2487 __ JumpIfNotSmi(edx, &arg1_is_object, Label::kNear);
2488 2488
2489 __ SmiUntag(edx); 2489 __ SmiUntag(edx);
2490 __ jmp(&load_arg2); 2490 __ jmp(&load_arg2);
2491 2491
2492 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 2492 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2493 __ bind(&check_undefined_arg1); 2493 __ bind(&check_undefined_arg1);
2494 Factory* factory = masm->isolate()->factory(); 2494 Factory* factory = masm->isolate()->factory();
2495 __ cmp(edx, factory->undefined_value()); 2495 __ cmp(edx, factory->undefined_value());
2496 __ j(not_equal, conversion_failure); 2496 __ j(not_equal, conversion_failure);
2497 __ mov(edx, Immediate(0)); 2497 __ mov(edx, Immediate(0));
2498 __ jmp(&load_arg2); 2498 __ jmp(&load_arg2);
2499 2499
2500 __ bind(&arg1_is_object); 2500 __ bind(&arg1_is_object);
2501 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 2501 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
2502 __ cmp(ebx, factory->heap_number_map()); 2502 __ cmp(ebx, factory->heap_number_map());
2503 __ j(not_equal, &check_undefined_arg1); 2503 __ j(not_equal, &check_undefined_arg1);
2504 2504
2505 // Get the untagged integer version of the edx heap number in ecx. 2505 // Get the untagged integer version of the edx heap number in ecx.
2506 IntegerConvert(masm, edx, use_sse3, conversion_failure); 2506 IntegerConvert(masm, edx, use_sse3, conversion_failure);
2507 __ mov(edx, ecx); 2507 __ mov(edx, ecx);
2508 2508
2509 // Here edx has the untagged integer, eax has a Smi or a heap number. 2509 // Here edx has the untagged integer, eax has a Smi or a heap number.
2510 __ bind(&load_arg2); 2510 __ bind(&load_arg2);
2511 2511
2512 // Test if arg2 is a Smi. 2512 // Test if arg2 is a Smi.
2513 __ JumpIfNotSmi(eax, &arg2_is_object); 2513 __ JumpIfNotSmi(eax, &arg2_is_object, Label::kNear);
2514 2514
2515 __ SmiUntag(eax); 2515 __ SmiUntag(eax);
2516 __ mov(ecx, eax); 2516 __ mov(ecx, eax);
2517 __ jmp(&done); 2517 __ jmp(&done);
2518 2518
2519 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 2519 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2520 __ bind(&check_undefined_arg2); 2520 __ bind(&check_undefined_arg2);
2521 __ cmp(eax, factory->undefined_value()); 2521 __ cmp(eax, factory->undefined_value());
2522 __ j(not_equal, conversion_failure); 2522 __ j(not_equal, conversion_failure);
2523 __ mov(ecx, Immediate(0)); 2523 __ mov(ecx, Immediate(0));
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after
2893 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 2893 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2894 // The key is in edx and the parameter count is in eax. 2894 // The key is in edx and the parameter count is in eax.
2895 2895
2896 // The displacement is used for skipping the frame pointer on the 2896 // The displacement is used for skipping the frame pointer on the
2897 // stack. It is the offset of the last parameter (if any) relative 2897 // stack. It is the offset of the last parameter (if any) relative
2898 // to the frame pointer. 2898 // to the frame pointer.
2899 static const int kDisplacement = 1 * kPointerSize; 2899 static const int kDisplacement = 1 * kPointerSize;
2900 2900
2901 // Check that the key is a smi. 2901 // Check that the key is a smi.
2902 Label slow; 2902 Label slow;
2903 __ JumpIfNotSmi(edx, &slow); 2903 __ JumpIfNotSmi(edx, &slow, Label::kNear);
2904 2904
2905 // Check if the calling frame is an arguments adaptor frame. 2905 // Check if the calling frame is an arguments adaptor frame.
2906 Label adaptor; 2906 Label adaptor;
2907 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2907 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2908 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset)); 2908 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
2909 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2909 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2910 __ j(equal, &adaptor, Label::kNear); 2910 __ j(equal, &adaptor, Label::kNear);
2911 2911
2912 // Check index against formal parameters count limit passed in 2912 // Check index against formal parameters count limit passed in
2913 // through register eax. Use unsigned comparison to get negative 2913 // through register eax. Use unsigned comparison to get negative
2914 // check for free. 2914 // check for free.
2915 __ cmp(edx, Operand(eax)); 2915 __ cmp(edx, Operand(eax));
2916 __ j(above_equal, &slow); 2916 __ j(above_equal, &slow, Label::kNear);
2917 2917
2918 // Read the argument from the stack and return it. 2918 // Read the argument from the stack and return it.
2919 STATIC_ASSERT(kSmiTagSize == 1); 2919 STATIC_ASSERT(kSmiTagSize == 1);
2920 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. 2920 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2921 __ lea(ebx, Operand(ebp, eax, times_2, 0)); 2921 __ lea(ebx, Operand(ebp, eax, times_2, 0));
2922 __ neg(edx); 2922 __ neg(edx);
2923 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); 2923 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2924 __ ret(0); 2924 __ ret(0);
2925 2925
2926 // Arguments adaptor case: Check index against actual arguments 2926 // Arguments adaptor case: Check index against actual arguments
2927 // limit found in the arguments adaptor frame. Use unsigned 2927 // limit found in the arguments adaptor frame. Use unsigned
2928 // comparison to get negative check for free. 2928 // comparison to get negative check for free.
2929 __ bind(&adaptor); 2929 __ bind(&adaptor);
2930 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2930 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2931 __ cmp(edx, Operand(ecx)); 2931 __ cmp(edx, Operand(ecx));
2932 __ j(above_equal, &slow); 2932 __ j(above_equal, &slow, Label::kNear);
2933 2933
2934 // Read the argument from the stack and return it. 2934 // Read the argument from the stack and return it.
2935 STATIC_ASSERT(kSmiTagSize == 1); 2935 STATIC_ASSERT(kSmiTagSize == 1);
2936 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. 2936 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2937 __ lea(ebx, Operand(ebx, ecx, times_2, 0)); 2937 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
2938 __ neg(edx); 2938 __ neg(edx);
2939 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); 2939 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2940 __ ret(0); 2940 __ ret(0);
2941 2941
2942 // Slow-case: Handle non-smi or out-of-bounds access to arguments 2942 // Slow-case: Handle non-smi or out-of-bounds access to arguments
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
3201 // esp[0] : return address 3201 // esp[0] : return address
3202 // esp[4] : number of parameters 3202 // esp[4] : number of parameters
3203 // esp[8] : receiver displacement 3203 // esp[8] : receiver displacement
3204 // esp[12] : function 3204 // esp[12] : function
3205 3205
3206 // Check if the calling frame is an arguments adaptor frame. 3206 // Check if the calling frame is an arguments adaptor frame.
3207 Label adaptor_frame, try_allocate, runtime; 3207 Label adaptor_frame, try_allocate, runtime;
3208 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 3208 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3209 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); 3209 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3210 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3210 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3211 __ j(equal, &adaptor_frame); 3211 __ j(equal, &adaptor_frame, Label::kNear);
3212 3212
3213 // Get the length from the frame. 3213 // Get the length from the frame.
3214 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 3214 __ mov(ecx, Operand(esp, 1 * kPointerSize));
3215 __ jmp(&try_allocate); 3215 __ jmp(&try_allocate, Label::kNear);
3216 3216
3217 // Patch the arguments.length and the parameters pointer. 3217 // Patch the arguments.length and the parameters pointer.
3218 __ bind(&adaptor_frame); 3218 __ bind(&adaptor_frame);
3219 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3219 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3220 __ mov(Operand(esp, 1 * kPointerSize), ecx); 3220 __ mov(Operand(esp, 1 * kPointerSize), ecx);
3221 __ lea(edx, Operand(edx, ecx, times_2, 3221 __ lea(edx, Operand(edx, ecx, times_2,
3222 StandardFrameConstants::kCallerSPOffset)); 3222 StandardFrameConstants::kCallerSPOffset));
3223 __ mov(Operand(esp, 2 * kPointerSize), edx); 3223 __ mov(Operand(esp, 2 * kPointerSize), edx);
3224 3224
3225 // Try the new space allocation. Start out with computing the size of 3225 // Try the new space allocation. Start out with computing the size of
(...skipping 25 matching lines...) Expand all
3251 // Get the length (smi tagged) and set that as an in-object property too. 3251 // Get the length (smi tagged) and set that as an in-object property too.
3252 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 3252 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
3253 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 3253 __ mov(ecx, Operand(esp, 1 * kPointerSize));
3254 __ mov(FieldOperand(eax, JSObject::kHeaderSize + 3254 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3255 Heap::kArgumentsLengthIndex * kPointerSize), 3255 Heap::kArgumentsLengthIndex * kPointerSize),
3256 ecx); 3256 ecx);
3257 3257
3258 // If there are no actual arguments, we're done. 3258 // If there are no actual arguments, we're done.
3259 Label done; 3259 Label done;
3260 __ test(ecx, Operand(ecx)); 3260 __ test(ecx, Operand(ecx));
3261 __ j(zero, &done); 3261 __ j(zero, &done, Label::kNear);
3262 3262
3263 // Get the parameters pointer from the stack. 3263 // Get the parameters pointer from the stack.
3264 __ mov(edx, Operand(esp, 2 * kPointerSize)); 3264 __ mov(edx, Operand(esp, 2 * kPointerSize));
3265 3265
3266 // Setup the elements pointer in the allocated arguments object and 3266 // Setup the elements pointer in the allocated arguments object and
3267 // initialize the header in the elements fixed array. 3267 // initialize the header in the elements fixed array.
3268 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict)); 3268 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
3269 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 3269 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3270 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 3270 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3271 Immediate(FACTORY->fixed_array_map())); 3271 Immediate(FACTORY->fixed_array_map()));
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
3397 __ cmp(eax, factory->fixed_array_map()); 3397 __ cmp(eax, factory->fixed_array_map());
3398 __ j(not_equal, &runtime); 3398 __ j(not_equal, &runtime);
3399 // Check that the last match info has space for the capture registers and the 3399 // Check that the last match info has space for the capture registers and the
3400 // additional information. 3400 // additional information.
3401 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); 3401 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3402 __ SmiUntag(eax); 3402 __ SmiUntag(eax);
3403 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); 3403 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3404 __ cmp(edx, Operand(eax)); 3404 __ cmp(edx, Operand(eax));
3405 __ j(greater, &runtime); 3405 __ j(greater, &runtime);
3406 3406
3407 // Reset offset for possibly sliced string.
3408 __ Set(edi, Immediate(0));
3407 // ecx: RegExp data (FixedArray) 3409 // ecx: RegExp data (FixedArray)
3408 // Check the representation and encoding of the subject string. 3410 // Check the representation and encoding of the subject string.
3409 Label seq_ascii_string, seq_two_byte_string, check_code; 3411 Label seq_ascii_string, seq_two_byte_string, check_code;
3410 __ mov(eax, Operand(esp, kSubjectOffset)); 3412 __ mov(eax, Operand(esp, kSubjectOffset));
3411 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3413 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3412 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 3414 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3413 // First check for flat two byte string. 3415 // First check for flat two byte string.
3414 __ and_(ebx, 3416 __ and_(ebx,
3415 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); 3417 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3416 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 3418 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
3417 __ j(zero, &seq_two_byte_string); 3419 __ j(zero, &seq_two_byte_string, Label::kNear);
3418 // Any other flat string must be a flat ascii string. 3420 // Any other flat string must be a flat ascii string.
3419 __ test(Operand(ebx), 3421 __ and_(Operand(ebx),
3420 Immediate(kIsNotStringMask | kStringRepresentationMask)); 3422 Immediate(kIsNotStringMask | kStringRepresentationMask));
3421 __ j(zero, &seq_ascii_string); 3423 __ j(zero, &seq_ascii_string, Label::kNear);
3422 3424
3423 // Check for flat cons string. 3425 // Check for flat cons string or sliced string.
3424 // A flat cons string is a cons string where the second part is the empty 3426 // A flat cons string is a cons string where the second part is the empty
3425 // string. In that case the subject string is just the first part of the cons 3427 // string. In that case the subject string is just the first part of the cons
3426 // string. Also in this case the first part of the cons string is known to be 3428 // string. Also in this case the first part of the cons string is known to be
3427 // a sequential string or an external string. 3429 // a sequential string or an external string.
3428 STATIC_ASSERT(kExternalStringTag != 0); 3430 // In the case of a sliced string its offset has to be taken into account.
3429 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); 3431 Label cons_string, check_encoding;
3430 __ test(Operand(ebx), 3432 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
3431 Immediate(kIsNotStringMask | kExternalStringTag)); 3433 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
3432 __ j(not_zero, &runtime); 3434 __ cmp(Operand(ebx), Immediate(kExternalStringTag));
3433 // String is a cons string. 3435 __ j(less, &cons_string);
3434 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset)); 3436 __ j(equal, &runtime);
3435 __ cmp(Operand(edx), factory->empty_string()); 3437
3438 // String is sliced.
3439 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
3440 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
3441 // edi: offset of sliced string, smi-tagged.
3442 // eax: parent string.
3443 __ jmp(&check_encoding, Label::kNear);
3444 // String is a cons string, check whether it is flat.
3445 __ bind(&cons_string);
3446 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
3436 __ j(not_equal, &runtime); 3447 __ j(not_equal, &runtime);
3437 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); 3448 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
3449 __ bind(&check_encoding);
3438 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3450 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3439 // String is a cons string with empty second part. 3451 // eax: first part of cons string or parent of sliced string.
3440 // eax: first part of cons string. 3452 // ebx: map of first part of cons string or map of parent of sliced string.
3441 // ebx: map of first part of cons string. 3453 // Is first part of cons or parent of slice a flat two byte string?
3442 // Is first part a flat two byte string?
3443 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset), 3454 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3444 kStringRepresentationMask | kStringEncodingMask); 3455 kStringRepresentationMask | kStringEncodingMask);
3445 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); 3456 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
3446 __ j(zero, &seq_two_byte_string); 3457 __ j(zero, &seq_two_byte_string, Label::kNear);
3447 // Any other flat string must be ascii. 3458 // Any other flat string must be ascii.
3448 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset), 3459 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3449 kStringRepresentationMask); 3460 kStringRepresentationMask);
3450 __ j(not_zero, &runtime); 3461 __ j(not_zero, &runtime);
3451 3462
3452 __ bind(&seq_ascii_string); 3463 __ bind(&seq_ascii_string);
3453 // eax: subject string (flat ascii) 3464 // eax: subject string (flat ascii)
3454 // ecx: RegExp data (FixedArray) 3465 // ecx: RegExp data (FixedArray)
3455 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset)); 3466 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
3456 __ Set(edi, Immediate(1)); // Type is ascii. 3467 __ Set(ecx, Immediate(1)); // Type is ascii.
3457 __ jmp(&check_code); 3468 __ jmp(&check_code, Label::kNear);
3458 3469
3459 __ bind(&seq_two_byte_string); 3470 __ bind(&seq_two_byte_string);
3460 // eax: subject string (flat two byte) 3471 // eax: subject string (flat two byte)
3461 // ecx: RegExp data (FixedArray) 3472 // ecx: RegExp data (FixedArray)
3462 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset)); 3473 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
3463 __ Set(edi, Immediate(0)); // Type is two byte. 3474 __ Set(ecx, Immediate(0)); // Type is two byte.
3464 3475
3465 __ bind(&check_code); 3476 __ bind(&check_code);
3466 // Check that the irregexp code has been generated for the actual string 3477 // Check that the irregexp code has been generated for the actual string
3467 // encoding. If it has, the field contains a code object otherwise it contains 3478 // encoding. If it has, the field contains a code object otherwise it contains
3468 // a smi (code flushing support). 3479 // a smi (code flushing support).
3469 __ JumpIfSmi(edx, &runtime); 3480 __ JumpIfSmi(edx, &runtime);
3470 3481
3471 // eax: subject string 3482 // eax: subject string
3472 // edx: code 3483 // edx: code
3473 // edi: encoding of subject string (1 if ascii, 0 if two_byte); 3484 // ecx: encoding of subject string (1 if ascii, 0 if two_byte);
3474 // Load used arguments before starting to push arguments for call to native 3485 // Load used arguments before starting to push arguments for call to native
3475 // RegExp code to avoid handling changing stack height. 3486 // RegExp code to avoid handling changing stack height.
3476 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); 3487 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
3477 __ SmiUntag(ebx); // Previous index from smi. 3488 __ SmiUntag(ebx); // Previous index from smi.
3478 3489
3479 // eax: subject string 3490 // eax: subject string
3480 // ebx: previous index 3491 // ebx: previous index
3481 // edx: code 3492 // edx: code
3482 // edi: encoding of subject string (1 if ascii 0 if two_byte); 3493 // ecx: encoding of subject string (1 if ascii 0 if two_byte);
3483 // All checks done. Now push arguments for native regexp code. 3494 // All checks done. Now push arguments for native regexp code.
3484 Counters* counters = masm->isolate()->counters(); 3495 Counters* counters = masm->isolate()->counters();
3485 __ IncrementCounter(counters->regexp_entry_native(), 1); 3496 __ IncrementCounter(counters->regexp_entry_native(), 1);
3486 3497
3487 // Isolates: note we add an additional parameter here (isolate pointer). 3498 // Isolates: note we add an additional parameter here (isolate pointer).
3488 static const int kRegExpExecuteArguments = 8; 3499 static const int kRegExpExecuteArguments = 8;
3489 __ EnterApiExitFrame(kRegExpExecuteArguments); 3500 __ EnterApiExitFrame(kRegExpExecuteArguments);
3490 3501
3491 // Argument 8: Pass current isolate address. 3502 // Argument 8: Pass current isolate address.
3492 __ mov(Operand(esp, 7 * kPointerSize), 3503 __ mov(Operand(esp, 7 * kPointerSize),
3493 Immediate(ExternalReference::isolate_address())); 3504 Immediate(ExternalReference::isolate_address()));
3494 3505
3495 // Argument 7: Indicate that this is a direct call from JavaScript. 3506 // Argument 7: Indicate that this is a direct call from JavaScript.
3496 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1)); 3507 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3497 3508
3498 // Argument 6: Start (high end) of backtracking stack memory area. 3509 // Argument 6: Start (high end) of backtracking stack memory area.
3499 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address)); 3510 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3500 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 3511 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3501 __ mov(Operand(esp, 5 * kPointerSize), ecx); 3512 __ mov(Operand(esp, 5 * kPointerSize), esi);
3502 3513
3503 // Argument 5: static offsets vector buffer. 3514 // Argument 5: static offsets vector buffer.
3504 __ mov(Operand(esp, 4 * kPointerSize), 3515 __ mov(Operand(esp, 4 * kPointerSize),
3505 Immediate(ExternalReference::address_of_static_offsets_vector( 3516 Immediate(ExternalReference::address_of_static_offsets_vector(
3506 masm->isolate()))); 3517 masm->isolate())));
3507 3518
3519 // Argument 2: Previous index.
3520 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3521
3522 // Argument 1: Original subject string.
3523 // The original subject is in the previous stack frame. Therefore we have to
3524 // use ebp, which points exactly to one pointer size below the previous esp.
3525 // (Because creating a new stack frame pushes the previous ebp onto the stack
3526 // and thereby moves up esp by one kPointerSize.)
3527 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
3528 __ mov(Operand(esp, 0 * kPointerSize), esi);
3529
3530 // esi: original subject string
3531 // eax: underlying subject string
3532 // ebx: previous index
3533 // ecx: encoding of subject string (1 if ascii 0 if two_byte);
3534 // edx: code
3508 // Argument 4: End of string data 3535 // Argument 4: End of string data
3509 // Argument 3: Start of string data 3536 // Argument 3: Start of string data
3537 // Prepare start and end index of the input.
3538 // Load the length from the original sliced string if that is the case.
3539 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
3540 __ add(esi, Operand(edi)); // Calculate input end wrt offset.
3541 __ SmiUntag(edi);
3542 __ add(ebx, Operand(edi)); // Calculate input start wrt offset.
3543
3544 // ebx: start index of the input string
3545 // esi: end index of the input string
3510 Label setup_two_byte, setup_rest; 3546 Label setup_two_byte, setup_rest;
3511 __ test(edi, Operand(edi)); 3547 __ test(ecx, Operand(ecx));
3512 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
3513 __ j(zero, &setup_two_byte, Label::kNear); 3548 __ j(zero, &setup_two_byte, Label::kNear);
3514 __ SmiUntag(edi); 3549 __ SmiUntag(esi);
3515 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize)); 3550 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize));
3516 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 3551 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3517 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize)); 3552 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3518 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 3553 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3519 __ jmp(&setup_rest, Label::kNear); 3554 __ jmp(&setup_rest, Label::kNear);
3520 3555
3521 __ bind(&setup_two_byte); 3556 __ bind(&setup_two_byte);
3522 STATIC_ASSERT(kSmiTag == 0); 3557 STATIC_ASSERT(kSmiTag == 0);
3523 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2). 3558 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
3524 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize)); 3559 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
3525 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 3560 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3526 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); 3561 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3527 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 3562 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3528 3563
3529 __ bind(&setup_rest); 3564 __ bind(&setup_rest);
3530 3565
3531 // Argument 2: Previous index.
3532 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3533
3534 // Argument 1: Subject string.
3535 __ mov(Operand(esp, 0 * kPointerSize), eax);
3536
3537 // Locate the code entry and call it. 3566 // Locate the code entry and call it.
3538 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); 3567 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3539 __ call(Operand(edx)); 3568 __ call(Operand(edx));
3540 3569
3541 // Drop arguments and come back to JS mode. 3570 // Drop arguments and come back to JS mode.
3542 __ LeaveApiExitFrame(); 3571 __ LeaveApiExitFrame();
3543 3572
3544 // Check the result. 3573 // Check the result.
3545 Label success; 3574 Label success;
3546 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS); 3575 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
(...skipping 18 matching lines...) Expand all
3565 __ j(equal, &runtime); 3594 __ j(equal, &runtime);
3566 // For exception, throw the exception again. 3595 // For exception, throw the exception again.
3567 3596
3568 // Clear the pending exception variable. 3597 // Clear the pending exception variable.
3569 __ mov(Operand::StaticVariable(pending_exception), edx); 3598 __ mov(Operand::StaticVariable(pending_exception), edx);
3570 3599
3571 // Special handling of termination exceptions which are uncatchable 3600 // Special handling of termination exceptions which are uncatchable
3572 // by javascript code. 3601 // by javascript code.
3573 __ cmp(eax, factory->termination_exception()); 3602 __ cmp(eax, factory->termination_exception());
3574 Label throw_termination_exception; 3603 Label throw_termination_exception;
3575 __ j(equal, &throw_termination_exception); 3604 __ j(equal, &throw_termination_exception, Label::kNear);
3576 3605
3577 // Handle normal exception by following handler chain. 3606 // Handle normal exception by following handler chain.
3578 __ Throw(eax); 3607 __ Throw(eax);
3579 3608
3580 __ bind(&throw_termination_exception); 3609 __ bind(&throw_termination_exception);
3581 __ ThrowUncatchable(TERMINATION, eax); 3610 __ ThrowUncatchable(TERMINATION, eax);
3582 3611
3583 __ bind(&failure); 3612 __ bind(&failure);
3584 // For failure to match, return null. 3613 // For failure to match, return null.
3585 __ mov(Operand(eax), factory->null_value()); 3614 __ mov(Operand(eax), factory->null_value());
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after
3854 static int NegativeComparisonResult(Condition cc) { 3883 static int NegativeComparisonResult(Condition cc) {
3855 ASSERT(cc != equal); 3884 ASSERT(cc != equal);
3856 ASSERT((cc == less) || (cc == less_equal) 3885 ASSERT((cc == less) || (cc == less_equal)
3857 || (cc == greater) || (cc == greater_equal)); 3886 || (cc == greater) || (cc == greater_equal));
3858 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 3887 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
3859 } 3888 }
3860 3889
3861 void CompareStub::Generate(MacroAssembler* masm) { 3890 void CompareStub::Generate(MacroAssembler* masm) {
3862 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); 3891 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3863 3892
3864 Label check_unequal_objects, done; 3893 Label check_unequal_objects;
3865 3894
3866 // Compare two smis if required. 3895 // Compare two smis if required.
3867 if (include_smi_compare_) { 3896 if (include_smi_compare_) {
3868 Label non_smi, smi_done; 3897 Label non_smi, smi_done;
3869 __ mov(ecx, Operand(edx)); 3898 __ mov(ecx, Operand(edx));
3870 __ or_(ecx, Operand(eax)); 3899 __ or_(ecx, Operand(eax));
3871 __ JumpIfNotSmi(ecx, &non_smi); 3900 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
3872 __ sub(edx, Operand(eax)); // Return on the result of the subtraction. 3901 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
3873 __ j(no_overflow, &smi_done); 3902 __ j(no_overflow, &smi_done, Label::kNear);
3874 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. 3903 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
3875 __ bind(&smi_done); 3904 __ bind(&smi_done);
3876 __ mov(eax, edx); 3905 __ mov(eax, edx);
3877 __ ret(0); 3906 __ ret(0);
3878 __ bind(&non_smi); 3907 __ bind(&non_smi);
3879 } else if (FLAG_debug_code) { 3908 } else if (FLAG_debug_code) {
3880 __ mov(ecx, Operand(edx)); 3909 __ mov(ecx, Operand(edx));
3881 __ or_(ecx, Operand(eax)); 3910 __ or_(ecx, Operand(eax));
3882 __ test(ecx, Immediate(kSmiTagMask)); 3911 __ test(ecx, Immediate(kSmiTagMask));
3883 __ Assert(not_zero, "Unexpected smi operands."); 3912 __ Assert(not_zero, "Unexpected smi operands.");
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
3985 __ mov(ebx, edx); 4014 __ mov(ebx, edx);
3986 __ xor_(ebx, Operand(eax)); 4015 __ xor_(ebx, Operand(eax));
3987 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx. 4016 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
3988 __ xor_(ebx, Operand(eax)); 4017 __ xor_(ebx, Operand(eax));
3989 // if eax was smi, ebx is now edx, else eax. 4018 // if eax was smi, ebx is now edx, else eax.
3990 4019
3991 // Check if the non-smi operand is a heap number. 4020 // Check if the non-smi operand is a heap number.
3992 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 4021 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
3993 Immediate(masm->isolate()->factory()->heap_number_map())); 4022 Immediate(masm->isolate()->factory()->heap_number_map()));
3994 // If heap number, handle it in the slow case. 4023 // If heap number, handle it in the slow case.
3995 __ j(equal, &slow); 4024 __ j(equal, &slow, Label::kNear);
3996 // Return non-equal (ebx is not zero) 4025 // Return non-equal (ebx is not zero)
3997 __ mov(eax, ebx); 4026 __ mov(eax, ebx);
3998 __ ret(0); 4027 __ ret(0);
3999 4028
4000 __ bind(&not_smis); 4029 __ bind(&not_smis);
4001 // If either operand is a JSObject or an oddball value, then they are not 4030 // If either operand is a JSObject or an oddball value, then they are not
4002 // equal since their pointers are different 4031 // equal since their pointers are different
4003 // There is no test for undetectability in strict equality. 4032 // There is no test for undetectability in strict equality.
4004 4033
4005 // Get the type of the first operand. 4034 // Get the type of the first operand.
(...skipping 30 matching lines...) Expand all
4036 Label non_number_comparison; 4065 Label non_number_comparison;
4037 Label unordered; 4066 Label unordered;
4038 if (CpuFeatures::IsSupported(SSE2)) { 4067 if (CpuFeatures::IsSupported(SSE2)) {
4039 CpuFeatures::Scope use_sse2(SSE2); 4068 CpuFeatures::Scope use_sse2(SSE2);
4040 CpuFeatures::Scope use_cmov(CMOV); 4069 CpuFeatures::Scope use_cmov(CMOV);
4041 4070
4042 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison); 4071 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4043 __ ucomisd(xmm0, xmm1); 4072 __ ucomisd(xmm0, xmm1);
4044 4073
4045 // Don't base result on EFLAGS when a NaN is involved. 4074 // Don't base result on EFLAGS when a NaN is involved.
4046 __ j(parity_even, &unordered); 4075 __ j(parity_even, &unordered, Label::kNear);
4047 // Return a result of -1, 0, or 1, based on EFLAGS. 4076 // Return a result of -1, 0, or 1, based on EFLAGS.
4048 __ mov(eax, 0); // equal 4077 __ mov(eax, 0); // equal
4049 __ mov(ecx, Immediate(Smi::FromInt(1))); 4078 __ mov(ecx, Immediate(Smi::FromInt(1)));
4050 __ cmov(above, eax, Operand(ecx)); 4079 __ cmov(above, eax, Operand(ecx));
4051 __ mov(ecx, Immediate(Smi::FromInt(-1))); 4080 __ mov(ecx, Immediate(Smi::FromInt(-1)));
4052 __ cmov(below, eax, Operand(ecx)); 4081 __ cmov(below, eax, Operand(ecx));
4053 __ ret(0); 4082 __ ret(0);
4054 } else { 4083 } else {
4055 FloatingPointHelper::CheckFloatOperands( 4084 FloatingPointHelper::CheckFloatOperands(
4056 masm, &non_number_comparison, ebx); 4085 masm, &non_number_comparison, ebx);
4057 FloatingPointHelper::LoadFloatOperand(masm, eax); 4086 FloatingPointHelper::LoadFloatOperand(masm, eax);
4058 FloatingPointHelper::LoadFloatOperand(masm, edx); 4087 FloatingPointHelper::LoadFloatOperand(masm, edx);
4059 __ FCmp(); 4088 __ FCmp();
4060 4089
4061 // Don't base result on EFLAGS when a NaN is involved. 4090 // Don't base result on EFLAGS when a NaN is involved.
4062 __ j(parity_even, &unordered); 4091 __ j(parity_even, &unordered, Label::kNear);
4063 4092
4064 Label below_label, above_label; 4093 Label below_label, above_label;
4065 // Return a result of -1, 0, or 1, based on EFLAGS. 4094 // Return a result of -1, 0, or 1, based on EFLAGS.
4066 __ j(below, &below_label); 4095 __ j(below, &below_label, Label::kNear);
4067 __ j(above, &above_label); 4096 __ j(above, &above_label, Label::kNear);
4068 4097
4069 __ Set(eax, Immediate(0)); 4098 __ Set(eax, Immediate(0));
4070 __ ret(0); 4099 __ ret(0);
4071 4100
4072 __ bind(&below_label); 4101 __ bind(&below_label);
4073 __ mov(eax, Immediate(Smi::FromInt(-1))); 4102 __ mov(eax, Immediate(Smi::FromInt(-1)));
4074 __ ret(0); 4103 __ ret(0);
4075 4104
4076 __ bind(&above_label); 4105 __ bind(&above_label);
4077 __ mov(eax, Immediate(Smi::FromInt(1))); 4106 __ mov(eax, Immediate(Smi::FromInt(1)));
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after
4372 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); 4401 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs);
4373 __ ret(0); 4402 __ ret(0);
4374 4403
4375 // Handling of failure. 4404 // Handling of failure.
4376 __ bind(&failure_returned); 4405 __ bind(&failure_returned);
4377 4406
4378 Label retry; 4407 Label retry;
4379 // If the returned exception is RETRY_AFTER_GC continue at retry label 4408 // If the returned exception is RETRY_AFTER_GC continue at retry label
4380 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 4409 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4381 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 4410 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4382 __ j(zero, &retry); 4411 __ j(zero, &retry, Label::kNear);
4383 4412
4384 // Special handling of out of memory exceptions. 4413 // Special handling of out of memory exceptions.
4385 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); 4414 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4386 __ j(equal, throw_out_of_memory_exception); 4415 __ j(equal, throw_out_of_memory_exception);
4387 4416
4388 // Retrieve the pending exception and clear the variable. 4417 // Retrieve the pending exception and clear the variable.
4389 ExternalReference the_hole_location = 4418 ExternalReference the_hole_location =
4390 ExternalReference::the_hole_value_location(masm->isolate()); 4419 ExternalReference::the_hole_value_location(masm->isolate());
4391 __ mov(eax, Operand::StaticVariable(pending_exception_address)); 4420 __ mov(eax, Operand::StaticVariable(pending_exception_address));
4392 __ mov(edx, Operand::StaticVariable(the_hole_location)); 4421 __ mov(edx, Operand::StaticVariable(the_hole_location));
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
4492 __ push(ebx); 4521 __ push(ebx);
4493 4522
4494 // Save copies of the top frame descriptor on the stack. 4523 // Save copies of the top frame descriptor on the stack.
4495 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate()); 4524 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
4496 __ push(Operand::StaticVariable(c_entry_fp)); 4525 __ push(Operand::StaticVariable(c_entry_fp));
4497 4526
4498 // If this is the outermost JS call, set js_entry_sp value. 4527 // If this is the outermost JS call, set js_entry_sp value.
4499 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, 4528 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
4500 masm->isolate()); 4529 masm->isolate());
4501 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); 4530 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4502 __ j(not_equal, &not_outermost_js); 4531 __ j(not_equal, &not_outermost_js, Label::kNear);
4503 __ mov(Operand::StaticVariable(js_entry_sp), ebp); 4532 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
4504 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 4533 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4505 Label cont; 4534 Label cont;
4506 __ jmp(&cont); 4535 __ jmp(&cont, Label::kNear);
4507 __ bind(&not_outermost_js); 4536 __ bind(&not_outermost_js);
4508 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); 4537 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4509 __ bind(&cont); 4538 __ bind(&cont);
4510 4539
4511 // Call a faked try-block that does the invoke. 4540 // Call a faked try-block that does the invoke.
4512 __ call(&invoke); 4541 __ call(&invoke);
4513 4542
4514 // Caught exception: Store result (exception) in the pending 4543 // Caught exception: Store result (exception) in the pending
4515 // exception field in the JSEnv and return a failure sentinel. 4544 // exception field in the JSEnv and return a failure sentinel.
4516 ExternalReference pending_exception(Isolate::k_pending_exception_address, 4545 ExternalReference pending_exception(Isolate::k_pending_exception_address,
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
4737 if (!ReturnTrueFalseObject()) { 4766 if (!ReturnTrueFalseObject()) {
4738 __ Set(eax, Immediate(Smi::FromInt(1))); 4767 __ Set(eax, Immediate(Smi::FromInt(1)));
4739 } 4768 }
4740 } 4769 }
4741 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4770 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4742 4771
4743 Label object_not_null, object_not_null_or_smi; 4772 Label object_not_null, object_not_null_or_smi;
4744 __ bind(&not_js_object); 4773 __ bind(&not_js_object);
4745 // Before null, smi and string value checks, check that the rhs is a function 4774 // Before null, smi and string value checks, check that the rhs is a function
4746 // as for a non-function rhs an exception needs to be thrown. 4775 // as for a non-function rhs an exception needs to be thrown.
4747 __ JumpIfSmi(function, &slow); 4776 __ JumpIfSmi(function, &slow, Label::kNear);
4748 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); 4777 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
4749 __ j(not_equal, &slow); 4778 __ j(not_equal, &slow, Label::kNear);
4750 4779
4751 // Null is not instance of anything. 4780 // Null is not instance of anything.
4752 __ cmp(object, factory->null_value()); 4781 __ cmp(object, factory->null_value());
4753 __ j(not_equal, &object_not_null); 4782 __ j(not_equal, &object_not_null, Label::kNear);
4754 __ Set(eax, Immediate(Smi::FromInt(1))); 4783 __ Set(eax, Immediate(Smi::FromInt(1)));
4755 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4784 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4756 4785
4757 __ bind(&object_not_null); 4786 __ bind(&object_not_null);
4758 // Smi values is not instance of anything. 4787 // Smi values is not instance of anything.
4759 __ JumpIfNotSmi(object, &object_not_null_or_smi); 4788 __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
4760 __ Set(eax, Immediate(Smi::FromInt(1))); 4789 __ Set(eax, Immediate(Smi::FromInt(1)));
4761 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4790 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4762 4791
4763 __ bind(&object_not_null_or_smi); 4792 __ bind(&object_not_null_or_smi);
4764 // String values is not instance of anything. 4793 // String values is not instance of anything.
4765 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); 4794 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
4766 __ j(NegateCondition(is_string), &slow); 4795 __ j(NegateCondition(is_string), &slow, Label::kNear);
4767 __ Set(eax, Immediate(Smi::FromInt(1))); 4796 __ Set(eax, Immediate(Smi::FromInt(1)));
4768 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4797 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4769 4798
4770 // Slow-case: Go through the JavaScript implementation. 4799 // Slow-case: Go through the JavaScript implementation.
4771 __ bind(&slow); 4800 __ bind(&slow);
4772 if (!ReturnTrueFalseObject()) { 4801 if (!ReturnTrueFalseObject()) {
4773 // Tail call the builtin which returns 0 or 1. 4802 // Tail call the builtin which returns 0 or 1.
4774 if (HasArgsInRegisters()) { 4803 if (HasArgsInRegisters()) {
4775 // Push arguments below return address. 4804 // Push arguments below return address.
4776 __ pop(scratch); 4805 __ pop(scratch);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
4843 } 4872 }
4844 4873
4845 4874
4846 // ------------------------------------------------------------------------- 4875 // -------------------------------------------------------------------------
4847 // StringCharCodeAtGenerator 4876 // StringCharCodeAtGenerator
4848 4877
4849 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 4878 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4850 Label flat_string; 4879 Label flat_string;
4851 Label ascii_string; 4880 Label ascii_string;
4852 Label got_char_code; 4881 Label got_char_code;
4882 Label sliced_string;
4853 4883
4854 // If the receiver is a smi trigger the non-string case. 4884 // If the receiver is a smi trigger the non-string case.
4855 STATIC_ASSERT(kSmiTag == 0); 4885 STATIC_ASSERT(kSmiTag == 0);
4856 __ JumpIfSmi(object_, receiver_not_string_); 4886 __ JumpIfSmi(object_, receiver_not_string_);
4857 4887
4858 // Fetch the instance type of the receiver into result register. 4888 // Fetch the instance type of the receiver into result register.
4859 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 4889 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4860 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 4890 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4861 // If the receiver is not a string trigger the non-string case. 4891 // If the receiver is not a string trigger the non-string case.
4862 __ test(result_, Immediate(kIsNotStringMask)); 4892 __ test(result_, Immediate(kIsNotStringMask));
(...skipping 10 matching lines...) Expand all
4873 // Check for index out of range. 4903 // Check for index out of range.
4874 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset)); 4904 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
4875 __ j(above_equal, index_out_of_range_); 4905 __ j(above_equal, index_out_of_range_);
4876 4906
4877 // We need special handling for non-flat strings. 4907 // We need special handling for non-flat strings.
4878 STATIC_ASSERT(kSeqStringTag == 0); 4908 STATIC_ASSERT(kSeqStringTag == 0);
4879 __ test(result_, Immediate(kStringRepresentationMask)); 4909 __ test(result_, Immediate(kStringRepresentationMask));
4880 __ j(zero, &flat_string); 4910 __ j(zero, &flat_string);
4881 4911
4882 // Handle non-flat strings. 4912 // Handle non-flat strings.
4883 __ test(result_, Immediate(kIsConsStringMask)); 4913 __ and_(result_, kStringRepresentationMask);
4884 __ j(zero, &call_runtime_); 4914 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
4915 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
4916 __ cmp(result_, kExternalStringTag);
4917 __ j(greater, &sliced_string, Label::kNear);
4918 __ j(equal, &call_runtime_);
4885 4919
4886 // ConsString. 4920 // ConsString.
4887 // Check whether the right hand side is the empty string (i.e. if 4921 // Check whether the right hand side is the empty string (i.e. if
4888 // this is really a flat string in a cons string). If that is not 4922 // this is really a flat string in a cons string). If that is not
4889 // the case we would rather go to the runtime system now to flatten 4923 // the case we would rather go to the runtime system now to flatten
4890 // the string. 4924 // the string.
4925 Label assure_seq_string;
4891 __ cmp(FieldOperand(object_, ConsString::kSecondOffset), 4926 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
4892 Immediate(masm->isolate()->factory()->empty_string())); 4927 Immediate(masm->isolate()->factory()->empty_string()));
4893 __ j(not_equal, &call_runtime_); 4928 __ j(not_equal, &call_runtime_);
4894 // Get the first of the two strings and load its instance type. 4929 // Get the first of the two strings and load its instance type.
4895 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset)); 4930 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
4931 __ jmp(&assure_seq_string, Label::kNear);
4932
4933 // SlicedString, unpack and add offset.
4934 __ bind(&sliced_string);
4935 __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
4936 __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset));
4937
4938 // Assure that we are dealing with a sequential string. Go to runtime if not.
4939 __ bind(&assure_seq_string);
4896 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 4940 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4897 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 4941 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4898 // If the first cons component is also non-flat, then go to runtime.
4899 STATIC_ASSERT(kSeqStringTag == 0); 4942 STATIC_ASSERT(kSeqStringTag == 0);
4900 __ test(result_, Immediate(kStringRepresentationMask)); 4943 __ test(result_, Immediate(kStringRepresentationMask));
4901 __ j(not_zero, &call_runtime_); 4944 __ j(not_zero, &call_runtime_);
4945 __ jmp(&flat_string, Label::kNear);
4902 4946
4903 // Check for 1-byte or 2-byte string. 4947 // Check for 1-byte or 2-byte string.
4904 __ bind(&flat_string); 4948 __ bind(&flat_string);
4905 STATIC_ASSERT(kAsciiStringTag != 0); 4949 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
4950 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
4906 __ test(result_, Immediate(kStringEncodingMask)); 4951 __ test(result_, Immediate(kStringEncodingMask));
4907 __ j(not_zero, &ascii_string); 4952 __ j(not_zero, &ascii_string, Label::kNear);
4908 4953
4909 // 2-byte string. 4954 // 2-byte string.
4910 // Load the 2-byte character code into the result register. 4955 // Load the 2-byte character code into the result register.
4911 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 4956 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4912 __ movzx_w(result_, FieldOperand(object_, 4957 __ movzx_w(result_, FieldOperand(object_,
4913 scratch_, times_1, // Scratch is smi-tagged. 4958 scratch_, times_1, // Scratch is smi-tagged.
4914 SeqTwoByteString::kHeaderSize)); 4959 SeqTwoByteString::kHeaderSize));
4915 __ jmp(&got_char_code); 4960 __ jmp(&got_char_code, Label::kNear);
4916 4961
4917 // ASCII string. 4962 // ASCII string.
4918 // Load the byte into the result register. 4963 // Load the byte into the result register.
4919 __ bind(&ascii_string); 4964 __ bind(&ascii_string);
4920 __ SmiUntag(scratch_); 4965 __ SmiUntag(scratch_);
4921 __ movzx_b(result_, FieldOperand(object_, 4966 __ movzx_b(result_, FieldOperand(object_,
4922 scratch_, times_1, 4967 scratch_, times_1,
4923 SeqAsciiString::kHeaderSize)); 4968 SeqAsciiString::kHeaderSize));
4924 __ bind(&got_char_code); 4969 __ bind(&got_char_code);
4925 __ SmiTag(result_); 4970 __ SmiTag(result_);
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
5166 __ j(below, &string_add_flat_result); 5211 __ j(below, &string_add_flat_result);
5167 5212
5168 // If result is not supposed to be flat allocate a cons string object. If both 5213 // If result is not supposed to be flat allocate a cons string object. If both
5169 // strings are ascii the result is an ascii cons string. 5214 // strings are ascii the result is an ascii cons string.
5170 Label non_ascii, allocated, ascii_data; 5215 Label non_ascii, allocated, ascii_data;
5171 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset)); 5216 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
5172 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset)); 5217 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
5173 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); 5218 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
5174 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset)); 5219 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
5175 __ and_(ecx, Operand(edi)); 5220 __ and_(ecx, Operand(edi));
5176 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag); 5221 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5177 __ test(ecx, Immediate(kAsciiStringTag)); 5222 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5223 __ test(ecx, Immediate(kStringEncodingMask));
5178 __ j(zero, &non_ascii); 5224 __ j(zero, &non_ascii);
5179 __ bind(&ascii_data); 5225 __ bind(&ascii_data);
5180 // Allocate an acsii cons string. 5226 // Allocate an acsii cons string.
5181 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime); 5227 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
5182 __ bind(&allocated); 5228 __ bind(&allocated);
5183 // Fill the fields of the cons string. 5229 // Fill the fields of the cons string.
5184 if (FLAG_debug_code) __ AbortIfNotSmi(ebx); 5230 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
5185 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx); 5231 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
5186 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset), 5232 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
5187 Immediate(String::kEmptyHashField)); 5233 Immediate(String::kEmptyHashField));
(...skipping 10 matching lines...) Expand all
5198 __ test(ecx, Immediate(kAsciiDataHintMask)); 5244 __ test(ecx, Immediate(kAsciiDataHintMask));
5199 __ j(not_zero, &ascii_data); 5245 __ j(not_zero, &ascii_data);
5200 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 5246 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5201 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 5247 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5202 __ xor_(edi, Operand(ecx)); 5248 __ xor_(edi, Operand(ecx));
5203 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0); 5249 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
5204 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag); 5250 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
5205 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag); 5251 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
5206 __ j(equal, &ascii_data); 5252 __ j(equal, &ascii_data);
5207 // Allocate a two byte cons string. 5253 // Allocate a two byte cons string.
5208 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime); 5254 __ AllocateTwoByteConsString(ecx, edi, no_reg, &string_add_runtime);
5209 __ jmp(&allocated); 5255 __ jmp(&allocated);
5210 5256
5211 // Handle creating a flat result. First check that both strings are not 5257 // Handle creating a flat result. First check that both strings are not
5212 // external strings. 5258 // external strings.
5213 // eax: first string 5259 // eax: first string
5214 // ebx: length of resulting flat string as a smi 5260 // ebx: length of resulting flat string as a smi
5215 // edx: second string 5261 // edx: second string
5216 __ bind(&string_add_flat_result); 5262 __ bind(&string_add_flat_result);
5217 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 5263 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5218 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 5264 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5219 __ and_(ecx, kStringRepresentationMask); 5265 __ and_(ecx, kStringRepresentationMask);
5220 __ cmp(ecx, kExternalStringTag); 5266 __ cmp(ecx, kExternalStringTag);
5221 __ j(equal, &string_add_runtime); 5267 __ j(equal, &string_add_runtime);
5222 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 5268 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5223 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 5269 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5224 __ and_(ecx, kStringRepresentationMask); 5270 __ and_(ecx, kStringRepresentationMask);
5225 __ cmp(ecx, kExternalStringTag); 5271 __ cmp(ecx, kExternalStringTag);
5226 __ j(equal, &string_add_runtime); 5272 __ j(equal, &string_add_runtime);
5273 // We cannot encounter sliced strings here since:
5274 STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
5227 // Now check if both strings are ascii strings. 5275 // Now check if both strings are ascii strings.
5228 // eax: first string 5276 // eax: first string
5229 // ebx: length of resulting flat string as a smi 5277 // ebx: length of resulting flat string as a smi
5230 // edx: second string 5278 // edx: second string
5231 Label non_ascii_string_add_flat_result; 5279 Label non_ascii_string_add_flat_result;
5232 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag); 5280 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5281 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5233 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 5282 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5234 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag); 5283 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
5235 __ j(zero, &non_ascii_string_add_flat_result); 5284 __ j(zero, &non_ascii_string_add_flat_result);
5236 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 5285 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5237 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag); 5286 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
5238 __ j(zero, &string_add_runtime); 5287 __ j(zero, &string_add_runtime);
5239 5288
5240 // Both strings are ascii strings. As they are short they are both flat. 5289 // Both strings are ascii strings. As they are short they are both flat.
5241 // ebx: length of resulting flat string as a smi 5290 // ebx: length of resulting flat string as a smi
5242 __ SmiUntag(ebx); 5291 __ SmiUntag(ebx);
5243 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime); 5292 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5244 // eax: result string 5293 // eax: result string
5245 __ mov(ecx, eax); 5294 __ mov(ecx, eax);
5246 // Locate first character of result. 5295 // Locate first character of result.
5247 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 5296 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
(...skipping 19 matching lines...) Expand all
5267 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true); 5316 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5268 __ IncrementCounter(counters->string_add_native(), 1); 5317 __ IncrementCounter(counters->string_add_native(), 1);
5269 __ ret(2 * kPointerSize); 5318 __ ret(2 * kPointerSize);
5270 5319
5271 // Handle creating a flat two byte result. 5320 // Handle creating a flat two byte result.
5272 // eax: first string - known to be two byte 5321 // eax: first string - known to be two byte
5273 // ebx: length of resulting flat string as a smi 5322 // ebx: length of resulting flat string as a smi
5274 // edx: second string 5323 // edx: second string
5275 __ bind(&non_ascii_string_add_flat_result); 5324 __ bind(&non_ascii_string_add_flat_result);
5276 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 5325 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5277 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag); 5326 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
5278 __ j(not_zero, &string_add_runtime); 5327 __ j(not_zero, &string_add_runtime);
5279 // Both strings are two byte strings. As they are short they are both 5328 // Both strings are two byte strings. As they are short they are both
5280 // flat. 5329 // flat.
5281 __ SmiUntag(ebx); 5330 __ SmiUntag(ebx);
5282 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime); 5331 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5283 // eax: result string 5332 // eax: result string
5284 __ mov(ecx, eax); 5333 __ mov(ecx, eax);
5285 // Locate first character of result. 5334 // Locate first character of result.
5286 __ add(Operand(ecx), 5335 __ add(Operand(ecx),
5287 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 5336 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
(...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after
5689 &make_two_character_string, &make_two_character_string); 5738 &make_two_character_string, &make_two_character_string);
5690 __ ret(3 * kPointerSize); 5739 __ ret(3 * kPointerSize);
5691 5740
5692 __ bind(&make_two_character_string); 5741 __ bind(&make_two_character_string);
5693 // Setup registers for allocating the two character string. 5742 // Setup registers for allocating the two character string.
5694 __ mov(eax, Operand(esp, 3 * kPointerSize)); 5743 __ mov(eax, Operand(esp, 3 * kPointerSize));
5695 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 5744 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
5696 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 5745 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
5697 __ Set(ecx, Immediate(2)); 5746 __ Set(ecx, Immediate(2));
5698 5747
5699 __ bind(&result_longer_than_two); 5748 if (FLAG_string_slices) {
5749 Label copy_routine;
5750 // If coming from the make_two_character_string path, the string
5751 // is too short to be sliced anyways.
5752 STATIC_ASSERT(2 < SlicedString::kMinLength);
5753 __ jmp(&copy_routine);
5754 __ bind(&result_longer_than_two);
5755
5756 // eax: string
5757 // ebx: instance type
5758 // ecx: sub string length
5759 // edx: from index (smi)
5760 Label allocate_slice, sliced_string, seq_string;
5761 __ cmp(ecx, SlicedString::kMinLength);
5762 // Short slice. Copy instead of slicing.
5763 __ j(less, &copy_routine);
5764 STATIC_ASSERT(kSeqStringTag == 0);
5765 __ test(ebx, Immediate(kStringRepresentationMask));
5766 __ j(zero, &seq_string, Label::kNear);
5767 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
5768 STATIC_ASSERT(kIsIndirectStringMask != 0);
5769 __ test(ebx, Immediate(kIsIndirectStringMask));
5770 // External string. Jump to runtime.
5771 __ j(zero, &runtime);
5772
5773 Factory* factory = masm->isolate()->factory();
5774 __ test(ebx, Immediate(kSlicedNotConsMask));
5775 __ j(not_zero, &sliced_string, Label::kNear);
5776 // Cons string. Check whether it is flat, then fetch first part.
5777 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
5778 factory->empty_string());
5779 __ j(not_equal, &runtime);
5780 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
5781 __ jmp(&allocate_slice, Label::kNear);
5782
5783 __ bind(&sliced_string);
5784 // Sliced string. Fetch parent and correct start index by offset.
5785 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
5786 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
5787 __ jmp(&allocate_slice, Label::kNear);
5788
5789 __ bind(&seq_string);
5790 // Sequential string. Just move string to the right register.
5791 __ mov(edi, eax);
5792
5793 __ bind(&allocate_slice);
5794 // edi: underlying subject string
5795 // ebx: instance type of original subject string
5796 // edx: offset
5797 // ecx: length
5798 // Allocate new sliced string. At this point we do not reload the instance
5799 // type including the string encoding because we simply rely on the info
5800 // provided by the original string. It does not matter if the original
5801 // string's encoding is wrong because we always have to recheck encoding of
5802 // the newly created string's parent anyways due to externalized strings.
5803 Label two_byte_slice, set_slice_header;
5804 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5805 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5806 __ test(ebx, Immediate(kStringEncodingMask));
5807 __ j(zero, &two_byte_slice, Label::kNear);
5808 __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
5809 __ jmp(&set_slice_header, Label::kNear);
5810 __ bind(&two_byte_slice);
5811 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
5812 __ bind(&set_slice_header);
5813 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
5814 __ SmiTag(ecx);
5815 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
5816 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
5817 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
5818 Immediate(String::kEmptyHashField));
5819 __ jmp(&return_eax);
5820
5821 __ bind(&copy_routine);
5822 } else {
5823 __ bind(&result_longer_than_two);
5824 }
5825
5700 // eax: string 5826 // eax: string
5701 // ebx: instance type 5827 // ebx: instance type
5702 // ecx: result string length 5828 // ecx: result string length
5703 // Check for flat ascii string 5829 // Check for flat ascii string
5704 Label non_ascii_flat; 5830 Label non_ascii_flat;
5705 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat); 5831 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
5706 5832
5707 // Allocate the result. 5833 // Allocate the result.
5708 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime); 5834 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
5709 5835
(...skipping 877 matching lines...) Expand 10 before | Expand all | Expand 10 after
6587 6713
6588 // Fall through when we need to inform the incremental marker. 6714 // Fall through when we need to inform the incremental marker.
6589 } 6715 }
6590 6716
6591 6717
6592 #undef __ 6718 #undef __
6593 6719
6594 } } // namespace v8::internal 6720 } } // namespace v8::internal
6595 6721
6596 #endif // V8_TARGET_ARCH_IA32 6722 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698