OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
308 __ j(equal, on_no_match); | 308 __ j(equal, on_no_match); |
309 __ bind(&loop_increment); | 309 __ bind(&loop_increment); |
310 // Increment pointers into match and capture strings. | 310 // Increment pointers into match and capture strings. |
311 __ addq(r11, Immediate(1)); | 311 __ addq(r11, Immediate(1)); |
312 __ addq(r9, Immediate(1)); | 312 __ addq(r9, Immediate(1)); |
313 // Compare to end of capture, and loop if not done. | 313 // Compare to end of capture, and loop if not done. |
314 __ cmpq(r9, rbx); | 314 __ cmpq(r9, rbx); |
315 __ j(below, &loop); | 315 __ j(below, &loop); |
316 | 316 |
317 // Compute new value of character position after the matched part. | 317 // Compute new value of character position after the matched part. |
318 __ movq(rdi, r11); | 318 __ movp(rdi, r11); |
319 __ subq(rdi, rsi); | 319 __ subq(rdi, rsi); |
320 } else { | 320 } else { |
321 ASSERT(mode_ == UC16); | 321 ASSERT(mode_ == UC16); |
322 // Save important/volatile registers before calling C function. | 322 // Save important/volatile registers before calling C function. |
323 #ifndef _WIN64 | 323 #ifndef _WIN64 |
324 // Caller save on Linux and callee save in Windows. | 324 // Caller save on Linux and callee save in Windows. |
325 __ push(rsi); | 325 __ push(rsi); |
326 __ push(rdi); | 326 __ push(rdi); |
327 #endif | 327 #endif |
328 __ push(backtrack_stackpointer()); | 328 __ push(backtrack_stackpointer()); |
329 | 329 |
330 static const int num_arguments = 4; | 330 static const int num_arguments = 4; |
331 __ PrepareCallCFunction(num_arguments); | 331 __ PrepareCallCFunction(num_arguments); |
332 | 332 |
333 // Put arguments into parameter registers. Parameters are | 333 // Put arguments into parameter registers. Parameters are |
334 // Address byte_offset1 - Address captured substring's start. | 334 // Address byte_offset1 - Address captured substring's start. |
335 // Address byte_offset2 - Address of current character position. | 335 // Address byte_offset2 - Address of current character position. |
336 // size_t byte_length - length of capture in bytes(!) | 336 // size_t byte_length - length of capture in bytes(!) |
337 // Isolate* isolate | 337 // Isolate* isolate |
338 #ifdef _WIN64 | 338 #ifdef _WIN64 |
339 // Compute and set byte_offset1 (start of capture). | 339 // Compute and set byte_offset1 (start of capture). |
340 __ lea(rcx, Operand(rsi, rdx, times_1, 0)); | 340 __ lea(rcx, Operand(rsi, rdx, times_1, 0)); |
341 // Set byte_offset2. | 341 // Set byte_offset2. |
342 __ lea(rdx, Operand(rsi, rdi, times_1, 0)); | 342 __ lea(rdx, Operand(rsi, rdi, times_1, 0)); |
343 // Set byte_length. | 343 // Set byte_length. |
344 __ movq(r8, rbx); | 344 __ movp(r8, rbx); |
345 // Isolate. | 345 // Isolate. |
346 __ LoadAddress(r9, ExternalReference::isolate_address(isolate())); | 346 __ LoadAddress(r9, ExternalReference::isolate_address(isolate())); |
347 #else // AMD64 calling convention | 347 #else // AMD64 calling convention |
348 // Compute byte_offset2 (current position = rsi+rdi). | 348 // Compute byte_offset2 (current position = rsi+rdi). |
349 __ lea(rax, Operand(rsi, rdi, times_1, 0)); | 349 __ lea(rax, Operand(rsi, rdi, times_1, 0)); |
350 // Compute and set byte_offset1 (start of capture). | 350 // Compute and set byte_offset1 (start of capture). |
351 __ lea(rdi, Operand(rsi, rdx, times_1, 0)); | 351 __ lea(rdi, Operand(rsi, rdx, times_1, 0)); |
352 // Set byte_offset2. | 352 // Set byte_offset2. |
353 __ movq(rsi, rax); | 353 __ movp(rsi, rax); |
354 // Set byte_length. | 354 // Set byte_length. |
355 __ movq(rdx, rbx); | 355 __ movp(rdx, rbx); |
356 // Isolate. | 356 // Isolate. |
357 __ LoadAddress(rcx, ExternalReference::isolate_address(isolate())); | 357 __ LoadAddress(rcx, ExternalReference::isolate_address(isolate())); |
358 #endif | 358 #endif |
359 | 359 |
360 { // NOLINT: Can't find a way to open this scope without confusing the | 360 { // NOLINT: Can't find a way to open this scope without confusing the |
361 // linter. | 361 // linter. |
362 AllowExternalCallThatCantCauseGC scope(&masm_); | 362 AllowExternalCallThatCantCauseGC scope(&masm_); |
363 ExternalReference compare = | 363 ExternalReference compare = |
364 ExternalReference::re_case_insensitive_compare_uc16(isolate()); | 364 ExternalReference::re_case_insensitive_compare_uc16(isolate()); |
365 __ CallCFunction(compare, num_arguments); | 365 __ CallCFunction(compare, num_arguments); |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
434 BranchOrBacktrack(not_equal, on_no_match); | 434 BranchOrBacktrack(not_equal, on_no_match); |
435 // Increment pointers into capture and match string. | 435 // Increment pointers into capture and match string. |
436 __ addq(rbx, Immediate(char_size())); | 436 __ addq(rbx, Immediate(char_size())); |
437 __ addq(rdx, Immediate(char_size())); | 437 __ addq(rdx, Immediate(char_size())); |
438 // Check if we have reached end of match area. | 438 // Check if we have reached end of match area. |
439 __ cmpq(rdx, r9); | 439 __ cmpq(rdx, r9); |
440 __ j(below, &loop); | 440 __ j(below, &loop); |
441 | 441 |
442 // Success. | 442 // Success. |
443 // Set current character position to position after match. | 443 // Set current character position to position after match. |
444 __ movq(rdi, rbx); | 444 __ movp(rdi, rbx); |
445 __ subq(rdi, rsi); | 445 __ subq(rdi, rsi); |
446 | 446 |
447 __ bind(&fallthrough); | 447 __ bind(&fallthrough); |
448 } | 448 } |
449 | 449 |
450 | 450 |
451 void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c, | 451 void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c, |
452 Label* on_not_equal) { | 452 Label* on_not_equal) { |
453 __ cmpl(current_character(), Immediate(c)); | 453 __ cmpl(current_character(), Immediate(c)); |
454 BranchOrBacktrack(not_equal, on_not_equal); | 454 BranchOrBacktrack(not_equal, on_not_equal); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
515 BranchOrBacktrack(above, on_not_in_range); | 515 BranchOrBacktrack(above, on_not_in_range); |
516 } | 516 } |
517 | 517 |
518 | 518 |
519 void RegExpMacroAssemblerX64::CheckBitInTable( | 519 void RegExpMacroAssemblerX64::CheckBitInTable( |
520 Handle<ByteArray> table, | 520 Handle<ByteArray> table, |
521 Label* on_bit_set) { | 521 Label* on_bit_set) { |
522 __ Move(rax, table); | 522 __ Move(rax, table); |
523 Register index = current_character(); | 523 Register index = current_character(); |
524 if (mode_ != ASCII || kTableMask != String::kMaxOneByteCharCode) { | 524 if (mode_ != ASCII || kTableMask != String::kMaxOneByteCharCode) { |
525 __ movq(rbx, current_character()); | 525 __ movp(rbx, current_character()); |
526 __ and_(rbx, Immediate(kTableMask)); | 526 __ and_(rbx, Immediate(kTableMask)); |
527 index = rbx; | 527 index = rbx; |
528 } | 528 } |
529 __ cmpb(FieldOperand(rax, index, times_1, ByteArray::kHeaderSize), | 529 __ cmpb(FieldOperand(rax, index, times_1, ByteArray::kHeaderSize), |
530 Immediate(0)); | 530 Immediate(0)); |
531 BranchOrBacktrack(not_equal, on_bit_set); | 531 BranchOrBacktrack(not_equal, on_bit_set); |
532 } | 532 } |
533 | 533 |
534 | 534 |
535 bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(uc16 type, | 535 bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(uc16 type, |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
668 // registers we need. | 668 // registers we need. |
669 // Entry code: | 669 // Entry code: |
670 __ bind(&entry_label_); | 670 __ bind(&entry_label_); |
671 | 671 |
672 // Tell the system that we have a stack frame. Because the type is MANUAL, no | 672 // Tell the system that we have a stack frame. Because the type is MANUAL, no |
673 // is generated. | 673 // is generated. |
674 FrameScope scope(&masm_, StackFrame::MANUAL); | 674 FrameScope scope(&masm_, StackFrame::MANUAL); |
675 | 675 |
676 // Actually emit code to start a new stack frame. | 676 // Actually emit code to start a new stack frame. |
677 __ push(rbp); | 677 __ push(rbp); |
678 __ movq(rbp, rsp); | 678 __ movp(rbp, rsp); |
679 // Save parameters and callee-save registers. Order here should correspond | 679 // Save parameters and callee-save registers. Order here should correspond |
680 // to order of kBackup_ebx etc. | 680 // to order of kBackup_ebx etc. |
681 #ifdef _WIN64 | 681 #ifdef _WIN64 |
682 // MSVC passes arguments in rcx, rdx, r8, r9, with backing stack slots. | 682 // MSVC passes arguments in rcx, rdx, r8, r9, with backing stack slots. |
683 // Store register parameters in pre-allocated stack slots, | 683 // Store register parameters in pre-allocated stack slots, |
684 __ movq(Operand(rbp, kInputString), rcx); | 684 __ movq(Operand(rbp, kInputString), rcx); |
685 __ movq(Operand(rbp, kStartIndex), rdx); // Passed as int32 in edx. | 685 __ movq(Operand(rbp, kStartIndex), rdx); // Passed as int32 in edx. |
686 __ movq(Operand(rbp, kInputStart), r8); | 686 __ movq(Operand(rbp, kInputStart), r8); |
687 __ movq(Operand(rbp, kInputEnd), r9); | 687 __ movq(Operand(rbp, kInputEnd), r9); |
688 // Callee-save on Win64. | 688 // Callee-save on Win64. |
(...skipping 21 matching lines...) Expand all Loading... |
710 | 710 |
711 __ push(Immediate(0)); // Number of successful matches in a global regexp. | 711 __ push(Immediate(0)); // Number of successful matches in a global regexp. |
712 __ push(Immediate(0)); // Make room for "input start - 1" constant. | 712 __ push(Immediate(0)); // Make room for "input start - 1" constant. |
713 | 713 |
714 // Check if we have space on the stack for registers. | 714 // Check if we have space on the stack for registers. |
715 Label stack_limit_hit; | 715 Label stack_limit_hit; |
716 Label stack_ok; | 716 Label stack_ok; |
717 | 717 |
718 ExternalReference stack_limit = | 718 ExternalReference stack_limit = |
719 ExternalReference::address_of_stack_limit(isolate()); | 719 ExternalReference::address_of_stack_limit(isolate()); |
720 __ movq(rcx, rsp); | 720 __ movp(rcx, rsp); |
721 __ Move(kScratchRegister, stack_limit); | 721 __ Move(kScratchRegister, stack_limit); |
722 __ subq(rcx, Operand(kScratchRegister, 0)); | 722 __ subq(rcx, Operand(kScratchRegister, 0)); |
723 // Handle it if the stack pointer is already below the stack limit. | 723 // Handle it if the stack pointer is already below the stack limit. |
724 __ j(below_equal, &stack_limit_hit); | 724 __ j(below_equal, &stack_limit_hit); |
725 // Check if there is room for the variable number of registers above | 725 // Check if there is room for the variable number of registers above |
726 // the stack limit. | 726 // the stack limit. |
727 __ cmpq(rcx, Immediate(num_registers_ * kPointerSize)); | 727 __ cmpq(rcx, Immediate(num_registers_ * kPointerSize)); |
728 __ j(above_equal, &stack_ok); | 728 __ j(above_equal, &stack_ok); |
729 // Exit with OutOfMemory exception. There is not enough space on the stack | 729 // Exit with OutOfMemory exception. There is not enough space on the stack |
730 // for our working registers. | 730 // for our working registers. |
731 __ Set(rax, EXCEPTION); | 731 __ Set(rax, EXCEPTION); |
732 __ jmp(&return_rax); | 732 __ jmp(&return_rax); |
733 | 733 |
734 __ bind(&stack_limit_hit); | 734 __ bind(&stack_limit_hit); |
735 __ Move(code_object_pointer(), masm_.CodeObject()); | 735 __ Move(code_object_pointer(), masm_.CodeObject()); |
736 CallCheckStackGuardState(); // Preserves no registers beside rbp and rsp. | 736 CallCheckStackGuardState(); // Preserves no registers beside rbp and rsp. |
737 __ testq(rax, rax); | 737 __ testq(rax, rax); |
738 // If returned value is non-zero, we exit with the returned value as result. | 738 // If returned value is non-zero, we exit with the returned value as result. |
739 __ j(not_zero, &return_rax); | 739 __ j(not_zero, &return_rax); |
740 | 740 |
741 __ bind(&stack_ok); | 741 __ bind(&stack_ok); |
742 | 742 |
743 // Allocate space on stack for registers. | 743 // Allocate space on stack for registers. |
744 __ subq(rsp, Immediate(num_registers_ * kPointerSize)); | 744 __ subq(rsp, Immediate(num_registers_ * kPointerSize)); |
745 // Load string length. | 745 // Load string length. |
746 __ movq(rsi, Operand(rbp, kInputEnd)); | 746 __ movp(rsi, Operand(rbp, kInputEnd)); |
747 // Load input position. | 747 // Load input position. |
748 __ movq(rdi, Operand(rbp, kInputStart)); | 748 __ movp(rdi, Operand(rbp, kInputStart)); |
749 // Set up rdi to be negative offset from string end. | 749 // Set up rdi to be negative offset from string end. |
750 __ subq(rdi, rsi); | 750 __ subq(rdi, rsi); |
751 // Set rax to address of char before start of the string | 751 // Set rax to address of char before start of the string |
752 // (effectively string position -1). | 752 // (effectively string position -1). |
753 __ movq(rbx, Operand(rbp, kStartIndex)); | 753 __ movp(rbx, Operand(rbp, kStartIndex)); |
754 __ neg(rbx); | 754 __ neg(rbx); |
755 if (mode_ == UC16) { | 755 if (mode_ == UC16) { |
756 __ lea(rax, Operand(rdi, rbx, times_2, -char_size())); | 756 __ lea(rax, Operand(rdi, rbx, times_2, -char_size())); |
757 } else { | 757 } else { |
758 __ lea(rax, Operand(rdi, rbx, times_1, -char_size())); | 758 __ lea(rax, Operand(rdi, rbx, times_1, -char_size())); |
759 } | 759 } |
760 // Store this value in a local variable, for use when clearing | 760 // Store this value in a local variable, for use when clearing |
761 // position registers. | 761 // position registers. |
762 __ movq(Operand(rbp, kInputStartMinusOne), rax); | 762 __ movp(Operand(rbp, kInputStartMinusOne), rax); |
763 | 763 |
764 #if V8_OS_WIN | 764 #if V8_OS_WIN |
765 // Ensure that we have written to each stack page, in order. Skipping a page | 765 // Ensure that we have written to each stack page, in order. Skipping a page |
766 // on Windows can cause segmentation faults. Assuming page size is 4k. | 766 // on Windows can cause segmentation faults. Assuming page size is 4k. |
767 const int kPageSize = 4096; | 767 const int kPageSize = 4096; |
768 const int kRegistersPerPage = kPageSize / kPointerSize; | 768 const int kRegistersPerPage = kPageSize / kPointerSize; |
769 for (int i = num_saved_registers_ + kRegistersPerPage - 1; | 769 for (int i = num_saved_registers_ + kRegistersPerPage - 1; |
770 i < num_registers_; | 770 i < num_registers_; |
771 i += kRegistersPerPage) { | 771 i += kRegistersPerPage) { |
772 __ movq(register_location(i), rax); // One write every page. | 772 __ movp(register_location(i), rax); // One write every page. |
773 } | 773 } |
774 #endif // V8_OS_WIN | 774 #endif // V8_OS_WIN |
775 | 775 |
776 // Initialize code object pointer. | 776 // Initialize code object pointer. |
777 __ Move(code_object_pointer(), masm_.CodeObject()); | 777 __ Move(code_object_pointer(), masm_.CodeObject()); |
778 | 778 |
779 Label load_char_start_regexp, start_regexp; | 779 Label load_char_start_regexp, start_regexp; |
780 // Load newline if index is at start, previous character otherwise. | 780 // Load newline if index is at start, previous character otherwise. |
781 __ cmpl(Operand(rbp, kStartIndex), Immediate(0)); | 781 __ cmpl(Operand(rbp, kStartIndex), Immediate(0)); |
782 __ j(not_equal, &load_char_start_regexp, Label::kNear); | 782 __ j(not_equal, &load_char_start_regexp, Label::kNear); |
783 __ Set(current_character(), '\n'); | 783 __ Set(current_character(), '\n'); |
784 __ jmp(&start_regexp, Label::kNear); | 784 __ jmp(&start_regexp, Label::kNear); |
785 | 785 |
786 // Global regexp restarts matching here. | 786 // Global regexp restarts matching here. |
787 __ bind(&load_char_start_regexp); | 787 __ bind(&load_char_start_regexp); |
788 // Load previous char as initial value of current character register. | 788 // Load previous char as initial value of current character register. |
789 LoadCurrentCharacterUnchecked(-1, 1); | 789 LoadCurrentCharacterUnchecked(-1, 1); |
790 __ bind(&start_regexp); | 790 __ bind(&start_regexp); |
791 | 791 |
792 // Initialize on-stack registers. | 792 // Initialize on-stack registers. |
793 if (num_saved_registers_ > 0) { | 793 if (num_saved_registers_ > 0) { |
794 // Fill saved registers with initial value = start offset - 1 | 794 // Fill saved registers with initial value = start offset - 1 |
795 // Fill in stack push order, to avoid accessing across an unwritten | 795 // Fill in stack push order, to avoid accessing across an unwritten |
796 // page (a problem on Windows). | 796 // page (a problem on Windows). |
797 if (num_saved_registers_ > 8) { | 797 if (num_saved_registers_ > 8) { |
798 __ Set(rcx, kRegisterZero); | 798 __ Set(rcx, kRegisterZero); |
799 Label init_loop; | 799 Label init_loop; |
800 __ bind(&init_loop); | 800 __ bind(&init_loop); |
801 __ movq(Operand(rbp, rcx, times_1, 0), rax); | 801 __ movp(Operand(rbp, rcx, times_1, 0), rax); |
802 __ subq(rcx, Immediate(kPointerSize)); | 802 __ subq(rcx, Immediate(kPointerSize)); |
803 __ cmpq(rcx, | 803 __ cmpq(rcx, |
804 Immediate(kRegisterZero - num_saved_registers_ * kPointerSize)); | 804 Immediate(kRegisterZero - num_saved_registers_ * kPointerSize)); |
805 __ j(greater, &init_loop); | 805 __ j(greater, &init_loop); |
806 } else { // Unroll the loop. | 806 } else { // Unroll the loop. |
807 for (int i = 0; i < num_saved_registers_; i++) { | 807 for (int i = 0; i < num_saved_registers_; i++) { |
808 __ movq(register_location(i), rax); | 808 __ movp(register_location(i), rax); |
809 } | 809 } |
810 } | 810 } |
811 } | 811 } |
812 | 812 |
813 // Initialize backtrack stack pointer. | 813 // Initialize backtrack stack pointer. |
814 __ movq(backtrack_stackpointer(), Operand(rbp, kStackHighEnd)); | 814 __ movp(backtrack_stackpointer(), Operand(rbp, kStackHighEnd)); |
815 | 815 |
816 __ jmp(&start_label_); | 816 __ jmp(&start_label_); |
817 | 817 |
818 // Exit code: | 818 // Exit code: |
819 if (success_label_.is_linked()) { | 819 if (success_label_.is_linked()) { |
820 // Save captures when successful. | 820 // Save captures when successful. |
821 __ bind(&success_label_); | 821 __ bind(&success_label_); |
822 if (num_saved_registers_ > 0) { | 822 if (num_saved_registers_ > 0) { |
823 // copy captures to output | 823 // copy captures to output |
824 __ movq(rdx, Operand(rbp, kStartIndex)); | 824 __ movp(rdx, Operand(rbp, kStartIndex)); |
825 __ movq(rbx, Operand(rbp, kRegisterOutput)); | 825 __ movp(rbx, Operand(rbp, kRegisterOutput)); |
826 __ movq(rcx, Operand(rbp, kInputEnd)); | 826 __ movp(rcx, Operand(rbp, kInputEnd)); |
827 __ subq(rcx, Operand(rbp, kInputStart)); | 827 __ subq(rcx, Operand(rbp, kInputStart)); |
828 if (mode_ == UC16) { | 828 if (mode_ == UC16) { |
829 __ lea(rcx, Operand(rcx, rdx, times_2, 0)); | 829 __ lea(rcx, Operand(rcx, rdx, times_2, 0)); |
830 } else { | 830 } else { |
831 __ addq(rcx, rdx); | 831 __ addq(rcx, rdx); |
832 } | 832 } |
833 for (int i = 0; i < num_saved_registers_; i++) { | 833 for (int i = 0; i < num_saved_registers_; i++) { |
834 __ movq(rax, register_location(i)); | 834 __ movq(rax, register_location(i)); |
835 if (i == 0 && global_with_zero_length_check()) { | 835 if (i == 0 && global_with_zero_length_check()) { |
836 // Keep capture start in rdx for the zero-length check later. | 836 // Keep capture start in rdx for the zero-length check later. |
837 __ movq(rdx, rax); | 837 __ movp(rdx, rax); |
838 } | 838 } |
839 __ addq(rax, rcx); // Convert to index from start, not end. | 839 __ addq(rax, rcx); // Convert to index from start, not end. |
840 if (mode_ == UC16) { | 840 if (mode_ == UC16) { |
841 __ sar(rax, Immediate(1)); // Convert byte index to character index. | 841 __ sar(rax, Immediate(1)); // Convert byte index to character index. |
842 } | 842 } |
843 __ movl(Operand(rbx, i * kIntSize), rax); | 843 __ movl(Operand(rbx, i * kIntSize), rax); |
844 } | 844 } |
845 } | 845 } |
846 | 846 |
847 if (global()) { | 847 if (global()) { |
848 // Restart matching if the regular expression is flagged as global. | 848 // Restart matching if the regular expression is flagged as global. |
849 // Increment success counter. | 849 // Increment success counter. |
850 __ incq(Operand(rbp, kSuccessfulCaptures)); | 850 __ incq(Operand(rbp, kSuccessfulCaptures)); |
851 // Capture results have been stored, so the number of remaining global | 851 // Capture results have been stored, so the number of remaining global |
852 // output registers is reduced by the number of stored captures. | 852 // output registers is reduced by the number of stored captures. |
853 __ movsxlq(rcx, Operand(rbp, kNumOutputRegisters)); | 853 __ movsxlq(rcx, Operand(rbp, kNumOutputRegisters)); |
854 __ subq(rcx, Immediate(num_saved_registers_)); | 854 __ subq(rcx, Immediate(num_saved_registers_)); |
855 // Check whether we have enough room for another set of capture results. | 855 // Check whether we have enough room for another set of capture results. |
856 __ cmpq(rcx, Immediate(num_saved_registers_)); | 856 __ cmpq(rcx, Immediate(num_saved_registers_)); |
857 __ j(less, &exit_label_); | 857 __ j(less, &exit_label_); |
858 | 858 |
859 __ movq(Operand(rbp, kNumOutputRegisters), rcx); | 859 __ movp(Operand(rbp, kNumOutputRegisters), rcx); |
860 // Advance the location for output. | 860 // Advance the location for output. |
861 __ addq(Operand(rbp, kRegisterOutput), | 861 __ addq(Operand(rbp, kRegisterOutput), |
862 Immediate(num_saved_registers_ * kIntSize)); | 862 Immediate(num_saved_registers_ * kIntSize)); |
863 | 863 |
864 // Prepare rax to initialize registers with its value in the next run. | 864 // Prepare rax to initialize registers with its value in the next run. |
865 __ movq(rax, Operand(rbp, kInputStartMinusOne)); | 865 __ movp(rax, Operand(rbp, kInputStartMinusOne)); |
866 | 866 |
867 if (global_with_zero_length_check()) { | 867 if (global_with_zero_length_check()) { |
868 // Special case for zero-length matches. | 868 // Special case for zero-length matches. |
869 // rdx: capture start index | 869 // rdx: capture start index |
870 __ cmpq(rdi, rdx); | 870 __ cmpq(rdi, rdx); |
871 // Not a zero-length match, restart. | 871 // Not a zero-length match, restart. |
872 __ j(not_equal, &load_char_start_regexp); | 872 __ j(not_equal, &load_char_start_regexp); |
873 // rdi (offset from the end) is zero if we already reached the end. | 873 // rdi (offset from the end) is zero if we already reached the end. |
874 __ testq(rdi, rdi); | 874 __ testq(rdi, rdi); |
875 __ j(zero, &exit_label_, Label::kNear); | 875 __ j(zero, &exit_label_, Label::kNear); |
876 // Advance current position after a zero-length match. | 876 // Advance current position after a zero-length match. |
877 if (mode_ == UC16) { | 877 if (mode_ == UC16) { |
878 __ addq(rdi, Immediate(2)); | 878 __ addq(rdi, Immediate(2)); |
879 } else { | 879 } else { |
880 __ incq(rdi); | 880 __ incq(rdi); |
881 } | 881 } |
882 } | 882 } |
883 | 883 |
884 __ jmp(&load_char_start_regexp); | 884 __ jmp(&load_char_start_regexp); |
885 } else { | 885 } else { |
886 __ movq(rax, Immediate(SUCCESS)); | 886 __ movp(rax, Immediate(SUCCESS)); |
887 } | 887 } |
888 } | 888 } |
889 | 889 |
890 __ bind(&exit_label_); | 890 __ bind(&exit_label_); |
891 if (global()) { | 891 if (global()) { |
892 // Return the number of successful captures. | 892 // Return the number of successful captures. |
893 __ movq(rax, Operand(rbp, kSuccessfulCaptures)); | 893 __ movp(rax, Operand(rbp, kSuccessfulCaptures)); |
894 } | 894 } |
895 | 895 |
896 __ bind(&return_rax); | 896 __ bind(&return_rax); |
897 #ifdef _WIN64 | 897 #ifdef _WIN64 |
898 // Restore callee save registers. | 898 // Restore callee save registers. |
899 __ lea(rsp, Operand(rbp, kLastCalleeSaveRegister)); | 899 __ lea(rsp, Operand(rbp, kLastCalleeSaveRegister)); |
900 __ pop(rbx); | 900 __ pop(rbx); |
901 __ pop(rdi); | 901 __ pop(rdi); |
902 __ pop(rsi); | 902 __ pop(rsi); |
903 // Stack now at rbp. | 903 // Stack now at rbp. |
904 #else | 904 #else |
905 // Restore callee save register. | 905 // Restore callee save register. |
906 __ movq(rbx, Operand(rbp, kBackup_rbx)); | 906 __ movp(rbx, Operand(rbp, kBackup_rbx)); |
907 // Skip rsp to rbp. | 907 // Skip rsp to rbp. |
908 __ movq(rsp, rbp); | 908 __ movp(rsp, rbp); |
909 #endif | 909 #endif |
910 // Exit function frame, restore previous one. | 910 // Exit function frame, restore previous one. |
911 __ pop(rbp); | 911 __ pop(rbp); |
912 __ ret(0); | 912 __ ret(0); |
913 | 913 |
914 // Backtrack code (branch target for conditional backtracks). | 914 // Backtrack code (branch target for conditional backtracks). |
915 if (backtrack_label_.is_linked()) { | 915 if (backtrack_label_.is_linked()) { |
916 __ bind(&backtrack_label_); | 916 __ bind(&backtrack_label_); |
917 Backtrack(); | 917 Backtrack(); |
918 } | 918 } |
(...skipping 11 matching lines...) Expand all Loading... |
930 __ testq(rax, rax); | 930 __ testq(rax, rax); |
931 // If returning non-zero, we should end execution with the given | 931 // If returning non-zero, we should end execution with the given |
932 // result as return value. | 932 // result as return value. |
933 __ j(not_zero, &return_rax); | 933 __ j(not_zero, &return_rax); |
934 | 934 |
935 // Restore registers. | 935 // Restore registers. |
936 __ Move(code_object_pointer(), masm_.CodeObject()); | 936 __ Move(code_object_pointer(), masm_.CodeObject()); |
937 __ pop(rdi); | 937 __ pop(rdi); |
938 __ pop(backtrack_stackpointer()); | 938 __ pop(backtrack_stackpointer()); |
939 // String might have moved: Reload esi from frame. | 939 // String might have moved: Reload esi from frame. |
940 __ movq(rsi, Operand(rbp, kInputEnd)); | 940 __ movp(rsi, Operand(rbp, kInputEnd)); |
941 SafeReturn(); | 941 SafeReturn(); |
942 } | 942 } |
943 | 943 |
944 // Backtrack stack overflow code. | 944 // Backtrack stack overflow code. |
945 if (stack_overflow_label_.is_linked()) { | 945 if (stack_overflow_label_.is_linked()) { |
946 SafeCallTarget(&stack_overflow_label_); | 946 SafeCallTarget(&stack_overflow_label_); |
947 // Reached if the backtrack-stack limit has been hit. | 947 // Reached if the backtrack-stack limit has been hit. |
948 | 948 |
949 Label grow_failed; | 949 Label grow_failed; |
950 // Save registers before calling C function | 950 // Save registers before calling C function |
951 #ifndef _WIN64 | 951 #ifndef _WIN64 |
952 // Callee-save in Microsoft 64-bit ABI, but not in AMD64 ABI. | 952 // Callee-save in Microsoft 64-bit ABI, but not in AMD64 ABI. |
953 __ push(rsi); | 953 __ push(rsi); |
954 __ push(rdi); | 954 __ push(rdi); |
955 #endif | 955 #endif |
956 | 956 |
957 // Call GrowStack(backtrack_stackpointer()) | 957 // Call GrowStack(backtrack_stackpointer()) |
958 static const int num_arguments = 3; | 958 static const int num_arguments = 3; |
959 __ PrepareCallCFunction(num_arguments); | 959 __ PrepareCallCFunction(num_arguments); |
960 #ifdef _WIN64 | 960 #ifdef _WIN64 |
961 // Microsoft passes parameters in rcx, rdx, r8. | 961 // Microsoft passes parameters in rcx, rdx, r8. |
962 // First argument, backtrack stackpointer, is already in rcx. | 962 // First argument, backtrack stackpointer, is already in rcx. |
963 __ lea(rdx, Operand(rbp, kStackHighEnd)); // Second argument | 963 __ lea(rdx, Operand(rbp, kStackHighEnd)); // Second argument |
964 __ LoadAddress(r8, ExternalReference::isolate_address(isolate())); | 964 __ LoadAddress(r8, ExternalReference::isolate_address(isolate())); |
965 #else | 965 #else |
966 // AMD64 ABI passes parameters in rdi, rsi, rdx. | 966 // AMD64 ABI passes parameters in rdi, rsi, rdx. |
967 __ movq(rdi, backtrack_stackpointer()); // First argument. | 967 __ movp(rdi, backtrack_stackpointer()); // First argument. |
968 __ lea(rsi, Operand(rbp, kStackHighEnd)); // Second argument. | 968 __ lea(rsi, Operand(rbp, kStackHighEnd)); // Second argument. |
969 __ LoadAddress(rdx, ExternalReference::isolate_address(isolate())); | 969 __ LoadAddress(rdx, ExternalReference::isolate_address(isolate())); |
970 #endif | 970 #endif |
971 ExternalReference grow_stack = | 971 ExternalReference grow_stack = |
972 ExternalReference::re_grow_stack(isolate()); | 972 ExternalReference::re_grow_stack(isolate()); |
973 __ CallCFunction(grow_stack, num_arguments); | 973 __ CallCFunction(grow_stack, num_arguments); |
974 // If return NULL, we have failed to grow the stack, and | 974 // If return NULL, we have failed to grow the stack, and |
975 // must exit with a stack-overflow exception. | 975 // must exit with a stack-overflow exception. |
976 __ testq(rax, rax); | 976 __ testq(rax, rax); |
977 __ j(equal, &exit_with_exception); | 977 __ j(equal, &exit_with_exception); |
978 // Otherwise use return value as new stack pointer. | 978 // Otherwise use return value as new stack pointer. |
979 __ movq(backtrack_stackpointer(), rax); | 979 __ movp(backtrack_stackpointer(), rax); |
980 // Restore saved registers and continue. | 980 // Restore saved registers and continue. |
981 __ Move(code_object_pointer(), masm_.CodeObject()); | 981 __ Move(code_object_pointer(), masm_.CodeObject()); |
982 #ifndef _WIN64 | 982 #ifndef _WIN64 |
983 __ pop(rdi); | 983 __ pop(rdi); |
984 __ pop(rsi); | 984 __ pop(rsi); |
985 #endif | 985 #endif |
986 SafeReturn(); | 986 SafeReturn(); |
987 } | 987 } |
988 | 988 |
989 if (exit_with_exception.is_linked()) { | 989 if (exit_with_exception.is_linked()) { |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1054 } | 1054 } |
1055 | 1055 |
1056 | 1056 |
1057 void RegExpMacroAssemblerX64::PopCurrentPosition() { | 1057 void RegExpMacroAssemblerX64::PopCurrentPosition() { |
1058 Pop(rdi); | 1058 Pop(rdi); |
1059 } | 1059 } |
1060 | 1060 |
1061 | 1061 |
1062 void RegExpMacroAssemblerX64::PopRegister(int register_index) { | 1062 void RegExpMacroAssemblerX64::PopRegister(int register_index) { |
1063 Pop(rax); | 1063 Pop(rax); |
1064 __ movq(register_location(register_index), rax); | 1064 __ movp(register_location(register_index), rax); |
1065 } | 1065 } |
1066 | 1066 |
1067 | 1067 |
1068 void RegExpMacroAssemblerX64::PushBacktrack(Label* label) { | 1068 void RegExpMacroAssemblerX64::PushBacktrack(Label* label) { |
1069 Push(label); | 1069 Push(label); |
1070 CheckStackLimit(); | 1070 CheckStackLimit(); |
1071 } | 1071 } |
1072 | 1072 |
1073 | 1073 |
1074 void RegExpMacroAssemblerX64::PushCurrentPosition() { | 1074 void RegExpMacroAssemblerX64::PushCurrentPosition() { |
1075 Push(rdi); | 1075 Push(rdi); |
1076 } | 1076 } |
1077 | 1077 |
1078 | 1078 |
1079 void RegExpMacroAssemblerX64::PushRegister(int register_index, | 1079 void RegExpMacroAssemblerX64::PushRegister(int register_index, |
1080 StackCheckFlag check_stack_limit) { | 1080 StackCheckFlag check_stack_limit) { |
1081 __ movq(rax, register_location(register_index)); | 1081 __ movp(rax, register_location(register_index)); |
1082 Push(rax); | 1082 Push(rax); |
1083 if (check_stack_limit) CheckStackLimit(); | 1083 if (check_stack_limit) CheckStackLimit(); |
1084 } | 1084 } |
1085 | 1085 |
1086 | 1086 |
1087 void RegExpMacroAssemblerX64::ReadCurrentPositionFromRegister(int reg) { | 1087 void RegExpMacroAssemblerX64::ReadCurrentPositionFromRegister(int reg) { |
1088 __ movq(rdi, register_location(reg)); | 1088 __ movq(rdi, register_location(reg)); |
1089 } | 1089 } |
1090 | 1090 |
1091 | 1091 |
(...skipping 11 matching lines...) Expand all Loading... |
1103 // On RegExp code entry (where this operation is used), the character before | 1103 // On RegExp code entry (where this operation is used), the character before |
1104 // the current position is expected to be already loaded. | 1104 // the current position is expected to be already loaded. |
1105 // We have advanced the position, so it's safe to read backwards. | 1105 // We have advanced the position, so it's safe to read backwards. |
1106 LoadCurrentCharacterUnchecked(-1, 1); | 1106 LoadCurrentCharacterUnchecked(-1, 1); |
1107 __ bind(&after_position); | 1107 __ bind(&after_position); |
1108 } | 1108 } |
1109 | 1109 |
1110 | 1110 |
1111 void RegExpMacroAssemblerX64::SetRegister(int register_index, int to) { | 1111 void RegExpMacroAssemblerX64::SetRegister(int register_index, int to) { |
1112 ASSERT(register_index >= num_saved_registers_); // Reserved for positions! | 1112 ASSERT(register_index >= num_saved_registers_); // Reserved for positions! |
1113 __ movq(register_location(register_index), Immediate(to)); | 1113 __ movp(register_location(register_index), Immediate(to)); |
1114 } | 1114 } |
1115 | 1115 |
1116 | 1116 |
1117 bool RegExpMacroAssemblerX64::Succeed() { | 1117 bool RegExpMacroAssemblerX64::Succeed() { |
1118 __ jmp(&success_label_); | 1118 __ jmp(&success_label_); |
1119 return global(); | 1119 return global(); |
1120 } | 1120 } |
1121 | 1121 |
1122 | 1122 |
1123 void RegExpMacroAssemblerX64::WriteCurrentPositionToRegister(int reg, | 1123 void RegExpMacroAssemblerX64::WriteCurrentPositionToRegister(int reg, |
1124 int cp_offset) { | 1124 int cp_offset) { |
1125 if (cp_offset == 0) { | 1125 if (cp_offset == 0) { |
1126 __ movq(register_location(reg), rdi); | 1126 __ movp(register_location(reg), rdi); |
1127 } else { | 1127 } else { |
1128 __ lea(rax, Operand(rdi, cp_offset * char_size())); | 1128 __ lea(rax, Operand(rdi, cp_offset * char_size())); |
1129 __ movq(register_location(reg), rax); | 1129 __ movp(register_location(reg), rax); |
1130 } | 1130 } |
1131 } | 1131 } |
1132 | 1132 |
1133 | 1133 |
1134 void RegExpMacroAssemblerX64::ClearRegisters(int reg_from, int reg_to) { | 1134 void RegExpMacroAssemblerX64::ClearRegisters(int reg_from, int reg_to) { |
1135 ASSERT(reg_from <= reg_to); | 1135 ASSERT(reg_from <= reg_to); |
1136 __ movq(rax, Operand(rbp, kInputStartMinusOne)); | 1136 __ movp(rax, Operand(rbp, kInputStartMinusOne)); |
1137 for (int reg = reg_from; reg <= reg_to; reg++) { | 1137 for (int reg = reg_from; reg <= reg_to; reg++) { |
1138 __ movq(register_location(reg), rax); | 1138 __ movp(register_location(reg), rax); |
1139 } | 1139 } |
1140 } | 1140 } |
1141 | 1141 |
1142 | 1142 |
1143 void RegExpMacroAssemblerX64::WriteStackPointerToRegister(int reg) { | 1143 void RegExpMacroAssemblerX64::WriteStackPointerToRegister(int reg) { |
1144 __ movq(rax, backtrack_stackpointer()); | 1144 __ movp(rax, backtrack_stackpointer()); |
1145 __ subq(rax, Operand(rbp, kStackHighEnd)); | 1145 __ subq(rax, Operand(rbp, kStackHighEnd)); |
1146 __ movq(register_location(reg), rax); | 1146 __ movp(register_location(reg), rax); |
1147 } | 1147 } |
1148 | 1148 |
1149 | 1149 |
1150 // Private methods: | 1150 // Private methods: |
1151 | 1151 |
1152 void RegExpMacroAssemblerX64::CallCheckStackGuardState() { | 1152 void RegExpMacroAssemblerX64::CallCheckStackGuardState() { |
1153 // This function call preserves no register values. Caller should | 1153 // This function call preserves no register values. Caller should |
1154 // store anything volatile in a C call or overwritten by this function. | 1154 // store anything volatile in a C call or overwritten by this function. |
1155 static const int num_arguments = 3; | 1155 static const int num_arguments = 3; |
1156 __ PrepareCallCFunction(num_arguments); | 1156 __ PrepareCallCFunction(num_arguments); |
1157 #ifdef _WIN64 | 1157 #ifdef _WIN64 |
1158 // Second argument: Code* of self. (Do this before overwriting r8). | 1158 // Second argument: Code* of self. (Do this before overwriting r8). |
1159 __ movq(rdx, code_object_pointer()); | 1159 __ movp(rdx, code_object_pointer()); |
1160 // Third argument: RegExp code frame pointer. | 1160 // Third argument: RegExp code frame pointer. |
1161 __ movq(r8, rbp); | 1161 __ movp(r8, rbp); |
1162 // First argument: Next address on the stack (will be address of | 1162 // First argument: Next address on the stack (will be address of |
1163 // return address). | 1163 // return address). |
1164 __ lea(rcx, Operand(rsp, -kPointerSize)); | 1164 __ lea(rcx, Operand(rsp, -kPointerSize)); |
1165 #else | 1165 #else |
1166 // Third argument: RegExp code frame pointer. | 1166 // Third argument: RegExp code frame pointer. |
1167 __ movq(rdx, rbp); | 1167 __ movp(rdx, rbp); |
1168 // Second argument: Code* of self. | 1168 // Second argument: Code* of self. |
1169 __ movq(rsi, code_object_pointer()); | 1169 __ movp(rsi, code_object_pointer()); |
1170 // First argument: Next address on the stack (will be address of | 1170 // First argument: Next address on the stack (will be address of |
1171 // return address). | 1171 // return address). |
1172 __ lea(rdi, Operand(rsp, -kPointerSize)); | 1172 __ lea(rdi, Operand(rsp, -kPointerSize)); |
1173 #endif | 1173 #endif |
1174 ExternalReference stack_check = | 1174 ExternalReference stack_check = |
1175 ExternalReference::re_check_stack_guard_state(isolate()); | 1175 ExternalReference::re_check_stack_guard_state(isolate()); |
1176 __ CallCFunction(stack_check, num_arguments); | 1176 __ CallCFunction(stack_check, num_arguments); |
1177 } | 1177 } |
1178 | 1178 |
1179 | 1179 |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1439 } | 1439 } |
1440 } | 1440 } |
1441 | 1441 |
1442 #undef __ | 1442 #undef __ |
1443 | 1443 |
1444 #endif // V8_INTERPRETED_REGEXP | 1444 #endif // V8_INTERPRETED_REGEXP |
1445 | 1445 |
1446 }} // namespace v8::internal | 1446 }} // namespace v8::internal |
1447 | 1447 |
1448 #endif // V8_TARGET_ARCH_X64 | 1448 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |