Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/a64/regexp-macro-assembler-a64.cc

Issue 194473005: A64: Rename k<Y>RegSize to k<Y>RegSizeInBits, and k<Y>RegSizeInBytes to k<Y>RegSize. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/regexp-macro-assembler-a64.h ('k') | src/a64/simulator-a64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
192 __ Ldr(w10, register_location(reg)); 192 __ Ldr(w10, register_location(reg));
193 __ Add(w10, w10, by); 193 __ Add(w10, w10, by);
194 __ Str(w10, register_location(reg)); 194 __ Str(w10, register_location(reg));
195 break; 195 break;
196 case CACHED_LSW: 196 case CACHED_LSW:
197 to_advance = GetCachedRegister(reg); 197 to_advance = GetCachedRegister(reg);
198 __ Add(to_advance, to_advance, by); 198 __ Add(to_advance, to_advance, by);
199 break; 199 break;
200 case CACHED_MSW: 200 case CACHED_MSW:
201 to_advance = GetCachedRegister(reg); 201 to_advance = GetCachedRegister(reg);
202 __ Add(to_advance, to_advance, static_cast<int64_t>(by) << kWRegSize); 202 __ Add(to_advance, to_advance,
203 static_cast<int64_t>(by) << kWRegSizeInBits);
203 break; 204 break;
204 default: 205 default:
205 UNREACHABLE(); 206 UNREACHABLE();
206 break; 207 break;
207 } 208 }
208 } 209 }
209 } 210 }
210 211
211 212
212 void RegExpMacroAssemblerA64::Backtrack() { 213 void RegExpMacroAssemblerA64::Backtrack() {
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
289 CompareAndBranchOrBacktrack(w10, str[i], ne, on_failure); 290 CompareAndBranchOrBacktrack(w10, str[i], ne, on_failure);
290 } 291 }
291 } 292 }
292 293
293 294
294 void RegExpMacroAssemblerA64::CheckGreedyLoop(Label* on_equal) { 295 void RegExpMacroAssemblerA64::CheckGreedyLoop(Label* on_equal) {
295 __ Ldr(w10, MemOperand(backtrack_stackpointer())); 296 __ Ldr(w10, MemOperand(backtrack_stackpointer()));
296 __ Cmp(current_input_offset(), w10); 297 __ Cmp(current_input_offset(), w10);
297 __ Cset(x11, eq); 298 __ Cset(x11, eq);
298 __ Add(backtrack_stackpointer(), 299 __ Add(backtrack_stackpointer(),
299 backtrack_stackpointer(), Operand(x11, LSL, kWRegSizeInBytesLog2)); 300 backtrack_stackpointer(), Operand(x11, LSL, kWRegSizeLog2));
300 BranchOrBacktrack(eq, on_equal); 301 BranchOrBacktrack(eq, on_equal);
301 } 302 }
302 303
303 void RegExpMacroAssemblerA64::CheckNotBackReferenceIgnoreCase( 304 void RegExpMacroAssemblerA64::CheckNotBackReferenceIgnoreCase(
304 int start_reg, 305 int start_reg,
305 Label* on_no_match) { 306 Label* on_no_match) {
306 Label fallthrough; 307 Label fallthrough;
307 308
308 Register capture_start_offset = w10; 309 Register capture_start_offset = w10;
309 // Save the capture length in a callee-saved register so it will 310 // Save the capture length in a callee-saved register so it will
310 // be preserved if we call a C helper. 311 // be preserved if we call a C helper.
311 Register capture_length = w19; 312 Register capture_length = w19;
312 ASSERT(kCalleeSaved.IncludesAliasOf(capture_length)); 313 ASSERT(kCalleeSaved.IncludesAliasOf(capture_length));
313 314
314 // Find length of back-referenced capture. 315 // Find length of back-referenced capture.
315 ASSERT((start_reg % 2) == 0); 316 ASSERT((start_reg % 2) == 0);
316 if (start_reg < kNumCachedRegisters) { 317 if (start_reg < kNumCachedRegisters) {
317 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg)); 318 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg));
318 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSize); 319 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSizeInBits);
319 } else { 320 } else {
320 __ Ldp(w11, capture_start_offset, capture_location(start_reg, x10)); 321 __ Ldp(w11, capture_start_offset, capture_location(start_reg, x10));
321 } 322 }
322 __ Sub(capture_length, w11, capture_start_offset); // Length to check. 323 __ Sub(capture_length, w11, capture_start_offset); // Length to check.
323 // Succeed on empty capture (including no capture). 324 // Succeed on empty capture (including no capture).
324 __ Cbz(capture_length, &fallthrough); 325 __ Cbz(capture_length, &fallthrough);
325 326
326 // Check that there are enough characters left in the input. 327 // Check that there are enough characters left in the input.
327 __ Cmn(capture_length, current_input_offset()); 328 __ Cmn(capture_length, current_input_offset());
328 BranchOrBacktrack(gt, on_no_match); 329 BranchOrBacktrack(gt, on_no_match);
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
382 __ Cmp(current_input_offset().X(), Operand(current_input_offset(), SXTW)); 383 __ Cmp(current_input_offset().X(), Operand(current_input_offset(), SXTW));
383 __ Ccmp(current_input_offset(), 0, NoFlag, eq); 384 __ Ccmp(current_input_offset(), 0, NoFlag, eq);
384 // The current input offset should be <= 0, and fit in a W register. 385 // The current input offset should be <= 0, and fit in a W register.
385 __ Check(le, kOffsetOutOfRange); 386 __ Check(le, kOffsetOutOfRange);
386 } 387 }
387 } else { 388 } else {
388 ASSERT(mode_ == UC16); 389 ASSERT(mode_ == UC16);
389 int argument_count = 4; 390 int argument_count = 4;
390 391
391 // The cached registers need to be retained. 392 // The cached registers need to be retained.
392 CPURegList cached_registers(CPURegister::kRegister, kXRegSize, 0, 7); 393 CPURegList cached_registers(CPURegister::kRegister, kXRegSizeInBits, 0, 7);
393 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters); 394 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters);
394 __ PushCPURegList(cached_registers); 395 __ PushCPURegList(cached_registers);
395 396
396 // Put arguments into arguments registers. 397 // Put arguments into arguments registers.
397 // Parameters are 398 // Parameters are
398 // x0: Address byte_offset1 - Address captured substring's start. 399 // x0: Address byte_offset1 - Address captured substring's start.
399 // x1: Address byte_offset2 - Address of current character position. 400 // x1: Address byte_offset2 - Address of current character position.
400 // w2: size_t byte_length - length of capture in bytes(!) 401 // w2: size_t byte_length - length of capture in bytes(!)
401 // x3: Isolate* isolate 402 // x3: Isolate* isolate
402 403
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
434 435
435 Register capture_start_address = x12; 436 Register capture_start_address = x12;
436 Register capture_end_address = x13; 437 Register capture_end_address = x13;
437 Register current_position_address = x14; 438 Register current_position_address = x14;
438 Register capture_length = w15; 439 Register capture_length = w15;
439 440
440 // Find length of back-referenced capture. 441 // Find length of back-referenced capture.
441 ASSERT((start_reg % 2) == 0); 442 ASSERT((start_reg % 2) == 0);
442 if (start_reg < kNumCachedRegisters) { 443 if (start_reg < kNumCachedRegisters) {
443 __ Mov(x10, GetCachedRegister(start_reg)); 444 __ Mov(x10, GetCachedRegister(start_reg));
444 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSize); 445 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSizeInBits);
445 } else { 446 } else {
446 __ Ldp(w11, w10, capture_location(start_reg, x10)); 447 __ Ldp(w11, w10, capture_location(start_reg, x10));
447 } 448 }
448 __ Sub(capture_length, w11, w10); // Length to check. 449 __ Sub(capture_length, w11, w10); // Length to check.
449 // Succeed on empty capture (including no capture). 450 // Succeed on empty capture (including no capture).
450 __ Cbz(capture_length, &fallthrough); 451 __ Cbz(capture_length, &fallthrough);
451 452
452 // Check that there are enough characters left in the input. 453 // Check that there are enough characters left in the input.
453 __ Cmn(capture_length, current_input_offset()); 454 __ Cmn(capture_length, current_input_offset());
454 BranchOrBacktrack(gt, on_no_match); 455 BranchOrBacktrack(gt, on_no_match);
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
719 // - (num_registers_ - kNumCachedRegisters) (W registers) 720 // - (num_registers_ - kNumCachedRegisters) (W registers)
720 int num_wreg_to_allocate = num_registers_ - kNumCachedRegisters; 721 int num_wreg_to_allocate = num_registers_ - kNumCachedRegisters;
721 // Do not allocate registers on the stack if they can all be cached. 722 // Do not allocate registers on the stack if they can all be cached.
722 if (num_wreg_to_allocate < 0) { num_wreg_to_allocate = 0; } 723 if (num_wreg_to_allocate < 0) { num_wreg_to_allocate = 0; }
723 // Make room for the success_counter. 724 // Make room for the success_counter.
724 num_wreg_to_allocate += 2; 725 num_wreg_to_allocate += 2;
725 726
726 // Make sure the stack alignment will be respected. 727 // Make sure the stack alignment will be respected.
727 int alignment = masm_->ActivationFrameAlignment(); 728 int alignment = masm_->ActivationFrameAlignment();
728 ASSERT_EQ(alignment % 16, 0); 729 ASSERT_EQ(alignment % 16, 0);
729 int align_mask = (alignment / kWRegSizeInBytes) - 1; 730 int align_mask = (alignment / kWRegSize) - 1;
730 num_wreg_to_allocate = (num_wreg_to_allocate + align_mask) & ~align_mask; 731 num_wreg_to_allocate = (num_wreg_to_allocate + align_mask) & ~align_mask;
731 732
732 // Check if we have space on the stack. 733 // Check if we have space on the stack.
733 Label stack_limit_hit; 734 Label stack_limit_hit;
734 Label stack_ok; 735 Label stack_ok;
735 736
736 ExternalReference stack_limit = 737 ExternalReference stack_limit =
737 ExternalReference::address_of_stack_limit(isolate()); 738 ExternalReference::address_of_stack_limit(isolate());
738 __ Mov(x10, Operand(stack_limit)); 739 __ Mov(x10, Operand(stack_limit));
739 __ Ldr(x10, MemOperand(x10)); 740 __ Ldr(x10, MemOperand(x10));
740 __ Subs(x10, csp, x10); 741 __ Subs(x10, csp, x10);
741 742
742 // Handle it if the stack pointer is already below the stack limit. 743 // Handle it if the stack pointer is already below the stack limit.
743 __ B(ls, &stack_limit_hit); 744 __ B(ls, &stack_limit_hit);
744 745
745 // Check if there is room for the variable number of registers above 746 // Check if there is room for the variable number of registers above
746 // the stack limit. 747 // the stack limit.
747 __ Cmp(x10, num_wreg_to_allocate * kWRegSizeInBytes); 748 __ Cmp(x10, num_wreg_to_allocate * kWRegSize);
748 __ B(hs, &stack_ok); 749 __ B(hs, &stack_ok);
749 750
750 // Exit with OutOfMemory exception. There is not enough space on the stack 751 // Exit with OutOfMemory exception. There is not enough space on the stack
751 // for our working registers. 752 // for our working registers.
752 __ Mov(w0, EXCEPTION); 753 __ Mov(w0, EXCEPTION);
753 __ B(&return_w0); 754 __ B(&return_w0);
754 755
755 __ Bind(&stack_limit_hit); 756 __ Bind(&stack_limit_hit);
756 CallCheckStackGuardState(x10); 757 CallCheckStackGuardState(x10);
757 // If returned value is non-zero, we exit with the returned value as result. 758 // If returned value is non-zero, we exit with the returned value as result.
758 __ Cbnz(w0, &return_w0); 759 __ Cbnz(w0, &return_w0);
759 760
760 __ Bind(&stack_ok); 761 __ Bind(&stack_ok);
761 762
762 // Allocate space on stack. 763 // Allocate space on stack.
763 __ Claim(num_wreg_to_allocate, kWRegSizeInBytes); 764 __ Claim(num_wreg_to_allocate, kWRegSize);
764 765
765 // Initialize success_counter with 0. 766 // Initialize success_counter with 0.
766 __ Str(wzr, MemOperand(frame_pointer(), kSuccessCounter)); 767 __ Str(wzr, MemOperand(frame_pointer(), kSuccessCounter));
767 768
768 // Find negative length (offset of start relative to end). 769 // Find negative length (offset of start relative to end).
769 __ Sub(x10, input_start(), input_end()); 770 __ Sub(x10, input_start(), input_end());
770 if (masm_->emit_debug_code()) { 771 if (masm_->emit_debug_code()) {
771 // Check that the input string length is < 2^30. 772 // Check that the input string length is < 2^30.
772 __ Neg(x11, x10); 773 __ Neg(x11, x10);
773 __ Cmp(x11, (1<<30) - 1); 774 __ Cmp(x11, (1<<30) - 1);
774 __ Check(ls, kInputStringTooLong); 775 __ Check(ls, kInputStringTooLong);
775 } 776 }
776 __ Mov(current_input_offset(), w10); 777 __ Mov(current_input_offset(), w10);
777 778
778 // The non-position value is used as a clearing value for the 779 // The non-position value is used as a clearing value for the
779 // capture registers, it corresponds to the position of the first character 780 // capture registers, it corresponds to the position of the first character
780 // minus one. 781 // minus one.
781 __ Sub(non_position_value(), current_input_offset(), char_size()); 782 __ Sub(non_position_value(), current_input_offset(), char_size());
782 __ Sub(non_position_value(), non_position_value(), 783 __ Sub(non_position_value(), non_position_value(),
783 Operand(start_offset(), LSL, (mode_ == UC16) ? 1 : 0)); 784 Operand(start_offset(), LSL, (mode_ == UC16) ? 1 : 0));
784 // We can store this value twice in an X register for initializing 785 // We can store this value twice in an X register for initializing
785 // on-stack registers later. 786 // on-stack registers later.
786 __ Orr(twice_non_position_value(), 787 __ Orr(twice_non_position_value(),
787 non_position_value().X(), 788 non_position_value().X(),
788 Operand(non_position_value().X(), LSL, kWRegSize)); 789 Operand(non_position_value().X(), LSL, kWRegSizeInBits));
789 790
790 // Initialize code pointer register. 791 // Initialize code pointer register.
791 __ Mov(code_pointer(), Operand(masm_->CodeObject())); 792 __ Mov(code_pointer(), Operand(masm_->CodeObject()));
792 793
793 Label load_char_start_regexp, start_regexp; 794 Label load_char_start_regexp, start_regexp;
794 // Load newline if index is at start, previous character otherwise. 795 // Load newline if index is at start, previous character otherwise.
795 __ Cbnz(start_offset(), &load_char_start_regexp); 796 __ Cbnz(start_offset(), &load_char_start_regexp);
796 __ Mov(current_character(), '\n'); 797 __ Mov(current_character(), '\n');
797 __ B(&start_regexp); 798 __ B(&start_regexp);
798 799
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
844 __ Add(input_length, start_offset(), Operand(w10, LSR, 1)); 845 __ Add(input_length, start_offset(), Operand(w10, LSR, 1));
845 } else { 846 } else {
846 __ Add(input_length, start_offset(), w10); 847 __ Add(input_length, start_offset(), w10);
847 } 848 }
848 849
849 // Copy the results to the output array from the cached registers first. 850 // Copy the results to the output array from the cached registers first.
850 for (int i = 0; 851 for (int i = 0;
851 (i < num_saved_registers_) && (i < kNumCachedRegisters); 852 (i < num_saved_registers_) && (i < kNumCachedRegisters);
852 i += 2) { 853 i += 2) {
853 __ Mov(capture_start.X(), GetCachedRegister(i)); 854 __ Mov(capture_start.X(), GetCachedRegister(i));
854 __ Lsr(capture_end.X(), capture_start.X(), kWRegSize); 855 __ Lsr(capture_end.X(), capture_start.X(), kWRegSizeInBits);
855 if ((i == 0) && global_with_zero_length_check()) { 856 if ((i == 0) && global_with_zero_length_check()) {
856 // Keep capture start for the zero-length check later. 857 // Keep capture start for the zero-length check later.
857 __ Mov(first_capture_start, capture_start); 858 __ Mov(first_capture_start, capture_start);
858 } 859 }
859 // Offsets need to be relative to the start of the string. 860 // Offsets need to be relative to the start of the string.
860 if (mode_ == UC16) { 861 if (mode_ == UC16) {
861 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1)); 862 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1));
862 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1)); 863 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
863 } else { 864 } else {
864 __ Add(capture_start, input_length, capture_start); 865 __ Add(capture_start, input_length, capture_start);
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
999 __ Mov(csp, fp); 1000 __ Mov(csp, fp);
1000 1001
1001 // Restore registers. 1002 // Restore registers.
1002 __ PopCPURegList(registers_to_retain); 1003 __ PopCPURegList(registers_to_retain);
1003 1004
1004 __ Ret(); 1005 __ Ret();
1005 1006
1006 Label exit_with_exception; 1007 Label exit_with_exception;
1007 // Registers x0 to x7 are used to store the first captures, they need to be 1008 // Registers x0 to x7 are used to store the first captures, they need to be
1008 // retained over calls to C++ code. 1009 // retained over calls to C++ code.
1009 CPURegList cached_registers(CPURegister::kRegister, kXRegSize, 0, 7); 1010 CPURegList cached_registers(CPURegister::kRegister, kXRegSizeInBits, 0, 7);
1010 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters); 1011 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters);
1011 1012
1012 if (check_preempt_label_.is_linked()) { 1013 if (check_preempt_label_.is_linked()) {
1013 __ Bind(&check_preempt_label_); 1014 __ Bind(&check_preempt_label_);
1014 SaveLinkRegister(); 1015 SaveLinkRegister();
1015 // The cached registers need to be retained. 1016 // The cached registers need to be retained.
1016 __ PushCPURegList(cached_registers); 1017 __ PushCPURegList(cached_registers);
1017 CallCheckStackGuardState(x10); 1018 CallCheckStackGuardState(x10);
1018 // Returning from the regexp code restores the stack (csp <- fp) 1019 // Returning from the regexp code restores the stack (csp <- fp)
1019 // so we don't need to drop the link register from it before exiting. 1020 // so we don't need to drop the link register from it before exiting.
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
1161 switch (register_state) { 1162 switch (register_state) {
1162 case STACKED: 1163 case STACKED:
1163 __ Ldr(current_input_offset(), register_location(reg)); 1164 __ Ldr(current_input_offset(), register_location(reg));
1164 break; 1165 break;
1165 case CACHED_LSW: 1166 case CACHED_LSW:
1166 cached_register = GetCachedRegister(reg); 1167 cached_register = GetCachedRegister(reg);
1167 __ Mov(current_input_offset(), cached_register.W()); 1168 __ Mov(current_input_offset(), cached_register.W());
1168 break; 1169 break;
1169 case CACHED_MSW: 1170 case CACHED_MSW:
1170 cached_register = GetCachedRegister(reg); 1171 cached_register = GetCachedRegister(reg);
1171 __ Lsr(current_input_offset().X(), cached_register, kWRegSize); 1172 __ Lsr(current_input_offset().X(), cached_register, kWRegSizeInBits);
1172 break; 1173 break;
1173 default: 1174 default:
1174 UNREACHABLE(); 1175 UNREACHABLE();
1175 break; 1176 break;
1176 } 1177 }
1177 } 1178 }
1178 1179
1179 1180
1180 void RegExpMacroAssemblerA64::ReadStackPointerFromRegister(int reg) { 1181 void RegExpMacroAssemblerA64::ReadStackPointerFromRegister(int reg) {
1181 Register read_from = GetRegister(reg, w10); 1182 Register read_from = GetRegister(reg, w10);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1256 ASSERT(reg_from >= kNumCachedRegisters); 1257 ASSERT(reg_from >= kNumCachedRegisters);
1257 1258
1258 // Move down the indexes of the registers on stack to get the correct offset 1259 // Move down the indexes of the registers on stack to get the correct offset
1259 // in memory. 1260 // in memory.
1260 reg_from -= kNumCachedRegisters; 1261 reg_from -= kNumCachedRegisters;
1261 reg_to -= kNumCachedRegisters; 1262 reg_to -= kNumCachedRegisters;
1262 // We should not unroll the loop for less than 2 registers. 1263 // We should not unroll the loop for less than 2 registers.
1263 STATIC_ASSERT(kNumRegistersToUnroll > 2); 1264 STATIC_ASSERT(kNumRegistersToUnroll > 2);
1264 // We position the base pointer to (reg_from + 1). 1265 // We position the base pointer to (reg_from + 1).
1265 int base_offset = kFirstRegisterOnStack - 1266 int base_offset = kFirstRegisterOnStack -
1266 kWRegSizeInBytes - (kWRegSizeInBytes * reg_from); 1267 kWRegSize - (kWRegSize * reg_from);
1267 if (num_registers > kNumRegistersToUnroll) { 1268 if (num_registers > kNumRegistersToUnroll) {
1268 Register base = x10; 1269 Register base = x10;
1269 __ Add(base, frame_pointer(), base_offset); 1270 __ Add(base, frame_pointer(), base_offset);
1270 1271
1271 Label loop; 1272 Label loop;
1272 __ Mov(x11, num_registers); 1273 __ Mov(x11, num_registers);
1273 __ Bind(&loop); 1274 __ Bind(&loop);
1274 __ Str(twice_non_position_value(), 1275 __ Str(twice_non_position_value(),
1275 MemOperand(base, -kPointerSize, PostIndex)); 1276 MemOperand(base, -kPointerSize, PostIndex));
1276 __ Sub(x11, x11, 2); 1277 __ Sub(x11, x11, 2);
1277 __ Cbnz(x11, &loop); 1278 __ Cbnz(x11, &loop);
1278 } else { 1279 } else {
1279 for (int i = reg_from; i <= reg_to; i += 2) { 1280 for (int i = reg_from; i <= reg_to; i += 2) {
1280 __ Str(twice_non_position_value(), 1281 __ Str(twice_non_position_value(),
1281 MemOperand(frame_pointer(), base_offset)); 1282 MemOperand(frame_pointer(), base_offset));
1282 base_offset -= kWRegSizeInBytes * 2; 1283 base_offset -= kWRegSize * 2;
1283 } 1284 }
1284 } 1285 }
1285 } 1286 }
1286 } 1287 }
1287 1288
1288 1289
1289 void RegExpMacroAssemblerA64::WriteStackPointerToRegister(int reg) { 1290 void RegExpMacroAssemblerA64::WriteStackPointerToRegister(int reg) {
1290 __ Ldr(x10, MemOperand(frame_pointer(), kStackBase)); 1291 __ Ldr(x10, MemOperand(frame_pointer(), kStackBase));
1291 __ Sub(x10, backtrack_stackpointer(), x10); 1292 __ Sub(x10, backtrack_stackpointer(), x10);
1292 if (masm_->emit_debug_code()) { 1293 if (masm_->emit_debug_code()) {
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
1422 1423
1423 // Private methods: 1424 // Private methods:
1424 1425
1425 void RegExpMacroAssemblerA64::CallCheckStackGuardState(Register scratch) { 1426 void RegExpMacroAssemblerA64::CallCheckStackGuardState(Register scratch) {
1426 // Allocate space on the stack to store the return address. The 1427 // Allocate space on the stack to store the return address. The
1427 // CheckStackGuardState C++ function will override it if the code 1428 // CheckStackGuardState C++ function will override it if the code
1428 // moved. Allocate extra space for 2 arguments passed by pointers. 1429 // moved. Allocate extra space for 2 arguments passed by pointers.
1429 // AAPCS64 requires the stack to be 16 byte aligned. 1430 // AAPCS64 requires the stack to be 16 byte aligned.
1430 int alignment = masm_->ActivationFrameAlignment(); 1431 int alignment = masm_->ActivationFrameAlignment();
1431 ASSERT_EQ(alignment % 16, 0); 1432 ASSERT_EQ(alignment % 16, 0);
1432 int align_mask = (alignment / kXRegSizeInBytes) - 1; 1433 int align_mask = (alignment / kXRegSize) - 1;
1433 int xreg_to_claim = (3 + align_mask) & ~align_mask; 1434 int xreg_to_claim = (3 + align_mask) & ~align_mask;
1434 1435
1435 ASSERT(csp.Is(__ StackPointer())); 1436 ASSERT(csp.Is(__ StackPointer()));
1436 __ Claim(xreg_to_claim); 1437 __ Claim(xreg_to_claim);
1437 1438
1438 // CheckStackGuardState needs the end and start addresses of the input string. 1439 // CheckStackGuardState needs the end and start addresses of the input string.
1439 __ Poke(input_end(), 2 * kPointerSize); 1440 __ Poke(input_end(), 2 * kPointerSize);
1440 __ Add(x5, csp, 2 * kPointerSize); 1441 __ Add(x5, csp, 2 * kPointerSize);
1441 __ Poke(input_start(), kPointerSize); 1442 __ Poke(input_start(), kPointerSize);
1442 __ Add(x4, csp, kPointerSize); 1443 __ Add(x4, csp, kPointerSize);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1534 __ Cmp(backtrack_stackpointer(), x10); 1535 __ Cmp(backtrack_stackpointer(), x10);
1535 CallIf(&stack_overflow_label_, ls); 1536 CallIf(&stack_overflow_label_, ls);
1536 } 1537 }
1537 1538
1538 1539
1539 void RegExpMacroAssemblerA64::Push(Register source) { 1540 void RegExpMacroAssemblerA64::Push(Register source) {
1540 ASSERT(source.Is32Bits()); 1541 ASSERT(source.Is32Bits());
1541 ASSERT(!source.is(backtrack_stackpointer())); 1542 ASSERT(!source.is(backtrack_stackpointer()));
1542 __ Str(source, 1543 __ Str(source,
1543 MemOperand(backtrack_stackpointer(), 1544 MemOperand(backtrack_stackpointer(),
1544 -static_cast<int>(kWRegSizeInBytes), 1545 -static_cast<int>(kWRegSize),
1545 PreIndex)); 1546 PreIndex));
1546 } 1547 }
1547 1548
1548 1549
1549 void RegExpMacroAssemblerA64::Pop(Register target) { 1550 void RegExpMacroAssemblerA64::Pop(Register target) {
1550 ASSERT(target.Is32Bits()); 1551 ASSERT(target.Is32Bits());
1551 ASSERT(!target.is(backtrack_stackpointer())); 1552 ASSERT(!target.is(backtrack_stackpointer()));
1552 __ Ldr(target, 1553 __ Ldr(target,
1553 MemOperand(backtrack_stackpointer(), kWRegSizeInBytes, PostIndex)); 1554 MemOperand(backtrack_stackpointer(), kWRegSize, PostIndex));
1554 } 1555 }
1555 1556
1556 1557
1557 Register RegExpMacroAssemblerA64::GetCachedRegister(int register_index) { 1558 Register RegExpMacroAssemblerA64::GetCachedRegister(int register_index) {
1558 ASSERT(register_index < kNumCachedRegisters); 1559 ASSERT(register_index < kNumCachedRegisters);
1559 return Register::Create(register_index / 2, kXRegSize); 1560 return Register::Create(register_index / 2, kXRegSizeInBits);
1560 } 1561 }
1561 1562
1562 1563
1563 Register RegExpMacroAssemblerA64::GetRegister(int register_index, 1564 Register RegExpMacroAssemblerA64::GetRegister(int register_index,
1564 Register maybe_result) { 1565 Register maybe_result) {
1565 ASSERT(maybe_result.Is32Bits()); 1566 ASSERT(maybe_result.Is32Bits());
1566 ASSERT(register_index >= 0); 1567 ASSERT(register_index >= 0);
1567 if (num_registers_ <= register_index) { 1568 if (num_registers_ <= register_index) {
1568 num_registers_ = register_index + 1; 1569 num_registers_ = register_index + 1;
1569 } 1570 }
1570 Register result; 1571 Register result;
1571 RegisterState register_state = GetRegisterState(register_index); 1572 RegisterState register_state = GetRegisterState(register_index);
1572 switch (register_state) { 1573 switch (register_state) {
1573 case STACKED: 1574 case STACKED:
1574 __ Ldr(maybe_result, register_location(register_index)); 1575 __ Ldr(maybe_result, register_location(register_index));
1575 result = maybe_result; 1576 result = maybe_result;
1576 break; 1577 break;
1577 case CACHED_LSW: 1578 case CACHED_LSW:
1578 result = GetCachedRegister(register_index).W(); 1579 result = GetCachedRegister(register_index).W();
1579 break; 1580 break;
1580 case CACHED_MSW: 1581 case CACHED_MSW:
1581 __ Lsr(maybe_result.X(), GetCachedRegister(register_index), kWRegSize); 1582 __ Lsr(maybe_result.X(), GetCachedRegister(register_index),
1583 kWRegSizeInBits);
1582 result = maybe_result; 1584 result = maybe_result;
1583 break; 1585 break;
1584 default: 1586 default:
1585 UNREACHABLE(); 1587 UNREACHABLE();
1586 break; 1588 break;
1587 } 1589 }
1588 ASSERT(result.Is32Bits()); 1590 ASSERT(result.Is32Bits());
1589 return result; 1591 return result;
1590 } 1592 }
1591 1593
1592 1594
1593 void RegExpMacroAssemblerA64::StoreRegister(int register_index, 1595 void RegExpMacroAssemblerA64::StoreRegister(int register_index,
1594 Register source) { 1596 Register source) {
1595 ASSERT(source.Is32Bits()); 1597 ASSERT(source.Is32Bits());
1596 ASSERT(register_index >= 0); 1598 ASSERT(register_index >= 0);
1597 if (num_registers_ <= register_index) { 1599 if (num_registers_ <= register_index) {
1598 num_registers_ = register_index + 1; 1600 num_registers_ = register_index + 1;
1599 } 1601 }
1600 1602
1601 Register cached_register; 1603 Register cached_register;
1602 RegisterState register_state = GetRegisterState(register_index); 1604 RegisterState register_state = GetRegisterState(register_index);
1603 switch (register_state) { 1605 switch (register_state) {
1604 case STACKED: 1606 case STACKED:
1605 __ Str(source, register_location(register_index)); 1607 __ Str(source, register_location(register_index));
1606 break; 1608 break;
1607 case CACHED_LSW: 1609 case CACHED_LSW:
1608 cached_register = GetCachedRegister(register_index); 1610 cached_register = GetCachedRegister(register_index);
1609 if (!source.Is(cached_register.W())) { 1611 if (!source.Is(cached_register.W())) {
1610 __ Bfi(cached_register, source.X(), 0, kWRegSize); 1612 __ Bfi(cached_register, source.X(), 0, kWRegSizeInBits);
1611 } 1613 }
1612 break; 1614 break;
1613 case CACHED_MSW: 1615 case CACHED_MSW:
1614 cached_register = GetCachedRegister(register_index); 1616 cached_register = GetCachedRegister(register_index);
1615 __ Bfi(cached_register, source.X(), kWRegSize, kWRegSize); 1617 __ Bfi(cached_register, source.X(), kWRegSizeInBits, kWRegSizeInBits);
1616 break; 1618 break;
1617 default: 1619 default:
1618 UNREACHABLE(); 1620 UNREACHABLE();
1619 break; 1621 break;
1620 } 1622 }
1621 } 1623 }
1622 1624
1623 1625
1624 void RegExpMacroAssemblerA64::CallIf(Label* to, Condition condition) { 1626 void RegExpMacroAssemblerA64::CallIf(Label* to, Condition condition) {
1625 Label skip_call; 1627 Label skip_call;
(...skipping 17 matching lines...) Expand all
1643 } 1645 }
1644 1646
1645 1647
1646 MemOperand RegExpMacroAssemblerA64::register_location(int register_index) { 1648 MemOperand RegExpMacroAssemblerA64::register_location(int register_index) {
1647 ASSERT(register_index < (1<<30)); 1649 ASSERT(register_index < (1<<30));
1648 ASSERT(register_index >= kNumCachedRegisters); 1650 ASSERT(register_index >= kNumCachedRegisters);
1649 if (num_registers_ <= register_index) { 1651 if (num_registers_ <= register_index) {
1650 num_registers_ = register_index + 1; 1652 num_registers_ = register_index + 1;
1651 } 1653 }
1652 register_index -= kNumCachedRegisters; 1654 register_index -= kNumCachedRegisters;
1653 int offset = kFirstRegisterOnStack - register_index * kWRegSizeInBytes; 1655 int offset = kFirstRegisterOnStack - register_index * kWRegSize;
1654 return MemOperand(frame_pointer(), offset); 1656 return MemOperand(frame_pointer(), offset);
1655 } 1657 }
1656 1658
1657 MemOperand RegExpMacroAssemblerA64::capture_location(int register_index, 1659 MemOperand RegExpMacroAssemblerA64::capture_location(int register_index,
1658 Register scratch) { 1660 Register scratch) {
1659 ASSERT(register_index < (1<<30)); 1661 ASSERT(register_index < (1<<30));
1660 ASSERT(register_index < num_saved_registers_); 1662 ASSERT(register_index < num_saved_registers_);
1661 ASSERT(register_index >= kNumCachedRegisters); 1663 ASSERT(register_index >= kNumCachedRegisters);
1662 ASSERT_EQ(register_index % 2, 0); 1664 ASSERT_EQ(register_index % 2, 0);
1663 register_index -= kNumCachedRegisters; 1665 register_index -= kNumCachedRegisters;
1664 int offset = kFirstCaptureOnStack - register_index * kWRegSizeInBytes; 1666 int offset = kFirstCaptureOnStack - register_index * kWRegSize;
1665 // capture_location is used with Stp instructions to load/store 2 registers. 1667 // capture_location is used with Stp instructions to load/store 2 registers.
1666 // The immediate field in the encoding is limited to 7 bits (signed). 1668 // The immediate field in the encoding is limited to 7 bits (signed).
1667 if (is_int7(offset)) { 1669 if (is_int7(offset)) {
1668 return MemOperand(frame_pointer(), offset); 1670 return MemOperand(frame_pointer(), offset);
1669 } else { 1671 } else {
1670 __ Add(scratch, frame_pointer(), offset); 1672 __ Add(scratch, frame_pointer(), offset);
1671 return MemOperand(scratch); 1673 return MemOperand(scratch);
1672 } 1674 }
1673 } 1675 }
1674 1676
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1719 __ Ldrh(current_character(), MemOperand(input_end(), offset, SXTW)); 1721 __ Ldrh(current_character(), MemOperand(input_end(), offset, SXTW));
1720 } 1722 }
1721 } 1723 }
1722 } 1724 }
1723 1725
1724 #endif // V8_INTERPRETED_REGEXP 1726 #endif // V8_INTERPRETED_REGEXP
1725 1727
1726 }} // namespace v8::internal 1728 }} // namespace v8::internal
1727 1729
1728 #endif // V8_TARGET_ARCH_A64 1730 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/regexp-macro-assembler-a64.h ('k') | src/a64/simulator-a64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698