| OLD | NEW |
| (Empty) | |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #include "v8.h" |
| 29 |
| 30 #if V8_TARGET_ARCH_A64 |
| 31 |
| 32 #include "cpu-profiler.h" |
| 33 #include "unicode.h" |
| 34 #include "log.h" |
| 35 #include "code-stubs.h" |
| 36 #include "regexp-stack.h" |
| 37 #include "macro-assembler.h" |
| 38 #include "regexp-macro-assembler.h" |
| 39 #include "a64/regexp-macro-assembler-a64.h" |
| 40 |
| 41 namespace v8 { |
| 42 namespace internal { |
| 43 |
| 44 #ifndef V8_INTERPRETED_REGEXP |
| 45 /* |
| 46 * This assembler uses the following register assignment convention: |
| 47 * - w19 : Used to temporarely store a value before a call to C code. |
| 48 * See CheckNotBackReferenceIgnoreCase. |
| 49 * - x20 : Pointer to the current code object (Code*), |
| 50 * it includes the heap object tag. |
| 51 * - w21 : Current position in input, as negative offset from |
| 52 * the end of the string. Please notice that this is |
| 53 * the byte offset, not the character offset! |
| 54 * - w22 : Currently loaded character. Must be loaded using |
| 55 * LoadCurrentCharacter before using any of the dispatch methods. |
| 56 * - x23 : Points to tip of backtrack stack. |
| 57 * - w24 : Position of the first character minus one: non_position_value. |
| 58 * Used to initialize capture registers. |
| 59 * - x25 : Address at the end of the input string: input_end. |
| 60 * Points to byte after last character in input. |
| 61 * - x26 : Address at the start of the input string: input_start. |
| 62 * - w27 : Where to start in the input string. |
| 63 * - x28 : Output array pointer. |
| 64 * - x29/fp : Frame pointer. Used to access arguments, local variables and |
| 65 * RegExp registers. |
| 66 * - x16/x17 : IP registers, used by assembler. Very volatile. |
| 67 * - csp : Points to tip of C stack. |
| 68 * |
| 69 * - x0-x7 : Used as a cache to store 32 bit capture registers. These |
| 70 * registers need to be retained every time a call to C code |
| 71 * is done. |
| 72 * |
| 73 * The remaining registers are free for computations. |
| 74 * Each call to a public method should retain this convention. |
| 75 * |
| 76 * The stack will have the following structure: |
| 77 * |
| 78 * Location Name Description |
| 79 * (as referred to in |
| 80 * the code) |
| 81 * |
| 82 * - fp[104] isolate Address of the current isolate. |
| 83 * - fp[96] return_address Secondary link/return address |
| 84 * used by an exit frame if this is a |
| 85 * native call. |
| 86 * ^^^ csp when called ^^^ |
| 87 * - fp[88] lr Return from the RegExp code. |
| 88 * - fp[80] r29 Old frame pointer (CalleeSaved). |
| 89 * - fp[0..72] r19-r28 Backup of CalleeSaved registers. |
| 90 * - fp[-8] direct_call 1 => Direct call from JavaScript code. |
| 91 * 0 => Call through the runtime system. |
| 92 * - fp[-16] stack_base High end of the memory area to use as |
| 93 * the backtracking stack. |
| 94 * - fp[-24] output_size Output may fit multiple sets of matches. |
| 95 * - fp[-32] input Handle containing the input string. |
| 96 * - fp[-40] success_counter |
| 97 * ^^^^^^^^^^^^^ From here and downwards we store 32 bit values ^^^^^^^^^^^^^ |
| 98 * - fp[-44] register N Capture registers initialized with |
| 99 * - fp[-48] register N + 1 non_position_value. |
| 100 * ... The first kNumCachedRegisters (N) registers |
| 101 * ... are cached in x0 to x7. |
| 102 * ... Only positions must be stored in the first |
| 103 * - ... num_saved_registers_ registers. |
| 104 * - ... |
| 105 * - register N + num_registers - 1 |
| 106 * ^^^^^^^^^ csp ^^^^^^^^^ |
| 107 * |
| 108 * The first num_saved_registers_ registers are initialized to point to |
| 109 * "character -1" in the string (i.e., char_size() bytes before the first |
| 110 * character of the string). The remaining registers start out as garbage. |
| 111 * |
| 112 * The data up to the return address must be placed there by the calling |
| 113 * code and the remaining arguments are passed in registers, e.g. by calling the |
| 114 * code entry as cast to a function with the signature: |
| 115 * int (*match)(String* input, |
| 116 * int start_offset, |
| 117 * Address input_start, |
| 118 * Address input_end, |
| 119 * int* output, |
| 120 * int output_size, |
| 121 * Address stack_base, |
| 122 * bool direct_call = false, |
| 123 * Address secondary_return_address, // Only used by native call. |
| 124 * Isolate* isolate) |
| 125 * The call is performed by NativeRegExpMacroAssembler::Execute() |
| 126 * (in regexp-macro-assembler.cc) via the CALL_GENERATED_REGEXP_CODE macro |
| 127 * in a64/simulator-a64.h. |
| 128 * When calling as a non-direct call (i.e., from C++ code), the return address |
| 129 * area is overwritten with the LR register by the RegExp code. When doing a |
| 130 * direct call from generated code, the return address is placed there by |
| 131 * the calling code, as in a normal exit frame. |
| 132 */ |
| 133 |
| 134 #define __ ACCESS_MASM(masm_) |
| 135 |
| 136 RegExpMacroAssemblerA64::RegExpMacroAssemblerA64( |
| 137 Mode mode, |
| 138 int registers_to_save, |
| 139 Zone* zone) |
| 140 : NativeRegExpMacroAssembler(zone), |
| 141 masm_(new MacroAssembler(zone->isolate(), NULL, kRegExpCodeSize)), |
| 142 mode_(mode), |
| 143 num_registers_(registers_to_save), |
| 144 num_saved_registers_(registers_to_save), |
| 145 entry_label_(), |
| 146 start_label_(), |
| 147 success_label_(), |
| 148 backtrack_label_(), |
| 149 exit_label_() { |
| 150 __ SetStackPointer(csp); |
| 151 ASSERT_EQ(0, registers_to_save % 2); |
| 152 // We can cache at most 16 W registers in x0-x7. |
| 153 STATIC_ASSERT(kNumCachedRegisters <= 16); |
| 154 STATIC_ASSERT((kNumCachedRegisters % 2) == 0); |
| 155 __ B(&entry_label_); // We'll write the entry code later. |
| 156 __ Bind(&start_label_); // And then continue from here. |
| 157 } |
| 158 |
| 159 |
| 160 RegExpMacroAssemblerA64::~RegExpMacroAssemblerA64() { |
| 161 delete masm_; |
| 162 // Unuse labels in case we throw away the assembler without calling GetCode. |
| 163 entry_label_.Unuse(); |
| 164 start_label_.Unuse(); |
| 165 success_label_.Unuse(); |
| 166 backtrack_label_.Unuse(); |
| 167 exit_label_.Unuse(); |
| 168 check_preempt_label_.Unuse(); |
| 169 stack_overflow_label_.Unuse(); |
| 170 } |
| 171 |
| 172 int RegExpMacroAssemblerA64::stack_limit_slack() { |
| 173 return RegExpStack::kStackLimitSlack; |
| 174 } |
| 175 |
| 176 |
| 177 void RegExpMacroAssemblerA64::AdvanceCurrentPosition(int by) { |
| 178 if (by != 0) { |
| 179 __ Add(current_input_offset(), |
| 180 current_input_offset(), by * char_size()); |
| 181 } |
| 182 } |
| 183 |
| 184 |
| 185 void RegExpMacroAssemblerA64::AdvanceRegister(int reg, int by) { |
| 186 ASSERT((reg >= 0) && (reg < num_registers_)); |
| 187 if (by != 0) { |
| 188 Register to_advance; |
| 189 RegisterState register_state = GetRegisterState(reg); |
| 190 switch (register_state) { |
| 191 case STACKED: |
| 192 __ Ldr(w10, register_location(reg)); |
| 193 __ Add(w10, w10, by); |
| 194 __ Str(w10, register_location(reg)); |
| 195 break; |
| 196 case CACHED_LSW: |
| 197 to_advance = GetCachedRegister(reg); |
| 198 __ Add(to_advance, to_advance, by); |
| 199 break; |
| 200 case CACHED_MSW: |
| 201 to_advance = GetCachedRegister(reg); |
| 202 __ Add(to_advance, to_advance, static_cast<int64_t>(by) << kWRegSize); |
| 203 break; |
| 204 default: |
| 205 UNREACHABLE(); |
| 206 break; |
| 207 } |
| 208 } |
| 209 } |
| 210 |
| 211 |
| 212 void RegExpMacroAssemblerA64::Backtrack() { |
| 213 CheckPreemption(); |
| 214 Pop(w10); |
| 215 __ Add(x10, code_pointer(), Operand(w10, UXTW)); |
| 216 __ Br(x10); |
| 217 } |
| 218 |
| 219 |
| 220 void RegExpMacroAssemblerA64::Bind(Label* label) { |
| 221 __ Bind(label); |
| 222 } |
| 223 |
| 224 |
| 225 void RegExpMacroAssemblerA64::CheckCharacter(uint32_t c, Label* on_equal) { |
| 226 CompareAndBranchOrBacktrack(current_character(), c, eq, on_equal); |
| 227 } |
| 228 |
| 229 |
| 230 void RegExpMacroAssemblerA64::CheckCharacterGT(uc16 limit, Label* on_greater) { |
| 231 CompareAndBranchOrBacktrack(current_character(), limit, hi, on_greater); |
| 232 } |
| 233 |
| 234 |
| 235 void RegExpMacroAssemblerA64::CheckAtStart(Label* on_at_start) { |
| 236 Label not_at_start; |
| 237 // Did we start the match at the start of the input string? |
| 238 CompareAndBranchOrBacktrack(start_offset(), 0, ne, ¬_at_start); |
| 239 // If we did, are we still at the start of the input string? |
| 240 __ Add(x10, input_end(), Operand(current_input_offset(), SXTW)); |
| 241 __ Cmp(x10, input_start()); |
| 242 BranchOrBacktrack(eq, on_at_start); |
| 243 __ Bind(¬_at_start); |
| 244 } |
| 245 |
| 246 |
| 247 void RegExpMacroAssemblerA64::CheckNotAtStart(Label* on_not_at_start) { |
| 248 // Did we start the match at the start of the input string? |
| 249 CompareAndBranchOrBacktrack(start_offset(), 0, ne, on_not_at_start); |
| 250 // If we did, are we still at the start of the input string? |
| 251 __ Add(x10, input_end(), Operand(current_input_offset(), SXTW)); |
| 252 __ Cmp(x10, input_start()); |
| 253 BranchOrBacktrack(ne, on_not_at_start); |
| 254 } |
| 255 |
| 256 |
| 257 void RegExpMacroAssemblerA64::CheckCharacterLT(uc16 limit, Label* on_less) { |
| 258 CompareAndBranchOrBacktrack(current_character(), limit, lo, on_less); |
| 259 } |
| 260 |
| 261 |
| 262 void RegExpMacroAssemblerA64::CheckCharacters(Vector<const uc16> str, |
| 263 int cp_offset, |
| 264 Label* on_failure, |
| 265 bool check_end_of_string) { |
| 266 // This method is only ever called from the cctests. |
| 267 |
| 268 if (check_end_of_string) { |
| 269 // Is last character of required match inside string. |
| 270 CheckPosition(cp_offset + str.length() - 1, on_failure); |
| 271 } |
| 272 |
| 273 Register characters_address = x11; |
| 274 |
| 275 __ Add(characters_address, |
| 276 input_end(), |
| 277 Operand(current_input_offset(), SXTW)); |
| 278 if (cp_offset != 0) { |
| 279 __ Add(characters_address, characters_address, cp_offset * char_size()); |
| 280 } |
| 281 |
| 282 for (int i = 0; i < str.length(); i++) { |
| 283 if (mode_ == ASCII) { |
| 284 __ Ldrb(w10, MemOperand(characters_address, 1, PostIndex)); |
| 285 ASSERT(str[i] <= String::kMaxOneByteCharCode); |
| 286 } else { |
| 287 __ Ldrh(w10, MemOperand(characters_address, 2, PostIndex)); |
| 288 } |
| 289 CompareAndBranchOrBacktrack(w10, str[i], ne, on_failure); |
| 290 } |
| 291 } |
| 292 |
| 293 |
| 294 void RegExpMacroAssemblerA64::CheckGreedyLoop(Label* on_equal) { |
| 295 __ Ldr(w10, MemOperand(backtrack_stackpointer())); |
| 296 __ Cmp(current_input_offset(), w10); |
| 297 __ Cset(x11, eq); |
| 298 __ Add(backtrack_stackpointer(), |
| 299 backtrack_stackpointer(), Operand(x11, LSL, kWRegSizeInBytesLog2)); |
| 300 BranchOrBacktrack(eq, on_equal); |
| 301 } |
| 302 |
| 303 void RegExpMacroAssemblerA64::CheckNotBackReferenceIgnoreCase( |
| 304 int start_reg, |
| 305 Label* on_no_match) { |
| 306 Label fallthrough; |
| 307 |
| 308 Register capture_start_offset = w10; |
| 309 // Save the capture length in a callee-saved register so it will |
| 310 // be preserved if we call a C helper. |
| 311 Register capture_length = w19; |
| 312 ASSERT(kCalleeSaved.IncludesAliasOf(capture_length)); |
| 313 |
| 314 // Find length of back-referenced capture. |
| 315 ASSERT((start_reg % 2) == 0); |
| 316 if (start_reg < kNumCachedRegisters) { |
| 317 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg)); |
| 318 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSize); |
| 319 } else { |
| 320 __ Ldp(w11, capture_start_offset, capture_location(start_reg, x10)); |
| 321 } |
| 322 __ Sub(capture_length, w11, capture_start_offset); // Length to check. |
| 323 // Succeed on empty capture (including no capture). |
| 324 __ Cbz(capture_length, &fallthrough); |
| 325 |
| 326 // Check that there are enough characters left in the input. |
| 327 __ Cmn(capture_length, current_input_offset()); |
| 328 BranchOrBacktrack(gt, on_no_match); |
| 329 |
| 330 if (mode_ == ASCII) { |
| 331 Label success; |
| 332 Label fail; |
| 333 Label loop_check; |
| 334 |
| 335 Register capture_start_address = x12; |
| 336 Register capture_end_addresss = x13; |
| 337 Register current_position_address = x14; |
| 338 |
| 339 __ Add(capture_start_address, |
| 340 input_end(), |
| 341 Operand(capture_start_offset, SXTW)); |
| 342 __ Add(capture_end_addresss, |
| 343 capture_start_address, |
| 344 Operand(capture_length, SXTW)); |
| 345 __ Add(current_position_address, |
| 346 input_end(), |
| 347 Operand(current_input_offset(), SXTW)); |
| 348 |
| 349 Label loop; |
| 350 __ Bind(&loop); |
| 351 __ Ldrb(w10, MemOperand(capture_start_address, 1, PostIndex)); |
| 352 __ Ldrb(w11, MemOperand(current_position_address, 1, PostIndex)); |
| 353 __ Cmp(w10, w11); |
| 354 __ B(eq, &loop_check); |
| 355 |
| 356 // Mismatch, try case-insensitive match (converting letters to lower-case). |
| 357 __ Orr(w10, w10, 0x20); // Convert capture character to lower-case. |
| 358 __ Orr(w11, w11, 0x20); // Also convert input character. |
| 359 __ Cmp(w11, w10); |
| 360 __ B(ne, &fail); |
| 361 __ Sub(w10, w10, 'a'); |
| 362 __ Cmp(w10, 'z' - 'a'); // Is w10 a lowercase letter? |
| 363 __ B(ls, &loop_check); // In range 'a'-'z'. |
| 364 // Latin-1: Check for values in range [224,254] but not 247. |
| 365 __ Sub(w10, w10, 224 - 'a'); |
| 366 __ Cmp(w10, 254 - 224); |
| 367 __ Ccmp(w10, 247 - 224, ZFlag, ls); // Check for 247. |
| 368 __ B(eq, &fail); // Weren't Latin-1 letters. |
| 369 |
| 370 __ Bind(&loop_check); |
| 371 __ Cmp(capture_start_address, capture_end_addresss); |
| 372 __ B(lt, &loop); |
| 373 __ B(&success); |
| 374 |
| 375 __ Bind(&fail); |
| 376 BranchOrBacktrack(al, on_no_match); |
| 377 |
| 378 __ Bind(&success); |
| 379 // Compute new value of character position after the matched part. |
| 380 __ Sub(current_input_offset().X(), current_position_address, input_end()); |
| 381 if (masm_->emit_debug_code()) { |
| 382 __ Cmp(current_input_offset().X(), Operand(current_input_offset(), SXTW)); |
| 383 __ Ccmp(current_input_offset(), 0, NoFlag, eq); |
| 384 // The current input offset should be <= 0, and fit in a W register. |
| 385 __ Check(le, kOffsetOutOfRange); |
| 386 } |
| 387 } else { |
| 388 ASSERT(mode_ == UC16); |
| 389 int argument_count = 4; |
| 390 |
| 391 // The cached registers need to be retained. |
| 392 CPURegList cached_registers(CPURegister::kRegister, kXRegSize, 0, 7); |
| 393 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters); |
| 394 __ PushCPURegList(cached_registers); |
| 395 |
| 396 // Put arguments into arguments registers. |
| 397 // Parameters are |
| 398 // x0: Address byte_offset1 - Address captured substring's start. |
| 399 // x1: Address byte_offset2 - Address of current character position. |
| 400 // w2: size_t byte_length - length of capture in bytes(!) |
| 401 // x3: Isolate* isolate |
| 402 |
| 403 // Address of start of capture. |
| 404 __ Add(x0, input_end(), Operand(capture_start_offset, SXTW)); |
| 405 // Length of capture. |
| 406 __ Mov(w2, capture_length); |
| 407 // Address of current input position. |
| 408 __ Add(x1, input_end(), Operand(current_input_offset(), SXTW)); |
| 409 // Isolate. |
| 410 __ Mov(x3, Operand(ExternalReference::isolate_address(isolate()))); |
| 411 |
| 412 { |
| 413 AllowExternalCallThatCantCauseGC scope(masm_); |
| 414 ExternalReference function = |
| 415 ExternalReference::re_case_insensitive_compare_uc16(isolate()); |
| 416 __ CallCFunction(function, argument_count); |
| 417 } |
| 418 |
| 419 // Check if function returned non-zero for success or zero for failure. |
| 420 CompareAndBranchOrBacktrack(x0, 0, eq, on_no_match); |
| 421 // On success, increment position by length of capture. |
| 422 __ Add(current_input_offset(), current_input_offset(), capture_length); |
| 423 // Reset the cached registers. |
| 424 __ PopCPURegList(cached_registers); |
| 425 } |
| 426 |
| 427 __ Bind(&fallthrough); |
| 428 } |
| 429 |
| 430 void RegExpMacroAssemblerA64::CheckNotBackReference( |
| 431 int start_reg, |
| 432 Label* on_no_match) { |
| 433 Label fallthrough; |
| 434 |
| 435 Register capture_start_address = x12; |
| 436 Register capture_end_address = x13; |
| 437 Register current_position_address = x14; |
| 438 Register capture_length = w15; |
| 439 |
| 440 // Find length of back-referenced capture. |
| 441 ASSERT((start_reg % 2) == 0); |
| 442 if (start_reg < kNumCachedRegisters) { |
| 443 __ Mov(x10, GetCachedRegister(start_reg)); |
| 444 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSize); |
| 445 } else { |
| 446 __ Ldp(w11, w10, capture_location(start_reg, x10)); |
| 447 } |
| 448 __ Sub(capture_length, w11, w10); // Length to check. |
| 449 // Succeed on empty capture (including no capture). |
| 450 __ Cbz(capture_length, &fallthrough); |
| 451 |
| 452 // Check that there are enough characters left in the input. |
| 453 __ Cmn(capture_length, current_input_offset()); |
| 454 BranchOrBacktrack(gt, on_no_match); |
| 455 |
| 456 // Compute pointers to match string and capture string |
| 457 __ Add(capture_start_address, input_end(), Operand(w10, SXTW)); |
| 458 __ Add(capture_end_address, |
| 459 capture_start_address, |
| 460 Operand(capture_length, SXTW)); |
| 461 __ Add(current_position_address, |
| 462 input_end(), |
| 463 Operand(current_input_offset(), SXTW)); |
| 464 |
| 465 Label loop; |
| 466 __ Bind(&loop); |
| 467 if (mode_ == ASCII) { |
| 468 __ Ldrb(w10, MemOperand(capture_start_address, 1, PostIndex)); |
| 469 __ Ldrb(w11, MemOperand(current_position_address, 1, PostIndex)); |
| 470 } else { |
| 471 ASSERT(mode_ == UC16); |
| 472 __ Ldrh(w10, MemOperand(capture_start_address, 2, PostIndex)); |
| 473 __ Ldrh(w11, MemOperand(current_position_address, 2, PostIndex)); |
| 474 } |
| 475 __ Cmp(w10, w11); |
| 476 BranchOrBacktrack(ne, on_no_match); |
| 477 __ Cmp(capture_start_address, capture_end_address); |
| 478 __ B(lt, &loop); |
| 479 |
| 480 // Move current character position to position after match. |
| 481 __ Sub(current_input_offset().X(), current_position_address, input_end()); |
| 482 if (masm_->emit_debug_code()) { |
| 483 __ Cmp(current_input_offset().X(), Operand(current_input_offset(), SXTW)); |
| 484 __ Ccmp(current_input_offset(), 0, NoFlag, eq); |
| 485 // The current input offset should be <= 0, and fit in a W register. |
| 486 __ Check(le, kOffsetOutOfRange); |
| 487 } |
| 488 __ Bind(&fallthrough); |
| 489 } |
| 490 |
| 491 |
| 492 void RegExpMacroAssemblerA64::CheckNotCharacter(unsigned c, |
| 493 Label* on_not_equal) { |
| 494 CompareAndBranchOrBacktrack(current_character(), c, ne, on_not_equal); |
| 495 } |
| 496 |
| 497 |
| 498 void RegExpMacroAssemblerA64::CheckCharacterAfterAnd(uint32_t c, |
| 499 uint32_t mask, |
| 500 Label* on_equal) { |
| 501 __ And(w10, current_character(), mask); |
| 502 CompareAndBranchOrBacktrack(w10, c, eq, on_equal); |
| 503 } |
| 504 |
| 505 |
| 506 void RegExpMacroAssemblerA64::CheckNotCharacterAfterAnd(unsigned c, |
| 507 unsigned mask, |
| 508 Label* on_not_equal) { |
| 509 __ And(w10, current_character(), mask); |
| 510 CompareAndBranchOrBacktrack(w10, c, ne, on_not_equal); |
| 511 } |
| 512 |
| 513 |
| 514 void RegExpMacroAssemblerA64::CheckNotCharacterAfterMinusAnd( |
| 515 uc16 c, |
| 516 uc16 minus, |
| 517 uc16 mask, |
| 518 Label* on_not_equal) { |
| 519 ASSERT(minus < String::kMaxUtf16CodeUnit); |
| 520 __ Sub(w10, current_character(), minus); |
| 521 __ And(w10, w10, mask); |
| 522 CompareAndBranchOrBacktrack(w10, c, ne, on_not_equal); |
| 523 } |
| 524 |
| 525 |
| 526 void RegExpMacroAssemblerA64::CheckCharacterInRange( |
| 527 uc16 from, |
| 528 uc16 to, |
| 529 Label* on_in_range) { |
| 530 __ Sub(w10, current_character(), from); |
| 531 // Unsigned lower-or-same condition. |
| 532 CompareAndBranchOrBacktrack(w10, to - from, ls, on_in_range); |
| 533 } |
| 534 |
| 535 |
| 536 void RegExpMacroAssemblerA64::CheckCharacterNotInRange( |
| 537 uc16 from, |
| 538 uc16 to, |
| 539 Label* on_not_in_range) { |
| 540 __ Sub(w10, current_character(), from); |
| 541 // Unsigned higher condition. |
| 542 CompareAndBranchOrBacktrack(w10, to - from, hi, on_not_in_range); |
| 543 } |
| 544 |
| 545 |
| 546 void RegExpMacroAssemblerA64::CheckBitInTable( |
| 547 Handle<ByteArray> table, |
| 548 Label* on_bit_set) { |
| 549 __ Mov(x11, Operand(table)); |
| 550 if ((mode_ != ASCII) || (kTableMask != String::kMaxOneByteCharCode)) { |
| 551 __ And(w10, current_character(), kTableMask); |
| 552 __ Add(w10, w10, ByteArray::kHeaderSize - kHeapObjectTag); |
| 553 } else { |
| 554 __ Add(w10, current_character(), ByteArray::kHeaderSize - kHeapObjectTag); |
| 555 } |
| 556 __ Ldrb(w11, MemOperand(x11, w10, UXTW)); |
| 557 CompareAndBranchOrBacktrack(w11, 0, ne, on_bit_set); |
| 558 } |
| 559 |
| 560 |
| 561 bool RegExpMacroAssemblerA64::CheckSpecialCharacterClass(uc16 type, |
| 562 Label* on_no_match) { |
| 563 // Range checks (c in min..max) are generally implemented by an unsigned |
| 564 // (c - min) <= (max - min) check |
| 565 switch (type) { |
| 566 case 's': |
| 567 // Match space-characters |
| 568 if (mode_ == ASCII) { |
| 569 // One byte space characters are '\t'..'\r', ' ' and \u00a0. |
| 570 Label success; |
| 571 // Check for ' ' or 0x00a0. |
| 572 __ Cmp(current_character(), ' '); |
| 573 __ Ccmp(current_character(), 0x00a0, ZFlag, ne); |
| 574 __ B(eq, &success); |
| 575 // Check range 0x09..0x0d. |
| 576 __ Sub(w10, current_character(), '\t'); |
| 577 CompareAndBranchOrBacktrack(w10, '\r' - '\t', hi, on_no_match); |
| 578 __ Bind(&success); |
| 579 return true; |
| 580 } |
| 581 return false; |
| 582 case 'S': |
| 583 // The emitted code for generic character classes is good enough. |
| 584 return false; |
| 585 case 'd': |
| 586 // Match ASCII digits ('0'..'9'). |
| 587 __ Sub(w10, current_character(), '0'); |
| 588 CompareAndBranchOrBacktrack(w10, '9' - '0', hi, on_no_match); |
| 589 return true; |
| 590 case 'D': |
| 591 // Match ASCII non-digits. |
| 592 __ Sub(w10, current_character(), '0'); |
| 593 CompareAndBranchOrBacktrack(w10, '9' - '0', ls, on_no_match); |
| 594 return true; |
| 595 case '.': { |
| 596 // Match non-newlines (not 0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029) |
| 597 // Here we emit the conditional branch only once at the end to make branch |
| 598 // prediction more efficient, even though we could branch out of here |
| 599 // as soon as a character matches. |
| 600 __ Cmp(current_character(), 0x0a); |
| 601 __ Ccmp(current_character(), 0x0d, ZFlag, ne); |
| 602 if (mode_ == UC16) { |
| 603 __ Sub(w10, current_character(), 0x2028); |
| 604 // If the Z flag was set we clear the flags to force a branch. |
| 605 __ Ccmp(w10, 0x2029 - 0x2028, NoFlag, ne); |
| 606 // ls -> !((C==1) && (Z==0)) |
| 607 BranchOrBacktrack(ls, on_no_match); |
| 608 } else { |
| 609 BranchOrBacktrack(eq, on_no_match); |
| 610 } |
| 611 return true; |
| 612 } |
| 613 case 'n': { |
| 614 // Match newlines (0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029) |
| 615 // We have to check all 4 newline characters before emitting |
| 616 // the conditional branch. |
| 617 __ Cmp(current_character(), 0x0a); |
| 618 __ Ccmp(current_character(), 0x0d, ZFlag, ne); |
| 619 if (mode_ == UC16) { |
| 620 __ Sub(w10, current_character(), 0x2028); |
| 621 // If the Z flag was set we clear the flags to force a fall-through. |
| 622 __ Ccmp(w10, 0x2029 - 0x2028, NoFlag, ne); |
| 623 // hi -> (C==1) && (Z==0) |
| 624 BranchOrBacktrack(hi, on_no_match); |
| 625 } else { |
| 626 BranchOrBacktrack(ne, on_no_match); |
| 627 } |
| 628 return true; |
| 629 } |
| 630 case 'w': { |
| 631 if (mode_ != ASCII) { |
| 632 // Table is 128 entries, so all ASCII characters can be tested. |
| 633 CompareAndBranchOrBacktrack(current_character(), 'z', hi, on_no_match); |
| 634 } |
| 635 ExternalReference map = ExternalReference::re_word_character_map(); |
| 636 __ Mov(x10, Operand(map)); |
| 637 __ Ldrb(w10, MemOperand(x10, current_character(), UXTW)); |
| 638 CompareAndBranchOrBacktrack(w10, 0, eq, on_no_match); |
| 639 return true; |
| 640 } |
| 641 case 'W': { |
| 642 Label done; |
| 643 if (mode_ != ASCII) { |
| 644 // Table is 128 entries, so all ASCII characters can be tested. |
| 645 __ Cmp(current_character(), 'z'); |
| 646 __ B(hi, &done); |
| 647 } |
| 648 ExternalReference map = ExternalReference::re_word_character_map(); |
| 649 __ Mov(x10, Operand(map)); |
| 650 __ Ldrb(w10, MemOperand(x10, current_character(), UXTW)); |
| 651 CompareAndBranchOrBacktrack(w10, 0, ne, on_no_match); |
| 652 __ Bind(&done); |
| 653 return true; |
| 654 } |
| 655 case '*': |
| 656 // Match any character. |
| 657 return true; |
| 658 // No custom implementation (yet): s(UC16), S(UC16). |
| 659 default: |
| 660 return false; |
| 661 } |
| 662 } |
| 663 |
| 664 |
| 665 void RegExpMacroAssemblerA64::Fail() { |
| 666 __ Mov(w0, FAILURE); |
| 667 __ B(&exit_label_); |
| 668 } |
| 669 |
| 670 |
| 671 Handle<HeapObject> RegExpMacroAssemblerA64::GetCode(Handle<String> source) { |
| 672 Label return_w0; |
| 673 // Finalize code - write the entry point code now we know how many |
| 674 // registers we need. |
| 675 |
| 676 // Entry code: |
| 677 __ Bind(&entry_label_); |
| 678 |
| 679 // Arguments on entry: |
| 680 // x0: String* input |
| 681 // x1: int start_offset |
| 682 // x2: byte* input_start |
| 683 // x3: byte* input_end |
| 684 // x4: int* output array |
| 685 // x5: int output array size |
| 686 // x6: Address stack_base |
| 687 // x7: int direct_call |
| 688 |
| 689 // The stack pointer should be csp on entry. |
| 690 // csp[8]: address of the current isolate |
| 691 // csp[0]: secondary link/return address used by native call |
| 692 |
| 693 // Tell the system that we have a stack frame. Because the type is MANUAL, no |
| 694 // code is generated. |
| 695 FrameScope scope(masm_, StackFrame::MANUAL); |
| 696 |
| 697 // Push registers on the stack, only push the argument registers that we need. |
| 698 CPURegList argument_registers(x0, x5, x6, x7); |
| 699 |
| 700 CPURegList registers_to_retain = kCalleeSaved; |
| 701 ASSERT(kCalleeSaved.Count() == 11); |
| 702 registers_to_retain.Combine(lr); |
| 703 |
| 704 ASSERT(csp.Is(__ StackPointer())); |
| 705 __ PushCPURegList(registers_to_retain); |
| 706 __ PushCPURegList(argument_registers); |
| 707 |
| 708 // Set frame pointer in place. |
| 709 __ Add(frame_pointer(), csp, argument_registers.Count() * kPointerSize); |
| 710 |
| 711 // Initialize callee-saved registers. |
| 712 __ Mov(start_offset(), w1); |
| 713 __ Mov(input_start(), x2); |
| 714 __ Mov(input_end(), x3); |
| 715 __ Mov(output_array(), x4); |
| 716 |
| 717 // Set the number of registers we will need to allocate, that is: |
| 718 // - success_counter (X register) |
| 719 // - (num_registers_ - kNumCachedRegisters) (W registers) |
| 720 int num_wreg_to_allocate = num_registers_ - kNumCachedRegisters; |
| 721 // Do not allocate registers on the stack if they can all be cached. |
| 722 if (num_wreg_to_allocate < 0) { num_wreg_to_allocate = 0; } |
| 723 // Make room for the success_counter. |
| 724 num_wreg_to_allocate += 2; |
| 725 |
| 726 // Make sure the stack alignment will be respected. |
| 727 int alignment = masm_->ActivationFrameAlignment(); |
| 728 ASSERT_EQ(alignment % 16, 0); |
| 729 int align_mask = (alignment / kWRegSizeInBytes) - 1; |
| 730 num_wreg_to_allocate = (num_wreg_to_allocate + align_mask) & ~align_mask; |
| 731 |
| 732 // Check if we have space on the stack. |
| 733 Label stack_limit_hit; |
| 734 Label stack_ok; |
| 735 |
| 736 ExternalReference stack_limit = |
| 737 ExternalReference::address_of_stack_limit(isolate()); |
| 738 __ Mov(x10, Operand(stack_limit)); |
| 739 __ Ldr(x10, MemOperand(x10)); |
| 740 __ Subs(x10, csp, x10); |
| 741 |
| 742 // Handle it if the stack pointer is already below the stack limit. |
| 743 __ B(ls, &stack_limit_hit); |
| 744 |
| 745 // Check if there is room for the variable number of registers above |
| 746 // the stack limit. |
| 747 __ Cmp(x10, num_wreg_to_allocate * kWRegSizeInBytes); |
| 748 __ B(hs, &stack_ok); |
| 749 |
| 750 // Exit with OutOfMemory exception. There is not enough space on the stack |
| 751 // for our working registers. |
| 752 __ Mov(w0, EXCEPTION); |
| 753 __ B(&return_w0); |
| 754 |
| 755 __ Bind(&stack_limit_hit); |
| 756 CallCheckStackGuardState(x10); |
| 757 // If returned value is non-zero, we exit with the returned value as result. |
| 758 __ Cbnz(w0, &return_w0); |
| 759 |
| 760 __ Bind(&stack_ok); |
| 761 |
| 762 // Allocate space on stack. |
| 763 __ Claim(num_wreg_to_allocate, kWRegSizeInBytes); |
| 764 |
| 765 // Initialize success_counter with 0. |
| 766 __ Str(wzr, MemOperand(frame_pointer(), kSuccessCounter)); |
| 767 |
| 768 // Find negative length (offset of start relative to end). |
| 769 __ Sub(x10, input_start(), input_end()); |
| 770 if (masm_->emit_debug_code()) { |
| 771 // Check that the input string length is < 2^30. |
| 772 __ Neg(x11, x10); |
| 773 __ Cmp(x11, (1<<30) - 1); |
| 774 __ Check(ls, kInputStringTooLong); |
| 775 } |
| 776 __ Mov(current_input_offset(), w10); |
| 777 |
| 778 // The non-position value is used as a clearing value for the |
| 779 // capture registers, it corresponds to the position of the first character |
| 780 // minus one. |
| 781 __ Sub(non_position_value(), current_input_offset(), char_size()); |
| 782 __ Sub(non_position_value(), non_position_value(), |
| 783 Operand(start_offset(), LSL, (mode_ == UC16) ? 1 : 0)); |
| 784 // We can store this value twice in an X register for initializing |
| 785 // on-stack registers later. |
| 786 __ Orr(twice_non_position_value(), |
| 787 non_position_value().X(), |
| 788 Operand(non_position_value().X(), LSL, kWRegSize)); |
| 789 |
| 790 // Initialize code pointer register. |
| 791 __ Mov(code_pointer(), Operand(masm_->CodeObject())); |
| 792 |
| 793 Label load_char_start_regexp, start_regexp; |
| 794 // Load newline if index is at start, previous character otherwise. |
| 795 __ Cbnz(start_offset(), &load_char_start_regexp); |
| 796 __ Mov(current_character(), '\n'); |
| 797 __ B(&start_regexp); |
| 798 |
| 799 // Global regexp restarts matching here. |
| 800 __ Bind(&load_char_start_regexp); |
| 801 // Load previous char as initial value of current character register. |
| 802 LoadCurrentCharacterUnchecked(-1, 1); |
| 803 __ Bind(&start_regexp); |
| 804 // Initialize on-stack registers. |
| 805 if (num_saved_registers_ > 0) { |
| 806 ClearRegisters(0, num_saved_registers_ - 1); |
| 807 } |
| 808 |
| 809 // Initialize backtrack stack pointer. |
| 810 __ Ldr(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackBase)); |
| 811 |
| 812 // Execute |
| 813 __ B(&start_label_); |
| 814 |
| 815 if (backtrack_label_.is_linked()) { |
| 816 __ Bind(&backtrack_label_); |
| 817 Backtrack(); |
| 818 } |
| 819 |
| 820 if (success_label_.is_linked()) { |
| 821 Register first_capture_start = w15; |
| 822 |
| 823 // Save captures when successful. |
| 824 __ Bind(&success_label_); |
| 825 |
| 826 if (num_saved_registers_ > 0) { |
| 827 // V8 expects the output to be an int32_t array. |
| 828 Register capture_start = w12; |
| 829 Register capture_end = w13; |
| 830 Register input_length = w14; |
| 831 |
| 832 // Copy captures to output. |
| 833 |
| 834 // Get string length. |
| 835 __ Sub(x10, input_end(), input_start()); |
| 836 if (masm_->emit_debug_code()) { |
| 837 // Check that the input string length is < 2^30. |
| 838 __ Cmp(x10, (1<<30) - 1); |
| 839 __ Check(ls, kInputStringTooLong); |
| 840 } |
| 841 // input_start has a start_offset offset on entry. We need to include |
| 842 // it when computing the length of the whole string. |
| 843 if (mode_ == UC16) { |
| 844 __ Add(input_length, start_offset(), Operand(w10, LSR, 1)); |
| 845 } else { |
| 846 __ Add(input_length, start_offset(), w10); |
| 847 } |
| 848 |
| 849 // Copy the results to the output array from the cached registers first. |
| 850 for (int i = 0; |
| 851 (i < num_saved_registers_) && (i < kNumCachedRegisters); |
| 852 i += 2) { |
| 853 __ Mov(capture_start.X(), GetCachedRegister(i)); |
| 854 __ Lsr(capture_end.X(), capture_start.X(), kWRegSize); |
| 855 if ((i == 0) && global_with_zero_length_check()) { |
| 856 // Keep capture start for the zero-length check later. |
| 857 __ Mov(first_capture_start, capture_start); |
| 858 } |
| 859 // Offsets need to be relative to the start of the string. |
| 860 if (mode_ == UC16) { |
| 861 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1)); |
| 862 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1)); |
| 863 } else { |
| 864 __ Add(capture_start, input_length, capture_start); |
| 865 __ Add(capture_end, input_length, capture_end); |
| 866 } |
| 867 // The output pointer advances for a possible global match. |
| 868 __ Stp(capture_start, |
| 869 capture_end, |
| 870 MemOperand(output_array(), kPointerSize, PostIndex)); |
| 871 } |
| 872 |
| 873 // Only carry on if there are more than kNumCachedRegisters capture |
| 874 // registers. |
| 875 int num_registers_left_on_stack = |
| 876 num_saved_registers_ - kNumCachedRegisters; |
| 877 if (num_registers_left_on_stack > 0) { |
| 878 Register base = x10; |
| 879 // There are always an even number of capture registers. A couple of |
| 880 // registers determine one match with two offsets. |
| 881 ASSERT_EQ(0, num_registers_left_on_stack % 2); |
| 882 __ Add(base, frame_pointer(), kFirstCaptureOnStack); |
| 883 |
| 884 // We can unroll the loop here, we should not unroll for less than 2 |
| 885 // registers. |
| 886 STATIC_ASSERT(kNumRegistersToUnroll > 2); |
| 887 if (num_registers_left_on_stack <= kNumRegistersToUnroll) { |
| 888 for (int i = 0; i < num_registers_left_on_stack / 2; i++) { |
| 889 __ Ldp(capture_end, |
| 890 capture_start, |
| 891 MemOperand(base, -kPointerSize, PostIndex)); |
| 892 if ((i == 0) && global_with_zero_length_check()) { |
| 893 // Keep capture start for the zero-length check later. |
| 894 __ Mov(first_capture_start, capture_start); |
| 895 } |
| 896 // Offsets need to be relative to the start of the string. |
| 897 if (mode_ == UC16) { |
| 898 __ Add(capture_start, |
| 899 input_length, |
| 900 Operand(capture_start, ASR, 1)); |
| 901 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1)); |
| 902 } else { |
| 903 __ Add(capture_start, input_length, capture_start); |
| 904 __ Add(capture_end, input_length, capture_end); |
| 905 } |
| 906 // The output pointer advances for a possible global match. |
| 907 __ Stp(capture_start, |
| 908 capture_end, |
| 909 MemOperand(output_array(), kPointerSize, PostIndex)); |
| 910 } |
| 911 } else { |
| 912 Label loop, start; |
| 913 __ Mov(x11, num_registers_left_on_stack); |
| 914 |
| 915 __ Ldp(capture_end, |
| 916 capture_start, |
| 917 MemOperand(base, -kPointerSize, PostIndex)); |
| 918 if (global_with_zero_length_check()) { |
| 919 __ Mov(first_capture_start, capture_start); |
| 920 } |
| 921 __ B(&start); |
| 922 |
| 923 __ Bind(&loop); |
| 924 __ Ldp(capture_end, |
| 925 capture_start, |
| 926 MemOperand(base, -kPointerSize, PostIndex)); |
| 927 __ Bind(&start); |
| 928 if (mode_ == UC16) { |
| 929 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1)); |
| 930 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1)); |
| 931 } else { |
| 932 __ Add(capture_start, input_length, capture_start); |
| 933 __ Add(capture_end, input_length, capture_end); |
| 934 } |
| 935 // The output pointer advances for a possible global match. |
| 936 __ Stp(capture_start, |
| 937 capture_end, |
| 938 MemOperand(output_array(), kPointerSize, PostIndex)); |
| 939 __ Sub(x11, x11, 2); |
| 940 __ Cbnz(x11, &loop); |
| 941 } |
| 942 } |
| 943 } |
| 944 |
| 945 if (global()) { |
| 946 Register success_counter = w0; |
| 947 Register output_size = x10; |
| 948 // Restart matching if the regular expression is flagged as global. |
| 949 |
| 950 // Increment success counter. |
| 951 __ Ldr(success_counter, MemOperand(frame_pointer(), kSuccessCounter)); |
| 952 __ Add(success_counter, success_counter, 1); |
| 953 __ Str(success_counter, MemOperand(frame_pointer(), kSuccessCounter)); |
| 954 |
| 955 // Capture results have been stored, so the number of remaining global |
| 956 // output registers is reduced by the number of stored captures. |
| 957 __ Ldr(output_size, MemOperand(frame_pointer(), kOutputSize)); |
| 958 __ Sub(output_size, output_size, num_saved_registers_); |
| 959 // Check whether we have enough room for another set of capture results. |
| 960 __ Cmp(output_size, num_saved_registers_); |
| 961 __ B(lt, &return_w0); |
| 962 |
| 963 // The output pointer is already set to the next field in the output |
| 964 // array. |
| 965 // Update output size on the frame before we restart matching. |
| 966 __ Str(output_size, MemOperand(frame_pointer(), kOutputSize)); |
| 967 |
| 968 if (global_with_zero_length_check()) { |
| 969 // Special case for zero-length matches. |
| 970 __ Cmp(current_input_offset(), first_capture_start); |
| 971 // Not a zero-length match, restart. |
| 972 __ B(ne, &load_char_start_regexp); |
| 973 // Offset from the end is zero if we already reached the end. |
| 974 __ Cbz(current_input_offset(), &return_w0); |
| 975 // Advance current position after a zero-length match. |
| 976 __ Add(current_input_offset(), |
| 977 current_input_offset(), |
| 978 Operand((mode_ == UC16) ? 2 : 1)); |
| 979 } |
| 980 |
| 981 __ B(&load_char_start_regexp); |
| 982 } else { |
| 983 __ Mov(w0, SUCCESS); |
| 984 } |
| 985 } |
| 986 |
| 987 if (exit_label_.is_linked()) { |
| 988 // Exit and return w0 |
| 989 __ Bind(&exit_label_); |
| 990 if (global()) { |
| 991 __ Ldr(w0, MemOperand(frame_pointer(), kSuccessCounter)); |
| 992 } |
| 993 } |
| 994 |
| 995 __ Bind(&return_w0); |
| 996 |
| 997 // Set stack pointer back to first register to retain |
| 998 ASSERT(csp.Is(__ StackPointer())); |
| 999 __ Mov(csp, fp); |
| 1000 |
| 1001 // Restore registers. |
| 1002 __ PopCPURegList(registers_to_retain); |
| 1003 |
| 1004 __ Ret(); |
| 1005 |
| 1006 Label exit_with_exception; |
| 1007 // Registers x0 to x7 are used to store the first captures, they need to be |
| 1008 // retained over calls to C++ code. |
| 1009 CPURegList cached_registers(CPURegister::kRegister, kXRegSize, 0, 7); |
| 1010 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters); |
| 1011 |
| 1012 if (check_preempt_label_.is_linked()) { |
| 1013 __ Bind(&check_preempt_label_); |
| 1014 SaveLinkRegister(); |
| 1015 // The cached registers need to be retained. |
| 1016 __ PushCPURegList(cached_registers); |
| 1017 CallCheckStackGuardState(x10); |
| 1018 // Returning from the regexp code restores the stack (csp <- fp) |
| 1019 // so we don't need to drop the link register from it before exiting. |
| 1020 __ Cbnz(w0, &return_w0); |
| 1021 // Reset the cached registers. |
| 1022 __ PopCPURegList(cached_registers); |
| 1023 RestoreLinkRegister(); |
| 1024 __ Ret(); |
| 1025 } |
| 1026 |
| 1027 if (stack_overflow_label_.is_linked()) { |
| 1028 __ Bind(&stack_overflow_label_); |
| 1029 SaveLinkRegister(); |
| 1030 // The cached registers need to be retained. |
| 1031 __ PushCPURegList(cached_registers); |
| 1032 // Call GrowStack(backtrack_stackpointer(), &stack_base) |
| 1033 __ Mov(x2, Operand(ExternalReference::isolate_address(isolate()))); |
| 1034 __ Add(x1, frame_pointer(), kStackBase); |
| 1035 __ Mov(x0, backtrack_stackpointer()); |
| 1036 ExternalReference grow_stack = |
| 1037 ExternalReference::re_grow_stack(isolate()); |
| 1038 __ CallCFunction(grow_stack, 3); |
| 1039 // If return NULL, we have failed to grow the stack, and |
| 1040 // must exit with a stack-overflow exception. |
| 1041 // Returning from the regexp code restores the stack (csp <- fp) |
| 1042 // so we don't need to drop the link register from it before exiting. |
| 1043 __ Cbz(w0, &exit_with_exception); |
| 1044 // Otherwise use return value as new stack pointer. |
| 1045 __ Mov(backtrack_stackpointer(), x0); |
| 1046 // Reset the cached registers. |
| 1047 __ PopCPURegList(cached_registers); |
| 1048 RestoreLinkRegister(); |
| 1049 __ Ret(); |
| 1050 } |
| 1051 |
| 1052 if (exit_with_exception.is_linked()) { |
| 1053 __ Bind(&exit_with_exception); |
| 1054 __ Mov(w0, EXCEPTION); |
| 1055 __ B(&return_w0); |
| 1056 } |
| 1057 |
| 1058 CodeDesc code_desc; |
| 1059 masm_->GetCode(&code_desc); |
| 1060 Handle<Code> code = isolate()->factory()->NewCode( |
| 1061 code_desc, Code::ComputeFlags(Code::REGEXP), masm_->CodeObject()); |
| 1062 PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source)); |
| 1063 return Handle<HeapObject>::cast(code); |
| 1064 } |
| 1065 |
| 1066 |
| 1067 void RegExpMacroAssemblerA64::GoTo(Label* to) { |
| 1068 BranchOrBacktrack(al, to); |
| 1069 } |
| 1070 |
| 1071 void RegExpMacroAssemblerA64::IfRegisterGE(int reg, |
| 1072 int comparand, |
| 1073 Label* if_ge) { |
| 1074 Register to_compare = GetRegister(reg, w10); |
| 1075 CompareAndBranchOrBacktrack(to_compare, comparand, ge, if_ge); |
| 1076 } |
| 1077 |
| 1078 |
| 1079 void RegExpMacroAssemblerA64::IfRegisterLT(int reg, |
| 1080 int comparand, |
| 1081 Label* if_lt) { |
| 1082 Register to_compare = GetRegister(reg, w10); |
| 1083 CompareAndBranchOrBacktrack(to_compare, comparand, lt, if_lt); |
| 1084 } |
| 1085 |
| 1086 |
| 1087 void RegExpMacroAssemblerA64::IfRegisterEqPos(int reg, |
| 1088 Label* if_eq) { |
| 1089 Register to_compare = GetRegister(reg, w10); |
| 1090 __ Cmp(to_compare, current_input_offset()); |
| 1091 BranchOrBacktrack(eq, if_eq); |
| 1092 } |
| 1093 |
| 1094 RegExpMacroAssembler::IrregexpImplementation |
| 1095 RegExpMacroAssemblerA64::Implementation() { |
| 1096 return kA64Implementation; |
| 1097 } |
| 1098 |
| 1099 |
| 1100 void RegExpMacroAssemblerA64::LoadCurrentCharacter(int cp_offset, |
| 1101 Label* on_end_of_input, |
| 1102 bool check_bounds, |
| 1103 int characters) { |
| 1104 // TODO(pielan): Make sure long strings are caught before this, and not |
| 1105 // just asserted in debug mode. |
| 1106 ASSERT(cp_offset >= -1); // ^ and \b can look behind one character. |
| 1107 // Be sane! (And ensure that an int32_t can be used to index the string) |
| 1108 ASSERT(cp_offset < (1<<30)); |
| 1109 if (check_bounds) { |
| 1110 CheckPosition(cp_offset + characters - 1, on_end_of_input); |
| 1111 } |
| 1112 LoadCurrentCharacterUnchecked(cp_offset, characters); |
| 1113 } |
| 1114 |
| 1115 |
| 1116 void RegExpMacroAssemblerA64::PopCurrentPosition() { |
| 1117 Pop(current_input_offset()); |
| 1118 } |
| 1119 |
| 1120 |
| 1121 void RegExpMacroAssemblerA64::PopRegister(int register_index) { |
| 1122 Pop(w10); |
| 1123 StoreRegister(register_index, w10); |
| 1124 } |
| 1125 |
| 1126 |
| 1127 void RegExpMacroAssemblerA64::PushBacktrack(Label* label) { |
| 1128 if (label->is_bound()) { |
| 1129 int target = label->pos(); |
| 1130 __ Mov(w10, target + Code::kHeaderSize - kHeapObjectTag); |
| 1131 } else { |
| 1132 __ Adr(x10, label); |
| 1133 __ Sub(x10, x10, code_pointer()); |
| 1134 if (masm_->emit_debug_code()) { |
| 1135 __ Cmp(x10, kWRegMask); |
| 1136 // The code offset has to fit in a W register. |
| 1137 __ Check(ls, kOffsetOutOfRange); |
| 1138 } |
| 1139 } |
| 1140 Push(w10); |
| 1141 CheckStackLimit(); |
| 1142 } |
| 1143 |
| 1144 |
| 1145 void RegExpMacroAssemblerA64::PushCurrentPosition() { |
| 1146 Push(current_input_offset()); |
| 1147 } |
| 1148 |
| 1149 |
| 1150 void RegExpMacroAssemblerA64::PushRegister(int register_index, |
| 1151 StackCheckFlag check_stack_limit) { |
| 1152 Register to_push = GetRegister(register_index, w10); |
| 1153 Push(to_push); |
| 1154 if (check_stack_limit) CheckStackLimit(); |
| 1155 } |
| 1156 |
| 1157 |
| 1158 void RegExpMacroAssemblerA64::ReadCurrentPositionFromRegister(int reg) { |
| 1159 Register cached_register; |
| 1160 RegisterState register_state = GetRegisterState(reg); |
| 1161 switch (register_state) { |
| 1162 case STACKED: |
| 1163 __ Ldr(current_input_offset(), register_location(reg)); |
| 1164 break; |
| 1165 case CACHED_LSW: |
| 1166 cached_register = GetCachedRegister(reg); |
| 1167 __ Mov(current_input_offset(), cached_register.W()); |
| 1168 break; |
| 1169 case CACHED_MSW: |
| 1170 cached_register = GetCachedRegister(reg); |
| 1171 __ Lsr(current_input_offset().X(), cached_register, kWRegSize); |
| 1172 break; |
| 1173 default: |
| 1174 UNREACHABLE(); |
| 1175 break; |
| 1176 } |
| 1177 } |
| 1178 |
| 1179 |
| 1180 void RegExpMacroAssemblerA64::ReadStackPointerFromRegister(int reg) { |
| 1181 Register read_from = GetRegister(reg, w10); |
| 1182 __ Ldr(x11, MemOperand(frame_pointer(), kStackBase)); |
| 1183 __ Add(backtrack_stackpointer(), x11, Operand(read_from, SXTW)); |
| 1184 } |
| 1185 |
| 1186 |
| 1187 void RegExpMacroAssemblerA64::SetCurrentPositionFromEnd(int by) { |
| 1188 Label after_position; |
| 1189 __ Cmp(current_input_offset(), -by * char_size()); |
| 1190 __ B(ge, &after_position); |
| 1191 __ Mov(current_input_offset(), -by * char_size()); |
| 1192 // On RegExp code entry (where this operation is used), the character before |
| 1193 // the current position is expected to be already loaded. |
| 1194 // We have advanced the position, so it's safe to read backwards. |
| 1195 LoadCurrentCharacterUnchecked(-1, 1); |
| 1196 __ Bind(&after_position); |
| 1197 } |
| 1198 |
| 1199 |
| 1200 void RegExpMacroAssemblerA64::SetRegister(int register_index, int to) { |
| 1201 ASSERT(register_index >= num_saved_registers_); // Reserved for positions! |
| 1202 Register set_to = wzr; |
| 1203 if (to != 0) { |
| 1204 set_to = w10; |
| 1205 __ Mov(set_to, to); |
| 1206 } |
| 1207 StoreRegister(register_index, set_to); |
| 1208 } |
| 1209 |
| 1210 |
| 1211 bool RegExpMacroAssemblerA64::Succeed() { |
| 1212 __ B(&success_label_); |
| 1213 return global(); |
| 1214 } |
| 1215 |
| 1216 |
| 1217 void RegExpMacroAssemblerA64::WriteCurrentPositionToRegister(int reg, |
| 1218 int cp_offset) { |
| 1219 Register position = current_input_offset(); |
| 1220 if (cp_offset != 0) { |
| 1221 position = w10; |
| 1222 __ Add(position, current_input_offset(), cp_offset * char_size()); |
| 1223 } |
| 1224 StoreRegister(reg, position); |
| 1225 } |
| 1226 |
| 1227 |
| 1228 void RegExpMacroAssemblerA64::ClearRegisters(int reg_from, int reg_to) { |
| 1229 ASSERT(reg_from <= reg_to); |
| 1230 int num_registers = reg_to - reg_from + 1; |
| 1231 |
| 1232 // If the first capture register is cached in a hardware register but not |
| 1233 // aligned on a 64-bit one, we need to clear the first one specifically. |
| 1234 if ((reg_from < kNumCachedRegisters) && ((reg_from % 2) != 0)) { |
| 1235 StoreRegister(reg_from, non_position_value()); |
| 1236 num_registers--; |
| 1237 reg_from++; |
| 1238 } |
| 1239 |
| 1240 // Clear cached registers in pairs as far as possible. |
| 1241 while ((num_registers >= 2) && (reg_from < kNumCachedRegisters)) { |
| 1242 ASSERT(GetRegisterState(reg_from) == CACHED_LSW); |
| 1243 __ Mov(GetCachedRegister(reg_from), twice_non_position_value()); |
| 1244 reg_from += 2; |
| 1245 num_registers -= 2; |
| 1246 } |
| 1247 |
| 1248 if ((num_registers % 2) == 1) { |
| 1249 StoreRegister(reg_from, non_position_value()); |
| 1250 num_registers--; |
| 1251 reg_from++; |
| 1252 } |
| 1253 |
| 1254 if (num_registers > 0) { |
| 1255 // If there are some remaining registers, they are stored on the stack. |
| 1256 ASSERT(reg_from >= kNumCachedRegisters); |
| 1257 |
| 1258 // Move down the indexes of the registers on stack to get the correct offset |
| 1259 // in memory. |
| 1260 reg_from -= kNumCachedRegisters; |
| 1261 reg_to -= kNumCachedRegisters; |
| 1262 // We should not unroll the loop for less than 2 registers. |
| 1263 STATIC_ASSERT(kNumRegistersToUnroll > 2); |
| 1264 // We position the base pointer to (reg_from + 1). |
| 1265 int base_offset = kFirstRegisterOnStack - |
| 1266 kWRegSizeInBytes - (kWRegSizeInBytes * reg_from); |
| 1267 if (num_registers > kNumRegistersToUnroll) { |
| 1268 Register base = x10; |
| 1269 __ Add(base, frame_pointer(), base_offset); |
| 1270 |
| 1271 Label loop; |
| 1272 __ Mov(x11, num_registers); |
| 1273 __ Bind(&loop); |
| 1274 __ Str(twice_non_position_value(), |
| 1275 MemOperand(base, -kPointerSize, PostIndex)); |
| 1276 __ Sub(x11, x11, 2); |
| 1277 __ Cbnz(x11, &loop); |
| 1278 } else { |
| 1279 for (int i = reg_from; i <= reg_to; i += 2) { |
| 1280 __ Str(twice_non_position_value(), |
| 1281 MemOperand(frame_pointer(), base_offset)); |
| 1282 base_offset -= kWRegSizeInBytes * 2; |
| 1283 } |
| 1284 } |
| 1285 } |
| 1286 } |
| 1287 |
| 1288 |
| 1289 void RegExpMacroAssemblerA64::WriteStackPointerToRegister(int reg) { |
| 1290 __ Ldr(x10, MemOperand(frame_pointer(), kStackBase)); |
| 1291 __ Sub(x10, backtrack_stackpointer(), x10); |
| 1292 if (masm_->emit_debug_code()) { |
| 1293 __ Cmp(x10, Operand(w10, SXTW)); |
| 1294 // The stack offset needs to fit in a W register. |
| 1295 __ Check(eq, kOffsetOutOfRange); |
| 1296 } |
| 1297 StoreRegister(reg, w10); |
| 1298 } |
| 1299 |
| 1300 |
| 1301 // Helper function for reading a value out of a stack frame. |
| 1302 template <typename T> |
| 1303 static T& frame_entry(Address re_frame, int frame_offset) { |
| 1304 return *reinterpret_cast<T*>(re_frame + frame_offset); |
| 1305 } |
| 1306 |
| 1307 |
| 1308 int RegExpMacroAssemblerA64::CheckStackGuardState(Address* return_address, |
| 1309 Code* re_code, |
| 1310 Address re_frame, |
| 1311 int start_offset, |
| 1312 const byte** input_start, |
| 1313 const byte** input_end) { |
| 1314 Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate); |
| 1315 if (isolate->stack_guard()->IsStackOverflow()) { |
| 1316 isolate->StackOverflow(); |
| 1317 return EXCEPTION; |
| 1318 } |
| 1319 |
| 1320 // If not real stack overflow the stack guard was used to interrupt |
| 1321 // execution for another purpose. |
| 1322 |
| 1323 // If this is a direct call from JavaScript retry the RegExp forcing the call |
| 1324 // through the runtime system. Currently the direct call cannot handle a GC. |
| 1325 if (frame_entry<int>(re_frame, kDirectCall) == 1) { |
| 1326 return RETRY; |
| 1327 } |
| 1328 |
| 1329 // Prepare for possible GC. |
| 1330 HandleScope handles(isolate); |
| 1331 Handle<Code> code_handle(re_code); |
| 1332 |
| 1333 Handle<String> subject(frame_entry<String*>(re_frame, kInput)); |
| 1334 |
| 1335 // Current string. |
| 1336 bool is_ascii = subject->IsOneByteRepresentationUnderneath(); |
| 1337 |
| 1338 ASSERT(re_code->instruction_start() <= *return_address); |
| 1339 ASSERT(*return_address <= |
| 1340 re_code->instruction_start() + re_code->instruction_size()); |
| 1341 |
| 1342 MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate); |
| 1343 |
| 1344 if (*code_handle != re_code) { // Return address no longer valid |
| 1345 int delta = code_handle->address() - re_code->address(); |
| 1346 // Overwrite the return address on the stack. |
| 1347 *return_address += delta; |
| 1348 } |
| 1349 |
| 1350 if (result->IsException()) { |
| 1351 return EXCEPTION; |
| 1352 } |
| 1353 |
| 1354 Handle<String> subject_tmp = subject; |
| 1355 int slice_offset = 0; |
| 1356 |
| 1357 // Extract the underlying string and the slice offset. |
| 1358 if (StringShape(*subject_tmp).IsCons()) { |
| 1359 subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first()); |
| 1360 } else if (StringShape(*subject_tmp).IsSliced()) { |
| 1361 SlicedString* slice = SlicedString::cast(*subject_tmp); |
| 1362 subject_tmp = Handle<String>(slice->parent()); |
| 1363 slice_offset = slice->offset(); |
| 1364 } |
| 1365 |
| 1366 // String might have changed. |
| 1367 if (subject_tmp->IsOneByteRepresentation() != is_ascii) { |
| 1368 // If we changed between an ASCII and an UC16 string, the specialized |
| 1369 // code cannot be used, and we need to restart regexp matching from |
| 1370 // scratch (including, potentially, compiling a new version of the code). |
| 1371 return RETRY; |
| 1372 } |
| 1373 |
| 1374 // Otherwise, the content of the string might have moved. It must still |
| 1375 // be a sequential or external string with the same content. |
| 1376 // Update the start and end pointers in the stack frame to the current |
| 1377 // location (whether it has actually moved or not). |
| 1378 ASSERT(StringShape(*subject_tmp).IsSequential() || |
| 1379 StringShape(*subject_tmp).IsExternal()); |
| 1380 |
| 1381 // The original start address of the characters to match. |
| 1382 const byte* start_address = *input_start; |
| 1383 |
| 1384 // Find the current start address of the same character at the current string |
| 1385 // position. |
| 1386 const byte* new_address = StringCharacterPosition(*subject_tmp, |
| 1387 start_offset + slice_offset); |
| 1388 |
| 1389 if (start_address != new_address) { |
| 1390 // If there is a difference, update the object pointer and start and end |
| 1391 // addresses in the RegExp stack frame to match the new value. |
| 1392 const byte* end_address = *input_end; |
| 1393 int byte_length = static_cast<int>(end_address - start_address); |
| 1394 frame_entry<const String*>(re_frame, kInput) = *subject; |
| 1395 *input_start = new_address; |
| 1396 *input_end = new_address + byte_length; |
| 1397 } else if (frame_entry<const String*>(re_frame, kInput) != *subject) { |
| 1398 // Subject string might have been a ConsString that underwent |
| 1399 // short-circuiting during GC. That will not change start_address but |
| 1400 // will change pointer inside the subject handle. |
| 1401 frame_entry<const String*>(re_frame, kInput) = *subject; |
| 1402 } |
| 1403 |
| 1404 return 0; |
| 1405 } |
| 1406 |
| 1407 |
| 1408 void RegExpMacroAssemblerA64::CheckPosition(int cp_offset, |
| 1409 Label* on_outside_input) { |
| 1410 CompareAndBranchOrBacktrack(current_input_offset(), |
| 1411 -cp_offset * char_size(), |
| 1412 ge, |
| 1413 on_outside_input); |
| 1414 } |
| 1415 |
| 1416 |
| 1417 bool RegExpMacroAssemblerA64::CanReadUnaligned() { |
| 1418 // TODO(pielan): See whether or not we should disable unaligned accesses. |
| 1419 return !slow_safe(); |
| 1420 } |
| 1421 |
| 1422 |
| 1423 // Private methods: |
| 1424 |
| 1425 void RegExpMacroAssemblerA64::CallCheckStackGuardState(Register scratch) { |
| 1426 // Allocate space on the stack to store the return address. The |
| 1427 // CheckStackGuardState C++ function will override it if the code |
| 1428 // moved. Allocate extra space for 2 arguments passed by pointers. |
| 1429 // AAPCS64 requires the stack to be 16 byte aligned. |
| 1430 int alignment = masm_->ActivationFrameAlignment(); |
| 1431 ASSERT_EQ(alignment % 16, 0); |
| 1432 int align_mask = (alignment / kXRegSizeInBytes) - 1; |
| 1433 int xreg_to_claim = (3 + align_mask) & ~align_mask; |
| 1434 |
| 1435 ASSERT(csp.Is(__ StackPointer())); |
| 1436 __ Claim(xreg_to_claim); |
| 1437 |
| 1438 // CheckStackGuardState needs the end and start addresses of the input string. |
| 1439 __ Poke(input_end(), 2 * kPointerSize); |
| 1440 __ Add(x5, csp, 2 * kPointerSize); |
| 1441 __ Poke(input_start(), kPointerSize); |
| 1442 __ Add(x4, csp, kPointerSize); |
| 1443 |
| 1444 __ Mov(w3, start_offset()); |
| 1445 // RegExp code frame pointer. |
| 1446 __ Mov(x2, frame_pointer()); |
| 1447 // Code* of self. |
| 1448 __ Mov(x1, Operand(masm_->CodeObject())); |
| 1449 |
| 1450 // We need to pass a pointer to the return address as first argument. |
| 1451 // The DirectCEntry stub will place the return address on the stack before |
| 1452 // calling so the stack pointer will point to it. |
| 1453 __ Mov(x0, csp); |
| 1454 |
| 1455 ExternalReference check_stack_guard_state = |
| 1456 ExternalReference::re_check_stack_guard_state(isolate()); |
| 1457 __ Mov(scratch, Operand(check_stack_guard_state)); |
| 1458 DirectCEntryStub stub; |
| 1459 stub.GenerateCall(masm_, scratch); |
| 1460 |
| 1461 // The input string may have been moved in memory, we need to reload it. |
| 1462 __ Peek(input_start(), kPointerSize); |
| 1463 __ Peek(input_end(), 2 * kPointerSize); |
| 1464 |
| 1465 ASSERT(csp.Is(__ StackPointer())); |
| 1466 __ Drop(xreg_to_claim); |
| 1467 |
| 1468 // Reload the Code pointer. |
| 1469 __ Mov(code_pointer(), Operand(masm_->CodeObject())); |
| 1470 } |
| 1471 |
| 1472 void RegExpMacroAssemblerA64::BranchOrBacktrack(Condition condition, |
| 1473 Label* to) { |
| 1474 if (condition == al) { // Unconditional. |
| 1475 if (to == NULL) { |
| 1476 Backtrack(); |
| 1477 return; |
| 1478 } |
| 1479 __ B(to); |
| 1480 return; |
| 1481 } |
| 1482 if (to == NULL) { |
| 1483 to = &backtrack_label_; |
| 1484 } |
| 1485 // TODO(ulan): do direct jump when jump distance is known and fits in imm19. |
| 1486 Condition inverted_condition = InvertCondition(condition); |
| 1487 Label no_branch; |
| 1488 __ B(inverted_condition, &no_branch); |
| 1489 __ B(to); |
| 1490 __ Bind(&no_branch); |
| 1491 } |
| 1492 |
| 1493 void RegExpMacroAssemblerA64::CompareAndBranchOrBacktrack(Register reg, |
| 1494 int immediate, |
| 1495 Condition condition, |
| 1496 Label* to) { |
| 1497 if ((immediate == 0) && ((condition == eq) || (condition == ne))) { |
| 1498 if (to == NULL) { |
| 1499 to = &backtrack_label_; |
| 1500 } |
| 1501 // TODO(ulan): do direct jump when jump distance is known and fits in imm19. |
| 1502 Label no_branch; |
| 1503 if (condition == eq) { |
| 1504 __ Cbnz(reg, &no_branch); |
| 1505 } else { |
| 1506 __ Cbz(reg, &no_branch); |
| 1507 } |
| 1508 __ B(to); |
| 1509 __ Bind(&no_branch); |
| 1510 } else { |
| 1511 __ Cmp(reg, immediate); |
| 1512 BranchOrBacktrack(condition, to); |
| 1513 } |
| 1514 } |
| 1515 |
| 1516 |
| 1517 void RegExpMacroAssemblerA64::CheckPreemption() { |
| 1518 // Check for preemption. |
| 1519 ExternalReference stack_limit = |
| 1520 ExternalReference::address_of_stack_limit(isolate()); |
| 1521 __ Mov(x10, Operand(stack_limit)); |
| 1522 __ Ldr(x10, MemOperand(x10)); |
| 1523 ASSERT(csp.Is(__ StackPointer())); |
| 1524 __ Cmp(csp, x10); |
| 1525 CallIf(&check_preempt_label_, ls); |
| 1526 } |
| 1527 |
| 1528 |
| 1529 void RegExpMacroAssemblerA64::CheckStackLimit() { |
| 1530 ExternalReference stack_limit = |
| 1531 ExternalReference::address_of_regexp_stack_limit(isolate()); |
| 1532 __ Mov(x10, Operand(stack_limit)); |
| 1533 __ Ldr(x10, MemOperand(x10)); |
| 1534 __ Cmp(backtrack_stackpointer(), x10); |
| 1535 CallIf(&stack_overflow_label_, ls); |
| 1536 } |
| 1537 |
| 1538 |
| 1539 void RegExpMacroAssemblerA64::Push(Register source) { |
| 1540 ASSERT(source.Is32Bits()); |
| 1541 ASSERT(!source.is(backtrack_stackpointer())); |
| 1542 __ Str(source, |
| 1543 MemOperand(backtrack_stackpointer(), |
| 1544 -static_cast<int>(kWRegSizeInBytes), |
| 1545 PreIndex)); |
| 1546 } |
| 1547 |
| 1548 |
| 1549 void RegExpMacroAssemblerA64::Pop(Register target) { |
| 1550 ASSERT(target.Is32Bits()); |
| 1551 ASSERT(!target.is(backtrack_stackpointer())); |
| 1552 __ Ldr(target, |
| 1553 MemOperand(backtrack_stackpointer(), kWRegSizeInBytes, PostIndex)); |
| 1554 } |
| 1555 |
| 1556 |
| 1557 Register RegExpMacroAssemblerA64::GetCachedRegister(int register_index) { |
| 1558 ASSERT(register_index < kNumCachedRegisters); |
| 1559 return Register::Create(register_index / 2, kXRegSize); |
| 1560 } |
| 1561 |
| 1562 |
| 1563 Register RegExpMacroAssemblerA64::GetRegister(int register_index, |
| 1564 Register maybe_result) { |
| 1565 ASSERT(maybe_result.Is32Bits()); |
| 1566 ASSERT(register_index >= 0); |
| 1567 if (num_registers_ <= register_index) { |
| 1568 num_registers_ = register_index + 1; |
| 1569 } |
| 1570 Register result; |
| 1571 RegisterState register_state = GetRegisterState(register_index); |
| 1572 switch (register_state) { |
| 1573 case STACKED: |
| 1574 __ Ldr(maybe_result, register_location(register_index)); |
| 1575 result = maybe_result; |
| 1576 break; |
| 1577 case CACHED_LSW: |
| 1578 result = GetCachedRegister(register_index).W(); |
| 1579 break; |
| 1580 case CACHED_MSW: |
| 1581 __ Lsr(maybe_result.X(), GetCachedRegister(register_index), kWRegSize); |
| 1582 result = maybe_result; |
| 1583 break; |
| 1584 default: |
| 1585 UNREACHABLE(); |
| 1586 break; |
| 1587 } |
| 1588 ASSERT(result.Is32Bits()); |
| 1589 return result; |
| 1590 } |
| 1591 |
| 1592 |
| 1593 void RegExpMacroAssemblerA64::StoreRegister(int register_index, |
| 1594 Register source) { |
| 1595 ASSERT(source.Is32Bits()); |
| 1596 ASSERT(register_index >= 0); |
| 1597 if (num_registers_ <= register_index) { |
| 1598 num_registers_ = register_index + 1; |
| 1599 } |
| 1600 |
| 1601 Register cached_register; |
| 1602 RegisterState register_state = GetRegisterState(register_index); |
| 1603 switch (register_state) { |
| 1604 case STACKED: |
| 1605 __ Str(source, register_location(register_index)); |
| 1606 break; |
| 1607 case CACHED_LSW: |
| 1608 cached_register = GetCachedRegister(register_index); |
| 1609 if (!source.Is(cached_register.W())) { |
| 1610 __ Bfi(cached_register, source.X(), 0, kWRegSize); |
| 1611 } |
| 1612 break; |
| 1613 case CACHED_MSW: |
| 1614 cached_register = GetCachedRegister(register_index); |
| 1615 __ Bfi(cached_register, source.X(), kWRegSize, kWRegSize); |
| 1616 break; |
| 1617 default: |
| 1618 UNREACHABLE(); |
| 1619 break; |
| 1620 } |
| 1621 } |
| 1622 |
| 1623 |
| 1624 void RegExpMacroAssemblerA64::CallIf(Label* to, Condition condition) { |
| 1625 Label skip_call; |
| 1626 if (condition != al) __ B(&skip_call, InvertCondition(condition)); |
| 1627 __ Bl(to); |
| 1628 __ Bind(&skip_call); |
| 1629 } |
| 1630 |
| 1631 |
| 1632 void RegExpMacroAssemblerA64::RestoreLinkRegister() { |
| 1633 ASSERT(csp.Is(__ StackPointer())); |
| 1634 __ Pop(lr, xzr); |
| 1635 __ Add(lr, lr, Operand(masm_->CodeObject())); |
| 1636 } |
| 1637 |
| 1638 |
| 1639 void RegExpMacroAssemblerA64::SaveLinkRegister() { |
| 1640 ASSERT(csp.Is(__ StackPointer())); |
| 1641 __ Sub(lr, lr, Operand(masm_->CodeObject())); |
| 1642 __ Push(xzr, lr); |
| 1643 } |
| 1644 |
| 1645 |
| 1646 MemOperand RegExpMacroAssemblerA64::register_location(int register_index) { |
| 1647 ASSERT(register_index < (1<<30)); |
| 1648 ASSERT(register_index >= kNumCachedRegisters); |
| 1649 if (num_registers_ <= register_index) { |
| 1650 num_registers_ = register_index + 1; |
| 1651 } |
| 1652 register_index -= kNumCachedRegisters; |
| 1653 int offset = kFirstRegisterOnStack - register_index * kWRegSizeInBytes; |
| 1654 return MemOperand(frame_pointer(), offset); |
| 1655 } |
| 1656 |
| 1657 MemOperand RegExpMacroAssemblerA64::capture_location(int register_index, |
| 1658 Register scratch) { |
| 1659 ASSERT(register_index < (1<<30)); |
| 1660 ASSERT(register_index < num_saved_registers_); |
| 1661 ASSERT(register_index >= kNumCachedRegisters); |
| 1662 ASSERT_EQ(register_index % 2, 0); |
| 1663 register_index -= kNumCachedRegisters; |
| 1664 int offset = kFirstCaptureOnStack - register_index * kWRegSizeInBytes; |
| 1665 // capture_location is used with Stp instructions to load/store 2 registers. |
| 1666 // The immediate field in the encoding is limited to 7 bits (signed). |
| 1667 if (is_int7(offset)) { |
| 1668 return MemOperand(frame_pointer(), offset); |
| 1669 } else { |
| 1670 __ Add(scratch, frame_pointer(), offset); |
| 1671 return MemOperand(scratch); |
| 1672 } |
| 1673 } |
| 1674 |
| 1675 void RegExpMacroAssemblerA64::LoadCurrentCharacterUnchecked(int cp_offset, |
| 1676 int characters) { |
| 1677 Register offset = current_input_offset(); |
| 1678 |
| 1679 // The ldr, str, ldrh, strh instructions can do unaligned accesses, if the CPU |
| 1680 // and the operating system running on the target allow it. |
| 1681 // If unaligned load/stores are not supported then this function must only |
| 1682 // be used to load a single character at a time. |
| 1683 |
| 1684 // ARMv8 supports unaligned accesses but V8 or the kernel can decide to |
| 1685 // disable it. |
| 1686 // TODO(pielan): See whether or not we should disable unaligned accesses. |
| 1687 if (!CanReadUnaligned()) { |
| 1688 ASSERT(characters == 1); |
| 1689 } |
| 1690 |
| 1691 if (cp_offset != 0) { |
| 1692 if (masm_->emit_debug_code()) { |
| 1693 __ Mov(x10, cp_offset * char_size()); |
| 1694 __ Add(x10, x10, Operand(current_input_offset(), SXTW)); |
| 1695 __ Cmp(x10, Operand(w10, SXTW)); |
| 1696 // The offset needs to fit in a W register. |
| 1697 __ Check(eq, kOffsetOutOfRange); |
| 1698 } else { |
| 1699 __ Add(w10, current_input_offset(), cp_offset * char_size()); |
| 1700 } |
| 1701 offset = w10; |
| 1702 } |
| 1703 |
| 1704 if (mode_ == ASCII) { |
| 1705 if (characters == 4) { |
| 1706 __ Ldr(current_character(), MemOperand(input_end(), offset, SXTW)); |
| 1707 } else if (characters == 2) { |
| 1708 __ Ldrh(current_character(), MemOperand(input_end(), offset, SXTW)); |
| 1709 } else { |
| 1710 ASSERT(characters == 1); |
| 1711 __ Ldrb(current_character(), MemOperand(input_end(), offset, SXTW)); |
| 1712 } |
| 1713 } else { |
| 1714 ASSERT(mode_ == UC16); |
| 1715 if (characters == 2) { |
| 1716 __ Ldr(current_character(), MemOperand(input_end(), offset, SXTW)); |
| 1717 } else { |
| 1718 ASSERT(characters == 1); |
| 1719 __ Ldrh(current_character(), MemOperand(input_end(), offset, SXTW)); |
| 1720 } |
| 1721 } |
| 1722 } |
| 1723 |
| 1724 #endif // V8_INTERPRETED_REGEXP |
| 1725 |
| 1726 }} // namespace v8::internal |
| 1727 |
| 1728 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |