Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(125)

Side by Side Diff: src/a64/regexp-macro-assembler-a64.cc

Issue 148293020: Merge experimental/a64 to bleeding_edge. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove ARM from OWNERS Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/regexp-macro-assembler-a64.h ('k') | src/a64/simulator-a64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if V8_TARGET_ARCH_A64
31
32 #include "cpu-profiler.h"
33 #include "unicode.h"
34 #include "log.h"
35 #include "code-stubs.h"
36 #include "regexp-stack.h"
37 #include "macro-assembler.h"
38 #include "regexp-macro-assembler.h"
39 #include "a64/regexp-macro-assembler-a64.h"
40
41 namespace v8 {
42 namespace internal {
43
44 #ifndef V8_INTERPRETED_REGEXP
45 /*
46 * This assembler uses the following register assignment convention:
47 * - w19 : Used to temporarely store a value before a call to C code.
48 * See CheckNotBackReferenceIgnoreCase.
49 * - x20 : Pointer to the current code object (Code*),
50 * it includes the heap object tag.
51 * - w21 : Current position in input, as negative offset from
52 * the end of the string. Please notice that this is
53 * the byte offset, not the character offset!
54 * - w22 : Currently loaded character. Must be loaded using
55 * LoadCurrentCharacter before using any of the dispatch methods.
56 * - x23 : Points to tip of backtrack stack.
57 * - w24 : Position of the first character minus one: non_position_value.
58 * Used to initialize capture registers.
59 * - x25 : Address at the end of the input string: input_end.
60 * Points to byte after last character in input.
61 * - x26 : Address at the start of the input string: input_start.
62 * - w27 : Where to start in the input string.
63 * - x28 : Output array pointer.
64 * - x29/fp : Frame pointer. Used to access arguments, local variables and
65 * RegExp registers.
66 * - x16/x17 : IP registers, used by assembler. Very volatile.
67 * - csp : Points to tip of C stack.
68 *
69 * - x0-x7 : Used as a cache to store 32 bit capture registers. These
70 * registers need to be retained every time a call to C code
71 * is done.
72 *
73 * The remaining registers are free for computations.
74 * Each call to a public method should retain this convention.
75 *
76 * The stack will have the following structure:
77 *
78 * Location Name Description
79 * (as referred to in
80 * the code)
81 *
82 * - fp[104] isolate Address of the current isolate.
83 * - fp[96] return_address Secondary link/return address
84 * used by an exit frame if this is a
85 * native call.
86 * ^^^ csp when called ^^^
87 * - fp[88] lr Return from the RegExp code.
88 * - fp[80] r29 Old frame pointer (CalleeSaved).
89 * - fp[0..72] r19-r28 Backup of CalleeSaved registers.
90 * - fp[-8] direct_call 1 => Direct call from JavaScript code.
91 * 0 => Call through the runtime system.
92 * - fp[-16] stack_base High end of the memory area to use as
93 * the backtracking stack.
94 * - fp[-24] output_size Output may fit multiple sets of matches.
95 * - fp[-32] input Handle containing the input string.
96 * - fp[-40] success_counter
97 * ^^^^^^^^^^^^^ From here and downwards we store 32 bit values ^^^^^^^^^^^^^
98 * - fp[-44] register N Capture registers initialized with
99 * - fp[-48] register N + 1 non_position_value.
100 * ... The first kNumCachedRegisters (N) registers
101 * ... are cached in x0 to x7.
102 * ... Only positions must be stored in the first
103 * - ... num_saved_registers_ registers.
104 * - ...
105 * - register N + num_registers - 1
106 * ^^^^^^^^^ csp ^^^^^^^^^
107 *
108 * The first num_saved_registers_ registers are initialized to point to
109 * "character -1" in the string (i.e., char_size() bytes before the first
110 * character of the string). The remaining registers start out as garbage.
111 *
112 * The data up to the return address must be placed there by the calling
113 * code and the remaining arguments are passed in registers, e.g. by calling the
114 * code entry as cast to a function with the signature:
115 * int (*match)(String* input,
116 * int start_offset,
117 * Address input_start,
118 * Address input_end,
119 * int* output,
120 * int output_size,
121 * Address stack_base,
122 * bool direct_call = false,
123 * Address secondary_return_address, // Only used by native call.
124 * Isolate* isolate)
125 * The call is performed by NativeRegExpMacroAssembler::Execute()
126 * (in regexp-macro-assembler.cc) via the CALL_GENERATED_REGEXP_CODE macro
127 * in a64/simulator-a64.h.
128 * When calling as a non-direct call (i.e., from C++ code), the return address
129 * area is overwritten with the LR register by the RegExp code. When doing a
130 * direct call from generated code, the return address is placed there by
131 * the calling code, as in a normal exit frame.
132 */
133
134 #define __ ACCESS_MASM(masm_)
135
136 RegExpMacroAssemblerA64::RegExpMacroAssemblerA64(
137 Mode mode,
138 int registers_to_save,
139 Zone* zone)
140 : NativeRegExpMacroAssembler(zone),
141 masm_(new MacroAssembler(zone->isolate(), NULL, kRegExpCodeSize)),
142 mode_(mode),
143 num_registers_(registers_to_save),
144 num_saved_registers_(registers_to_save),
145 entry_label_(),
146 start_label_(),
147 success_label_(),
148 backtrack_label_(),
149 exit_label_() {
150 __ SetStackPointer(csp);
151 ASSERT_EQ(0, registers_to_save % 2);
152 // We can cache at most 16 W registers in x0-x7.
153 STATIC_ASSERT(kNumCachedRegisters <= 16);
154 STATIC_ASSERT((kNumCachedRegisters % 2) == 0);
155 __ B(&entry_label_); // We'll write the entry code later.
156 __ Bind(&start_label_); // And then continue from here.
157 }
158
159
160 RegExpMacroAssemblerA64::~RegExpMacroAssemblerA64() {
161 delete masm_;
162 // Unuse labels in case we throw away the assembler without calling GetCode.
163 entry_label_.Unuse();
164 start_label_.Unuse();
165 success_label_.Unuse();
166 backtrack_label_.Unuse();
167 exit_label_.Unuse();
168 check_preempt_label_.Unuse();
169 stack_overflow_label_.Unuse();
170 }
171
172 int RegExpMacroAssemblerA64::stack_limit_slack() {
173 return RegExpStack::kStackLimitSlack;
174 }
175
176
177 void RegExpMacroAssemblerA64::AdvanceCurrentPosition(int by) {
178 if (by != 0) {
179 __ Add(current_input_offset(),
180 current_input_offset(), by * char_size());
181 }
182 }
183
184
185 void RegExpMacroAssemblerA64::AdvanceRegister(int reg, int by) {
186 ASSERT((reg >= 0) && (reg < num_registers_));
187 if (by != 0) {
188 Register to_advance;
189 RegisterState register_state = GetRegisterState(reg);
190 switch (register_state) {
191 case STACKED:
192 __ Ldr(w10, register_location(reg));
193 __ Add(w10, w10, by);
194 __ Str(w10, register_location(reg));
195 break;
196 case CACHED_LSW:
197 to_advance = GetCachedRegister(reg);
198 __ Add(to_advance, to_advance, by);
199 break;
200 case CACHED_MSW:
201 to_advance = GetCachedRegister(reg);
202 __ Add(to_advance, to_advance, static_cast<int64_t>(by) << kWRegSize);
203 break;
204 default:
205 UNREACHABLE();
206 break;
207 }
208 }
209 }
210
211
212 void RegExpMacroAssemblerA64::Backtrack() {
213 CheckPreemption();
214 Pop(w10);
215 __ Add(x10, code_pointer(), Operand(w10, UXTW));
216 __ Br(x10);
217 }
218
219
220 void RegExpMacroAssemblerA64::Bind(Label* label) {
221 __ Bind(label);
222 }
223
224
225 void RegExpMacroAssemblerA64::CheckCharacter(uint32_t c, Label* on_equal) {
226 CompareAndBranchOrBacktrack(current_character(), c, eq, on_equal);
227 }
228
229
230 void RegExpMacroAssemblerA64::CheckCharacterGT(uc16 limit, Label* on_greater) {
231 CompareAndBranchOrBacktrack(current_character(), limit, hi, on_greater);
232 }
233
234
235 void RegExpMacroAssemblerA64::CheckAtStart(Label* on_at_start) {
236 Label not_at_start;
237 // Did we start the match at the start of the input string?
238 CompareAndBranchOrBacktrack(start_offset(), 0, ne, &not_at_start);
239 // If we did, are we still at the start of the input string?
240 __ Add(x10, input_end(), Operand(current_input_offset(), SXTW));
241 __ Cmp(x10, input_start());
242 BranchOrBacktrack(eq, on_at_start);
243 __ Bind(&not_at_start);
244 }
245
246
247 void RegExpMacroAssemblerA64::CheckNotAtStart(Label* on_not_at_start) {
248 // Did we start the match at the start of the input string?
249 CompareAndBranchOrBacktrack(start_offset(), 0, ne, on_not_at_start);
250 // If we did, are we still at the start of the input string?
251 __ Add(x10, input_end(), Operand(current_input_offset(), SXTW));
252 __ Cmp(x10, input_start());
253 BranchOrBacktrack(ne, on_not_at_start);
254 }
255
256
257 void RegExpMacroAssemblerA64::CheckCharacterLT(uc16 limit, Label* on_less) {
258 CompareAndBranchOrBacktrack(current_character(), limit, lo, on_less);
259 }
260
261
262 void RegExpMacroAssemblerA64::CheckCharacters(Vector<const uc16> str,
263 int cp_offset,
264 Label* on_failure,
265 bool check_end_of_string) {
266 // This method is only ever called from the cctests.
267
268 if (check_end_of_string) {
269 // Is last character of required match inside string.
270 CheckPosition(cp_offset + str.length() - 1, on_failure);
271 }
272
273 Register characters_address = x11;
274
275 __ Add(characters_address,
276 input_end(),
277 Operand(current_input_offset(), SXTW));
278 if (cp_offset != 0) {
279 __ Add(characters_address, characters_address, cp_offset * char_size());
280 }
281
282 for (int i = 0; i < str.length(); i++) {
283 if (mode_ == ASCII) {
284 __ Ldrb(w10, MemOperand(characters_address, 1, PostIndex));
285 ASSERT(str[i] <= String::kMaxOneByteCharCode);
286 } else {
287 __ Ldrh(w10, MemOperand(characters_address, 2, PostIndex));
288 }
289 CompareAndBranchOrBacktrack(w10, str[i], ne, on_failure);
290 }
291 }
292
293
294 void RegExpMacroAssemblerA64::CheckGreedyLoop(Label* on_equal) {
295 __ Ldr(w10, MemOperand(backtrack_stackpointer()));
296 __ Cmp(current_input_offset(), w10);
297 __ Cset(x11, eq);
298 __ Add(backtrack_stackpointer(),
299 backtrack_stackpointer(), Operand(x11, LSL, kWRegSizeInBytesLog2));
300 BranchOrBacktrack(eq, on_equal);
301 }
302
303 void RegExpMacroAssemblerA64::CheckNotBackReferenceIgnoreCase(
304 int start_reg,
305 Label* on_no_match) {
306 Label fallthrough;
307
308 Register capture_start_offset = w10;
309 // Save the capture length in a callee-saved register so it will
310 // be preserved if we call a C helper.
311 Register capture_length = w19;
312 ASSERT(kCalleeSaved.IncludesAliasOf(capture_length));
313
314 // Find length of back-referenced capture.
315 ASSERT((start_reg % 2) == 0);
316 if (start_reg < kNumCachedRegisters) {
317 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg));
318 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSize);
319 } else {
320 __ Ldp(w11, capture_start_offset, capture_location(start_reg, x10));
321 }
322 __ Sub(capture_length, w11, capture_start_offset); // Length to check.
323 // Succeed on empty capture (including no capture).
324 __ Cbz(capture_length, &fallthrough);
325
326 // Check that there are enough characters left in the input.
327 __ Cmn(capture_length, current_input_offset());
328 BranchOrBacktrack(gt, on_no_match);
329
330 if (mode_ == ASCII) {
331 Label success;
332 Label fail;
333 Label loop_check;
334
335 Register capture_start_address = x12;
336 Register capture_end_addresss = x13;
337 Register current_position_address = x14;
338
339 __ Add(capture_start_address,
340 input_end(),
341 Operand(capture_start_offset, SXTW));
342 __ Add(capture_end_addresss,
343 capture_start_address,
344 Operand(capture_length, SXTW));
345 __ Add(current_position_address,
346 input_end(),
347 Operand(current_input_offset(), SXTW));
348
349 Label loop;
350 __ Bind(&loop);
351 __ Ldrb(w10, MemOperand(capture_start_address, 1, PostIndex));
352 __ Ldrb(w11, MemOperand(current_position_address, 1, PostIndex));
353 __ Cmp(w10, w11);
354 __ B(eq, &loop_check);
355
356 // Mismatch, try case-insensitive match (converting letters to lower-case).
357 __ Orr(w10, w10, 0x20); // Convert capture character to lower-case.
358 __ Orr(w11, w11, 0x20); // Also convert input character.
359 __ Cmp(w11, w10);
360 __ B(ne, &fail);
361 __ Sub(w10, w10, 'a');
362 __ Cmp(w10, 'z' - 'a'); // Is w10 a lowercase letter?
363 __ B(ls, &loop_check); // In range 'a'-'z'.
364 // Latin-1: Check for values in range [224,254] but not 247.
365 __ Sub(w10, w10, 224 - 'a');
366 // TODO(jbramley): Use Ccmp here.
367 __ Cmp(w10, 254 - 224);
368 __ B(hi, &fail); // Weren't Latin-1 letters.
369 __ Cmp(w10, 247 - 224); // Check for 247.
370 __ B(eq, &fail);
371
372 __ Bind(&loop_check);
373 __ Cmp(capture_start_address, capture_end_addresss);
374 __ B(lt, &loop);
375 __ B(&success);
376
377 __ Bind(&fail);
378 BranchOrBacktrack(al, on_no_match);
379
380 __ Bind(&success);
381 // Compute new value of character position after the matched part.
382 __ Sub(current_input_offset().X(), current_position_address, input_end());
383 if (masm_->emit_debug_code()) {
384 __ Cmp(current_input_offset().X(), Operand(current_input_offset(), SXTW));
385 __ Ccmp(current_input_offset(), 0, NoFlag, eq);
386 // The current input offset should be <= 0, and fit in a W register.
387 __ Check(le, kOffsetOutOfRange);
388 }
389 } else {
390 ASSERT(mode_ == UC16);
391 int argument_count = 4;
392
393 // The cached registers need to be retained.
394 CPURegList cached_registers(CPURegister::kRegister, kXRegSize, 0, 7);
395 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters);
396 __ PushCPURegList(cached_registers);
397
398 // Put arguments into arguments registers.
399 // Parameters are
400 // x0: Address byte_offset1 - Address captured substring's start.
401 // x1: Address byte_offset2 - Address of current character position.
402 // w2: size_t byte_length - length of capture in bytes(!)
403 // x3: Isolate* isolate
404
405 // Address of start of capture.
406 __ Add(x0, input_end(), Operand(capture_start_offset, SXTW));
407 // Length of capture.
408 __ Mov(w2, capture_length);
409 // Address of current input position.
410 __ Add(x1, input_end(), Operand(current_input_offset(), SXTW));
411 // Isolate.
412 __ Mov(x3, Operand(ExternalReference::isolate_address(isolate())));
413
414 {
415 AllowExternalCallThatCantCauseGC scope(masm_);
416 ExternalReference function =
417 ExternalReference::re_case_insensitive_compare_uc16(isolate());
418 __ CallCFunction(function, argument_count);
419 }
420
421 // Check if function returned non-zero for success or zero for failure.
422 CompareAndBranchOrBacktrack(x0, 0, eq, on_no_match);
423 // On success, increment position by length of capture.
424 __ Add(current_input_offset(), current_input_offset(), capture_length);
425 // Reset the cached registers.
426 __ PopCPURegList(cached_registers);
427 }
428
429 __ Bind(&fallthrough);
430 }
431
432 void RegExpMacroAssemblerA64::CheckNotBackReference(
433 int start_reg,
434 Label* on_no_match) {
435 Label fallthrough;
436
437 Register capture_start_address = x12;
438 Register capture_end_address = x13;
439 Register current_position_address = x14;
440 Register capture_length = w15;
441
442 // Find length of back-referenced capture.
443 ASSERT((start_reg % 2) == 0);
444 if (start_reg < kNumCachedRegisters) {
445 __ Mov(x10, GetCachedRegister(start_reg));
446 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSize);
447 } else {
448 __ Ldp(w11, w10, capture_location(start_reg, x10));
449 }
450 __ Sub(capture_length, w11, w10); // Length to check.
451 // Succeed on empty capture (including no capture).
452 __ Cbz(capture_length, &fallthrough);
453
454 // Check that there are enough characters left in the input.
455 __ Cmn(capture_length, current_input_offset());
456 BranchOrBacktrack(gt, on_no_match);
457
458 // Compute pointers to match string and capture string
459 __ Add(capture_start_address, input_end(), Operand(w10, SXTW));
460 __ Add(capture_end_address,
461 capture_start_address,
462 Operand(capture_length, SXTW));
463 __ Add(current_position_address,
464 input_end(),
465 Operand(current_input_offset(), SXTW));
466
467 Label loop;
468 __ Bind(&loop);
469 if (mode_ == ASCII) {
470 __ Ldrb(w10, MemOperand(capture_start_address, 1, PostIndex));
471 __ Ldrb(w11, MemOperand(current_position_address, 1, PostIndex));
472 } else {
473 ASSERT(mode_ == UC16);
474 __ Ldrh(w10, MemOperand(capture_start_address, 2, PostIndex));
475 __ Ldrh(w11, MemOperand(current_position_address, 2, PostIndex));
476 }
477 __ Cmp(w10, w11);
478 BranchOrBacktrack(ne, on_no_match);
479 __ Cmp(capture_start_address, capture_end_address);
480 __ B(lt, &loop);
481
482 // Move current character position to position after match.
483 __ Sub(current_input_offset().X(), current_position_address, input_end());
484 if (masm_->emit_debug_code()) {
485 __ Cmp(current_input_offset().X(), Operand(current_input_offset(), SXTW));
486 __ Ccmp(current_input_offset(), 0, NoFlag, eq);
487 // The current input offset should be <= 0, and fit in a W register.
488 __ Check(le, kOffsetOutOfRange);
489 }
490 __ Bind(&fallthrough);
491 }
492
493
494 void RegExpMacroAssemblerA64::CheckNotCharacter(unsigned c,
495 Label* on_not_equal) {
496 CompareAndBranchOrBacktrack(current_character(), c, ne, on_not_equal);
497 }
498
499
500 void RegExpMacroAssemblerA64::CheckCharacterAfterAnd(uint32_t c,
501 uint32_t mask,
502 Label* on_equal) {
503 __ And(w10, current_character(), mask);
504 CompareAndBranchOrBacktrack(w10, c, eq, on_equal);
505 }
506
507
508 void RegExpMacroAssemblerA64::CheckNotCharacterAfterAnd(unsigned c,
509 unsigned mask,
510 Label* on_not_equal) {
511 __ And(w10, current_character(), mask);
512 CompareAndBranchOrBacktrack(w10, c, ne, on_not_equal);
513 }
514
515
516 void RegExpMacroAssemblerA64::CheckNotCharacterAfterMinusAnd(
517 uc16 c,
518 uc16 minus,
519 uc16 mask,
520 Label* on_not_equal) {
521 ASSERT(minus < String::kMaxUtf16CodeUnit);
522 __ Sub(w10, current_character(), minus);
523 __ And(w10, w10, mask);
524 CompareAndBranchOrBacktrack(w10, c, ne, on_not_equal);
525 }
526
527
528 void RegExpMacroAssemblerA64::CheckCharacterInRange(
529 uc16 from,
530 uc16 to,
531 Label* on_in_range) {
532 __ Sub(w10, current_character(), from);
533 // Unsigned lower-or-same condition.
534 CompareAndBranchOrBacktrack(w10, to - from, ls, on_in_range);
535 }
536
537
538 void RegExpMacroAssemblerA64::CheckCharacterNotInRange(
539 uc16 from,
540 uc16 to,
541 Label* on_not_in_range) {
542 __ Sub(w10, current_character(), from);
543 // Unsigned higher condition.
544 CompareAndBranchOrBacktrack(w10, to - from, hi, on_not_in_range);
545 }
546
547
548 void RegExpMacroAssemblerA64::CheckBitInTable(
549 Handle<ByteArray> table,
550 Label* on_bit_set) {
551 __ Mov(x11, Operand(table));
552 if ((mode_ != ASCII) || (kTableMask != String::kMaxOneByteCharCode)) {
553 __ And(w10, current_character(), kTableMask);
554 __ Add(w10, w10, ByteArray::kHeaderSize - kHeapObjectTag);
555 } else {
556 __ Add(w10, current_character(), ByteArray::kHeaderSize - kHeapObjectTag);
557 }
558 __ Ldrb(w11, MemOperand(x11, w10, UXTW));
559 CompareAndBranchOrBacktrack(w11, 0, ne, on_bit_set);
560 }
561
562
563 bool RegExpMacroAssemblerA64::CheckSpecialCharacterClass(uc16 type,
564 Label* on_no_match) {
565 // Range checks (c in min..max) are generally implemented by an unsigned
566 // (c - min) <= (max - min) check
567 switch (type) {
568 case 's':
569 // Match space-characters
570 if (mode_ == ASCII) {
571 // One byte space characters are '\t'..'\r', ' ' and \u00a0.
572 Label success;
573 // Check for ' ' or 0x00a0.
574 __ Cmp(current_character(), ' ');
575 __ Ccmp(current_character(), 0x00a0, ZFlag, ne);
576 __ B(eq, &success);
577 // Check range 0x09..0x0d.
578 __ Sub(w10, current_character(), '\t');
579 CompareAndBranchOrBacktrack(w10, '\r' - '\t', hi, on_no_match);
580 __ Bind(&success);
581 return true;
582 }
583 return false;
584 case 'S':
585 // The emitted code for generic character classes is good enough.
586 return false;
587 case 'd':
588 // Match ASCII digits ('0'..'9').
589 __ Sub(w10, current_character(), '0');
590 CompareAndBranchOrBacktrack(w10, '9' - '0', hi, on_no_match);
591 return true;
592 case 'D':
593 // Match ASCII non-digits.
594 __ Sub(w10, current_character(), '0');
595 CompareAndBranchOrBacktrack(w10, '9' - '0', ls, on_no_match);
596 return true;
597 case '.': {
598 // Match non-newlines (not 0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029)
599 // Here we emit the conditional branch only once at the end to make branch
600 // prediction more efficient, even though we could branch out of here
601 // as soon as a character matches.
602 __ Cmp(current_character(), 0x0a);
603 __ Ccmp(current_character(), 0x0d, ZFlag, ne);
604 if (mode_ == UC16) {
605 __ Sub(w10, current_character(), 0x2028);
606 // If the Z flag was set we clear the flags to force a branch.
607 __ Ccmp(w10, 0x2029 - 0x2028, NoFlag, ne);
608 // ls -> !((C==1) && (Z==0))
609 BranchOrBacktrack(ls, on_no_match);
610 } else {
611 BranchOrBacktrack(eq, on_no_match);
612 }
613 return true;
614 }
615 case 'n': {
616 // Match newlines (0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029)
617 // We have to check all 4 newline characters before emitting
618 // the conditional branch.
619 __ Cmp(current_character(), 0x0a);
620 __ Ccmp(current_character(), 0x0d, ZFlag, ne);
621 if (mode_ == UC16) {
622 __ Sub(w10, current_character(), 0x2028);
623 // If the Z flag was set we clear the flags to force a fall-through.
624 __ Ccmp(w10, 0x2029 - 0x2028, NoFlag, ne);
625 // hi -> (C==1) && (Z==0)
626 BranchOrBacktrack(hi, on_no_match);
627 } else {
628 BranchOrBacktrack(ne, on_no_match);
629 }
630 return true;
631 }
632 case 'w': {
633 if (mode_ != ASCII) {
634 // Table is 128 entries, so all ASCII characters can be tested.
635 CompareAndBranchOrBacktrack(current_character(), 'z', hi, on_no_match);
636 }
637 ExternalReference map = ExternalReference::re_word_character_map();
638 __ Mov(x10, Operand(map));
639 __ Ldrb(w10, MemOperand(x10, current_character(), UXTW));
640 CompareAndBranchOrBacktrack(w10, 0, eq, on_no_match);
641 return true;
642 }
643 case 'W': {
644 Label done;
645 if (mode_ != ASCII) {
646 // Table is 128 entries, so all ASCII characters can be tested.
647 __ Cmp(current_character(), 'z');
648 __ B(hi, &done);
649 }
650 ExternalReference map = ExternalReference::re_word_character_map();
651 __ Mov(x10, Operand(map));
652 __ Ldrb(w10, MemOperand(x10, current_character(), UXTW));
653 CompareAndBranchOrBacktrack(w10, 0, ne, on_no_match);
654 __ Bind(&done);
655 return true;
656 }
657 case '*':
658 // Match any character.
659 return true;
660 // No custom implementation (yet): s(UC16), S(UC16).
661 default:
662 return false;
663 }
664 }
665
666
667 void RegExpMacroAssemblerA64::Fail() {
668 __ Mov(w0, FAILURE);
669 __ B(&exit_label_);
670 }
671
672
673 Handle<HeapObject> RegExpMacroAssemblerA64::GetCode(Handle<String> source) {
674 Label return_w0;
675 // Finalize code - write the entry point code now we know how many
676 // registers we need.
677
678 // Entry code:
679 __ Bind(&entry_label_);
680
681 // Arguments on entry:
682 // x0: String* input
683 // x1: int start_offset
684 // x2: byte* input_start
685 // x3: byte* input_end
686 // x4: int* output array
687 // x5: int output array size
688 // x6: Address stack_base
689 // x7: int direct_call
690
691 // The stack pointer should be csp on entry.
692 // csp[8]: address of the current isolate
693 // csp[0]: secondary link/return address used by native call
694
695 // Tell the system that we have a stack frame. Because the type is MANUAL, no
696 // code is generated.
697 FrameScope scope(masm_, StackFrame::MANUAL);
698
699 // Push registers on the stack, only push the argument registers that we need.
700 CPURegList argument_registers(x0, x5, x6, x7);
701
702 CPURegList registers_to_retain = kCalleeSaved;
703 ASSERT(kCalleeSaved.Count() == 11);
704 registers_to_retain.Combine(lr);
705
706 ASSERT(csp.Is(__ StackPointer()));
707 __ PushCPURegList(registers_to_retain);
708 __ PushCPURegList(argument_registers);
709
710 // Set frame pointer in place.
711 __ Add(frame_pointer(), csp, argument_registers.Count() * kPointerSize);
712
713 // Initialize callee-saved registers.
714 __ Mov(start_offset(), w1);
715 __ Mov(input_start(), x2);
716 __ Mov(input_end(), x3);
717 __ Mov(output_array(), x4);
718
719 // Set the number of registers we will need to allocate, that is:
720 // - success_counter (X register)
721 // - (num_registers_ - kNumCachedRegisters) (W registers)
722 int num_wreg_to_allocate = num_registers_ - kNumCachedRegisters;
723 // Do not allocate registers on the stack if they can all be cached.
724 if (num_wreg_to_allocate < 0) { num_wreg_to_allocate = 0; }
725 // Make room for the success_counter.
726 num_wreg_to_allocate += 2;
727
728 // Make sure the stack alignment will be respected.
729 int alignment = masm_->ActivationFrameAlignment();
730 ASSERT_EQ(alignment % 16, 0);
731 int align_mask = (alignment / kWRegSizeInBytes) - 1;
732 num_wreg_to_allocate = (num_wreg_to_allocate + align_mask) & ~align_mask;
733
734 // Check if we have space on the stack.
735 Label stack_limit_hit;
736 Label stack_ok;
737
738 ExternalReference stack_limit =
739 ExternalReference::address_of_stack_limit(isolate());
740 __ Mov(x10, Operand(stack_limit));
741 __ Ldr(x10, MemOperand(x10));
742 __ Subs(x10, csp, x10);
743
744 // Handle it if the stack pointer is already below the stack limit.
745 __ B(ls, &stack_limit_hit);
746
747 // Check if there is room for the variable number of registers above
748 // the stack limit.
749 __ Cmp(x10, num_wreg_to_allocate * kWRegSizeInBytes);
750 __ B(hs, &stack_ok);
751
752 // Exit with OutOfMemory exception. There is not enough space on the stack
753 // for our working registers.
754 __ Mov(w0, EXCEPTION);
755 __ B(&return_w0);
756
757 __ Bind(&stack_limit_hit);
758 CallCheckStackGuardState(x10);
759 // If returned value is non-zero, we exit with the returned value as result.
760 __ Cbnz(w0, &return_w0);
761
762 __ Bind(&stack_ok);
763
764 // Allocate space on stack.
765 __ Claim(num_wreg_to_allocate, kWRegSizeInBytes);
766
767 // Initialize success_counter with 0.
768 __ Str(wzr, MemOperand(frame_pointer(), kSuccessCounter));
769
770 // Find negative length (offset of start relative to end).
771 __ Sub(x10, input_start(), input_end());
772 if (masm_->emit_debug_code()) {
773 // Check that the input string length is < 2^30.
774 __ Neg(x11, x10);
775 __ Cmp(x11, (1<<30) - 1);
776 __ Check(ls, kInputStringTooLong);
777 }
778 __ Mov(current_input_offset(), w10);
779
780 // The non-position value is used as a clearing value for the
781 // capture registers, it corresponds to the position of the first character
782 // minus one.
783 __ Sub(non_position_value(), current_input_offset(), char_size());
784 __ Sub(non_position_value(), non_position_value(),
785 Operand(start_offset(), LSL, (mode_ == UC16) ? 1 : 0));
786 // We can store this value twice in an X register for initializing
787 // on-stack registers later.
788 __ Orr(twice_non_position_value(),
789 non_position_value().X(),
790 Operand(non_position_value().X(), LSL, kWRegSize));
791
792 // Initialize code pointer register.
793 __ Mov(code_pointer(), Operand(masm_->CodeObject()));
794
795 Label load_char_start_regexp, start_regexp;
796 // Load newline if index is at start, previous character otherwise.
797 __ Cbnz(start_offset(), &load_char_start_regexp);
798 __ Mov(current_character(), '\n');
799 __ B(&start_regexp);
800
801 // Global regexp restarts matching here.
802 __ Bind(&load_char_start_regexp);
803 // Load previous char as initial value of current character register.
804 LoadCurrentCharacterUnchecked(-1, 1);
805 __ Bind(&start_regexp);
806 // Initialize on-stack registers.
807 if (num_saved_registers_ > 0) {
808 ClearRegisters(0, num_saved_registers_ - 1);
809 }
810
811 // Initialize backtrack stack pointer.
812 __ Ldr(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackBase));
813
814 // Execute
815 __ B(&start_label_);
816
817 if (backtrack_label_.is_linked()) {
818 __ Bind(&backtrack_label_);
819 Backtrack();
820 }
821
822 if (success_label_.is_linked()) {
823 Register first_capture_start = w15;
824
825 // Save captures when successful.
826 __ Bind(&success_label_);
827
828 if (num_saved_registers_ > 0) {
829 // V8 expects the output to be an int32_t array.
830 Register capture_start = w12;
831 Register capture_end = w13;
832 Register input_length = w14;
833
834 // Copy captures to output.
835
836 // Get string length.
837 __ Sub(x10, input_end(), input_start());
838 if (masm_->emit_debug_code()) {
839 // Check that the input string length is < 2^30.
840 __ Cmp(x10, (1<<30) - 1);
841 __ Check(ls, kInputStringTooLong);
842 }
843 // input_start has a start_offset offset on entry. We need to include
844 // it when computing the length of the whole string.
845 if (mode_ == UC16) {
846 __ Add(input_length, start_offset(), Operand(w10, LSR, 1));
847 } else {
848 __ Add(input_length, start_offset(), w10);
849 }
850
851 // Copy the results to the output array from the cached registers first.
852 for (int i = 0;
853 (i < num_saved_registers_) && (i < kNumCachedRegisters);
854 i += 2) {
855 __ Mov(capture_start.X(), GetCachedRegister(i));
856 __ Lsr(capture_end.X(), capture_start.X(), kWRegSize);
857 if ((i == 0) && global_with_zero_length_check()) {
858 // Keep capture start for the zero-length check later.
859 __ Mov(first_capture_start, capture_start);
860 }
861 // Offsets need to be relative to the start of the string.
862 if (mode_ == UC16) {
863 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1));
864 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
865 } else {
866 __ Add(capture_start, input_length, capture_start);
867 __ Add(capture_end, input_length, capture_end);
868 }
869 // The output pointer advances for a possible global match.
870 __ Stp(capture_start,
871 capture_end,
872 MemOperand(output_array(), kPointerSize, PostIndex));
873 }
874
875 // Only carry on if there are more than kNumCachedRegisters capture
876 // registers.
877 int num_registers_left_on_stack =
878 num_saved_registers_ - kNumCachedRegisters;
879 if (num_registers_left_on_stack > 0) {
880 Register base = x10;
881 // There are always an even number of capture registers. A couple of
882 // registers determine one match with two offsets.
883 ASSERT_EQ(0, num_registers_left_on_stack % 2);
884 __ Add(base, frame_pointer(), kFirstCaptureOnStack);
885
886 // We can unroll the loop here, we should not unroll for less than 2
887 // registers.
888 STATIC_ASSERT(kNumRegistersToUnroll > 2);
889 if (num_registers_left_on_stack <= kNumRegistersToUnroll) {
890 for (int i = 0; i < num_registers_left_on_stack / 2; i++) {
891 __ Ldp(capture_end,
892 capture_start,
893 MemOperand(base, -kPointerSize, PostIndex));
894 if ((i == 0) && global_with_zero_length_check()) {
895 // Keep capture start for the zero-length check later.
896 __ Mov(first_capture_start, capture_start);
897 }
898 // Offsets need to be relative to the start of the string.
899 if (mode_ == UC16) {
900 __ Add(capture_start,
901 input_length,
902 Operand(capture_start, ASR, 1));
903 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
904 } else {
905 __ Add(capture_start, input_length, capture_start);
906 __ Add(capture_end, input_length, capture_end);
907 }
908 // The output pointer advances for a possible global match.
909 __ Stp(capture_start,
910 capture_end,
911 MemOperand(output_array(), kPointerSize, PostIndex));
912 }
913 } else {
914 Label loop, start;
915 __ Mov(x11, num_registers_left_on_stack);
916
917 __ Ldp(capture_end,
918 capture_start,
919 MemOperand(base, -kPointerSize, PostIndex));
920 if (global_with_zero_length_check()) {
921 __ Mov(first_capture_start, capture_start);
922 }
923 __ B(&start);
924
925 __ Bind(&loop);
926 __ Ldp(capture_end,
927 capture_start,
928 MemOperand(base, -kPointerSize, PostIndex));
929 __ Bind(&start);
930 if (mode_ == UC16) {
931 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1));
932 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
933 } else {
934 __ Add(capture_start, input_length, capture_start);
935 __ Add(capture_end, input_length, capture_end);
936 }
937 // The output pointer advances for a possible global match.
938 __ Stp(capture_start,
939 capture_end,
940 MemOperand(output_array(), kPointerSize, PostIndex));
941 __ Sub(x11, x11, 2);
942 __ Cbnz(x11, &loop);
943 }
944 }
945 }
946
947 if (global()) {
948 Register success_counter = w0;
949 Register output_size = x10;
950 // Restart matching if the regular expression is flagged as global.
951
952 // Increment success counter.
953 __ Ldr(success_counter, MemOperand(frame_pointer(), kSuccessCounter));
954 __ Add(success_counter, success_counter, 1);
955 __ Str(success_counter, MemOperand(frame_pointer(), kSuccessCounter));
956
957 // Capture results have been stored, so the number of remaining global
958 // output registers is reduced by the number of stored captures.
959 __ Ldr(output_size, MemOperand(frame_pointer(), kOutputSize));
960 __ Sub(output_size, output_size, num_saved_registers_);
961 // Check whether we have enough room for another set of capture results.
962 __ Cmp(output_size, num_saved_registers_);
963 __ B(lt, &return_w0);
964
965 // The output pointer is already set to the next field in the output
966 // array.
967 // Update output size on the frame before we restart matching.
968 __ Str(output_size, MemOperand(frame_pointer(), kOutputSize));
969
970 if (global_with_zero_length_check()) {
971 // Special case for zero-length matches.
972 __ Cmp(current_input_offset(), first_capture_start);
973 // Not a zero-length match, restart.
974 __ B(ne, &load_char_start_regexp);
975 // Offset from the end is zero if we already reached the end.
976 __ Cbz(current_input_offset(), &return_w0);
977 // Advance current position after a zero-length match.
978 __ Add(current_input_offset(),
979 current_input_offset(),
980 Operand((mode_ == UC16) ? 2 : 1));
981 }
982
983 __ B(&load_char_start_regexp);
984 } else {
985 __ Mov(w0, SUCCESS);
986 }
987 }
988
989 if (exit_label_.is_linked()) {
990 // Exit and return w0
991 __ Bind(&exit_label_);
992 if (global()) {
993 __ Ldr(w0, MemOperand(frame_pointer(), kSuccessCounter));
994 }
995 }
996
997 __ Bind(&return_w0);
998
999 // Set stack pointer back to first register to retain
1000 ASSERT(csp.Is(__ StackPointer()));
1001 __ Mov(csp, fp);
1002
1003 // Restore registers.
1004 __ PopCPURegList(registers_to_retain);
1005
1006 __ Ret();
1007
1008 Label exit_with_exception;
1009 // Registers x0 to x7 are used to store the first captures, they need to be
1010 // retained over calls to C++ code.
1011 CPURegList cached_registers(CPURegister::kRegister, kXRegSize, 0, 7);
1012 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters);
1013
1014 if (check_preempt_label_.is_linked()) {
1015 __ Bind(&check_preempt_label_);
1016 SaveLinkRegister();
1017 // The cached registers need to be retained.
1018 __ PushCPURegList(cached_registers);
1019 CallCheckStackGuardState(x10);
1020 // Returning from the regexp code restores the stack (csp <- fp)
1021 // so we don't need to drop the link register from it before exiting.
1022 __ Cbnz(w0, &return_w0);
1023 // Reset the cached registers.
1024 __ PopCPURegList(cached_registers);
1025 RestoreLinkRegister();
1026 __ Ret();
1027 }
1028
1029 if (stack_overflow_label_.is_linked()) {
1030 __ Bind(&stack_overflow_label_);
1031 SaveLinkRegister();
1032 // The cached registers need to be retained.
1033 __ PushCPURegList(cached_registers);
1034 // Call GrowStack(backtrack_stackpointer(), &stack_base)
1035 __ Mov(x2, Operand(ExternalReference::isolate_address(isolate())));
1036 __ Add(x1, frame_pointer(), kStackBase);
1037 __ Mov(x0, backtrack_stackpointer());
1038 ExternalReference grow_stack =
1039 ExternalReference::re_grow_stack(isolate());
1040 __ CallCFunction(grow_stack, 3);
1041 // If return NULL, we have failed to grow the stack, and
1042 // must exit with a stack-overflow exception.
1043 // Returning from the regexp code restores the stack (csp <- fp)
1044 // so we don't need to drop the link register from it before exiting.
1045 __ Cbz(w0, &exit_with_exception);
1046 // Otherwise use return value as new stack pointer.
1047 __ Mov(backtrack_stackpointer(), x0);
1048 // Reset the cached registers.
1049 __ PopCPURegList(cached_registers);
1050 RestoreLinkRegister();
1051 __ Ret();
1052 }
1053
1054 if (exit_with_exception.is_linked()) {
1055 __ Bind(&exit_with_exception);
1056 __ Mov(w0, EXCEPTION);
1057 __ B(&return_w0);
1058 }
1059
1060 CodeDesc code_desc;
1061 masm_->GetCode(&code_desc);
1062 Handle<Code> code = isolate()->factory()->NewCode(
1063 code_desc, Code::ComputeFlags(Code::REGEXP), masm_->CodeObject());
1064 PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source));
1065 return Handle<HeapObject>::cast(code);
1066 }
1067
1068
1069 void RegExpMacroAssemblerA64::GoTo(Label* to) {
1070 BranchOrBacktrack(al, to);
1071 }
1072
1073 void RegExpMacroAssemblerA64::IfRegisterGE(int reg,
1074 int comparand,
1075 Label* if_ge) {
1076 Register to_compare = GetRegister(reg, w10);
1077 CompareAndBranchOrBacktrack(to_compare, comparand, ge, if_ge);
1078 }
1079
1080
1081 void RegExpMacroAssemblerA64::IfRegisterLT(int reg,
1082 int comparand,
1083 Label* if_lt) {
1084 Register to_compare = GetRegister(reg, w10);
1085 CompareAndBranchOrBacktrack(to_compare, comparand, lt, if_lt);
1086 }
1087
1088
1089 void RegExpMacroAssemblerA64::IfRegisterEqPos(int reg,
1090 Label* if_eq) {
1091 Register to_compare = GetRegister(reg, w10);
1092 __ Cmp(to_compare, current_input_offset());
1093 BranchOrBacktrack(eq, if_eq);
1094 }
1095
1096 RegExpMacroAssembler::IrregexpImplementation
1097 RegExpMacroAssemblerA64::Implementation() {
1098 return kA64Implementation;
1099 }
1100
1101
1102 void RegExpMacroAssemblerA64::LoadCurrentCharacter(int cp_offset,
1103 Label* on_end_of_input,
1104 bool check_bounds,
1105 int characters) {
1106 // TODO(pielan): Make sure long strings are caught before this, and not
1107 // just asserted in debug mode.
1108 ASSERT(cp_offset >= -1); // ^ and \b can look behind one character.
1109 // Be sane! (And ensure that an int32_t can be used to index the string)
1110 ASSERT(cp_offset < (1<<30));
1111 if (check_bounds) {
1112 CheckPosition(cp_offset + characters - 1, on_end_of_input);
1113 }
1114 LoadCurrentCharacterUnchecked(cp_offset, characters);
1115 }
1116
1117
1118 void RegExpMacroAssemblerA64::PopCurrentPosition() {
1119 Pop(current_input_offset());
1120 }
1121
1122
1123 void RegExpMacroAssemblerA64::PopRegister(int register_index) {
1124 Pop(w10);
1125 StoreRegister(register_index, w10);
1126 }
1127
1128
1129 void RegExpMacroAssemblerA64::PushBacktrack(Label* label) {
1130 if (label->is_bound()) {
1131 int target = label->pos();
1132 __ Mov(w10, target + Code::kHeaderSize - kHeapObjectTag);
1133 } else {
1134 __ Adr(x10, label);
1135 __ Sub(x10, x10, code_pointer());
1136 if (masm_->emit_debug_code()) {
1137 __ Cmp(x10, kWRegMask);
1138 // The code offset has to fit in a W register.
1139 __ Check(ls, kOffsetOutOfRange);
1140 }
1141 }
1142 Push(w10);
1143 CheckStackLimit();
1144 }
1145
1146
1147 void RegExpMacroAssemblerA64::PushCurrentPosition() {
1148 Push(current_input_offset());
1149 }
1150
1151
1152 void RegExpMacroAssemblerA64::PushRegister(int register_index,
1153 StackCheckFlag check_stack_limit) {
1154 Register to_push = GetRegister(register_index, w10);
1155 Push(to_push);
1156 if (check_stack_limit) CheckStackLimit();
1157 }
1158
1159
1160 void RegExpMacroAssemblerA64::ReadCurrentPositionFromRegister(int reg) {
1161 Register cached_register;
1162 RegisterState register_state = GetRegisterState(reg);
1163 switch (register_state) {
1164 case STACKED:
1165 __ Ldr(current_input_offset(), register_location(reg));
1166 break;
1167 case CACHED_LSW:
1168 cached_register = GetCachedRegister(reg);
1169 __ Mov(current_input_offset(), cached_register.W());
1170 break;
1171 case CACHED_MSW:
1172 cached_register = GetCachedRegister(reg);
1173 __ Lsr(current_input_offset().X(), cached_register, kWRegSize);
1174 break;
1175 default:
1176 UNREACHABLE();
1177 break;
1178 }
1179 }
1180
1181
1182 void RegExpMacroAssemblerA64::ReadStackPointerFromRegister(int reg) {
1183 Register read_from = GetRegister(reg, w10);
1184 __ Ldr(x11, MemOperand(frame_pointer(), kStackBase));
1185 __ Add(backtrack_stackpointer(), x11, Operand(read_from, SXTW));
1186 }
1187
1188
1189 void RegExpMacroAssemblerA64::SetCurrentPositionFromEnd(int by) {
1190 Label after_position;
1191 __ Cmp(current_input_offset(), -by * char_size());
1192 __ B(ge, &after_position);
1193 __ Mov(current_input_offset(), -by * char_size());
1194 // On RegExp code entry (where this operation is used), the character before
1195 // the current position is expected to be already loaded.
1196 // We have advanced the position, so it's safe to read backwards.
1197 LoadCurrentCharacterUnchecked(-1, 1);
1198 __ Bind(&after_position);
1199 }
1200
1201
1202 void RegExpMacroAssemblerA64::SetRegister(int register_index, int to) {
1203 ASSERT(register_index >= num_saved_registers_); // Reserved for positions!
1204 Register set_to = wzr;
1205 if (to != 0) {
1206 set_to = w10;
1207 __ Mov(set_to, to);
1208 }
1209 StoreRegister(register_index, set_to);
1210 }
1211
1212
1213 bool RegExpMacroAssemblerA64::Succeed() {
1214 __ B(&success_label_);
1215 return global();
1216 }
1217
1218
1219 void RegExpMacroAssemblerA64::WriteCurrentPositionToRegister(int reg,
1220 int cp_offset) {
1221 Register position = current_input_offset();
1222 if (cp_offset != 0) {
1223 position = w10;
1224 __ Add(position, current_input_offset(), cp_offset * char_size());
1225 }
1226 StoreRegister(reg, position);
1227 }
1228
1229
1230 void RegExpMacroAssemblerA64::ClearRegisters(int reg_from, int reg_to) {
1231 ASSERT(reg_from <= reg_to);
1232 int num_registers = reg_to - reg_from + 1;
1233
1234 // If the first capture register is cached in a hardware register but not
1235 // aligned on a 64-bit one, we need to clear the first one specifically.
1236 if ((reg_from < kNumCachedRegisters) && ((reg_from % 2) != 0)) {
1237 StoreRegister(reg_from, non_position_value());
1238 num_registers--;
1239 reg_from++;
1240 }
1241
1242 // Clear cached registers in pairs as far as possible.
1243 while ((num_registers >= 2) && (reg_from < kNumCachedRegisters)) {
1244 ASSERT(GetRegisterState(reg_from) == CACHED_LSW);
1245 __ Mov(GetCachedRegister(reg_from), twice_non_position_value());
1246 reg_from += 2;
1247 num_registers -= 2;
1248 }
1249
1250 if ((num_registers % 2) == 1) {
1251 StoreRegister(reg_from, non_position_value());
1252 num_registers--;
1253 reg_from++;
1254 }
1255
1256 if (num_registers > 0) {
1257 // If there are some remaining registers, they are stored on the stack.
1258 ASSERT(reg_from >= kNumCachedRegisters);
1259
1260 // Move down the indexes of the registers on stack to get the correct offset
1261 // in memory.
1262 reg_from -= kNumCachedRegisters;
1263 reg_to -= kNumCachedRegisters;
1264 // We should not unroll the loop for less than 2 registers.
1265 STATIC_ASSERT(kNumRegistersToUnroll > 2);
1266 // We position the base pointer to (reg_from + 1).
1267 int base_offset = kFirstRegisterOnStack -
1268 kWRegSizeInBytes - (kWRegSizeInBytes * reg_from);
1269 if (num_registers > kNumRegistersToUnroll) {
1270 Register base = x10;
1271 __ Add(base, frame_pointer(), base_offset);
1272
1273 Label loop;
1274 __ Mov(x11, num_registers);
1275 __ Bind(&loop);
1276 __ Str(twice_non_position_value(),
1277 MemOperand(base, -kPointerSize, PostIndex));
1278 __ Sub(x11, x11, 2);
1279 __ Cbnz(x11, &loop);
1280 } else {
1281 for (int i = reg_from; i <= reg_to; i += 2) {
1282 __ Str(twice_non_position_value(),
1283 MemOperand(frame_pointer(), base_offset));
1284 base_offset -= kWRegSizeInBytes * 2;
1285 }
1286 }
1287 }
1288 }
1289
1290
1291 void RegExpMacroAssemblerA64::WriteStackPointerToRegister(int reg) {
1292 __ Ldr(x10, MemOperand(frame_pointer(), kStackBase));
1293 __ Sub(x10, backtrack_stackpointer(), x10);
1294 if (masm_->emit_debug_code()) {
1295 __ Cmp(x10, Operand(w10, SXTW));
1296 // The stack offset needs to fit in a W register.
1297 __ Check(eq, kOffsetOutOfRange);
1298 }
1299 StoreRegister(reg, w10);
1300 }
1301
1302
1303 // Helper function for reading a value out of a stack frame.
1304 template <typename T>
1305 static T& frame_entry(Address re_frame, int frame_offset) {
1306 return *reinterpret_cast<T*>(re_frame + frame_offset);
1307 }
1308
1309
1310 int RegExpMacroAssemblerA64::CheckStackGuardState(Address* return_address,
1311 Code* re_code,
1312 Address re_frame,
1313 int start_offset,
1314 const byte** input_start,
1315 const byte** input_end) {
1316 Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
1317 if (isolate->stack_guard()->IsStackOverflow()) {
1318 isolate->StackOverflow();
1319 return EXCEPTION;
1320 }
1321
1322 // If not real stack overflow the stack guard was used to interrupt
1323 // execution for another purpose.
1324
1325 // If this is a direct call from JavaScript retry the RegExp forcing the call
1326 // through the runtime system. Currently the direct call cannot handle a GC.
1327 if (frame_entry<int>(re_frame, kDirectCall) == 1) {
1328 return RETRY;
1329 }
1330
1331 // Prepare for possible GC.
1332 HandleScope handles(isolate);
1333 Handle<Code> code_handle(re_code);
1334
1335 Handle<String> subject(frame_entry<String*>(re_frame, kInput));
1336
1337 // Current string.
1338 bool is_ascii = subject->IsOneByteRepresentationUnderneath();
1339
1340 ASSERT(re_code->instruction_start() <= *return_address);
1341 ASSERT(*return_address <=
1342 re_code->instruction_start() + re_code->instruction_size());
1343
1344 MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
1345
1346 if (*code_handle != re_code) { // Return address no longer valid
1347 int delta = code_handle->address() - re_code->address();
1348 // Overwrite the return address on the stack.
1349 *return_address += delta;
1350 }
1351
1352 if (result->IsException()) {
1353 return EXCEPTION;
1354 }
1355
1356 Handle<String> subject_tmp = subject;
1357 int slice_offset = 0;
1358
1359 // Extract the underlying string and the slice offset.
1360 if (StringShape(*subject_tmp).IsCons()) {
1361 subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
1362 } else if (StringShape(*subject_tmp).IsSliced()) {
1363 SlicedString* slice = SlicedString::cast(*subject_tmp);
1364 subject_tmp = Handle<String>(slice->parent());
1365 slice_offset = slice->offset();
1366 }
1367
1368 // String might have changed.
1369 if (subject_tmp->IsOneByteRepresentation() != is_ascii) {
1370 // If we changed between an ASCII and an UC16 string, the specialized
1371 // code cannot be used, and we need to restart regexp matching from
1372 // scratch (including, potentially, compiling a new version of the code).
1373 return RETRY;
1374 }
1375
1376 // Otherwise, the content of the string might have moved. It must still
1377 // be a sequential or external string with the same content.
1378 // Update the start and end pointers in the stack frame to the current
1379 // location (whether it has actually moved or not).
1380 ASSERT(StringShape(*subject_tmp).IsSequential() ||
1381 StringShape(*subject_tmp).IsExternal());
1382
1383 // The original start address of the characters to match.
1384 const byte* start_address = *input_start;
1385
1386 // Find the current start address of the same character at the current string
1387 // position.
1388 const byte* new_address = StringCharacterPosition(*subject_tmp,
1389 start_offset + slice_offset);
1390
1391 if (start_address != new_address) {
1392 // If there is a difference, update the object pointer and start and end
1393 // addresses in the RegExp stack frame to match the new value.
1394 const byte* end_address = *input_end;
1395 int byte_length = static_cast<int>(end_address - start_address);
1396 frame_entry<const String*>(re_frame, kInput) = *subject;
1397 *input_start = new_address;
1398 *input_end = new_address + byte_length;
1399 } else if (frame_entry<const String*>(re_frame, kInput) != *subject) {
1400 // Subject string might have been a ConsString that underwent
1401 // short-circuiting during GC. That will not change start_address but
1402 // will change pointer inside the subject handle.
1403 frame_entry<const String*>(re_frame, kInput) = *subject;
1404 }
1405
1406 return 0;
1407 }
1408
1409
1410 void RegExpMacroAssemblerA64::CheckPosition(int cp_offset,
1411 Label* on_outside_input) {
1412 CompareAndBranchOrBacktrack(current_input_offset(),
1413 -cp_offset * char_size(),
1414 ge,
1415 on_outside_input);
1416 }
1417
1418
1419 bool RegExpMacroAssemblerA64::CanReadUnaligned() {
1420 // TODO(pielan): See whether or not we should disable unaligned accesses.
1421 return !slow_safe();
1422 }
1423
1424
1425 // Private methods:
1426
1427 void RegExpMacroAssemblerA64::CallCheckStackGuardState(Register scratch) {
1428 // Allocate space on the stack to store the return address. The
1429 // CheckStackGuardState C++ function will override it if the code
1430 // moved. Allocate extra space for 2 arguments passed by pointers.
1431 // AAPCS64 requires the stack to be 16 byte aligned.
1432 int alignment = masm_->ActivationFrameAlignment();
1433 ASSERT_EQ(alignment % 16, 0);
1434 int align_mask = (alignment / kXRegSizeInBytes) - 1;
1435 int xreg_to_claim = (3 + align_mask) & ~align_mask;
1436
1437 ASSERT(csp.Is(__ StackPointer()));
1438 __ Claim(xreg_to_claim);
1439
1440 // CheckStackGuardState needs the end and start addresses of the input string.
1441 __ Poke(input_end(), 2 * kPointerSize);
1442 __ Add(x5, csp, 2 * kPointerSize);
1443 __ Poke(input_start(), kPointerSize);
1444 __ Add(x4, csp, kPointerSize);
1445
1446 __ Mov(w3, start_offset());
1447 // RegExp code frame pointer.
1448 __ Mov(x2, frame_pointer());
1449 // Code* of self.
1450 __ Mov(x1, Operand(masm_->CodeObject()));
1451
1452 // We need to pass a pointer to the return address as first argument.
1453 // The DirectCEntry stub will place the return address on the stack before
1454 // calling so the stack pointer will point to it.
1455 __ Mov(x0, csp);
1456
1457 ExternalReference check_stack_guard_state =
1458 ExternalReference::re_check_stack_guard_state(isolate());
1459 __ Mov(scratch, Operand(check_stack_guard_state));
1460 DirectCEntryStub stub;
1461 stub.GenerateCall(masm_, scratch);
1462
1463 // The input string may have been moved in memory, we need to reload it.
1464 __ Peek(input_start(), kPointerSize);
1465 __ Peek(input_end(), 2 * kPointerSize);
1466
1467 ASSERT(csp.Is(__ StackPointer()));
1468 __ Drop(xreg_to_claim);
1469
1470 // Reload the Code pointer.
1471 __ Mov(code_pointer(), Operand(masm_->CodeObject()));
1472 }
1473
1474 void RegExpMacroAssemblerA64::BranchOrBacktrack(Condition condition,
1475 Label* to) {
1476 if (condition == al) { // Unconditional.
1477 if (to == NULL) {
1478 Backtrack();
1479 return;
1480 }
1481 __ B(to);
1482 return;
1483 }
1484 if (to == NULL) {
1485 to = &backtrack_label_;
1486 }
1487 // TODO(ulan): do direct jump when jump distance is known and fits in imm19.
1488 Condition inverted_condition = InvertCondition(condition);
1489 Label no_branch;
1490 __ B(inverted_condition, &no_branch);
1491 __ B(to);
1492 __ Bind(&no_branch);
1493 }
1494
1495 void RegExpMacroAssemblerA64::CompareAndBranchOrBacktrack(Register reg,
1496 int immediate,
1497 Condition condition,
1498 Label* to) {
1499 if ((immediate == 0) && ((condition == eq) || (condition == ne))) {
1500 if (to == NULL) {
1501 to = &backtrack_label_;
1502 }
1503 // TODO(ulan): do direct jump when jump distance is known and fits in imm19.
1504 Label no_branch;
1505 if (condition == eq) {
1506 __ Cbnz(reg, &no_branch);
1507 } else {
1508 __ Cbz(reg, &no_branch);
1509 }
1510 __ B(to);
1511 __ Bind(&no_branch);
1512 } else {
1513 __ Cmp(reg, immediate);
1514 BranchOrBacktrack(condition, to);
1515 }
1516 }
1517
1518
1519 void RegExpMacroAssemblerA64::CheckPreemption() {
1520 // Check for preemption.
1521 ExternalReference stack_limit =
1522 ExternalReference::address_of_stack_limit(isolate());
1523 __ Mov(x10, Operand(stack_limit));
1524 __ Ldr(x10, MemOperand(x10));
1525 ASSERT(csp.Is(__ StackPointer()));
1526 __ Cmp(csp, x10);
1527 CallIf(&check_preempt_label_, ls);
1528 }
1529
1530
1531 void RegExpMacroAssemblerA64::CheckStackLimit() {
1532 ExternalReference stack_limit =
1533 ExternalReference::address_of_regexp_stack_limit(isolate());
1534 __ Mov(x10, Operand(stack_limit));
1535 __ Ldr(x10, MemOperand(x10));
1536 __ Cmp(backtrack_stackpointer(), x10);
1537 CallIf(&stack_overflow_label_, ls);
1538 }
1539
1540
1541 void RegExpMacroAssemblerA64::Push(Register source) {
1542 ASSERT(source.Is32Bits());
1543 ASSERT(!source.is(backtrack_stackpointer()));
1544 __ Str(source,
1545 MemOperand(backtrack_stackpointer(),
1546 -static_cast<int>(kWRegSizeInBytes),
1547 PreIndex));
1548 }
1549
1550
1551 void RegExpMacroAssemblerA64::Pop(Register target) {
1552 ASSERT(target.Is32Bits());
1553 ASSERT(!target.is(backtrack_stackpointer()));
1554 __ Ldr(target,
1555 MemOperand(backtrack_stackpointer(), kWRegSizeInBytes, PostIndex));
1556 }
1557
1558
1559 Register RegExpMacroAssemblerA64::GetCachedRegister(int register_index) {
1560 ASSERT(register_index < kNumCachedRegisters);
1561 return Register::Create(register_index / 2, kXRegSize);
1562 }
1563
1564
1565 Register RegExpMacroAssemblerA64::GetRegister(int register_index,
1566 Register maybe_result) {
1567 ASSERT(maybe_result.Is32Bits());
1568 ASSERT(register_index >= 0);
1569 if (num_registers_ <= register_index) {
1570 num_registers_ = register_index + 1;
1571 }
1572 Register result;
1573 RegisterState register_state = GetRegisterState(register_index);
1574 switch (register_state) {
1575 case STACKED:
1576 __ Ldr(maybe_result, register_location(register_index));
1577 result = maybe_result;
1578 break;
1579 case CACHED_LSW:
1580 result = GetCachedRegister(register_index).W();
1581 break;
1582 case CACHED_MSW:
1583 __ Lsr(maybe_result.X(), GetCachedRegister(register_index), kWRegSize);
1584 result = maybe_result;
1585 break;
1586 default:
1587 UNREACHABLE();
1588 break;
1589 }
1590 ASSERT(result.Is32Bits());
1591 return result;
1592 }
1593
1594
1595 void RegExpMacroAssemblerA64::StoreRegister(int register_index,
1596 Register source) {
1597 ASSERT(source.Is32Bits());
1598 ASSERT(register_index >= 0);
1599 if (num_registers_ <= register_index) {
1600 num_registers_ = register_index + 1;
1601 }
1602
1603 Register cached_register;
1604 RegisterState register_state = GetRegisterState(register_index);
1605 switch (register_state) {
1606 case STACKED:
1607 __ Str(source, register_location(register_index));
1608 break;
1609 case CACHED_LSW:
1610 cached_register = GetCachedRegister(register_index);
1611 if (!source.Is(cached_register.W())) {
1612 __ Bfi(cached_register, source.X(), 0, kWRegSize);
1613 }
1614 break;
1615 case CACHED_MSW:
1616 cached_register = GetCachedRegister(register_index);
1617 __ Bfi(cached_register, source.X(), kWRegSize, kWRegSize);
1618 break;
1619 default:
1620 UNREACHABLE();
1621 break;
1622 }
1623 }
1624
1625
1626 void RegExpMacroAssemblerA64::CallIf(Label* to, Condition condition) {
1627 Label skip_call;
1628 if (condition != al) __ B(&skip_call, InvertCondition(condition));
1629 __ Bl(to);
1630 __ Bind(&skip_call);
1631 }
1632
1633
1634 void RegExpMacroAssemblerA64::RestoreLinkRegister() {
1635 ASSERT(csp.Is(__ StackPointer()));
1636 __ Pop(lr, xzr);
1637 __ Add(lr, lr, Operand(masm_->CodeObject()));
1638 }
1639
1640
1641 void RegExpMacroAssemblerA64::SaveLinkRegister() {
1642 ASSERT(csp.Is(__ StackPointer()));
1643 __ Sub(lr, lr, Operand(masm_->CodeObject()));
1644 __ Push(xzr, lr);
1645 }
1646
1647
1648 MemOperand RegExpMacroAssemblerA64::register_location(int register_index) {
1649 ASSERT(register_index < (1<<30));
1650 ASSERT(register_index >= kNumCachedRegisters);
1651 if (num_registers_ <= register_index) {
1652 num_registers_ = register_index + 1;
1653 }
1654 register_index -= kNumCachedRegisters;
1655 int offset = kFirstRegisterOnStack - register_index * kWRegSizeInBytes;
1656 return MemOperand(frame_pointer(), offset);
1657 }
1658
1659 MemOperand RegExpMacroAssemblerA64::capture_location(int register_index,
1660 Register scratch) {
1661 ASSERT(register_index < (1<<30));
1662 ASSERT(register_index < num_saved_registers_);
1663 ASSERT(register_index >= kNumCachedRegisters);
1664 ASSERT_EQ(register_index % 2, 0);
1665 register_index -= kNumCachedRegisters;
1666 int offset = kFirstCaptureOnStack - register_index * kWRegSizeInBytes;
1667 // capture_location is used with Stp instructions to load/store 2 registers.
1668 // The immediate field in the encoding is limited to 7 bits (signed).
1669 if (is_int7(offset)) {
1670 return MemOperand(frame_pointer(), offset);
1671 } else {
1672 __ Add(scratch, frame_pointer(), offset);
1673 return MemOperand(scratch);
1674 }
1675 }
1676
1677 void RegExpMacroAssemblerA64::LoadCurrentCharacterUnchecked(int cp_offset,
1678 int characters) {
1679 Register offset = current_input_offset();
1680
1681 // The ldr, str, ldrh, strh instructions can do unaligned accesses, if the CPU
1682 // and the operating system running on the target allow it.
1683 // If unaligned load/stores are not supported then this function must only
1684 // be used to load a single character at a time.
1685
1686 // ARMv8 supports unaligned accesses but V8 or the kernel can decide to
1687 // disable it.
1688 // TODO(pielan): See whether or not we should disable unaligned accesses.
1689 if (!CanReadUnaligned()) {
1690 ASSERT(characters == 1);
1691 }
1692
1693 if (cp_offset != 0) {
1694 if (masm_->emit_debug_code()) {
1695 __ Mov(x10, cp_offset * char_size());
1696 __ Add(x10, x10, Operand(current_input_offset(), SXTW));
1697 __ Cmp(x10, Operand(w10, SXTW));
1698 // The offset needs to fit in a W register.
1699 __ Check(eq, kOffsetOutOfRange);
1700 } else {
1701 __ Add(w10, current_input_offset(), cp_offset * char_size());
1702 }
1703 offset = w10;
1704 }
1705
1706 if (mode_ == ASCII) {
1707 if (characters == 4) {
1708 __ Ldr(current_character(), MemOperand(input_end(), offset, SXTW));
1709 } else if (characters == 2) {
1710 __ Ldrh(current_character(), MemOperand(input_end(), offset, SXTW));
1711 } else {
1712 ASSERT(characters == 1);
1713 __ Ldrb(current_character(), MemOperand(input_end(), offset, SXTW));
1714 }
1715 } else {
1716 ASSERT(mode_ == UC16);
1717 if (characters == 2) {
1718 __ Ldr(current_character(), MemOperand(input_end(), offset, SXTW));
1719 } else {
1720 ASSERT(characters == 1);
1721 __ Ldrh(current_character(), MemOperand(input_end(), offset, SXTW));
1722 }
1723 }
1724 }
1725
1726 #endif // V8_INTERPRETED_REGEXP
1727
1728 }} // namespace v8::internal
1729
1730 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/regexp-macro-assembler-a64.h ('k') | src/a64/simulator-a64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698