Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(39)

Unified Diff: src/arm64/code-stubs-arm64.cc

Issue 1695633003: [runtime] Turn ArgumentAccessStub into FastNewSloppyArgumentsStub. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix MIPS dead code Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/arm/interface-descriptors-arm.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/arm64/code-stubs-arm64.cc
diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
index 124bc536a2583239aa209bb12122a89fd1901b87..b9eaa286010238016df1356c881b98a40570f690 100644
--- a/src/arm64/code-stubs-arm64.cc
+++ b/src/arm64/code-stubs-arm64.cc
@@ -1626,749 +1626,448 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
}
-void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
- // x1 : function
- // x2 : number of parameters (tagged)
- // x3 : parameters pointer
+void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
+ // Return address is in lr.
+ Label slow;
- DCHECK(x1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(x3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
+ Register receiver = LoadDescriptor::ReceiverRegister();
+ Register key = LoadDescriptor::NameRegister();
- // Check if the calling frame is an arguments adaptor frame.
- Label runtime;
- Register caller_fp = x10;
- __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- // Load and untag the context.
- __ Ldr(w11, UntagSmiMemOperand(caller_fp,
- StandardFrameConstants::kContextOffset));
- __ Cmp(w11, StackFrame::ARGUMENTS_ADAPTOR);
- __ B(ne, &runtime);
+ // Check that the key is an array index, that is Uint32.
+ __ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow);
- // Patch the arguments.length and parameters pointer in the current frame.
- __ Ldr(x2,
- MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ Add(x3, caller_fp, Operand::UntagSmiAndScale(x2, kPointerSizeLog2));
- __ Add(x3, x3, StandardFrameConstants::kCallerSPOffset);
+ // Everything is fine, call runtime.
+ __ Push(receiver, key);
+ __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
- __ Bind(&runtime);
- __ Push(x1, x3, x2);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
+ __ Bind(&slow);
+ PropertyAccessCompiler::TailCallBuiltin(
+ masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}
-void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
- // x1 : function
- // x2 : number of parameters (tagged)
- // x3 : parameters pointer
- //
- // Returns pointer to result object in x0.
-
- DCHECK(x1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(x3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
-
- // Make an untagged copy of the parameter count.
- // Note: arg_count_smi is an alias of param_count_smi.
- Register function = x1;
- Register arg_count_smi = x2;
- Register param_count_smi = x2;
- Register recv_arg = x3;
- Register param_count = x7;
- __ SmiUntag(param_count, param_count_smi);
+void RegExpExecStub::Generate(MacroAssembler* masm) {
+#ifdef V8_INTERPRETED_REGEXP
+ __ TailCallRuntime(Runtime::kRegExpExec);
+#else // V8_INTERPRETED_REGEXP
- // Check if the calling frame is an arguments adaptor frame.
- Register caller_fp = x11;
- Register caller_ctx = x12;
+ // Stack frame on entry.
+ // jssp[0]: last_match_info (expected JSArray)
+ // jssp[8]: previous index
+ // jssp[16]: subject string
+ // jssp[24]: JSRegExp object
Label runtime;
- Label adaptor_frame, try_allocate;
- __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ Ldr(caller_ctx, MemOperand(caller_fp,
- StandardFrameConstants::kContextOffset));
- __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
- __ B(eq, &adaptor_frame);
-
- // No adaptor, parameter count = argument count.
-
- // x1 function function pointer
- // x2 arg_count_smi number of function arguments (smi)
- // x3 recv_arg pointer to receiver arguments
- // x4 mapped_params number of mapped params, min(params, args) (uninit)
- // x7 param_count number of function parameters
- // x11 caller_fp caller's frame pointer
- // x14 arg_count number of function arguments (uninit)
- Register arg_count = x14;
- Register mapped_params = x4;
- __ Mov(arg_count, param_count);
- __ Mov(mapped_params, param_count);
- __ B(&try_allocate);
+ // Use of registers for this function.
- // We have an adaptor frame. Patch the parameters pointer.
- __ Bind(&adaptor_frame);
- __ Ldr(arg_count_smi,
- MemOperand(caller_fp,
- ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ SmiUntag(arg_count, arg_count_smi);
- __ Add(x10, caller_fp, Operand(arg_count, LSL, kPointerSizeLog2));
- __ Add(recv_arg, x10, StandardFrameConstants::kCallerSPOffset);
+ // Variable registers:
+ // x10-x13 used as scratch registers
+ // w0 string_type type of subject string
+ // x2 jsstring_length subject string length
+ // x3 jsregexp_object JSRegExp object
+ // w4 string_encoding Latin1 or UC16
+ // w5 sliced_string_offset if the string is a SlicedString
+ // offset to the underlying string
+ // w6 string_representation groups attributes of the string:
+ // - is a string
+ // - type of the string
+ // - is a short external string
+ Register string_type = w0;
+ Register jsstring_length = x2;
+ Register jsregexp_object = x3;
+ Register string_encoding = w4;
+ Register sliced_string_offset = w5;
+ Register string_representation = w6;
- // Compute the mapped parameter count = min(param_count, arg_count)
- __ Cmp(param_count, arg_count);
- __ Csel(mapped_params, param_count, arg_count, lt);
+ // These are in callee save registers and will be preserved by the call
+ // to the native RegExp code, as this code is called using the normal
+ // C calling convention. When calling directly from generated code the
+ // native RegExp code will not do a GC and therefore the content of
+ // these registers are safe to use after the call.
- __ Bind(&try_allocate);
+ // x19 subject subject string
+ // x20 regexp_data RegExp data (FixedArray)
+ // x21 last_match_info_elements info relative to the last match
+ // (FixedArray)
+ // x22 code_object generated regexp code
+ Register subject = x19;
+ Register regexp_data = x20;
+ Register last_match_info_elements = x21;
+ Register code_object = x22;
- // x0 alloc_obj pointer to allocated objects: param map, backing
- // store, arguments (uninit)
- // x1 function function pointer
- // x2 arg_count_smi number of function arguments (smi)
- // x3 recv_arg pointer to receiver arguments
- // x4 mapped_params number of mapped parameters, min(params, args)
- // x7 param_count number of function parameters
- // x10 size size of objects to allocate (uninit)
- // x14 arg_count number of function arguments
+ // Stack frame.
+ // jssp[00]: last_match_info (JSArray)
+ // jssp[08]: previous index
+ // jssp[16]: subject string
+ // jssp[24]: JSRegExp object
- // Compute the size of backing store, parameter map, and arguments object.
- // 1. Parameter map, has two extra words containing context and backing
- // store.
- const int kParameterMapHeaderSize =
- FixedArray::kHeaderSize + 2 * kPointerSize;
+ const int kLastMatchInfoOffset = 0 * kPointerSize;
+ const int kPreviousIndexOffset = 1 * kPointerSize;
+ const int kSubjectOffset = 2 * kPointerSize;
+ const int kJSRegExpOffset = 3 * kPointerSize;
- // Calculate the parameter map size, assuming it exists.
- Register size = x10;
- __ Mov(size, Operand(mapped_params, LSL, kPointerSizeLog2));
- __ Add(size, size, kParameterMapHeaderSize);
+ // Ensure that a RegExp stack is allocated.
+ ExternalReference address_of_regexp_stack_memory_address =
+ ExternalReference::address_of_regexp_stack_memory_address(isolate());
+ ExternalReference address_of_regexp_stack_memory_size =
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
+ __ Mov(x10, address_of_regexp_stack_memory_size);
+ __ Ldr(x10, MemOperand(x10));
+ __ Cbz(x10, &runtime);
- // If there are no mapped parameters, set the running size total to zero.
- // Otherwise, use the parameter map size calculated earlier.
- __ Cmp(mapped_params, 0);
- __ CzeroX(size, eq);
+ // Check that the first argument is a JSRegExp object.
+ DCHECK(jssp.Is(__ StackPointer()));
+ __ Peek(jsregexp_object, kJSRegExpOffset);
+ __ JumpIfSmi(jsregexp_object, &runtime);
+ __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime);
- // 2. Add the size of the backing store and arguments object.
- __ Add(size, size, Operand(arg_count, LSL, kPointerSizeLog2));
- __ Add(size, size, FixedArray::kHeaderSize + JSSloppyArgumentsObject::kSize);
+ // Check that the RegExp has been compiled (data contains a fixed array).
+ __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset));
+ if (FLAG_debug_code) {
+ STATIC_ASSERT(kSmiTag == 0);
+ __ Tst(regexp_data, kSmiTagMask);
+ __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE);
+ __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ }
- // Do the allocation of all three objects in one go. Assign this to x0, as it
- // will be returned to the caller.
- Register alloc_obj = x0;
- __ Allocate(size, alloc_obj, x11, x12, &runtime, TAG_OBJECT);
+ // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
+ __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
+ __ Cmp(x10, Smi::FromInt(JSRegExp::IRREGEXP));
+ __ B(ne, &runtime);
- // Get the arguments boilerplate from the current (global) context.
+ // Check that the number of captures fit in the static offsets vector buffer.
+ // We have always at least one capture for the whole match, plus additional
+ // ones due to capturing parentheses. A capture takes 2 registers.
+ // The number of capture registers then is (number_of_captures + 1) * 2.
+ __ Ldrsw(x10,
+ UntagSmiFieldMemOperand(regexp_data,
+ JSRegExp::kIrregexpCaptureCountOffset));
+ // Check (number_of_captures + 1) * 2 <= offsets vector size
+ // number_of_captures * 2 <= offsets vector size - 2
+ STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
+ __ Add(x10, x10, x10);
+ __ Cmp(x10, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
+ __ B(hi, &runtime);
- // x0 alloc_obj pointer to allocated objects (param map, backing
- // store, arguments)
- // x1 function function pointer
- // x2 arg_count_smi number of function arguments (smi)
- // x3 recv_arg pointer to receiver arguments
- // x4 mapped_params number of mapped parameters, min(params, args)
- // x7 param_count number of function parameters
- // x11 sloppy_args_map offset to args (or aliased args) map (uninit)
- // x14 arg_count number of function arguments
+ // Initialize offset for possibly sliced string.
+ __ Mov(sliced_string_offset, 0);
- Register global_ctx = x10;
- Register sloppy_args_map = x11;
- Register aliased_args_map = x10;
- __ Ldr(global_ctx, NativeContextMemOperand());
+ DCHECK(jssp.Is(__ StackPointer()));
+ __ Peek(subject, kSubjectOffset);
+ __ JumpIfSmi(subject, &runtime);
- __ Ldr(sloppy_args_map,
- ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
- __ Ldr(
- aliased_args_map,
- ContextMemOperand(global_ctx, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX));
- __ Cmp(mapped_params, 0);
- __ CmovX(sloppy_args_map, aliased_args_map, ne);
+ __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset));
+ __ Ldrb(string_type, FieldMemOperand(x10, Map::kInstanceTypeOffset));
- // Copy the JS object part.
- __ Str(sloppy_args_map, FieldMemOperand(alloc_obj, JSObject::kMapOffset));
- __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
- __ Str(x10, FieldMemOperand(alloc_obj, JSObject::kPropertiesOffset));
- __ Str(x10, FieldMemOperand(alloc_obj, JSObject::kElementsOffset));
+ __ Ldr(jsstring_length, FieldMemOperand(subject, String::kLengthOffset));
- // Set up the callee in-object property.
- __ AssertNotSmi(function);
- __ Str(function,
- FieldMemOperand(alloc_obj, JSSloppyArgumentsObject::kCalleeOffset));
+ // Handle subject string according to its encoding and representation:
+ // (1) Sequential string? If yes, go to (5).
+ // (2) Anything but sequential or cons? If yes, go to (6).
+ // (3) Cons string. If the string is flat, replace subject with first string.
+ // Otherwise bailout.
+ // (4) Is subject external? If yes, go to (7).
+ // (5) Sequential string. Load regexp code according to encoding.
+ // (E) Carry on.
+ /// [...]
- // Use the length and set that as an in-object property.
- __ Str(arg_count_smi,
- FieldMemOperand(alloc_obj, JSSloppyArgumentsObject::kLengthOffset));
+ // Deferred code at the end of the stub:
+ // (6) Not a long external string? If yes, go to (8).
+ // (7) External string. Make it, offset-wise, look like a sequential string.
+ // Go to (5).
+ // (8) Short external string or not a string? If yes, bail out to runtime.
+ // (9) Sliced string. Replace subject with parent. Go to (4).
- // Set up the elements pointer in the allocated arguments object.
- // If we allocated a parameter map, "elements" will point there, otherwise
- // it will point to the backing store.
+ Label check_underlying; // (4)
+ Label seq_string; // (5)
+ Label not_seq_nor_cons; // (6)
+ Label external_string; // (7)
+ Label not_long_external; // (8)
- // x0 alloc_obj pointer to allocated objects (param map, backing
- // store, arguments)
- // x1 function function pointer
- // x2 arg_count_smi number of function arguments (smi)
- // x3 recv_arg pointer to receiver arguments
- // x4 mapped_params number of mapped parameters, min(params, args)
- // x5 elements pointer to parameter map or backing store (uninit)
- // x6 backing_store pointer to backing store (uninit)
- // x7 param_count number of function parameters
- // x14 arg_count number of function arguments
-
- Register elements = x5;
- __ Add(elements, alloc_obj, JSSloppyArgumentsObject::kSize);
- __ Str(elements, FieldMemOperand(alloc_obj, JSObject::kElementsOffset));
+ // (1) Sequential string? If yes, go to (5).
+ __ And(string_representation,
+ string_type,
+ kIsNotStringMask |
+ kStringRepresentationMask |
+ kShortExternalStringMask);
+ // We depend on the fact that Strings of type
+ // SeqString and not ShortExternalString are defined
+ // by the following pattern:
+ // string_type: 0XX0 XX00
+ // ^ ^ ^^
+ // | | ||
+ // | | is a SeqString
+ // | is not a short external String
+ // is a String
+ STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ Cbz(string_representation, &seq_string); // Go to (5).
- // Initialize parameter map. If there are no mapped arguments, we're done.
- Label skip_parameter_map;
- __ Cmp(mapped_params, 0);
- // Set up backing store address, because it is needed later for filling in
- // the unmapped arguments.
- Register backing_store = x6;
- __ CmovX(backing_store, elements, eq);
- __ B(eq, &skip_parameter_map);
+ // (2) Anything but sequential or cons? If yes, go to (6).
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
+ STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
+ __ Cmp(string_representation, kExternalStringTag);
+ __ B(ge, &not_seq_nor_cons); // Go to (6).
- __ LoadRoot(x10, Heap::kSloppyArgumentsElementsMapRootIndex);
- __ Str(x10, FieldMemOperand(elements, FixedArray::kMapOffset));
- __ Add(x10, mapped_params, 2);
- __ SmiTag(x10);
- __ Str(x10, FieldMemOperand(elements, FixedArray::kLengthOffset));
- __ Str(cp, FieldMemOperand(elements,
- FixedArray::kHeaderSize + 0 * kPointerSize));
- __ Add(x10, elements, Operand(mapped_params, LSL, kPointerSizeLog2));
- __ Add(x10, x10, kParameterMapHeaderSize);
- __ Str(x10, FieldMemOperand(elements,
- FixedArray::kHeaderSize + 1 * kPointerSize));
+ // (3) Cons string. Check that it's flat.
+ __ Ldr(x10, FieldMemOperand(subject, ConsString::kSecondOffset));
+ __ JumpIfNotRoot(x10, Heap::kempty_stringRootIndex, &runtime);
+ // Replace subject with first string.
+ __ Ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
- // Copy the parameter slots and the holes in the arguments.
- // We need to fill in mapped_parameter_count slots. Then index the context,
- // where parameters are stored in reverse order, at:
- //
- // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS + parameter_count - 1
- //
- // The mapped parameter thus needs to get indices:
- //
- // MIN_CONTEXT_SLOTS + parameter_count - 1 ..
- // MIN_CONTEXT_SLOTS + parameter_count - mapped_parameter_count
- //
- // We loop from right to left.
+ // (4) Is subject external? If yes, go to (7).
+ __ Bind(&check_underlying);
+ // Reload the string type.
+ __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset));
+ __ Ldrb(string_type, FieldMemOperand(x10, Map::kInstanceTypeOffset));
+ STATIC_ASSERT(kSeqStringTag == 0);
+ // The underlying external string is never a short external string.
+ STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
+ STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
+ __ TestAndBranchIfAnySet(string_type.X(),
+ kStringRepresentationMask,
+ &external_string); // Go to (7).
- // x0 alloc_obj pointer to allocated objects (param map, backing
- // store, arguments)
- // x1 function function pointer
- // x2 arg_count_smi number of function arguments (smi)
- // x3 recv_arg pointer to receiver arguments
- // x4 mapped_params number of mapped parameters, min(params, args)
- // x5 elements pointer to parameter map or backing store (uninit)
- // x6 backing_store pointer to backing store (uninit)
- // x7 param_count number of function parameters
- // x11 loop_count parameter loop counter (uninit)
- // x12 index parameter index (smi, uninit)
- // x13 the_hole hole value (uninit)
- // x14 arg_count number of function arguments
+ // (5) Sequential string. Load regexp code according to encoding.
+ __ Bind(&seq_string);
- Register loop_count = x11;
- Register index = x12;
- Register the_hole = x13;
- Label parameters_loop, parameters_test;
- __ Mov(loop_count, mapped_params);
- __ Add(index, param_count, static_cast<int>(Context::MIN_CONTEXT_SLOTS));
- __ Sub(index, index, mapped_params);
- __ SmiTag(index);
- __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
- __ Add(backing_store, elements, Operand(loop_count, LSL, kPointerSizeLog2));
- __ Add(backing_store, backing_store, kParameterMapHeaderSize);
+ // Check that the third argument is a positive smi less than the subject
+ // string length. A negative value will be greater (unsigned comparison).
+ DCHECK(jssp.Is(__ StackPointer()));
+ __ Peek(x10, kPreviousIndexOffset);
+ __ JumpIfNotSmi(x10, &runtime);
+ __ Cmp(jsstring_length, x10);
+ __ B(ls, &runtime);
- __ B(&parameters_test);
+ // Argument 2 (x1): We need to load argument 2 (the previous index) into x1
+ // before entering the exit frame.
+ __ SmiUntag(x1, x10);
- __ Bind(&parameters_loop);
- __ Sub(loop_count, loop_count, 1);
- __ Mov(x10, Operand(loop_count, LSL, kPointerSizeLog2));
- __ Add(x10, x10, kParameterMapHeaderSize - kHeapObjectTag);
- __ Str(index, MemOperand(elements, x10));
- __ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize);
- __ Str(the_hole, MemOperand(backing_store, x10));
- __ Add(index, index, Smi::FromInt(1));
- __ Bind(&parameters_test);
- __ Cbnz(loop_count, &parameters_loop);
+ // The third bit determines the string encoding in string_type.
+ STATIC_ASSERT(kOneByteStringTag == 0x04);
+ STATIC_ASSERT(kTwoByteStringTag == 0x00);
+ STATIC_ASSERT(kStringEncodingMask == 0x04);
- __ Bind(&skip_parameter_map);
- // Copy arguments header and remaining slots (if there are any.)
- __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
- __ Str(x10, FieldMemOperand(backing_store, FixedArray::kMapOffset));
- __ Str(arg_count_smi, FieldMemOperand(backing_store,
- FixedArray::kLengthOffset));
+ // Find the code object based on the assumptions above.
+ // kDataOneByteCodeOffset and kDataUC16CodeOffset are adjacent, adds an offset
+ // of kPointerSize to reach the latter.
+ STATIC_ASSERT(JSRegExp::kDataOneByteCodeOffset + kPointerSize ==
+ JSRegExp::kDataUC16CodeOffset);
+ __ Mov(x10, kPointerSize);
+ // We will need the encoding later: Latin1 = 0x04
+ // UC16 = 0x00
+ __ Ands(string_encoding, string_type, kStringEncodingMask);
+ __ CzeroX(x10, ne);
+ __ Add(x10, regexp_data, x10);
+ __ Ldr(code_object, FieldMemOperand(x10, JSRegExp::kDataOneByteCodeOffset));
- // x0 alloc_obj pointer to allocated objects (param map, backing
- // store, arguments)
- // x1 function function pointer
- // x2 arg_count_smi number of function arguments (smi)
- // x3 recv_arg pointer to receiver arguments
- // x4 mapped_params number of mapped parameters, min(params, args)
- // x6 backing_store pointer to backing store (uninit)
- // x14 arg_count number of function arguments
+ // (E) Carry on. String handling is done.
- Label arguments_loop, arguments_test;
- __ Mov(x10, mapped_params);
- __ Sub(recv_arg, recv_arg, Operand(x10, LSL, kPointerSizeLog2));
- __ B(&arguments_test);
+ // Check that the irregexp code has been generated for the actual string
+ // encoding. If it has, the field contains a code object otherwise it contains
+ // a smi (code flushing support).
+ __ JumpIfSmi(code_object, &runtime);
- __ Bind(&arguments_loop);
- __ Sub(recv_arg, recv_arg, kPointerSize);
- __ Ldr(x11, MemOperand(recv_arg));
- __ Add(x12, backing_store, Operand(x10, LSL, kPointerSizeLog2));
- __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
- __ Add(x10, x10, 1);
+ // All checks done. Now push arguments for native regexp code.
+ __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1,
+ x10,
+ x11);
- __ Bind(&arguments_test);
- __ Cmp(x10, arg_count);
- __ B(lt, &arguments_loop);
+ // Isolates: note we add an additional parameter here (isolate pointer).
+ __ EnterExitFrame(false, x10, 1);
+ DCHECK(csp.Is(__ StackPointer()));
- __ Ret();
+ // We have 9 arguments to pass to the regexp code, therefore we have to pass
+ // one on the stack and the rest as registers.
- // Do the runtime call to allocate the arguments object.
- __ Bind(&runtime);
- __ Push(function, recv_arg, arg_count_smi);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
-}
+ // Note that the placement of the argument on the stack isn't standard
+ // AAPCS64:
+ // csp[0]: Space for the return address placed by DirectCEntryStub.
+ // csp[8]: Argument 9, the current isolate address.
+ __ Mov(x10, ExternalReference::isolate_address(isolate()));
+ __ Poke(x10, kPointerSize);
-void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
- // Return address is in lr.
- Label slow;
+ Register length = w11;
+ Register previous_index_in_bytes = w12;
+ Register start = x13;
- Register receiver = LoadDescriptor::ReceiverRegister();
- Register key = LoadDescriptor::NameRegister();
+ // Load start of the subject string.
+ __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag);
+ // Load the length from the original subject string from the previous stack
+ // frame. Therefore we have to use fp, which points exactly to two pointer
+ // sizes below the previous sp. (Because creating a new stack frame pushes
+ // the previous fp onto the stack and decrements sp by 2 * kPointerSize.)
+ __ Ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
+ __ Ldr(length, UntagSmiFieldMemOperand(subject, String::kLengthOffset));
- // Check that the key is an array index, that is Uint32.
- __ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow);
+ // Handle UC16 encoding, two bytes make one character.
+ // string_encoding: if Latin1: 0x04
+ // if UC16: 0x00
+ STATIC_ASSERT(kStringEncodingMask == 0x04);
+ __ Ubfx(string_encoding, string_encoding, 2, 1);
+ __ Eor(string_encoding, string_encoding, 1);
+ // string_encoding: if Latin1: 0
+ // if UC16: 1
- // Everything is fine, call runtime.
- __ Push(receiver, key);
- __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
+ // Convert string positions from characters to bytes.
+ // Previous index is in x1.
+ __ Lsl(previous_index_in_bytes, w1, string_encoding);
+ __ Lsl(length, length, string_encoding);
+ __ Lsl(sliced_string_offset, sliced_string_offset, string_encoding);
- __ Bind(&slow);
- PropertyAccessCompiler::TailCallBuiltin(
- masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
-}
+ // Argument 1 (x0): Subject string.
+ __ Mov(x0, subject);
+ // Argument 2 (x1): Previous index, already there.
-void RegExpExecStub::Generate(MacroAssembler* masm) {
-#ifdef V8_INTERPRETED_REGEXP
- __ TailCallRuntime(Runtime::kRegExpExec);
-#else // V8_INTERPRETED_REGEXP
+ // Argument 3 (x2): Get the start of input.
+ // Start of input = start of string + previous index + substring offset
+ // (0 if the string
+ // is not sliced).
+ __ Add(w10, previous_index_in_bytes, sliced_string_offset);
+ __ Add(x2, start, Operand(w10, UXTW));
- // Stack frame on entry.
- // jssp[0]: last_match_info (expected JSArray)
- // jssp[8]: previous index
- // jssp[16]: subject string
- // jssp[24]: JSRegExp object
- Label runtime;
-
- // Use of registers for this function.
-
- // Variable registers:
- // x10-x13 used as scratch registers
- // w0 string_type type of subject string
- // x2 jsstring_length subject string length
- // x3 jsregexp_object JSRegExp object
- // w4 string_encoding Latin1 or UC16
- // w5 sliced_string_offset if the string is a SlicedString
- // offset to the underlying string
- // w6 string_representation groups attributes of the string:
- // - is a string
- // - type of the string
- // - is a short external string
- Register string_type = w0;
- Register jsstring_length = x2;
- Register jsregexp_object = x3;
- Register string_encoding = w4;
- Register sliced_string_offset = w5;
- Register string_representation = w6;
+ // Argument 4 (x3):
+ // End of input = start of input + (length of input - previous index)
+ __ Sub(w10, length, previous_index_in_bytes);
+ __ Add(x3, x2, Operand(w10, UXTW));
- // These are in callee save registers and will be preserved by the call
- // to the native RegExp code, as this code is called using the normal
- // C calling convention. When calling directly from generated code the
- // native RegExp code will not do a GC and therefore the content of
- // these registers are safe to use after the call.
+ // Argument 5 (x4): static offsets vector buffer.
+ __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate()));
- // x19 subject subject string
- // x20 regexp_data RegExp data (FixedArray)
- // x21 last_match_info_elements info relative to the last match
- // (FixedArray)
- // x22 code_object generated regexp code
- Register subject = x19;
- Register regexp_data = x20;
- Register last_match_info_elements = x21;
- Register code_object = x22;
+ // Argument 6 (x5): Set the number of capture registers to zero to force
+ // global regexps to behave as non-global. This stub is not used for global
+ // regexps.
+ __ Mov(x5, 0);
- // Stack frame.
- // jssp[00]: last_match_info (JSArray)
- // jssp[08]: previous index
- // jssp[16]: subject string
- // jssp[24]: JSRegExp object
+ // Argument 7 (x6): Start (high end) of backtracking stack memory area.
+ __ Mov(x10, address_of_regexp_stack_memory_address);
+ __ Ldr(x10, MemOperand(x10));
+ __ Mov(x11, address_of_regexp_stack_memory_size);
+ __ Ldr(x11, MemOperand(x11));
+ __ Add(x6, x10, x11);
- const int kLastMatchInfoOffset = 0 * kPointerSize;
- const int kPreviousIndexOffset = 1 * kPointerSize;
- const int kSubjectOffset = 2 * kPointerSize;
- const int kJSRegExpOffset = 3 * kPointerSize;
+ // Argument 8 (x7): Indicate that this is a direct call from JavaScript.
+ __ Mov(x7, 1);
- // Ensure that a RegExp stack is allocated.
- ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address(isolate());
- ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate());
- __ Mov(x10, address_of_regexp_stack_memory_size);
- __ Ldr(x10, MemOperand(x10));
- __ Cbz(x10, &runtime);
+ // Locate the code entry and call it.
+ __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag);
+ DirectCEntryStub stub(isolate());
+ stub.GenerateCall(masm, code_object);
- // Check that the first argument is a JSRegExp object.
- DCHECK(jssp.Is(__ StackPointer()));
- __ Peek(jsregexp_object, kJSRegExpOffset);
- __ JumpIfSmi(jsregexp_object, &runtime);
- __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime);
+ __ LeaveExitFrame(false, x10, true);
- // Check that the RegExp has been compiled (data contains a fixed array).
- __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset));
- if (FLAG_debug_code) {
- STATIC_ASSERT(kSmiTag == 0);
- __ Tst(regexp_data, kSmiTagMask);
- __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
- __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE);
- __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
- }
+ // The generated regexp code returns an int32 in w0.
+ Label failure, exception;
+ __ CompareAndBranch(w0, NativeRegExpMacroAssembler::FAILURE, eq, &failure);
+ __ CompareAndBranch(w0,
+ NativeRegExpMacroAssembler::EXCEPTION,
+ eq,
+ &exception);
+ __ CompareAndBranch(w0, NativeRegExpMacroAssembler::RETRY, eq, &runtime);
- // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
- __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
- __ Cmp(x10, Smi::FromInt(JSRegExp::IRREGEXP));
- __ B(ne, &runtime);
+ // Success: process the result from the native regexp code.
+ Register number_of_capture_registers = x12;
- // Check that the number of captures fit in the static offsets vector buffer.
- // We have always at least one capture for the whole match, plus additional
- // ones due to capturing parentheses. A capture takes 2 registers.
- // The number of capture registers then is (number_of_captures + 1) * 2.
+ // Calculate number of capture registers (number_of_captures + 1) * 2
+ // and store it in the last match info.
__ Ldrsw(x10,
UntagSmiFieldMemOperand(regexp_data,
JSRegExp::kIrregexpCaptureCountOffset));
- // Check (number_of_captures + 1) * 2 <= offsets vector size
- // number_of_captures * 2 <= offsets vector size - 2
- STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
__ Add(x10, x10, x10);
- __ Cmp(x10, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
- __ B(hi, &runtime);
-
- // Initialize offset for possibly sliced string.
- __ Mov(sliced_string_offset, 0);
+ __ Add(number_of_capture_registers, x10, 2);
+ // Check that the fourth object is a JSArray object.
DCHECK(jssp.Is(__ StackPointer()));
- __ Peek(subject, kSubjectOffset);
- __ JumpIfSmi(subject, &runtime);
-
- __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset));
- __ Ldrb(string_type, FieldMemOperand(x10, Map::kInstanceTypeOffset));
+ __ Peek(x10, kLastMatchInfoOffset);
+ __ JumpIfSmi(x10, &runtime);
+ __ JumpIfNotObjectType(x10, x11, x11, JS_ARRAY_TYPE, &runtime);
- __ Ldr(jsstring_length, FieldMemOperand(subject, String::kLengthOffset));
+ // Check that the JSArray is the fast case.
+ __ Ldr(last_match_info_elements,
+ FieldMemOperand(x10, JSArray::kElementsOffset));
+ __ Ldr(x10,
+ FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
+ __ JumpIfNotRoot(x10, Heap::kFixedArrayMapRootIndex, &runtime);
- // Handle subject string according to its encoding and representation:
- // (1) Sequential string? If yes, go to (5).
- // (2) Anything but sequential or cons? If yes, go to (6).
- // (3) Cons string. If the string is flat, replace subject with first string.
- // Otherwise bailout.
- // (4) Is subject external? If yes, go to (7).
- // (5) Sequential string. Load regexp code according to encoding.
- // (E) Carry on.
- /// [...]
+ // Check that the last match info has space for the capture registers and the
+ // additional information (overhead).
+ // (number_of_captures + 1) * 2 + overhead <= last match info size
+ // (number_of_captures * 2) + 2 + overhead <= last match info size
+ // number_of_capture_registers + overhead <= last match info size
+ __ Ldrsw(x10,
+ UntagSmiFieldMemOperand(last_match_info_elements,
+ FixedArray::kLengthOffset));
+ __ Add(x11, number_of_capture_registers, RegExpImpl::kLastMatchOverhead);
+ __ Cmp(x11, x10);
+ __ B(gt, &runtime);
- // Deferred code at the end of the stub:
- // (6) Not a long external string? If yes, go to (8).
- // (7) External string. Make it, offset-wise, look like a sequential string.
- // Go to (5).
- // (8) Short external string or not a string? If yes, bail out to runtime.
- // (9) Sliced string. Replace subject with parent. Go to (4).
+ // Store the capture count.
+ __ SmiTag(x10, number_of_capture_registers);
+ __ Str(x10,
+ FieldMemOperand(last_match_info_elements,
+ RegExpImpl::kLastCaptureCountOffset));
+ // Store last subject and last input.
+ __ Str(subject,
+ FieldMemOperand(last_match_info_elements,
+ RegExpImpl::kLastSubjectOffset));
+ // Use x10 as the subject string in order to only need
+ // one RecordWriteStub.
+ __ Mov(x10, subject);
+ __ RecordWriteField(last_match_info_elements,
+ RegExpImpl::kLastSubjectOffset,
+ x10,
+ x11,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
+ __ Str(subject,
+ FieldMemOperand(last_match_info_elements,
+ RegExpImpl::kLastInputOffset));
+ __ Mov(x10, subject);
+ __ RecordWriteField(last_match_info_elements,
+ RegExpImpl::kLastInputOffset,
+ x10,
+ x11,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
- Label check_underlying; // (4)
- Label seq_string; // (5)
- Label not_seq_nor_cons; // (6)
- Label external_string; // (7)
- Label not_long_external; // (8)
+ Register last_match_offsets = x13;
+ Register offsets_vector_index = x14;
+ Register current_offset = x15;
- // (1) Sequential string? If yes, go to (5).
- __ And(string_representation,
- string_type,
- kIsNotStringMask |
- kStringRepresentationMask |
- kShortExternalStringMask);
- // We depend on the fact that Strings of type
- // SeqString and not ShortExternalString are defined
- // by the following pattern:
- // string_type: 0XX0 XX00
- // ^ ^ ^^
- // | | ||
- // | | is a SeqString
- // | is not a short external String
- // is a String
- STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
- STATIC_ASSERT(kShortExternalStringTag != 0);
- __ Cbz(string_representation, &seq_string); // Go to (5).
+ // Get the static offsets vector filled by the native regexp code
+ // and fill the last match info.
+ ExternalReference address_of_static_offsets_vector =
+ ExternalReference::address_of_static_offsets_vector(isolate());
+ __ Mov(offsets_vector_index, address_of_static_offsets_vector);
- // (2) Anything but sequential or cons? If yes, go to (6).
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
- STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
- STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
- __ Cmp(string_representation, kExternalStringTag);
- __ B(ge, &not_seq_nor_cons); // Go to (6).
-
- // (3) Cons string. Check that it's flat.
- __ Ldr(x10, FieldMemOperand(subject, ConsString::kSecondOffset));
- __ JumpIfNotRoot(x10, Heap::kempty_stringRootIndex, &runtime);
- // Replace subject with first string.
- __ Ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
-
- // (4) Is subject external? If yes, go to (7).
- __ Bind(&check_underlying);
- // Reload the string type.
- __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset));
- __ Ldrb(string_type, FieldMemOperand(x10, Map::kInstanceTypeOffset));
- STATIC_ASSERT(kSeqStringTag == 0);
- // The underlying external string is never a short external string.
- STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
- STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
- __ TestAndBranchIfAnySet(string_type.X(),
- kStringRepresentationMask,
- &external_string); // Go to (7).
-
- // (5) Sequential string. Load regexp code according to encoding.
- __ Bind(&seq_string);
-
- // Check that the third argument is a positive smi less than the subject
- // string length. A negative value will be greater (unsigned comparison).
- DCHECK(jssp.Is(__ StackPointer()));
- __ Peek(x10, kPreviousIndexOffset);
- __ JumpIfNotSmi(x10, &runtime);
- __ Cmp(jsstring_length, x10);
- __ B(ls, &runtime);
-
- // Argument 2 (x1): We need to load argument 2 (the previous index) into x1
- // before entering the exit frame.
- __ SmiUntag(x1, x10);
-
- // The third bit determines the string encoding in string_type.
- STATIC_ASSERT(kOneByteStringTag == 0x04);
- STATIC_ASSERT(kTwoByteStringTag == 0x00);
- STATIC_ASSERT(kStringEncodingMask == 0x04);
-
- // Find the code object based on the assumptions above.
- // kDataOneByteCodeOffset and kDataUC16CodeOffset are adjacent, adds an offset
- // of kPointerSize to reach the latter.
- STATIC_ASSERT(JSRegExp::kDataOneByteCodeOffset + kPointerSize ==
- JSRegExp::kDataUC16CodeOffset);
- __ Mov(x10, kPointerSize);
- // We will need the encoding later: Latin1 = 0x04
- // UC16 = 0x00
- __ Ands(string_encoding, string_type, kStringEncodingMask);
- __ CzeroX(x10, ne);
- __ Add(x10, regexp_data, x10);
- __ Ldr(code_object, FieldMemOperand(x10, JSRegExp::kDataOneByteCodeOffset));
-
- // (E) Carry on. String handling is done.
-
- // Check that the irregexp code has been generated for the actual string
- // encoding. If it has, the field contains a code object otherwise it contains
- // a smi (code flushing support).
- __ JumpIfSmi(code_object, &runtime);
-
- // All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1,
- x10,
- x11);
-
- // Isolates: note we add an additional parameter here (isolate pointer).
- __ EnterExitFrame(false, x10, 1);
- DCHECK(csp.Is(__ StackPointer()));
-
- // We have 9 arguments to pass to the regexp code, therefore we have to pass
- // one on the stack and the rest as registers.
-
- // Note that the placement of the argument on the stack isn't standard
- // AAPCS64:
- // csp[0]: Space for the return address placed by DirectCEntryStub.
- // csp[8]: Argument 9, the current isolate address.
-
- __ Mov(x10, ExternalReference::isolate_address(isolate()));
- __ Poke(x10, kPointerSize);
-
- Register length = w11;
- Register previous_index_in_bytes = w12;
- Register start = x13;
-
- // Load start of the subject string.
- __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag);
- // Load the length from the original subject string from the previous stack
- // frame. Therefore we have to use fp, which points exactly to two pointer
- // sizes below the previous sp. (Because creating a new stack frame pushes
- // the previous fp onto the stack and decrements sp by 2 * kPointerSize.)
- __ Ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
- __ Ldr(length, UntagSmiFieldMemOperand(subject, String::kLengthOffset));
-
- // Handle UC16 encoding, two bytes make one character.
- // string_encoding: if Latin1: 0x04
- // if UC16: 0x00
- STATIC_ASSERT(kStringEncodingMask == 0x04);
- __ Ubfx(string_encoding, string_encoding, 2, 1);
- __ Eor(string_encoding, string_encoding, 1);
- // string_encoding: if Latin1: 0
- // if UC16: 1
-
- // Convert string positions from characters to bytes.
- // Previous index is in x1.
- __ Lsl(previous_index_in_bytes, w1, string_encoding);
- __ Lsl(length, length, string_encoding);
- __ Lsl(sliced_string_offset, sliced_string_offset, string_encoding);
-
- // Argument 1 (x0): Subject string.
- __ Mov(x0, subject);
-
- // Argument 2 (x1): Previous index, already there.
-
- // Argument 3 (x2): Get the start of input.
- // Start of input = start of string + previous index + substring offset
- // (0 if the string
- // is not sliced).
- __ Add(w10, previous_index_in_bytes, sliced_string_offset);
- __ Add(x2, start, Operand(w10, UXTW));
-
- // Argument 4 (x3):
- // End of input = start of input + (length of input - previous index)
- __ Sub(w10, length, previous_index_in_bytes);
- __ Add(x3, x2, Operand(w10, UXTW));
-
- // Argument 5 (x4): static offsets vector buffer.
- __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate()));
-
- // Argument 6 (x5): Set the number of capture registers to zero to force
- // global regexps to behave as non-global. This stub is not used for global
- // regexps.
- __ Mov(x5, 0);
-
- // Argument 7 (x6): Start (high end) of backtracking stack memory area.
- __ Mov(x10, address_of_regexp_stack_memory_address);
- __ Ldr(x10, MemOperand(x10));
- __ Mov(x11, address_of_regexp_stack_memory_size);
- __ Ldr(x11, MemOperand(x11));
- __ Add(x6, x10, x11);
-
- // Argument 8 (x7): Indicate that this is a direct call from JavaScript.
- __ Mov(x7, 1);
-
- // Locate the code entry and call it.
- __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag);
- DirectCEntryStub stub(isolate());
- stub.GenerateCall(masm, code_object);
-
- __ LeaveExitFrame(false, x10, true);
-
- // The generated regexp code returns an int32 in w0.
- Label failure, exception;
- __ CompareAndBranch(w0, NativeRegExpMacroAssembler::FAILURE, eq, &failure);
- __ CompareAndBranch(w0,
- NativeRegExpMacroAssembler::EXCEPTION,
- eq,
- &exception);
- __ CompareAndBranch(w0, NativeRegExpMacroAssembler::RETRY, eq, &runtime);
-
- // Success: process the result from the native regexp code.
- Register number_of_capture_registers = x12;
-
- // Calculate number of capture registers (number_of_captures + 1) * 2
- // and store it in the last match info.
- __ Ldrsw(x10,
- UntagSmiFieldMemOperand(regexp_data,
- JSRegExp::kIrregexpCaptureCountOffset));
- __ Add(x10, x10, x10);
- __ Add(number_of_capture_registers, x10, 2);
-
- // Check that the fourth object is a JSArray object.
- DCHECK(jssp.Is(__ StackPointer()));
- __ Peek(x10, kLastMatchInfoOffset);
- __ JumpIfSmi(x10, &runtime);
- __ JumpIfNotObjectType(x10, x11, x11, JS_ARRAY_TYPE, &runtime);
-
- // Check that the JSArray is the fast case.
- __ Ldr(last_match_info_elements,
- FieldMemOperand(x10, JSArray::kElementsOffset));
- __ Ldr(x10,
- FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
- __ JumpIfNotRoot(x10, Heap::kFixedArrayMapRootIndex, &runtime);
-
- // Check that the last match info has space for the capture registers and the
- // additional information (overhead).
- // (number_of_captures + 1) * 2 + overhead <= last match info size
- // (number_of_captures * 2) + 2 + overhead <= last match info size
- // number_of_capture_registers + overhead <= last match info size
- __ Ldrsw(x10,
- UntagSmiFieldMemOperand(last_match_info_elements,
- FixedArray::kLengthOffset));
- __ Add(x11, number_of_capture_registers, RegExpImpl::kLastMatchOverhead);
- __ Cmp(x11, x10);
- __ B(gt, &runtime);
-
- // Store the capture count.
- __ SmiTag(x10, number_of_capture_registers);
- __ Str(x10,
- FieldMemOperand(last_match_info_elements,
- RegExpImpl::kLastCaptureCountOffset));
- // Store last subject and last input.
- __ Str(subject,
- FieldMemOperand(last_match_info_elements,
- RegExpImpl::kLastSubjectOffset));
- // Use x10 as the subject string in order to only need
- // one RecordWriteStub.
- __ Mov(x10, subject);
- __ RecordWriteField(last_match_info_elements,
- RegExpImpl::kLastSubjectOffset,
- x10,
- x11,
- kLRHasNotBeenSaved,
- kDontSaveFPRegs);
- __ Str(subject,
- FieldMemOperand(last_match_info_elements,
- RegExpImpl::kLastInputOffset));
- __ Mov(x10, subject);
- __ RecordWriteField(last_match_info_elements,
- RegExpImpl::kLastInputOffset,
- x10,
- x11,
- kLRHasNotBeenSaved,
- kDontSaveFPRegs);
-
- Register last_match_offsets = x13;
- Register offsets_vector_index = x14;
- Register current_offset = x15;
-
- // Get the static offsets vector filled by the native regexp code
- // and fill the last match info.
- ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(isolate());
- __ Mov(offsets_vector_index, address_of_static_offsets_vector);
-
- Label next_capture, done;
- // Capture register counter starts from number of capture registers and
- // iterates down to zero (inclusive).
- __ Add(last_match_offsets,
- last_match_info_elements,
- RegExpImpl::kFirstCaptureOffset - kHeapObjectTag);
- __ Bind(&next_capture);
- __ Subs(number_of_capture_registers, number_of_capture_registers, 2);
- __ B(mi, &done);
- // Read two 32 bit values from the static offsets vector buffer into
- // an X register
- __ Ldr(current_offset,
- MemOperand(offsets_vector_index, kWRegSize * 2, PostIndex));
- // Store the smi values in the last match info.
- __ SmiTag(x10, current_offset);
- // Clearing the 32 bottom bits gives us a Smi.
- STATIC_ASSERT(kSmiTag == 0);
- __ Bic(x11, current_offset, kSmiShiftMask);
- __ Stp(x10,
- x11,
- MemOperand(last_match_offsets, kXRegSize * 2, PostIndex));
- __ B(&next_capture);
- __ Bind(&done);
+ Label next_capture, done;
+ // Capture register counter starts from number of capture registers and
+ // iterates down to zero (inclusive).
+ __ Add(last_match_offsets,
+ last_match_info_elements,
+ RegExpImpl::kFirstCaptureOffset - kHeapObjectTag);
+ __ Bind(&next_capture);
+ __ Subs(number_of_capture_registers, number_of_capture_registers, 2);
+ __ B(mi, &done);
+ // Read two 32 bit values from the static offsets vector buffer into
+ // an X register
+ __ Ldr(current_offset,
+ MemOperand(offsets_vector_index, kWRegSize * 2, PostIndex));
+ // Store the smi values in the last match info.
+ __ SmiTag(x10, current_offset);
+ // Clearing the 32 bottom bits gives us a Smi.
+ STATIC_ASSERT(kSmiTag == 0);
+ __ Bic(x11, current_offset, kSmiShiftMask);
+ __ Stp(x10,
+ x11,
+ MemOperand(last_match_offsets, kXRegSize * 2, PostIndex));
+ __ B(&next_capture);
+ __ Bind(&done);
// Return last match info.
__ Peek(x0, kLastMatchInfoOffset);
@@ -5301,6 +5000,288 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
}
+void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- x1 : function
+ // -- cp : context
+ // -- fp : frame pointer
+ // -- lr : return address
+ // -----------------------------------
+ __ AssertFunction(x1);
+
+ // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
+ __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
+ __ Ldrsw(
+ x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ Add(x3, fp, Operand(x2, LSL, kPointerSizeLog2));
+ __ Add(x3, x3, Operand(StandardFrameConstants::kCallerSPOffset));
+ __ SmiTag(x2);
+
+ // x1 : function
+ // x2 : number of parameters (tagged)
+ // x3 : parameters pointer
+ //
+ // Returns pointer to result object in x0.
+
+ // Make an untagged copy of the parameter count.
+ // Note: arg_count_smi is an alias of param_count_smi.
+ Register function = x1;
+ Register arg_count_smi = x2;
+ Register param_count_smi = x2;
+ Register recv_arg = x3;
+ Register param_count = x7;
+ __ SmiUntag(param_count, param_count_smi);
+
+ // Check if the calling frame is an arguments adaptor frame.
+ Register caller_fp = x11;
+ Register caller_ctx = x12;
+ Label runtime;
+ Label adaptor_frame, try_allocate;
+ __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ Ldr(caller_ctx, MemOperand(caller_fp,
+ StandardFrameConstants::kContextOffset));
+ __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ B(eq, &adaptor_frame);
+
+ // No adaptor, parameter count = argument count.
+
+ // x1 function function pointer
+ // x2 arg_count_smi number of function arguments (smi)
+ // x3 recv_arg pointer to receiver arguments
+ // x4 mapped_params number of mapped params, min(params, args) (uninit)
+ // x7 param_count number of function parameters
+ // x11 caller_fp caller's frame pointer
+ // x14 arg_count number of function arguments (uninit)
+
+ Register arg_count = x14;
+ Register mapped_params = x4;
+ __ Mov(arg_count, param_count);
+ __ Mov(mapped_params, param_count);
+ __ B(&try_allocate);
+
+ // We have an adaptor frame. Patch the parameters pointer.
+ __ Bind(&adaptor_frame);
+ __ Ldr(arg_count_smi,
+ MemOperand(caller_fp,
+ ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiUntag(arg_count, arg_count_smi);
+ __ Add(x10, caller_fp, Operand(arg_count, LSL, kPointerSizeLog2));
+ __ Add(recv_arg, x10, StandardFrameConstants::kCallerSPOffset);
+
+ // Compute the mapped parameter count = min(param_count, arg_count)
+ __ Cmp(param_count, arg_count);
+ __ Csel(mapped_params, param_count, arg_count, lt);
+
+ __ Bind(&try_allocate);
+
+ // x0 alloc_obj pointer to allocated objects: param map, backing
+ // store, arguments (uninit)
+ // x1 function function pointer
+ // x2 arg_count_smi number of function arguments (smi)
+ // x3 recv_arg pointer to receiver arguments
+ // x4 mapped_params number of mapped parameters, min(params, args)
+ // x7 param_count number of function parameters
+ // x10 size size of objects to allocate (uninit)
+ // x14 arg_count number of function arguments
+
+ // Compute the size of backing store, parameter map, and arguments object.
+ // 1. Parameter map, has two extra words containing context and backing
+ // store.
+ const int kParameterMapHeaderSize =
+ FixedArray::kHeaderSize + 2 * kPointerSize;
+
+ // Calculate the parameter map size, assuming it exists.
+ Register size = x10;
+ __ Mov(size, Operand(mapped_params, LSL, kPointerSizeLog2));
+ __ Add(size, size, kParameterMapHeaderSize);
+
+ // If there are no mapped parameters, set the running size total to zero.
+ // Otherwise, use the parameter map size calculated earlier.
+ __ Cmp(mapped_params, 0);
+ __ CzeroX(size, eq);
+
+ // 2. Add the size of the backing store and arguments object.
+ __ Add(size, size, Operand(arg_count, LSL, kPointerSizeLog2));
+ __ Add(size, size, FixedArray::kHeaderSize + JSSloppyArgumentsObject::kSize);
+
+ // Do the allocation of all three objects in one go. Assign this to x0, as it
+ // will be returned to the caller.
+ Register alloc_obj = x0;
+ __ Allocate(size, alloc_obj, x11, x12, &runtime, TAG_OBJECT);
+
+ // Get the arguments boilerplate from the current (global) context.
+
+ // x0 alloc_obj pointer to allocated objects (param map, backing
+ // store, arguments)
+ // x1 function function pointer
+ // x2 arg_count_smi number of function arguments (smi)
+ // x3 recv_arg pointer to receiver arguments
+ // x4 mapped_params number of mapped parameters, min(params, args)
+ // x7 param_count number of function parameters
+ // x11 sloppy_args_map offset to args (or aliased args) map (uninit)
+ // x14 arg_count number of function arguments
+
+ Register global_ctx = x10;
+ Register sloppy_args_map = x11;
+ Register aliased_args_map = x10;
+ __ Ldr(global_ctx, NativeContextMemOperand());
+
+ __ Ldr(sloppy_args_map,
+ ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
+ __ Ldr(
+ aliased_args_map,
+ ContextMemOperand(global_ctx, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX));
+ __ Cmp(mapped_params, 0);
+ __ CmovX(sloppy_args_map, aliased_args_map, ne);
+
+ // Copy the JS object part.
+ __ Str(sloppy_args_map, FieldMemOperand(alloc_obj, JSObject::kMapOffset));
+ __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
+ __ Str(x10, FieldMemOperand(alloc_obj, JSObject::kPropertiesOffset));
+ __ Str(x10, FieldMemOperand(alloc_obj, JSObject::kElementsOffset));
+
+ // Set up the callee in-object property.
+ __ AssertNotSmi(function);
+ __ Str(function,
+ FieldMemOperand(alloc_obj, JSSloppyArgumentsObject::kCalleeOffset));
+
+ // Use the length and set that as an in-object property.
+ __ Str(arg_count_smi,
+ FieldMemOperand(alloc_obj, JSSloppyArgumentsObject::kLengthOffset));
+
+ // Set up the elements pointer in the allocated arguments object.
+ // If we allocated a parameter map, "elements" will point there, otherwise
+ // it will point to the backing store.
+
+ // x0 alloc_obj pointer to allocated objects (param map, backing
+ // store, arguments)
+ // x1 function function pointer
+ // x2 arg_count_smi number of function arguments (smi)
+ // x3 recv_arg pointer to receiver arguments
+ // x4 mapped_params number of mapped parameters, min(params, args)
+ // x5 elements pointer to parameter map or backing store (uninit)
+ // x6 backing_store pointer to backing store (uninit)
+ // x7 param_count number of function parameters
+ // x14 arg_count number of function arguments
+
+ Register elements = x5;
+ __ Add(elements, alloc_obj, JSSloppyArgumentsObject::kSize);
+ __ Str(elements, FieldMemOperand(alloc_obj, JSObject::kElementsOffset));
+
+ // Initialize parameter map. If there are no mapped arguments, we're done.
+ Label skip_parameter_map;
+ __ Cmp(mapped_params, 0);
+ // Set up backing store address, because it is needed later for filling in
+ // the unmapped arguments.
+ Register backing_store = x6;
+ __ CmovX(backing_store, elements, eq);
+ __ B(eq, &skip_parameter_map);
+
+ __ LoadRoot(x10, Heap::kSloppyArgumentsElementsMapRootIndex);
+ __ Str(x10, FieldMemOperand(elements, FixedArray::kMapOffset));
+ __ Add(x10, mapped_params, 2);
+ __ SmiTag(x10);
+ __ Str(x10, FieldMemOperand(elements, FixedArray::kLengthOffset));
+ __ Str(cp, FieldMemOperand(elements,
+ FixedArray::kHeaderSize + 0 * kPointerSize));
+ __ Add(x10, elements, Operand(mapped_params, LSL, kPointerSizeLog2));
+ __ Add(x10, x10, kParameterMapHeaderSize);
+ __ Str(x10, FieldMemOperand(elements,
+ FixedArray::kHeaderSize + 1 * kPointerSize));
+
+ // Copy the parameter slots and the holes in the arguments.
+ // We need to fill in mapped_parameter_count slots. Then index the context,
+ // where parameters are stored in reverse order, at:
+ //
+ // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS + parameter_count - 1
+ //
+ // The mapped parameter thus needs to get indices:
+ //
+ // MIN_CONTEXT_SLOTS + parameter_count - 1 ..
+ // MIN_CONTEXT_SLOTS + parameter_count - mapped_parameter_count
+ //
+ // We loop from right to left.
+
+ // x0 alloc_obj pointer to allocated objects (param map, backing
+ // store, arguments)
+ // x1 function function pointer
+ // x2 arg_count_smi number of function arguments (smi)
+ // x3 recv_arg pointer to receiver arguments
+ // x4 mapped_params number of mapped parameters, min(params, args)
+ // x5 elements pointer to parameter map or backing store (uninit)
+ // x6 backing_store pointer to backing store (uninit)
+ // x7 param_count number of function parameters
+ // x11 loop_count parameter loop counter (uninit)
+ // x12 index parameter index (smi, uninit)
+ // x13 the_hole hole value (uninit)
+ // x14 arg_count number of function arguments
+
+ Register loop_count = x11;
+ Register index = x12;
+ Register the_hole = x13;
+ Label parameters_loop, parameters_test;
+ __ Mov(loop_count, mapped_params);
+ __ Add(index, param_count, static_cast<int>(Context::MIN_CONTEXT_SLOTS));
+ __ Sub(index, index, mapped_params);
+ __ SmiTag(index);
+ __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
+ __ Add(backing_store, elements, Operand(loop_count, LSL, kPointerSizeLog2));
+ __ Add(backing_store, backing_store, kParameterMapHeaderSize);
+
+ __ B(&parameters_test);
+
+ __ Bind(&parameters_loop);
+ __ Sub(loop_count, loop_count, 1);
+ __ Mov(x10, Operand(loop_count, LSL, kPointerSizeLog2));
+ __ Add(x10, x10, kParameterMapHeaderSize - kHeapObjectTag);
+ __ Str(index, MemOperand(elements, x10));
+ __ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize);
+ __ Str(the_hole, MemOperand(backing_store, x10));
+ __ Add(index, index, Smi::FromInt(1));
+ __ Bind(&parameters_test);
+ __ Cbnz(loop_count, &parameters_loop);
+
+ __ Bind(&skip_parameter_map);
+ // Copy arguments header and remaining slots (if there are any.)
+ __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
+ __ Str(x10, FieldMemOperand(backing_store, FixedArray::kMapOffset));
+ __ Str(arg_count_smi, FieldMemOperand(backing_store,
+ FixedArray::kLengthOffset));
+
+ // x0 alloc_obj pointer to allocated objects (param map, backing
+ // store, arguments)
+ // x1 function function pointer
+ // x2 arg_count_smi number of function arguments (smi)
+ // x3 recv_arg pointer to receiver arguments
+ // x4 mapped_params number of mapped parameters, min(params, args)
+ // x6 backing_store pointer to backing store (uninit)
+ // x14 arg_count number of function arguments
+
+ Label arguments_loop, arguments_test;
+ __ Mov(x10, mapped_params);
+ __ Sub(recv_arg, recv_arg, Operand(x10, LSL, kPointerSizeLog2));
+ __ B(&arguments_test);
+
+ __ Bind(&arguments_loop);
+ __ Sub(recv_arg, recv_arg, kPointerSize);
+ __ Ldr(x11, MemOperand(recv_arg));
+ __ Add(x12, backing_store, Operand(x10, LSL, kPointerSizeLog2));
+ __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
+ __ Add(x10, x10, 1);
+
+ __ Bind(&arguments_test);
+ __ Cmp(x10, arg_count);
+ __ B(lt, &arguments_loop);
+
+ __ Ret();
+
+ // Do the runtime call to allocate the arguments object.
+ __ Bind(&runtime);
+ __ Push(function, recv_arg, arg_count_smi);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
+}
+
+
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x1 : function
« no previous file with comments | « src/arm/interface-descriptors-arm.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698