OLD | NEW |
| (Empty) |
1 // Copyright 2013 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #include "v8.h" | |
29 | |
30 #if V8_TARGET_ARCH_A64 | |
31 | |
32 #include "codegen.h" | |
33 #include "debug.h" | |
34 #include "deoptimizer.h" | |
35 #include "full-codegen.h" | |
36 #include "runtime.h" | |
37 #include "stub-cache.h" | |
38 | |
39 namespace v8 { | |
40 namespace internal { | |
41 | |
42 | |
43 #define __ ACCESS_MASM(masm) | |
44 | |
45 | |
46 // Load the built-in Array function from the current context. | |
47 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | |
48 // Load the native context. | |
49 __ Ldr(result, GlobalObjectMemOperand()); | |
50 __ Ldr(result, | |
51 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | |
52 // Load the InternalArray function from the native context. | |
53 __ Ldr(result, | |
54 MemOperand(result, | |
55 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); | |
56 } | |
57 | |
58 | |
59 // Load the built-in InternalArray function from the current context. | |
60 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | |
61 Register result) { | |
62 // Load the native context. | |
63 __ Ldr(result, GlobalObjectMemOperand()); | |
64 __ Ldr(result, | |
65 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | |
66 // Load the InternalArray function from the native context. | |
67 __ Ldr(result, ContextMemOperand(result, | |
68 Context::INTERNAL_ARRAY_FUNCTION_INDEX)); | |
69 } | |
70 | |
71 | |
72 void Builtins::Generate_Adaptor(MacroAssembler* masm, | |
73 CFunctionId id, | |
74 BuiltinExtraArguments extra_args) { | |
75 // ----------- S t a t e ------------- | |
76 // -- x0 : number of arguments excluding receiver | |
77 // -- x1 : called function (only guaranteed when | |
78 // extra_args requires it) | |
79 // -- cp : context | |
80 // -- sp[0] : last argument | |
81 // -- ... | |
82 // -- sp[4 * (argc - 1)] : first argument (argc == x0) | |
83 // -- sp[4 * argc] : receiver | |
84 // ----------------------------------- | |
85 | |
86 // Insert extra arguments. | |
87 int num_extra_args = 0; | |
88 if (extra_args == NEEDS_CALLED_FUNCTION) { | |
89 num_extra_args = 1; | |
90 __ Push(x1); | |
91 } else { | |
92 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | |
93 } | |
94 | |
95 // JumpToExternalReference expects x0 to contain the number of arguments | |
96 // including the receiver and the extra arguments. | |
97 __ Add(x0, x0, num_extra_args + 1); | |
98 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | |
99 } | |
100 | |
101 | |
102 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
103 // ----------- S t a t e ------------- | |
104 // -- x0 : number of arguments | |
105 // -- lr : return address | |
106 // -- sp[...]: constructor arguments | |
107 // ----------------------------------- | |
108 ASM_LOCATION("Builtins::Generate_InternalArrayCode"); | |
109 Label generic_array_code; | |
110 | |
111 // Get the InternalArray function. | |
112 GenerateLoadInternalArrayFunction(masm, x1); | |
113 | |
114 if (FLAG_debug_code) { | |
115 // Initial map for the builtin InternalArray functions should be maps. | |
116 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | |
117 __ Tst(x10, kSmiTagMask); | |
118 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); | |
119 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | |
120 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | |
121 } | |
122 | |
123 // Run the native code for the InternalArray function called as a normal | |
124 // function. | |
125 InternalArrayConstructorStub stub(masm->isolate()); | |
126 __ TailCallStub(&stub); | |
127 } | |
128 | |
129 | |
130 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
131 // ----------- S t a t e ------------- | |
132 // -- x0 : number of arguments | |
133 // -- lr : return address | |
134 // -- sp[...]: constructor arguments | |
135 // ----------------------------------- | |
136 ASM_LOCATION("Builtins::Generate_ArrayCode"); | |
137 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
138 | |
139 // Get the Array function. | |
140 GenerateLoadArrayFunction(masm, x1); | |
141 | |
142 if (FLAG_debug_code) { | |
143 // Initial map for the builtin Array functions should be maps. | |
144 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | |
145 __ Tst(x10, kSmiTagMask); | |
146 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | |
147 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | |
148 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | |
149 } | |
150 | |
151 // Run the native code for the Array function called as a normal function. | |
152 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | |
153 ArrayConstructorStub stub(masm->isolate()); | |
154 __ TailCallStub(&stub); | |
155 } | |
156 | |
157 | |
158 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | |
159 // ----------- S t a t e ------------- | |
160 // -- x0 : number of arguments | |
161 // -- x1 : constructor function | |
162 // -- lr : return address | |
163 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | |
164 // -- sp[argc * 8] : receiver | |
165 // ----------------------------------- | |
166 ASM_LOCATION("Builtins::Generate_StringConstructCode"); | |
167 Counters* counters = masm->isolate()->counters(); | |
168 __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11); | |
169 | |
170 Register argc = x0; | |
171 Register function = x1; | |
172 if (FLAG_debug_code) { | |
173 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10); | |
174 __ Cmp(function, x10); | |
175 __ Assert(eq, kUnexpectedStringFunction); | |
176 } | |
177 | |
178 // Load the first arguments in x0 and get rid of the rest. | |
179 Label no_arguments; | |
180 __ Cbz(argc, &no_arguments); | |
181 // First args = sp[(argc - 1) * 8]. | |
182 __ Sub(argc, argc, 1); | |
183 __ Claim(argc, kXRegSize); | |
184 // jssp now point to args[0], load and drop args[0] + receiver. | |
185 Register arg = argc; | |
186 __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex)); | |
187 argc = NoReg; | |
188 | |
189 Register argument = x2; | |
190 Label not_cached, argument_is_string; | |
191 __ LookupNumberStringCache(arg, // Input. | |
192 argument, // Result. | |
193 x10, // Scratch. | |
194 x11, // Scratch. | |
195 x12, // Scratch. | |
196 ¬_cached); | |
197 __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11); | |
198 __ Bind(&argument_is_string); | |
199 | |
200 // ----------- S t a t e ------------- | |
201 // -- x2 : argument converted to string | |
202 // -- x1 : constructor function | |
203 // -- lr : return address | |
204 // ----------------------------------- | |
205 | |
206 Label gc_required; | |
207 Register new_obj = x0; | |
208 __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT); | |
209 | |
210 // Initialize the String object. | |
211 Register map = x3; | |
212 __ LoadGlobalFunctionInitialMap(function, map, x10); | |
213 if (FLAG_debug_code) { | |
214 __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset)); | |
215 __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2); | |
216 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); | |
217 __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); | |
218 __ Cmp(x4, 0); | |
219 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); | |
220 } | |
221 __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset)); | |
222 | |
223 Register empty = x3; | |
224 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); | |
225 __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); | |
226 __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset)); | |
227 | |
228 __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset)); | |
229 | |
230 // Ensure the object is fully initialized. | |
231 STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize)); | |
232 | |
233 __ Ret(); | |
234 | |
235 // The argument was not found in the number to string cache. Check | |
236 // if it's a string already before calling the conversion builtin. | |
237 Label convert_argument; | |
238 __ Bind(¬_cached); | |
239 __ JumpIfSmi(arg, &convert_argument); | |
240 | |
241 // Is it a String? | |
242 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
243 __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | |
244 __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument); | |
245 __ Mov(argument, arg); | |
246 __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11); | |
247 __ B(&argument_is_string); | |
248 | |
249 // Invoke the conversion builtin and put the result into x2. | |
250 __ Bind(&convert_argument); | |
251 __ Push(function); // Preserve the function. | |
252 __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11); | |
253 { | |
254 FrameScope scope(masm, StackFrame::INTERNAL); | |
255 __ Push(arg); | |
256 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | |
257 } | |
258 __ Pop(function); | |
259 __ Mov(argument, x0); | |
260 __ B(&argument_is_string); | |
261 | |
262 // Load the empty string into x2, remove the receiver from the | |
263 // stack, and jump back to the case where the argument is a string. | |
264 __ Bind(&no_arguments); | |
265 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | |
266 __ Drop(1); | |
267 __ B(&argument_is_string); | |
268 | |
269 // At this point the argument is already a string. Call runtime to create a | |
270 // string wrapper. | |
271 __ Bind(&gc_required); | |
272 __ IncrementCounter(counters->string_ctor_gc_required(), 1, x10, x11); | |
273 { | |
274 FrameScope scope(masm, StackFrame::INTERNAL); | |
275 __ Push(argument); | |
276 __ CallRuntime(Runtime::kNewStringWrapper, 1); | |
277 } | |
278 __ Ret(); | |
279 } | |
280 | |
281 | |
282 static void CallRuntimePassFunction(MacroAssembler* masm, | |
283 Runtime::FunctionId function_id) { | |
284 FrameScope scope(masm, StackFrame::INTERNAL); | |
285 // - Push a copy of the function onto the stack. | |
286 // - Push another copy as a parameter to the runtime call. | |
287 __ Push(x1, x1); | |
288 | |
289 __ CallRuntime(function_id, 1); | |
290 | |
291 // - Restore receiver. | |
292 __ Pop(x1); | |
293 } | |
294 | |
295 | |
296 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
297 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | |
298 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset)); | |
299 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); | |
300 __ Br(x2); | |
301 } | |
302 | |
303 | |
304 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | |
305 __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag); | |
306 __ Br(x0); | |
307 } | |
308 | |
309 | |
310 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
311 // Checking whether the queued function is ready for install is optional, | |
312 // since we come across interrupts and stack checks elsewhere. However, not | |
313 // checking may delay installing ready functions, and always checking would be | |
314 // quite expensive. A good compromise is to first check against stack limit as | |
315 // a cue for an interrupt signal. | |
316 Label ok; | |
317 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); | |
318 __ B(hs, &ok); | |
319 | |
320 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | |
321 GenerateTailCallToReturnedCode(masm); | |
322 | |
323 __ Bind(&ok); | |
324 GenerateTailCallToSharedCode(masm); | |
325 } | |
326 | |
327 | |
328 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
329 bool is_api_function, | |
330 bool count_constructions, | |
331 bool create_memento) { | |
332 // ----------- S t a t e ------------- | |
333 // -- x0 : number of arguments | |
334 // -- x1 : constructor function | |
335 // -- x2 : allocation site or undefined | |
336 // -- lr : return address | |
337 // -- sp[...]: constructor arguments | |
338 // ----------------------------------- | |
339 | |
340 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper"); | |
341 // Should never count constructions for api objects. | |
342 ASSERT(!is_api_function || !count_constructions); | |
343 // Should never create mementos for api functions. | |
344 ASSERT(!is_api_function || !create_memento); | |
345 // Should never create mementos before slack tracking is finished. | |
346 ASSERT(!count_constructions || !create_memento); | |
347 | |
348 Isolate* isolate = masm->isolate(); | |
349 | |
350 // Enter a construct frame. | |
351 { | |
352 FrameScope scope(masm, StackFrame::CONSTRUCT); | |
353 | |
354 // Preserve the three incoming parameters on the stack. | |
355 if (create_memento) { | |
356 __ AssertUndefinedOrAllocationSite(x2, x10); | |
357 __ Push(x2); | |
358 } | |
359 | |
360 Register argc = x0; | |
361 Register constructor = x1; | |
362 // x1: constructor function | |
363 __ SmiTag(argc); | |
364 __ Push(argc, constructor); | |
365 // sp[0] : Constructor function. | |
366 // sp[1]: number of arguments (smi-tagged) | |
367 | |
368 // Try to allocate the object without transitioning into C code. If any of | |
369 // the preconditions is not met, the code bails out to the runtime call. | |
370 Label rt_call, allocated; | |
371 if (FLAG_inline_new) { | |
372 Label undo_allocation; | |
373 #if ENABLE_DEBUGGER_SUPPORT | |
374 ExternalReference debug_step_in_fp = | |
375 ExternalReference::debug_step_in_fp_address(isolate); | |
376 __ Mov(x2, Operand(debug_step_in_fp)); | |
377 __ Ldr(x2, MemOperand(x2)); | |
378 __ Cbnz(x2, &rt_call); | |
379 #endif | |
380 // Load the initial map and verify that it is in fact a map. | |
381 Register init_map = x2; | |
382 __ Ldr(init_map, | |
383 FieldMemOperand(constructor, | |
384 JSFunction::kPrototypeOrInitialMapOffset)); | |
385 __ JumpIfSmi(init_map, &rt_call); | |
386 __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call); | |
387 | |
388 // Check that the constructor is not constructing a JSFunction (see | |
389 // comments in Runtime_NewObject in runtime.cc). In which case the initial | |
390 // map's instance type would be JS_FUNCTION_TYPE. | |
391 __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE); | |
392 __ B(eq, &rt_call); | |
393 | |
394 if (count_constructions) { | |
395 Label allocate; | |
396 // Decrease generous allocation count. | |
397 __ Ldr(x3, FieldMemOperand(constructor, | |
398 JSFunction::kSharedFunctionInfoOffset)); | |
399 MemOperand constructor_count = | |
400 FieldMemOperand(x3, SharedFunctionInfo::kConstructionCountOffset); | |
401 __ Ldrb(x4, constructor_count); | |
402 __ Subs(x4, x4, 1); | |
403 __ Strb(x4, constructor_count); | |
404 __ B(ne, &allocate); | |
405 | |
406 // Push the constructor and map to the stack, and the constructor again | |
407 // as argument to the runtime call. | |
408 __ Push(constructor, init_map, constructor); | |
409 // The call will replace the stub, so the countdown is only done once. | |
410 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | |
411 __ Pop(init_map, constructor); | |
412 __ Bind(&allocate); | |
413 } | |
414 | |
415 // Now allocate the JSObject on the heap. | |
416 Register obj_size = x3; | |
417 Register new_obj = x4; | |
418 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); | |
419 if (create_memento) { | |
420 __ Add(x7, obj_size, | |
421 Operand(AllocationMemento::kSize / kPointerSize)); | |
422 __ Allocate(x7, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); | |
423 } else { | |
424 __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); | |
425 } | |
426 | |
427 // Allocated the JSObject, now initialize the fields. Map is set to | |
428 // initial map and properties and elements are set to empty fixed array. | |
429 // NB. the object pointer is not tagged, so MemOperand is used. | |
430 Register empty = x5; | |
431 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); | |
432 __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset)); | |
433 STATIC_ASSERT(JSObject::kElementsOffset == | |
434 (JSObject::kPropertiesOffset + kPointerSize)); | |
435 __ Stp(empty, empty, MemOperand(new_obj, JSObject::kPropertiesOffset)); | |
436 | |
437 Register first_prop = x5; | |
438 __ Add(first_prop, new_obj, JSObject::kHeaderSize); | |
439 | |
440 // Fill all of the in-object properties with the appropriate filler. | |
441 Register undef = x7; | |
442 __ LoadRoot(undef, Heap::kUndefinedValueRootIndex); | |
443 | |
444 // Obtain number of pre-allocated property fields and in-object | |
445 // properties. | |
446 Register prealloc_fields = x10; | |
447 Register inobject_props = x11; | |
448 Register inst_sizes = x11; | |
449 __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset)); | |
450 __ Ubfx(prealloc_fields, inst_sizes, | |
451 Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, | |
452 kBitsPerByte); | |
453 __ Ubfx(inobject_props, inst_sizes, | |
454 Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte); | |
455 | |
456 // Calculate number of property fields in the object. | |
457 Register prop_fields = x6; | |
458 __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize); | |
459 | |
460 if (count_constructions) { | |
461 // Fill the pre-allocated fields with undef. | |
462 __ FillFields(first_prop, prealloc_fields, undef); | |
463 | |
464 // Register first_non_prealloc is the offset of the first field after | |
465 // pre-allocated fields. | |
466 Register first_non_prealloc = x12; | |
467 __ Add(first_non_prealloc, first_prop, | |
468 Operand(prealloc_fields, LSL, kPointerSizeLog2)); | |
469 | |
470 first_prop = NoReg; | |
471 | |
472 if (FLAG_debug_code) { | |
473 Register obj_end = x5; | |
474 __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); | |
475 __ Cmp(first_non_prealloc, obj_end); | |
476 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); | |
477 } | |
478 | |
479 // Fill the remaining fields with one pointer filler map. | |
480 Register one_pointer_filler = x5; | |
481 Register non_prealloc_fields = x6; | |
482 __ LoadRoot(one_pointer_filler, Heap::kOnePointerFillerMapRootIndex); | |
483 __ Sub(non_prealloc_fields, prop_fields, prealloc_fields); | |
484 __ FillFields(first_non_prealloc, non_prealloc_fields, | |
485 one_pointer_filler); | |
486 prop_fields = NoReg; | |
487 } else if (create_memento) { | |
488 // Fill the pre-allocated fields with undef. | |
489 __ FillFields(first_prop, prop_fields, undef); | |
490 __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); | |
491 __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex); | |
492 ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset); | |
493 __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex)); | |
494 // Load the AllocationSite | |
495 __ Peek(x14, 2 * kXRegSize); | |
496 ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset); | |
497 __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex)); | |
498 first_prop = NoReg; | |
499 } else { | |
500 // Fill all of the property fields with undef. | |
501 __ FillFields(first_prop, prop_fields, undef); | |
502 first_prop = NoReg; | |
503 prop_fields = NoReg; | |
504 } | |
505 | |
506 // Add the object tag to make the JSObject real, so that we can continue | |
507 // and jump into the continuation code at any time from now on. Any | |
508 // failures need to undo the allocation, so that the heap is in a | |
509 // consistent state and verifiable. | |
510 __ Add(new_obj, new_obj, kHeapObjectTag); | |
511 | |
512 // Check if a non-empty properties array is needed. Continue with | |
513 // allocated object if not, or fall through to runtime call if it is. | |
514 Register element_count = x3; | |
515 __ Ldrb(element_count, | |
516 FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset)); | |
517 // The field instance sizes contains both pre-allocated property fields | |
518 // and in-object properties. | |
519 __ Add(element_count, element_count, prealloc_fields); | |
520 __ Subs(element_count, element_count, inobject_props); | |
521 | |
522 // Done if no extra properties are to be allocated. | |
523 __ B(eq, &allocated); | |
524 __ Assert(pl, kPropertyAllocationCountFailed); | |
525 | |
526 // Scale the number of elements by pointer size and add the header for | |
527 // FixedArrays to the start of the next object calculation from above. | |
528 Register new_array = x5; | |
529 Register array_size = x6; | |
530 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize); | |
531 __ Allocate(array_size, new_array, x11, x12, &undo_allocation, | |
532 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | | |
533 SIZE_IN_WORDS)); | |
534 | |
535 Register array_map = x10; | |
536 __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex); | |
537 __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset)); | |
538 __ SmiTag(x0, element_count); | |
539 __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset)); | |
540 | |
541 // Initialize the fields to undefined. | |
542 Register elements = x10; | |
543 __ Add(elements, new_array, FixedArray::kHeaderSize); | |
544 __ FillFields(elements, element_count, undef); | |
545 | |
546 // Store the initialized FixedArray into the properties field of the | |
547 // JSObject. | |
548 __ Add(new_array, new_array, kHeapObjectTag); | |
549 __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); | |
550 | |
551 // Continue with JSObject being successfully allocated. | |
552 __ B(&allocated); | |
553 | |
554 // Undo the setting of the new top so that the heap is verifiable. For | |
555 // example, the map's unused properties potentially do not match the | |
556 // allocated objects unused properties. | |
557 __ Bind(&undo_allocation); | |
558 __ UndoAllocationInNewSpace(new_obj, x14); | |
559 } | |
560 | |
561 // Allocate the new receiver object using the runtime call. | |
562 __ Bind(&rt_call); | |
563 Label count_incremented; | |
564 if (create_memento) { | |
565 // Get the cell or allocation site. | |
566 __ Peek(x4, 2 * kXRegSize); | |
567 __ Push(x4); | |
568 __ Push(constructor); // Argument for Runtime_NewObject. | |
569 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); | |
570 __ Mov(x4, x0); | |
571 // If we ended up using the runtime, and we want a memento, then the | |
572 // runtime call made it for us, and we shouldn't do create count | |
573 // increment. | |
574 __ jmp(&count_incremented); | |
575 } else { | |
576 __ Push(constructor); // Argument for Runtime_NewObject. | |
577 __ CallRuntime(Runtime::kNewObject, 1); | |
578 __ Mov(x4, x0); | |
579 } | |
580 | |
581 // Receiver for constructor call allocated. | |
582 // x4: JSObject | |
583 __ Bind(&allocated); | |
584 | |
585 if (create_memento) { | |
586 __ Peek(x10, 2 * kXRegSize); | |
587 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented); | |
588 // r2 is an AllocationSite. We are creating a memento from it, so we | |
589 // need to increment the memento create count. | |
590 __ Ldr(x5, FieldMemOperand(x10, | |
591 AllocationSite::kPretenureCreateCountOffset)); | |
592 __ Add(x5, x5, Operand(Smi::FromInt(1))); | |
593 __ Str(x5, FieldMemOperand(x10, | |
594 AllocationSite::kPretenureCreateCountOffset)); | |
595 __ bind(&count_incremented); | |
596 } | |
597 | |
598 __ Push(x4, x4); | |
599 | |
600 // Reload the number of arguments from the stack. | |
601 // Set it up in x0 for the function call below. | |
602 // jssp[0]: receiver | |
603 // jssp[1]: receiver | |
604 // jssp[2]: constructor function | |
605 // jssp[3]: number of arguments (smi-tagged) | |
606 __ Peek(constructor, 2 * kXRegSize); // Load constructor. | |
607 __ Peek(argc, 3 * kXRegSize); // Load number of arguments. | |
608 __ SmiUntag(argc); | |
609 | |
610 // Set up pointer to last argument. | |
611 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset); | |
612 | |
613 // Copy arguments and receiver to the expression stack. | |
614 // Copy 2 values every loop to use ldp/stp. | |
615 // x0: number of arguments | |
616 // x1: constructor function | |
617 // x2: address of last argument (caller sp) | |
618 // jssp[0]: receiver | |
619 // jssp[1]: receiver | |
620 // jssp[2]: constructor function | |
621 // jssp[3]: number of arguments (smi-tagged) | |
622 // Compute the start address of the copy in x3. | |
623 __ Add(x3, x2, Operand(argc, LSL, kPointerSizeLog2)); | |
624 Label loop, entry, done_copying_arguments; | |
625 __ B(&entry); | |
626 __ Bind(&loop); | |
627 __ Ldp(x10, x11, MemOperand(x3, -2 * kPointerSize, PreIndex)); | |
628 __ Push(x11, x10); | |
629 __ Bind(&entry); | |
630 __ Cmp(x3, x2); | |
631 __ B(gt, &loop); | |
632 // Because we copied values 2 by 2 we may have copied one extra value. | |
633 // Drop it if that is the case. | |
634 __ B(eq, &done_copying_arguments); | |
635 __ Drop(1); | |
636 __ Bind(&done_copying_arguments); | |
637 | |
638 // Call the function. | |
639 // x0: number of arguments | |
640 // x1: constructor function | |
641 if (is_api_function) { | |
642 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset)); | |
643 Handle<Code> code = | |
644 masm->isolate()->builtins()->HandleApiCallConstruct(); | |
645 __ Call(code, RelocInfo::CODE_TARGET); | |
646 } else { | |
647 ParameterCount actual(argc); | |
648 __ InvokeFunction(constructor, actual, CALL_FUNCTION, NullCallWrapper()); | |
649 } | |
650 | |
651 // Store offset of return address for deoptimizer. | |
652 if (!is_api_function && !count_constructions) { | |
653 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
654 } | |
655 | |
656 // Restore the context from the frame. | |
657 // x0: result | |
658 // jssp[0]: receiver | |
659 // jssp[1]: constructor function | |
660 // jssp[2]: number of arguments (smi-tagged) | |
661 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
662 | |
663 // If the result is an object (in the ECMA sense), we should get rid | |
664 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
665 // on page 74. | |
666 Label use_receiver, exit; | |
667 | |
668 // If the result is a smi, it is *not* an object in the ECMA sense. | |
669 // x0: result | |
670 // jssp[0]: receiver (newly allocated object) | |
671 // jssp[1]: constructor function | |
672 // jssp[2]: number of arguments (smi-tagged) | |
673 __ JumpIfSmi(x0, &use_receiver); | |
674 | |
675 // If the type of the result (stored in its map) is less than | |
676 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. | |
677 __ JumpIfObjectType(x0, x1, x3, FIRST_SPEC_OBJECT_TYPE, &exit, ge); | |
678 | |
679 // Throw away the result of the constructor invocation and use the | |
680 // on-stack receiver as the result. | |
681 __ Bind(&use_receiver); | |
682 __ Peek(x0, 0); | |
683 | |
684 // Remove the receiver from the stack, remove caller arguments, and | |
685 // return. | |
686 __ Bind(&exit); | |
687 // x0: result | |
688 // jssp[0]: receiver (newly allocated object) | |
689 // jssp[1]: constructor function | |
690 // jssp[2]: number of arguments (smi-tagged) | |
691 __ Peek(x1, 2 * kXRegSize); | |
692 | |
693 // Leave construct frame. | |
694 } | |
695 | |
696 __ DropBySMI(x1); | |
697 __ Drop(1); | |
698 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2); | |
699 __ Ret(); | |
700 } | |
701 | |
702 | |
703 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { | |
704 Generate_JSConstructStubHelper(masm, false, true, false); | |
705 } | |
706 | |
707 | |
708 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
709 Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); | |
710 } | |
711 | |
712 | |
713 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
714 Generate_JSConstructStubHelper(masm, true, false, false); | |
715 } | |
716 | |
717 | |
718 // Input: | |
719 // x0: code entry. | |
720 // x1: function. | |
721 // x2: receiver. | |
722 // x3: argc. | |
723 // x4: argv. | |
724 // Output: | |
725 // x0: result. | |
726 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
727 bool is_construct) { | |
728 // Called from JSEntryStub::GenerateBody(). | |
729 Register function = x1; | |
730 Register receiver = x2; | |
731 Register argc = x3; | |
732 Register argv = x4; | |
733 | |
734 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
735 | |
736 // Clear the context before we push it when entering the internal frame. | |
737 __ Mov(cp, 0); | |
738 | |
739 { | |
740 // Enter an internal frame. | |
741 FrameScope scope(masm, StackFrame::INTERNAL); | |
742 | |
743 // Set up the context from the function argument. | |
744 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | |
745 | |
746 __ InitializeRootRegister(); | |
747 | |
748 // Push the function and the receiver onto the stack. | |
749 __ Push(function, receiver); | |
750 | |
751 // Copy arguments to the stack in a loop, in reverse order. | |
752 // x3: argc. | |
753 // x4: argv. | |
754 Label loop, entry; | |
755 // Compute the copy end address. | |
756 __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2)); | |
757 | |
758 __ B(&entry); | |
759 __ Bind(&loop); | |
760 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); | |
761 __ Ldr(x12, MemOperand(x11)); // Dereference the handle. | |
762 __ Push(x12); // Push the argument. | |
763 __ Bind(&entry); | |
764 __ Cmp(x10, argv); | |
765 __ B(ne, &loop); | |
766 | |
767 // Initialize all JavaScript callee-saved registers, since they will be seen | |
768 // by the garbage collector as part of handlers. | |
769 // The original values have been saved in JSEntryStub::GenerateBody(). | |
770 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex); | |
771 __ Mov(x20, x19); | |
772 __ Mov(x21, x19); | |
773 __ Mov(x22, x19); | |
774 __ Mov(x23, x19); | |
775 __ Mov(x24, x19); | |
776 __ Mov(x25, x19); | |
777 // Don't initialize the reserved registers. | |
778 // x26 : root register (root). | |
779 // x27 : context pointer (cp). | |
780 // x28 : JS stack pointer (jssp). | |
781 // x29 : frame pointer (fp). | |
782 | |
783 __ Mov(x0, argc); | |
784 if (is_construct) { | |
785 // No type feedback cell is available. | |
786 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | |
787 | |
788 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | |
789 __ CallStub(&stub); | |
790 } else { | |
791 ParameterCount actual(x0); | |
792 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); | |
793 } | |
794 // Exit the JS internal frame and remove the parameters (except function), | |
795 // and return. | |
796 } | |
797 | |
798 // Result is in x0. Return. | |
799 __ Ret(); | |
800 } | |
801 | |
802 | |
803 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
804 Generate_JSEntryTrampolineHelper(masm, false); | |
805 } | |
806 | |
807 | |
808 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
809 Generate_JSEntryTrampolineHelper(masm, true); | |
810 } | |
811 | |
812 | |
813 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | |
814 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | |
815 GenerateTailCallToReturnedCode(masm); | |
816 } | |
817 | |
818 | |
819 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | |
820 FrameScope scope(masm, StackFrame::INTERNAL); | |
821 Register function = x1; | |
822 | |
823 // Preserve function. At the same time, push arguments for | |
824 // kCompileOptimized. | |
825 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent)); | |
826 __ Push(function, function, x10); | |
827 | |
828 __ CallRuntime(Runtime::kCompileOptimized, 2); | |
829 | |
830 // Restore receiver. | |
831 __ Pop(function); | |
832 } | |
833 | |
834 | |
835 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
836 CallCompileOptimized(masm, false); | |
837 GenerateTailCallToReturnedCode(masm); | |
838 } | |
839 | |
840 | |
841 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
842 CallCompileOptimized(masm, true); | |
843 GenerateTailCallToReturnedCode(masm); | |
844 } | |
845 | |
846 | |
847 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
848 // For now, we are relying on the fact that make_code_young doesn't do any | |
849 // garbage collection which allows us to save/restore the registers without | |
850 // worrying about which of them contain pointers. We also don't build an | |
851 // internal frame to make the code fast, since we shouldn't have to do stack | |
852 // crawls in MakeCodeYoung. This seems a bit fragile. | |
853 | |
854 // The following caller-saved registers must be saved and restored when | |
855 // calling through to the runtime: | |
856 // x0 - The address from which to resume execution. | |
857 // x1 - isolate | |
858 // lr - The return address for the JSFunction itself. It has not yet been | |
859 // preserved on the stack because the frame setup code was replaced | |
860 // with a call to this stub, to handle code ageing. | |
861 { | |
862 FrameScope scope(masm, StackFrame::MANUAL); | |
863 __ Push(x0, x1, fp, lr); | |
864 __ Mov(x1, ExternalReference::isolate_address(masm->isolate())); | |
865 __ CallCFunction( | |
866 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
867 __ Pop(lr, fp, x1, x0); | |
868 } | |
869 | |
870 // The calling function has been made young again, so return to execute the | |
871 // real frame set-up code. | |
872 __ Br(x0); | |
873 } | |
874 | |
875 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
876 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
877 MacroAssembler* masm) { \ | |
878 GenerateMakeCodeYoungAgainCommon(masm); \ | |
879 } \ | |
880 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
881 MacroAssembler* masm) { \ | |
882 GenerateMakeCodeYoungAgainCommon(masm); \ | |
883 } | |
884 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
885 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
886 | |
887 | |
888 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
889 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact | |
890 // that make_code_young doesn't do any garbage collection which allows us to | |
891 // save/restore the registers without worrying about which of them contain | |
892 // pointers. | |
893 | |
894 // The following caller-saved registers must be saved and restored when | |
895 // calling through to the runtime: | |
896 // x0 - The address from which to resume execution. | |
897 // x1 - isolate | |
898 // lr - The return address for the JSFunction itself. It has not yet been | |
899 // preserved on the stack because the frame setup code was replaced | |
900 // with a call to this stub, to handle code ageing. | |
901 { | |
902 FrameScope scope(masm, StackFrame::MANUAL); | |
903 __ Push(x0, x1, fp, lr); | |
904 __ Mov(x1, ExternalReference::isolate_address(masm->isolate())); | |
905 __ CallCFunction( | |
906 ExternalReference::get_mark_code_as_executed_function( | |
907 masm->isolate()), 2); | |
908 __ Pop(lr, fp, x1, x0); | |
909 | |
910 // Perform prologue operations usually performed by the young code stub. | |
911 __ EmitFrameSetupForCodeAgePatching(masm); | |
912 } | |
913 | |
914 // Jump to point after the code-age stub. | |
915 __ Add(x0, x0, kCodeAgeSequenceSize); | |
916 __ Br(x0); | |
917 } | |
918 | |
919 | |
920 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
921 GenerateMakeCodeYoungAgainCommon(masm); | |
922 } | |
923 | |
924 | |
925 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
926 SaveFPRegsMode save_doubles) { | |
927 { | |
928 FrameScope scope(masm, StackFrame::INTERNAL); | |
929 | |
930 // Preserve registers across notification, this is important for compiled | |
931 // stubs that tail call the runtime on deopts passing their parameters in | |
932 // registers. | |
933 // TODO(jbramley): Is it correct (and appropriate) to use safepoint | |
934 // registers here? According to the comment above, we should only need to | |
935 // preserve the registers with parameters. | |
936 __ PushXRegList(kSafepointSavedRegisters); | |
937 // Pass the function and deoptimization type to the runtime system. | |
938 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | |
939 __ PopXRegList(kSafepointSavedRegisters); | |
940 } | |
941 | |
942 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). | |
943 __ Drop(1); | |
944 | |
945 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this | |
946 // into lr before it jumps here. | |
947 __ Br(lr); | |
948 } | |
949 | |
950 | |
951 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
952 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
953 } | |
954 | |
955 | |
956 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
957 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
958 } | |
959 | |
960 | |
961 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
962 Deoptimizer::BailoutType type) { | |
963 { | |
964 FrameScope scope(masm, StackFrame::INTERNAL); | |
965 // Pass the deoptimization type to the runtime system. | |
966 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); | |
967 __ Push(x0); | |
968 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | |
969 } | |
970 | |
971 // Get the full codegen state from the stack and untag it. | |
972 Register state = x6; | |
973 __ Peek(state, 0); | |
974 __ SmiUntag(state); | |
975 | |
976 // Switch on the state. | |
977 Label with_tos_register, unknown_state; | |
978 __ CompareAndBranch( | |
979 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register); | |
980 __ Drop(1); // Remove state. | |
981 __ Ret(); | |
982 | |
983 __ Bind(&with_tos_register); | |
984 // Reload TOS register. | |
985 __ Peek(x0, kPointerSize); | |
986 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state); | |
987 __ Drop(2); // Remove state and TOS. | |
988 __ Ret(); | |
989 | |
990 __ Bind(&unknown_state); | |
991 __ Abort(kInvalidFullCodegenState); | |
992 } | |
993 | |
994 | |
995 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
996 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
997 } | |
998 | |
999 | |
1000 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
1001 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
1002 } | |
1003 | |
1004 | |
1005 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
1006 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
1007 } | |
1008 | |
1009 | |
1010 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
1011 // Lookup the function in the JavaScript frame. | |
1012 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1013 { | |
1014 FrameScope scope(masm, StackFrame::INTERNAL); | |
1015 // Pass function as argument. | |
1016 __ Push(x0); | |
1017 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | |
1018 } | |
1019 | |
1020 // If the code object is null, just return to the unoptimized code. | |
1021 Label skip; | |
1022 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip); | |
1023 __ Ret(); | |
1024 | |
1025 __ Bind(&skip); | |
1026 | |
1027 // Load deoptimization data from the code object. | |
1028 // <deopt_data> = <code>[#deoptimization_data_offset] | |
1029 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | |
1030 | |
1031 // Load the OSR entrypoint offset from the deoptimization data. | |
1032 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | |
1033 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt( | |
1034 DeoptimizationInputData::kOsrPcOffsetIndex))); | |
1035 | |
1036 // Compute the target address = code_obj + header_size + osr_offset | |
1037 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | |
1038 __ Add(x0, x0, x1); | |
1039 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag); | |
1040 | |
1041 // And "return" to the OSR entry point of the function. | |
1042 __ Ret(); | |
1043 } | |
1044 | |
1045 | |
1046 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | |
1047 // We check the stack limit as indicator that recompilation might be done. | |
1048 Label ok; | |
1049 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); | |
1050 __ B(hs, &ok); | |
1051 { | |
1052 FrameScope scope(masm, StackFrame::INTERNAL); | |
1053 __ CallRuntime(Runtime::kStackGuard, 0); | |
1054 } | |
1055 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | |
1056 RelocInfo::CODE_TARGET); | |
1057 | |
1058 __ Bind(&ok); | |
1059 __ Ret(); | |
1060 } | |
1061 | |
1062 | |
1063 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | |
1064 enum { | |
1065 call_type_JS_func = 0, | |
1066 call_type_func_proxy = 1, | |
1067 call_type_non_func = 2 | |
1068 }; | |
1069 Register argc = x0; | |
1070 Register function = x1; | |
1071 Register call_type = x4; | |
1072 Register scratch1 = x10; | |
1073 Register scratch2 = x11; | |
1074 Register receiver_type = x13; | |
1075 | |
1076 ASM_LOCATION("Builtins::Generate_FunctionCall"); | |
1077 // 1. Make sure we have at least one argument. | |
1078 { Label done; | |
1079 __ Cbnz(argc, &done); | |
1080 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); | |
1081 __ Push(scratch1); | |
1082 __ Mov(argc, 1); | |
1083 __ Bind(&done); | |
1084 } | |
1085 | |
1086 // 2. Get the function to call (passed as receiver) from the stack, check | |
1087 // if it is a function. | |
1088 Label slow, non_function; | |
1089 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); | |
1090 __ JumpIfSmi(function, &non_function); | |
1091 __ JumpIfNotObjectType(function, scratch1, receiver_type, | |
1092 JS_FUNCTION_TYPE, &slow); | |
1093 | |
1094 // 3a. Patch the first argument if necessary when calling a function. | |
1095 Label shift_arguments; | |
1096 __ Mov(call_type, static_cast<int>(call_type_JS_func)); | |
1097 { Label convert_to_object, use_global_receiver, patch_receiver; | |
1098 // Change context eagerly in case we need the global receiver. | |
1099 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | |
1100 | |
1101 // Do not transform the receiver for strict mode functions. | |
1102 // Also do not transform the receiver for native (Compilerhints already in | |
1103 // x3). | |
1104 __ Ldr(scratch1, | |
1105 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | |
1106 __ Ldr(scratch2.W(), | |
1107 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset)); | |
1108 __ TestAndBranchIfAnySet( | |
1109 scratch2.W(), | |
1110 (1 << SharedFunctionInfo::kStrictModeFunction) | | |
1111 (1 << SharedFunctionInfo::kNative), | |
1112 &shift_arguments); | |
1113 | |
1114 // Compute the receiver in sloppy mode. | |
1115 Register receiver = x2; | |
1116 __ Sub(scratch1, argc, 1); | |
1117 __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeLog2)); | |
1118 __ JumpIfSmi(receiver, &convert_to_object); | |
1119 | |
1120 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, | |
1121 &use_global_receiver); | |
1122 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); | |
1123 | |
1124 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | |
1125 __ JumpIfObjectType(receiver, scratch1, scratch2, | |
1126 FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge); | |
1127 | |
1128 __ Bind(&convert_to_object); | |
1129 | |
1130 { | |
1131 // Enter an internal frame in order to preserve argument count. | |
1132 FrameScope scope(masm, StackFrame::INTERNAL); | |
1133 __ SmiTag(argc); | |
1134 | |
1135 __ Push(argc, receiver); | |
1136 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
1137 __ Mov(receiver, x0); | |
1138 | |
1139 __ Pop(argc); | |
1140 __ SmiUntag(argc); | |
1141 | |
1142 // Exit the internal frame. | |
1143 } | |
1144 | |
1145 // Restore the function and flag in the registers. | |
1146 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); | |
1147 __ Mov(call_type, static_cast<int>(call_type_JS_func)); | |
1148 __ B(&patch_receiver); | |
1149 | |
1150 __ Bind(&use_global_receiver); | |
1151 __ Ldr(receiver, GlobalObjectMemOperand()); | |
1152 __ Ldr(receiver, | |
1153 FieldMemOperand(receiver, GlobalObject::kGlobalReceiverOffset)); | |
1154 | |
1155 | |
1156 __ Bind(&patch_receiver); | |
1157 __ Sub(scratch1, argc, 1); | |
1158 __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeLog2)); | |
1159 | |
1160 __ B(&shift_arguments); | |
1161 } | |
1162 | |
1163 // 3b. Check for function proxy. | |
1164 __ Bind(&slow); | |
1165 __ Mov(call_type, static_cast<int>(call_type_func_proxy)); | |
1166 __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE); | |
1167 __ B(eq, &shift_arguments); | |
1168 __ Bind(&non_function); | |
1169 __ Mov(call_type, static_cast<int>(call_type_non_func)); | |
1170 | |
1171 // 3c. Patch the first argument when calling a non-function. The | |
1172 // CALL_NON_FUNCTION builtin expects the non-function callee as | |
1173 // receiver, so overwrite the first argument which will ultimately | |
1174 // become the receiver. | |
1175 // call type (0: JS function, 1: function proxy, 2: non-function) | |
1176 __ Sub(scratch1, argc, 1); | |
1177 __ Poke(function, Operand(scratch1, LSL, kXRegSizeLog2)); | |
1178 | |
1179 // 4. Shift arguments and return address one slot down on the stack | |
1180 // (overwriting the original receiver). Adjust argument count to make | |
1181 // the original first argument the new receiver. | |
1182 // call type (0: JS function, 1: function proxy, 2: non-function) | |
1183 __ Bind(&shift_arguments); | |
1184 { Label loop; | |
1185 // Calculate the copy start address (destination). Copy end address is jssp. | |
1186 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2)); | |
1187 __ Sub(scratch1, scratch2, kPointerSize); | |
1188 | |
1189 __ Bind(&loop); | |
1190 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex)); | |
1191 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex)); | |
1192 __ Cmp(scratch1, jssp); | |
1193 __ B(ge, &loop); | |
1194 // Adjust the actual number of arguments and remove the top element | |
1195 // (which is a copy of the last argument). | |
1196 __ Sub(argc, argc, 1); | |
1197 __ Drop(1); | |
1198 } | |
1199 | |
1200 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, | |
1201 // or a function proxy via CALL_FUNCTION_PROXY. | |
1202 // call type (0: JS function, 1: function proxy, 2: non-function) | |
1203 { Label js_function, non_proxy; | |
1204 __ Cbz(call_type, &js_function); | |
1205 // Expected number of arguments is 0 for CALL_NON_FUNCTION. | |
1206 __ Mov(x2, 0); | |
1207 __ Cmp(call_type, static_cast<int>(call_type_func_proxy)); | |
1208 __ B(ne, &non_proxy); | |
1209 | |
1210 __ Push(function); // Re-add proxy object as additional argument. | |
1211 __ Add(argc, argc, 1); | |
1212 __ GetBuiltinFunction(function, Builtins::CALL_FUNCTION_PROXY); | |
1213 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
1214 RelocInfo::CODE_TARGET); | |
1215 | |
1216 __ Bind(&non_proxy); | |
1217 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); | |
1218 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
1219 RelocInfo::CODE_TARGET); | |
1220 __ Bind(&js_function); | |
1221 } | |
1222 | |
1223 // 5b. Get the code to call from the function and check that the number of | |
1224 // expected arguments matches what we're providing. If so, jump | |
1225 // (tail-call) to the code in register edx without checking arguments. | |
1226 __ Ldr(x3, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | |
1227 __ Ldrsw(x2, | |
1228 FieldMemOperand(x3, | |
1229 SharedFunctionInfo::kFormalParameterCountOffset)); | |
1230 Label dont_adapt_args; | |
1231 __ Cmp(x2, argc); // Check formal and actual parameter counts. | |
1232 __ B(eq, &dont_adapt_args); | |
1233 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
1234 RelocInfo::CODE_TARGET); | |
1235 __ Bind(&dont_adapt_args); | |
1236 | |
1237 __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); | |
1238 ParameterCount expected(0); | |
1239 __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); | |
1240 } | |
1241 | |
1242 | |
1243 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | |
1244 ASM_LOCATION("Builtins::Generate_FunctionApply"); | |
1245 const int kIndexOffset = | |
1246 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | |
1247 const int kLimitOffset = | |
1248 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | |
1249 const int kArgsOffset = 2 * kPointerSize; | |
1250 const int kReceiverOffset = 3 * kPointerSize; | |
1251 const int kFunctionOffset = 4 * kPointerSize; | |
1252 | |
1253 { | |
1254 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1255 | |
1256 Register args = x12; | |
1257 Register receiver = x14; | |
1258 Register function = x15; | |
1259 | |
1260 // Get the length of the arguments via a builtin call. | |
1261 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | |
1262 __ Ldr(args, MemOperand(fp, kArgsOffset)); | |
1263 __ Push(function, args); | |
1264 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | |
1265 Register argc = x0; | |
1266 | |
1267 // Check the stack for overflow. | |
1268 // We are not trying to catch interruptions (e.g. debug break and | |
1269 // preemption) here, so the "real stack limit" is checked. | |
1270 Label enough_stack_space; | |
1271 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); | |
1272 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | |
1273 // Make x10 the space we have left. The stack might already be overflowed | |
1274 // here which will cause x10 to become negative. | |
1275 // TODO(jbramley): Check that the stack usage here is safe. | |
1276 __ Sub(x10, jssp, x10); | |
1277 // Check if the arguments will overflow the stack. | |
1278 __ Cmp(x10, Operand(argc, LSR, kSmiShift - kPointerSizeLog2)); | |
1279 __ B(gt, &enough_stack_space); | |
1280 // There is not enough stack space, so use a builtin to throw an appropriate | |
1281 // error. | |
1282 __ Push(function, argc); | |
1283 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | |
1284 // We should never return from the APPLY_OVERFLOW builtin. | |
1285 if (__ emit_debug_code()) { | |
1286 __ Unreachable(); | |
1287 } | |
1288 | |
1289 __ Bind(&enough_stack_space); | |
1290 // Push current limit and index. | |
1291 __ Mov(x1, 0); // Initial index. | |
1292 __ Push(argc, x1); | |
1293 | |
1294 Label push_receiver; | |
1295 __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); | |
1296 | |
1297 // Check that the function is a JS function. Otherwise it must be a proxy. | |
1298 // When it is not the function proxy will be invoked later. | |
1299 __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, | |
1300 &push_receiver); | |
1301 | |
1302 // Change context eagerly to get the right global object if necessary. | |
1303 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | |
1304 // Load the shared function info. | |
1305 __ Ldr(x2, FieldMemOperand(function, | |
1306 JSFunction::kSharedFunctionInfoOffset)); | |
1307 | |
1308 // Compute and push the receiver. | |
1309 // Do not transform the receiver for strict mode functions. | |
1310 Label convert_receiver_to_object, use_global_receiver; | |
1311 __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); | |
1312 __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver); | |
1313 // Do not transform the receiver for native functions. | |
1314 __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver); | |
1315 | |
1316 // Compute the receiver in sloppy mode. | |
1317 __ JumpIfSmi(receiver, &convert_receiver_to_object); | |
1318 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); | |
1319 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, | |
1320 &use_global_receiver); | |
1321 | |
1322 // Check if the receiver is already a JavaScript object. | |
1323 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | |
1324 __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE, | |
1325 &push_receiver, ge); | |
1326 | |
1327 // Call a builtin to convert the receiver to a regular object. | |
1328 __ Bind(&convert_receiver_to_object); | |
1329 __ Push(receiver); | |
1330 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
1331 __ Mov(receiver, x0); | |
1332 __ B(&push_receiver); | |
1333 | |
1334 __ Bind(&use_global_receiver); | |
1335 __ Ldr(x10, GlobalObjectMemOperand()); | |
1336 __ Ldr(receiver, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset)); | |
1337 | |
1338 // Push the receiver | |
1339 __ Bind(&push_receiver); | |
1340 __ Push(receiver); | |
1341 | |
1342 // Copy all arguments from the array to the stack. | |
1343 Label entry, loop; | |
1344 Register current = x0; | |
1345 __ Ldr(current, MemOperand(fp, kIndexOffset)); | |
1346 __ B(&entry); | |
1347 | |
1348 __ Bind(&loop); | |
1349 // Load the current argument from the arguments array and push it. | |
1350 // TODO(all): Couldn't we optimize this for JS arrays? | |
1351 | |
1352 __ Ldr(x1, MemOperand(fp, kArgsOffset)); | |
1353 __ Push(x1, current); | |
1354 | |
1355 // Call the runtime to access the property in the arguments array. | |
1356 __ CallRuntime(Runtime::kGetProperty, 2); | |
1357 __ Push(x0); | |
1358 | |
1359 // Use inline caching to access the arguments. | |
1360 __ Ldr(current, MemOperand(fp, kIndexOffset)); | |
1361 __ Add(current, current, Smi::FromInt(1)); | |
1362 __ Str(current, MemOperand(fp, kIndexOffset)); | |
1363 | |
1364 // Test if the copy loop has finished copying all the elements from the | |
1365 // arguments object. | |
1366 __ Bind(&entry); | |
1367 __ Ldr(x1, MemOperand(fp, kLimitOffset)); | |
1368 __ Cmp(current, x1); | |
1369 __ B(ne, &loop); | |
1370 | |
1371 // At the end of the loop, the number of arguments is stored in 'current', | |
1372 // represented as a smi. | |
1373 | |
1374 function = x1; // From now on we want the function to be kept in x1; | |
1375 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | |
1376 | |
1377 // Call the function. | |
1378 Label call_proxy; | |
1379 ParameterCount actual(current); | |
1380 __ SmiUntag(current); | |
1381 __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, &call_proxy); | |
1382 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); | |
1383 frame_scope.GenerateLeaveFrame(); | |
1384 __ Drop(3); | |
1385 __ Ret(); | |
1386 | |
1387 // Call the function proxy. | |
1388 __ Bind(&call_proxy); | |
1389 // x0 : argc | |
1390 // x1 : function | |
1391 __ Push(function); // Add function proxy as last argument. | |
1392 __ Add(x0, x0, 1); | |
1393 __ Mov(x2, 0); | |
1394 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); | |
1395 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
1396 RelocInfo::CODE_TARGET); | |
1397 } | |
1398 __ Drop(3); | |
1399 __ Ret(); | |
1400 } | |
1401 | |
1402 | |
1403 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
1404 __ SmiTag(x10, x0); | |
1405 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
1406 __ Push(lr, fp); | |
1407 __ Push(x11, x1, x10); | |
1408 __ Add(fp, jssp, | |
1409 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize); | |
1410 } | |
1411 | |
1412 | |
1413 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
1414 // ----------- S t a t e ------------- | |
1415 // -- x0 : result being passed through | |
1416 // ----------------------------------- | |
1417 // Get the number of arguments passed (as a smi), tear down the frame and | |
1418 // then drop the parameters and the receiver. | |
1419 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | |
1420 kPointerSize))); | |
1421 __ Mov(jssp, fp); | |
1422 __ Pop(fp, lr); | |
1423 __ DropBySMI(x10, kXRegSize); | |
1424 __ Drop(1); | |
1425 } | |
1426 | |
1427 | |
1428 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
1429 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); | |
1430 // ----------- S t a t e ------------- | |
1431 // -- x0 : actual number of arguments | |
1432 // -- x1 : function (passed through to callee) | |
1433 // -- x2 : expected number of arguments | |
1434 // ----------------------------------- | |
1435 | |
1436 Register argc_actual = x0; // Excluding the receiver. | |
1437 Register argc_expected = x2; // Excluding the receiver. | |
1438 Register function = x1; | |
1439 Register code_entry = x3; | |
1440 | |
1441 Label invoke, dont_adapt_arguments; | |
1442 | |
1443 Label enough, too_few; | |
1444 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); | |
1445 __ Cmp(argc_actual, argc_expected); | |
1446 __ B(lt, &too_few); | |
1447 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel); | |
1448 __ B(eq, &dont_adapt_arguments); | |
1449 | |
1450 { // Enough parameters: actual >= expected | |
1451 EnterArgumentsAdaptorFrame(masm); | |
1452 | |
1453 Register copy_start = x10; | |
1454 Register copy_end = x11; | |
1455 Register copy_to = x12; | |
1456 Register scratch1 = x13, scratch2 = x14; | |
1457 | |
1458 __ Lsl(argc_expected, argc_expected, kPointerSizeLog2); | |
1459 | |
1460 // Adjust for fp, lr, and the receiver. | |
1461 __ Add(copy_start, fp, 3 * kPointerSize); | |
1462 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2)); | |
1463 __ Sub(copy_end, copy_start, argc_expected); | |
1464 __ Sub(copy_end, copy_end, kPointerSize); | |
1465 __ Mov(copy_to, jssp); | |
1466 | |
1467 // Claim space for the arguments, the receiver, and one extra slot. | |
1468 // The extra slot ensures we do not write under jssp. It will be popped | |
1469 // later. | |
1470 __ Add(scratch1, argc_expected, 2 * kPointerSize); | |
1471 __ Claim(scratch1, 1); | |
1472 | |
1473 // Copy the arguments (including the receiver) to the new stack frame. | |
1474 Label copy_2_by_2; | |
1475 __ Bind(©_2_by_2); | |
1476 __ Ldp(scratch1, scratch2, | |
1477 MemOperand(copy_start, - 2 * kPointerSize, PreIndex)); | |
1478 __ Stp(scratch1, scratch2, | |
1479 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); | |
1480 __ Cmp(copy_start, copy_end); | |
1481 __ B(hi, ©_2_by_2); | |
1482 | |
1483 // Correct the space allocated for the extra slot. | |
1484 __ Drop(1); | |
1485 | |
1486 __ B(&invoke); | |
1487 } | |
1488 | |
1489 { // Too few parameters: Actual < expected | |
1490 __ Bind(&too_few); | |
1491 EnterArgumentsAdaptorFrame(masm); | |
1492 | |
1493 Register copy_from = x10; | |
1494 Register copy_end = x11; | |
1495 Register copy_to = x12; | |
1496 Register scratch1 = x13, scratch2 = x14; | |
1497 | |
1498 __ Lsl(argc_expected, argc_expected, kPointerSizeLog2); | |
1499 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2); | |
1500 | |
1501 // Adjust for fp, lr, and the receiver. | |
1502 __ Add(copy_from, fp, 3 * kPointerSize); | |
1503 __ Add(copy_from, copy_from, argc_actual); | |
1504 __ Mov(copy_to, jssp); | |
1505 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver. | |
1506 __ Sub(copy_end, copy_end, argc_actual); | |
1507 | |
1508 // Claim space for the arguments, the receiver, and one extra slot. | |
1509 // The extra slot ensures we do not write under jssp. It will be popped | |
1510 // later. | |
1511 __ Add(scratch1, argc_expected, 2 * kPointerSize); | |
1512 __ Claim(scratch1, 1); | |
1513 | |
1514 // Copy the arguments (including the receiver) to the new stack frame. | |
1515 Label copy_2_by_2; | |
1516 __ Bind(©_2_by_2); | |
1517 __ Ldp(scratch1, scratch2, | |
1518 MemOperand(copy_from, - 2 * kPointerSize, PreIndex)); | |
1519 __ Stp(scratch1, scratch2, | |
1520 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); | |
1521 __ Cmp(copy_to, copy_end); | |
1522 __ B(hi, ©_2_by_2); | |
1523 | |
1524 __ Mov(copy_to, copy_end); | |
1525 | |
1526 // Fill the remaining expected arguments with undefined. | |
1527 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); | |
1528 __ Add(copy_end, jssp, kPointerSize); | |
1529 | |
1530 Label fill; | |
1531 __ Bind(&fill); | |
1532 __ Stp(scratch1, scratch1, | |
1533 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); | |
1534 __ Cmp(copy_to, copy_end); | |
1535 __ B(hi, &fill); | |
1536 | |
1537 // Correct the space allocated for the extra slot. | |
1538 __ Drop(1); | |
1539 } | |
1540 | |
1541 // Arguments have been adapted. Now call the entry point. | |
1542 __ Bind(&invoke); | |
1543 __ Call(code_entry); | |
1544 | |
1545 // Store offset of return address for deoptimizer. | |
1546 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
1547 | |
1548 // Exit frame and return. | |
1549 LeaveArgumentsAdaptorFrame(masm); | |
1550 __ Ret(); | |
1551 | |
1552 // Call the entry point without adapting the arguments. | |
1553 __ Bind(&dont_adapt_arguments); | |
1554 __ Jump(code_entry); | |
1555 } | |
1556 | |
1557 | |
1558 #undef __ | |
1559 | |
1560 } } // namespace v8::internal | |
1561 | |
1562 #endif // V8_TARGET_ARCH_ARM | |
OLD | NEW |